emacs-elpa-diffs
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[elpa] externals/wisi bf11e7dec3 2/4: Update to latest devel version, fo


From: Stephen Leake
Subject: [elpa] externals/wisi bf11e7dec3 2/4: Update to latest devel version, for ada-mode beta test
Date: Sun, 10 Jul 2022 12:58:55 -0400 (EDT)

branch: externals/wisi
commit bf11e7dec3faeda83f90d4e0a36760befbeb3e00
Author: Stephen Leake <stephen_leake@stephe-leake.org>
Commit: Stephen Leake <stephen_leake@stephe-leake.org>

    Update to latest devel version, for ada-mode beta test
---
 .gitignore                                         |     1 +
 NEWS                                               |    81 +-
 README                                             |     2 +-
 dir                                                |    19 -
 emacs_wisi_common_parse.adb                        |   743 +-
 emacs_wisi_common_parse.ads                        |   167 +-
 gen_emacs_wisi_lr_parse.adb                        |    20 +-
 gen_emacs_wisi_lr_parse.ads                        |    20 +-
 gen_emacs_wisi_lr_text_rep_parse.adb               |    25 +-
 gen_emacs_wisi_lr_text_rep_parse.ads               |    21 +-
 gen_emacs_wisi_packrat_parse.adb                   |   180 +
 gen_emacs_wisi_packrat_parse.ads                   |    42 +
 gen_run_wisi_lr_parse.adb                          |    34 +-
 gen_run_wisi_lr_parse.ads                          |    19 +-
 gen_run_wisi_lr_text_rep_parse.adb                 |    36 +-
 gen_run_wisi_lr_text_rep_parse.ads                 |    20 +-
 gen_run_wisi_packrat_parse.adb                     |   241 +
 gen_run_wisi_packrat_parse.ads                     |    36 +
 gnat-core.el                                       |   487 +
 install.sh                                         |    15 +
 recover_stats.adb                                  |   271 -
 run_wisi_common_parse.adb                          |  1083 +-
 run_wisi_common_parse.ads                          |    54 +-
 ..._definite_doubly_linked_lists-gen_image_aux.adb |    75 +-
 ..._definite_doubly_linked_lists-gen_image_aux.ads |    46 +-
 sal-gen_bounded_definite_doubly_linked_lists.adb   |   348 +
 ...al-gen_bounded_definite_doubly_linked_lists.ads |    94 +-
 sal-gen_bounded_definite_vectors-gen_image.adb     |    40 -
 sal-gen_bounded_definite_vectors.adb               |     5 +-
 ...-gen_definite_doubly_linked_lists-gen_image.adb |     7 +-
 ...-gen_definite_doubly_linked_lists-gen_image.ads |     4 +-
 ..._definite_doubly_linked_lists-gen_image_aux.adb |    89 +-
 ..._definite_doubly_linked_lists-gen_image_aux.ads |    52 +-
 sal-gen_definite_doubly_linked_lists.ads           |     8 +-
 sal-gen_definite_doubly_linked_lists_ref_count.adb |   347 +
 sal-gen_definite_doubly_linked_lists_ref_count.ads |   200 +
 sal-gen_graphs.adb                                 |    22 +-
 ...ndefinite_doubly_linked_lists-gen_image_aux.adb |    37 +-
 ...ndefinite_doubly_linked_lists-gen_image_aux.ads |    56 +-
 sal-gen_indefinite_doubly_linked_lists.adb         |    94 +-
 sal-gen_indefinite_doubly_linked_lists.ads         |    50 +-
 sal-gen_unbounded_definite_hash_tables.adb         |   276 +
 sal-gen_unbounded_definite_hash_tables.ads         |   180 +
 sal-gen_unbounded_definite_min_heaps_fibonacci.adb |     7 +-
 sal-gen_unbounded_definite_min_heaps_fibonacci.ads |    28 +-
 sal-gen_unbounded_definite_queues.adb              |    34 -
 sal-gen_unbounded_definite_queues.ads              |    31 -
 sal-gen_unbounded_definite_red_black_trees.adb     |    12 +-
 sal-gen_unbounded_definite_red_black_trees.ads     |     5 +-
 sal-gen_unbounded_definite_stacks.adb              |    37 +
 sal-gen_unbounded_definite_stacks.ads              |    21 +-
 sal-gen_unbounded_definite_vectors.adb             |    23 +-
 sal-gen_unbounded_definite_vectors.ads             |     7 +-
 sal-gen_unbounded_sparse_ordered_sets.adb          |    85 +
 sal-gen_unbounded_sparse_ordered_sets.ads          |   106 +
 sal-gen_unconstrained_array_image.ads              |    24 -
 sal-unix_text_io.adb                               |    65 +
 sal-unix_text_io.ads                               |    47 +
 sal.adb                                            |    15 +-
 sal.ads                                            |     6 +-
 standard_common.gpr                                |    40 +-
 wisi-fringe.el                                     |    45 +-
 wisi-parse-common.el                               |   398 +-
 wisi-parse_context.adb                             |   787 ++
 wisi-parse_context.ads                             |   143 +
 wisi-prj.el                                        |    92 +-
 wisi-process-parse.el                              |  1774 ++-
 wisi-run-indent-test.el                            |   424 +-
 wisi-skel.el                                       |    19 +-
 wisi-tests.el                                      |     3 +-
 wisi.adb                                           |  3141 +++---
 wisi.ads                                           |   632 +-
 wisi.el                                            |   773 +-
 wisi.gpr.gp                                        |    24 +-
 wisi.texi                                          |   685 +-
 wisitoken-bnf-generate.adb                         |   643 +-
 wisitoken-bnf-generate_grammar.adb                 |     4 +-
 wisitoken-bnf-generate_packrat.adb                 |   117 +-
 wisitoken-bnf-generate_utils.adb                   |   453 +-
 wisitoken-bnf-generate_utils.ads                   |    79 +-
 wisitoken-bnf-output_ada.adb                       |   179 +-
 wisitoken-bnf-output_ada_common.adb                |  1329 ++-
 wisitoken-bnf-output_ada_common.ads                |    30 +-
 wisitoken-bnf-output_ada_emacs.adb                 |   833 +-
 wisitoken-bnf-output_elisp_common.adb              |     4 +-
 wisitoken-bnf.adb                                  |    80 +-
 wisitoken-bnf.ads                                  |   166 +-
 wisitoken-followed_by.adb                          |   207 -
 wisitoken-gen_token_enum.adb                       |   133 -
 wisitoken-gen_token_enum.ads                       |   122 -
 wisitoken-generate-lr-lalr_generate.adb            |   241 +-
 wisitoken-generate-lr-lalr_generate.ads            |    38 +-
 wisitoken-generate-lr-lr1_generate.adb             |   885 +-
 wisitoken-generate-lr-lr1_generate.ads             |    53 +-
 wisitoken-generate-lr.adb                          |  1410 ++-
 wisitoken-generate-lr.ads                          |   245 +-
 wisitoken-generate-lr1_items.adb                   |   312 +-
 wisitoken-generate-lr1_items.ads                   |   186 +-
 wisitoken-generate-packrat.adb                     |    56 +-
 wisitoken-generate-packrat.ads                     |    19 +-
 wisitoken-generate.adb                             |    56 +-
 wisitoken-generate.ads                             |    37 +-
 wisitoken-in_parse_actions.adb                     |   162 +
 wisitoken-in_parse_actions.ads                     |    80 +
 wisitoken-lexer-re2c.adb                           |   320 +-
 wisitoken-lexer-re2c.ads                           |   196 +-
 wisitoken-lexer-regexp.adb                         |   259 -
 wisitoken-lexer-regexp.ads                         |   114 -
 wisitoken-lexer.adb                                |   390 +-
 wisitoken-lexer.ads                                |   452 +-
 wisitoken-parse-lr-mckenzie_recover-base.adb       |   867 +-
 wisitoken-parse-lr-mckenzie_recover-base.ads       |   249 +-
 wisitoken-parse-lr-mckenzie_recover-explore.adb    |  1993 ++--
 wisitoken-parse-lr-mckenzie_recover-explore.ads    |     7 +-
 wisitoken-parse-lr-mckenzie_recover-parse.adb      |   942 +-
 wisitoken-parse-lr-mckenzie_recover-parse.ads      |   151 +-
 wisitoken-parse-lr-mckenzie_recover.adb            |  2122 ++--
 wisitoken-parse-lr-mckenzie_recover.ads            |   420 +-
 wisitoken-parse-lr-parser-parse.adb                |   659 ++
 wisitoken-parse-lr-parser.adb                      |  4037 +++++--
 wisitoken-parse-lr-parser.ads                      |   146 +-
 wisitoken-parse-lr-parser_lists.adb                |   645 +-
 wisitoken-parse-lr-parser_lists.ads                |   248 +-
 wisitoken-parse-lr-parser_no_recover.adb           |   403 +-
 wisitoken-parse-lr-parser_no_recover.ads           |    76 +-
 wisitoken-parse-lr.adb                             |   333 +-
 wisitoken-parse-lr.ads                             |   454 +-
 wisitoken-parse-packrat-generated.adb              |   101 +-
 wisitoken-parse-packrat-generated.ads              |    33 +-
 wisitoken-parse-packrat-procedural.adb             |   200 +-
 wisitoken-parse-packrat-procedural.ads             |    46 +-
 wisitoken-parse-packrat.adb                        |    74 +-
 wisitoken-parse-packrat.ads                        |    34 +-
 wisitoken-parse.adb                                |  1178 +-
 wisitoken-parse.ads                                |   653 +-
 wisitoken-parse_table-mode.el                      |   116 +-
 wisitoken-productions.ads                          |    16 +-
 wisitoken-semantic_checks.adb                      |   152 -
 wisitoken-semantic_checks.ads                      |   106 -
 wisitoken-syntax_trees-lr_utils.adb                |   293 +-
 wisitoken-syntax_trees-lr_utils.ads                |   140 +-
 wisitoken-syntax_trees.adb                         | 11294 ++++++++++++++++---
 wisitoken-syntax_trees.ads                         |  3523 +++++-
 wisitoken-text_io_trace.adb                        |     9 +-
 wisitoken-text_io_trace.ads                        |     5 +-
 wisitoken-to_tree_sitter.adb                       |   528 -
 wisitoken-user_guide.texinfo                       |   589 +-
 wisitoken-wisi_ada.adb                             |    34 +-
 wisitoken-wisi_ada.ads                             |     8 +-
 wisitoken.adb                                      |   252 +-
 wisitoken.ads                                      |   303 +-
 wisitoken_grammar_actions.adb                      |   179 +-
 wisitoken_grammar_actions.ads                      |   213 +-
 wisitoken_grammar_editing.adb                      |  3598 ++++++
 wisitoken_grammar_editing.ads                      |   175 +
 wisitoken_grammar_main.adb                         |  1561 ++-
 wisitoken_grammar_main.ads                         |    17 +-
 wisitoken_grammar_re2c.c                           |  2903 +++--
 wisitoken_grammar_re2c_c.ads                       |    27 +-
 wisitoken_grammar_runtime.adb                      |  3740 ++----
 wisitoken_grammar_runtime.ads                      |   164 +-
 161 files changed, 50203 insertions(+), 20883 deletions(-)

diff --git a/.gitignore b/.gitignore
index cb872719a0..12798c6e35 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,3 +4,4 @@
 obj/
 wisi.gpr
 wisitoken-user_guide.info
+wisitoken-bnf-generate.exe
diff --git a/NEWS b/NEWS
index 1c86ec83f1..137248793e 100644
--- a/NEWS
+++ b/NEWS
@@ -1,33 +1,68 @@
 GNU Emacs wisi NEWS -- history of user-visible changes.
 
-Copyright (C) 2014 - 2020 Free Software Foundation, Inc.
+Copyright (C) 2014 - 2022 Free Software Foundation, Inc.
 
 Please send wisi bug reports to bug-gnu-emacs@gnu.org, with
 'wisi' in the subject. If possible, use M-x report-emacs-bug.
 
 
-* wisi 3.1.8
-28 May 2022
-
-** Provide project-roots
-
-* wisi 3.1.7
-21 Nov 2021
-
-** Correct last fix (sigh).
-
-* wisi 3.1.6
-21 Nov 2021
-
-** Adapt to change in xref.el xref-file-location.
-
-* wisi 3.1.5
-30 Jul 2021
-
-** Update several SAL files for compatibility with gnat FSF 11, Pro
-   22, Community 2021.
-
-* wisi 3.1.3, 3.1.4 packaging error
+* wisi 4.0.0
+9 Jul 2022
+
+** Major redesign to support incremental parse.
+
+** New user custom variable wisi-incremental-parse-enable; when
+   non-nil, wisi uses incremental parse.
+
+** There is now a log buffer showing all interactions with the parser
+   process. The buffer name is given by
+   (wisi-parser-transaction-log-buffer-name parser); for Ada it is
+   "*Ada-wisi-parser-log*".
+
+** The buffer-local variable containing the wisi parser object is
+   split and renamed from wisi--parser to wisi-parser-shared, and
+   wisi-parser-local. wisi-parser-shared points to a per-language
+   structure; wisi-parser-local contains parser state that is buffer
+   local, such as error messages.
+
+** The new command wisi-parse-tree-query allows querying the stored
+   parse tree for various things.
+
+** Redesign how indents add, and what some indent functions mean; see
+   wisi.info for details. In general, it is easier to arrange indent
+   actions to get the desired result. This may require grammar file
+   indent action changes; most likely add 'wisi-block' in block
+   statements.
+
+** The algorithm for naming conflicts is now more accurate, and the
+   format of the %conflict declaration has changed. The old format is
+   still supported. Some conflicts have the same name, but others will
+   need to be updated.
+
+** wisitoken-parse_table-mode now provides a command
+   wisitoken-parse_table-conflict-goto that will find a conflict in
+   the parse table file. wisitoken-grammar-mode binds that command to
+   "\C-c."
+
+** wisitoken-parse_table-mode now provides a command
+   wisitoken-parse_table-goto that will find a nonterminal or state in
+   the parse table file. It is recommended to bind that command to
+   "\C-c.", either in global-key-map, or in modes that typically have
+   nonterminals or states.
+
+** parser process protocol version 6
+    add zombie_limit
+    add error-pos in Recover
+    replace debug-mode, trace-mckenzie, trace-action with verbosity string
+    delete wisi-mckenzie-disable, -check-limit
+    add incremental parse
+    increase command length to 4 digits
+    add query_tree command
+
+* wisi 3.1.3
+5 Jun 2020
+
+** Fix packaging bug.
 
 * wisi 3.1.2
 4 Jun 2020
diff --git a/README b/README
index 154c767d75..5898d91ee8 100644
--- a/README
+++ b/README
@@ -1,4 +1,4 @@
-Emacs wisi package 3.1.8
+Emacs wisi package 3.1.2
 
 The wisi package provides utilities for using generalized
 error-correcting LR parsers (in external processes) to do indentation,
diff --git a/dir b/dir
deleted file mode 100644
index fbc9cbbe29..0000000000
--- a/dir
+++ /dev/null
@@ -1,19 +0,0 @@
-This is the file .../info/dir, which contains the
-topmost node of the Info hierarchy, called (dir)Top.
-The first time you invoke Info you start off looking at this node.
-
-File: dir,     Node: Top       This is the top of the INFO tree
-
-  This (the Directory node) gives a menu of major topics.
-  Typing "q" exits, "H" lists all Info commands, "d" returns here,
-  "h" gives a primer for first-timers,
-  "mEmacs<Return>" visits the Emacs manual, etc.
-
-  In Emacs, you can click mouse button 2 on a menu item or cross reference
-  to select it.
-
-* Menu:
-
-Parser generators
-* wisitoken-bnf-generate: (wisitoken-bnf-generate).
-                                Ada and Elisp parser generator
diff --git a/emacs_wisi_common_parse.adb b/emacs_wisi_common_parse.adb
index 59d20e5740..fb751b3dc3 100644
--- a/emacs_wisi_common_parse.adb
+++ b/emacs_wisi_common_parse.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -24,10 +24,13 @@ with Ada.Exceptions;
 with Ada.Strings.Fixed;
 with Ada.Text_IO;
 with GNAT.OS_Lib;
+with GNAT.Traceback.Symbolic;
+with GNATCOLL.Memory;
 with SAL;
-with System.Multiprocessors;
 with System.Storage_Elements;
 with WisiToken.Lexer;
+with WisiToken.Parse.LR.Parser;
+with WisiToken.Syntax_Trees;
 package body Emacs_Wisi_Common_Parse is
 
    procedure Usage (Name : in String)
@@ -41,6 +44,8 @@ package body Emacs_Wisi_Common_Parse is
       Put_Line ("See wisi-process-parse.el *--send-parse, *--send-noop for 
arguments.");
    end Usage;
 
+   Trace_Protocol : Natural := 0;
+
    procedure Read_Input (A : System.Address; N : Integer)
    is
       use System.Storage_Elements;
@@ -69,64 +74,24 @@ package body Emacs_Wisi_Common_Parse is
 
    function Get_Command_Length return Integer
    is
-      Temp : aliased String (1 .. 3) := (others => ' '); -- initialize for 
error message
+      --  Length must match wisi-process-parse.el
+      --  wisi-process-parse--add-cmd-length. Initialize for error message.
+      Temp : aliased String (1 .. 4) := (others => ' ');
    begin
       Read_Input (Temp'Address, Temp'Length);
       return Integer'Value (Temp);
    exception
    when Constraint_Error =>
       --  From Integer'Value
-      raise Protocol_Error with "invalid command byte count; '" & Temp & "'";
+      raise Wisi.Protocol_Error with "invalid command byte count; '" & Temp & 
"'";
    end Get_Command_Length;
 
-   function Get_String
-     (Source : in     String;
-      Last   : in out Integer)
-     return String
-   is
-      use Ada.Strings.Fixed;
-      First : constant Integer := Index
-        (Source  => Source,
-         Pattern => """",
-         From    => Last + 1);
-   begin
-      Last := Index
-        (Source  => Source,
-         Pattern => """",
-         From    => First + 1);
-
-      if First = 0 or Last = 0 then
-         raise Protocol_Error with "no '""' found for string";
-      end if;
-
-      return Source (First + 1 .. Last - 1);
-   end Get_String;
-
-   function Get_Integer
-     (Source : in     String;
-      Last   : in out Integer)
-     return Integer
-   is
-      use Ada.Strings.Fixed;
-      First : constant Integer := Last + 2; -- final char of previous item, 
space
-   begin
-      Last := Index
-        (Source  => Source,
-         Pattern => " ",
-         From    => First);
-
-      if Last = 0 then
-         Last := Source'Last;
-      else
-         Last := Last - 1;
+   procedure Check_Command_Length (Command_Length, Last : in Integer)
+   is begin
+      if Last /= Command_Length then
+         raise Wisi.Protocol_Error with "command length expected" & 
Command_Length'Image & " got" & Last'Image;
       end if;
-
-      return Integer'Value (Source (First .. Last));
-   exception
-   when others =>
-      Ada.Text_IO.Put_Line ("bad integer '" & Source (First .. Source'Last) & 
"'");
-      raise;
-   end Get_Integer;
+   end Check_Command_Length;
 
    function Get_Process_Start_Params return Process_Start_Params
    is
@@ -138,6 +103,7 @@ package body Emacs_Wisi_Common_Parse is
          Put_Line (Standard_Error, "process start args:");
          Put_Line (Standard_Error, "--help : put this help");
          Put_Line (Standard_Error, "--recover-log <file_name> : log recover 
actions to file");
+         Put_Line (Standard_Error, "--trace_protocol <n> : 0 = none, 1 = echo 
commands");
       end Put_Usage;
 
       Next_Arg : Integer := 1;
@@ -153,6 +119,13 @@ package body Emacs_Wisi_Common_Parse is
             elsif Next_Arg + 1 <= Argument_Count and then Argument (Next_Arg) 
= "--recover-log" then
                Result.Recover_Log_File_Name := 
Ada.Strings.Unbounded.To_Unbounded_String (Argument (Next_Arg + 1));
                Next_Arg := Next_Arg + 2;
+
+            elsif Next_Arg + 1 <= Argument_Count and then Argument (Next_Arg) 
= "--trace_protocol" then
+               Trace_Protocol := Integer'Value (Argument (Next_Arg + 1));
+               Next_Arg := Next_Arg + 2;
+
+            else
+               raise Wisi.Protocol_Error with "invalid process arg '" & 
Argument (Next_Arg) & "'";
             end if;
          end loop;
       end return;
@@ -160,80 +133,136 @@ package body Emacs_Wisi_Common_Parse is
 
    function Get_Parse_Params (Command_Line : in String; Last : in out Integer) 
return Parse_Params
    is
+      use Wisi;
       use WisiToken;
+      Kind : constant Parse_Kind := Parse_Kind'Val (Get_Integer (Command_Line, 
Last));
    begin
-      return Result : Parse_Params do
+      return Result : Parse_Params (Kind) do
          --  We don't use an aggregate, to enforce execution order.
-         --  Match wisi-process-parse.el wisi-process--send-parse
-
-         Result.Post_Parse_Action    := Wisi.Post_Parse_Action_Type'Val 
(Get_Integer (Command_Line, Last));
-         Result.Source_File_Name     := +Get_String (Command_Line, Last);
-         Result.Begin_Byte_Pos       := Get_Integer (Command_Line, Last);
-
-         --  Emacs end is after last char.
-         Result.End_Byte_Pos         := Get_Integer (Command_Line, Last) - 1;
-
-         Result.Goal_Byte_Pos        := Get_Integer (Command_Line, Last);
-         Result.Begin_Char_Pos       := WisiToken.Buffer_Pos (Get_Integer 
(Command_Line, Last));
-         Result.Begin_Line           := WisiToken.Line_Number_Type 
(Get_Integer (Command_Line, Last));
-         Result.End_Line             := WisiToken.Line_Number_Type 
(Get_Integer (Command_Line, Last));
-         Result.Begin_Indent         := Get_Integer (Command_Line, Last);
-         Result.Partial_Parse_Active := 1 = Get_Integer (Command_Line, Last);
-         Result.Debug_Mode           := 1 = Get_Integer (Command_Line, Last);
-         Result.Parse_Verbosity      := Get_Integer (Command_Line, Last);
-         Result.McKenzie_Verbosity   := Get_Integer (Command_Line, Last);
-         Result.Action_Verbosity     := Get_Integer (Command_Line, Last);
-         Result.McKenzie_Disable     := Get_Integer (Command_Line, Last);
-         Result.Task_Count           := Get_Integer (Command_Line, Last);
-         Result.Check_Limit          := Get_Integer (Command_Line, Last);
-         Result.Enqueue_Limit        := Get_Integer (Command_Line, Last);
-         Result.Max_Parallel         := Get_Integer (Command_Line, Last);
-         Result.Byte_Count           := Get_Integer (Command_Line, Last);
+         --  Match wisi-process-parse.el wisi-process-parse--send-parse, 
wisi-process-parse--send-incremental-parse
+         case Kind is
+         when Partial =>
+            Result.Post_Parse_Action := Wisi.Post_Parse_Action_Type'Val 
(Get_Integer (Command_Line, Last));
+            Result.Source_File_Name  := +Get_String (Command_Line, Last);
+            Result.Begin_Byte_Pos    := Get_Integer (Command_Line, Last);
+            Result.End_Byte_Pos      := Get_Integer (Command_Line, Last) - 1;
+            --  Emacs end is after last byte.
+            Result.Goal_Byte_Pos        := Get_Integer (Command_Line, Last);
+            Result.Begin_Char_Pos       := Buffer_Pos (Get_Integer 
(Command_Line, Last));
+            Result.End_Char_Pos         := Buffer_Pos (Get_Integer 
(Command_Line, Last)) - 1;
+            Result.Goal_Char_Pos        := Base_Buffer_Pos (Get_Integer 
(Command_Line, Last));
+            Result.Begin_Line           := Line_Number_Type (Get_Integer 
(Command_Line, Last));
+            Result.Begin_Indent         := Get_Integer (Command_Line, Last);
+            Result.Partial_Parse_Active := 1 = Get_Integer (Command_Line, 
Last);
+            Result.Verbosity            := +Get_String (Command_Line, Last);
+            Result.Zombie_Limit         := Get_Integer (Command_Line, Last);
+            Result.Enqueue_Limit        := Get_Integer (Command_Line, Last);
+            Result.Max_Parallel         := Get_Integer (Command_Line, Last);
+
+         when Incremental | Full =>
+            Result.Source_File_Name := +Get_String (Command_Line, Last);
+            Result.Verbosity        := +Get_String (Command_Line, Last);
+            Result.Zombie_Limit     := Get_Integer (Command_Line, Last);
+            Result.Enqueue_Limit    := Get_Integer (Command_Line, Last);
+            Result.Max_Parallel     := Get_Integer (Command_Line, Last);
+
+            case Kind is
+            when Partial => null;
+            when Incremental =>
+               Result.Changes := Wisi.Parse_Context.Get_Emacs_Change_List 
(Command_Line, Last);
+
+            when Full =>
+               Result.Byte_Count        := Get_Integer (Command_Line, Last);
+               Result.Full_End_Char_Pos := Buffer_Pos (Get_Integer 
(Command_Line, Last)) - 1;
+            end case;
+         end case;
+
+         Result.Language_Params := +Get_String (Command_Line, Last);
+
+         Enable_Trace (-Result.Verbosity);
+
+         Check_Command_Length (Command_Line'Last, Last);
       end return;
+   exception
+   when Protocol_Error =>
+      raise;
+   when E : others =>
+      raise Protocol_Error with "at" & Last'Image & ": " & 
Ada.Exceptions.Exception_Message (E);
    end Get_Parse_Params;
 
+   function Get_Post_Parse_Params (Command_Line : in String; Last : in out 
Integer) return Post_Parse_Params
+   is
+      use Wisi;
+      use WisiToken;
+   begin
+      return Result : Post_Parse_Params do
+
+         Result.Source_File_Name  := +Get_String (Command_Line, Last);
+         Result.Verbosity         := +Get_String (Command_Line, Last);
+         Result.Post_Parse_Action := Wisi.Post_Parse_Action_Type'Val 
(Get_Integer (Command_Line, Last));
+         Result.Begin_Byte_Pos    := Get_Integer (Command_Line, Last);
+         Result.Begin_Char_Pos    := Get_Integer (Command_Line, Last);
+
+         --  Emacs end is after last char. FIXME: if last char is
+         --  multibyte, this is wrong; add something to wisitoken.utf_8.
+         Result.End_Byte_Pos      := Get_Integer (Command_Line, Last) - 1;
+         Result.End_Char_Pos      := Get_Integer (Command_Line, Last) - 1;
+
+         Result.Language_Params   := +Get_String (Command_Line, Last);
+
+         Enable_Trace (-Result.Verbosity);
+         Check_Command_Length (Command_Line'Last, Last);
+      end return;
+   exception
+   when Protocol_Error =>
+      raise;
+   when E : others =>
+      raise Protocol_Error with "at" & Last'Image & ": " & 
Ada.Exceptions.Exception_Message (E);
+   end Get_Post_Parse_Params;
+
    function Get_Refactor_Params (Command_Line : in String; Last : in out 
Integer) return Refactor_Params
    is
+      use Wisi;
       use WisiToken;
    begin
       return Result : Refactor_Params do
          --  We don't use an aggregate, to enforce execution order.
          --  Match wisi-process-parse.el wisi-process--send-refactor
 
-         Result.Refactor_Action    := Get_Integer (Command_Line, Last);
          Result.Source_File_Name   := +Get_String (Command_Line, Last);
-         Result.Parse_Region.First := WisiToken.Buffer_Pos (Get_Integer 
(Command_Line, Last));
-         Result.Parse_Region.Last  := WisiToken.Buffer_Pos (Get_Integer 
(Command_Line, Last) - 1);
-
-         Result.Edit_Begin           := WisiToken.Buffer_Pos (Get_Integer 
(Command_Line, Last));
-         Result.Parse_Begin_Char_Pos := WisiToken.Buffer_Pos (Get_Integer 
(Command_Line, Last));
-         Result.Parse_Begin_Line     := WisiToken.Line_Number_Type 
(Get_Integer (Command_Line, Last));
-         Result.Parse_End_Line       := WisiToken.Line_Number_Type 
(Get_Integer (Command_Line, Last));
-         Result.Parse_Begin_Indent   := Get_Integer (Command_Line, Last);
-         Result.Debug_Mode           := 1 = Get_Integer (Command_Line, Last);
-         Result.Parse_Verbosity      := Get_Integer (Command_Line, Last);
-         Result.Action_Verbosity     := Get_Integer (Command_Line, Last);
-         Result.Max_Parallel         := Get_Integer (Command_Line, Last);
-         Result.Byte_Count           := Get_Integer (Command_Line, Last);
+         Result.Refactor_Action    := Refactor_Action (Get_Integer 
(Command_Line, Last));
+
+         Result.Edit_Begin := Buffer_Pos (Get_Integer (Command_Line, Last));
+         Result.Verbosity  := +Get_String (Command_Line, Last);
+
+         Enable_Trace (-Result.Verbosity);
+         Check_Command_Length (Command_Line'Last, Last);
       end return;
+   exception
+   when Protocol_Error =>
+      raise;
+   when E : others =>
+      raise Protocol_Error with "at" & Last'Image & ": " & 
Ada.Exceptions.Exception_Message (E);
    end Get_Refactor_Params;
 
    procedure Process_Stream
-     (Name                      : in     String;
-      Language_Protocol_Version : in     String;
-      Partial_Parse_Active      : in out Boolean;
-      Params                    : in     Process_Start_Params;
-      Parser                    : in out WisiToken.Parse.LR.Parser.Parser;
-      Parse_Data                : in out Wisi.Parse_Data_Type'Class;
-      Descriptor                : in     WisiToken.Descriptor)
+     (Name                      : in String;
+      Language_Protocol_Version : in String;
+      Params                    : in Process_Start_Params;
+      Language                  : in Wisi.Parse_Context.Language;
+      Trace                     : in WisiToken.Trace_Access)
    is
       use Ada.Text_IO;
       use WisiToken; -- "+", "-" Unbounded_string
+      use all type Ada.Strings.Unbounded.String_Access;
+      use all type Wisi.Parse_Context.Parse_Context_Access;
+
+      Recover_Log_File : Ada.Text_IO.File_Type;
 
       procedure Cleanup
       is begin
-         if Is_Open (Parser.Recover_Log_File) then
-            Close (Parser.Recover_Log_File);
+         if Is_Open (Recover_Log_File) then
+            Close (Recover_Log_File);
          end if;
       end Cleanup;
 
@@ -247,14 +276,14 @@ package body Emacs_Wisi_Common_Parse is
             --  to Current_Output, visible from Emacs
 
             if Exists (-Params.Recover_Log_File_Name) then
-               Open (Parser.Recover_Log_File, Append_File, 
-Params.Recover_Log_File_Name);
+               Open (Recover_Log_File, Append_File, 
-Params.Recover_Log_File_Name);
             else
-               Create (Parser.Recover_Log_File, Out_File, 
-Params.Recover_Log_File_Name);
+               Create (Recover_Log_File, Out_File, 
-Params.Recover_Log_File_Name);
             end if;
          end if;
       end;
 
-      Parser.Trace.Set_Prefix (";; "); -- so debug messages don't confuse 
Emacs.
+      Trace.Set_Prefix (";; "); -- so debug messages don't confuse Emacs.
 
       Put_Line
         (Name & " protocol: process version " & Protocol_Version & " language 
version " & Language_Protocol_Version);
@@ -269,17 +298,65 @@ package body Emacs_Wisi_Common_Parse is
             Last           : Integer;
 
             function Match (Target : in String) return Boolean
-            is begin
-               Last := Command_Line'First + Target'Length - 1;
-               return Last <= Command_Line'Last and then Command_Line 
(Command_Line'First .. Last) = Target;
+            is
+               use Ada.Strings.Fixed;
+            begin
+               Last := Index (Source => Command_Line, Pattern => " ");
+               if Last = 0 then
+                  Last := Command_Line'Last;
+               else
+                  Last := Last - 1;
+               end if;
+
+               return Last = Target'Length and then Command_Line 
(Command_Line'First .. Last) = Target;
             end Match;
          begin
             Read_Input (Command_Line'Address, Command_Length);
 
-            Put_Line (";; " & Command_Line);
+            if Trace_Protocol > WisiToken.Outline then
+               Trace.Put_Line ("'" & Command_Line & "' length:" & 
Command_Length'Image);
+            end if;
+
+            if Match ("create-context") then
+               Wisi.Parse_Context.Create_No_Text (Wisi.Get_String 
(Command_Line, Last), Language, Trace);
+
+            elsif Match ("kill-context") then
+               Wisi.Parse_Context.Kill (File_Name => Wisi.Get_String 
(Command_Line, Last));
+
+            elsif Match ("enable_memory_report") then
+               --  Args: <none>
+               --  Input: <none>
+               --  Response:
+               --  (message "memory ...)
+               --  prompt
+               GNATCOLL.Memory.Configure
+                 (Activate_Monitor      => True,
+                  Stack_Trace_Depth     => 0,
+                  Reset_Content_On_Free => False);
+
+            elsif Match ("memory_report_reset") then
+               --  Args: <none>
+               --  Input: <none>
+               --  Response:
+               --  (message "memory report reset")
+               --  prompt
+
+               --  GNATCOLL.Memory.Reset does not reset the values returned by
+               --  Get_Ada_Allocations; only those used by Dump. Sigh.
+               Memory_Baseline := GNATCOLL.Memory.Get_Ada_Allocations.Current;
+               Ada.Text_IO.Put_Line ("(message ""memory report reset"")");
+
+            elsif Match ("memory_report") then
+               --  Args: <none>
+               --  Input: <none>
+               --  Response:
+               --  (message "memory ...)
+               --  prompt
+               Report_Memory (Trace.all, Prefix => False);
 
-            if Match ("parse") then
-               --  Args: see wisi-process-parse.el 
wisi-process-parse--send-parse
+            elsif Match ("parse") then
+               --  Args: see wisi-process-parse.el 
wisi-process-parse--send-parse,
+               --    wisi-process-parse--send-incremental-parse
                --  Input: <source text>
                --  Response:
                --  [response elisp vector]...
@@ -287,208 +364,338 @@ package body Emacs_Wisi_Common_Parse is
                --  prompt
                declare
                   Params : constant Parse_Params := Get_Parse_Params 
(Command_Line, Last);
-                  Buffer : Ada.Strings.Unbounded.String_Access;
-
-                  procedure Clean_Up
-                  is
-                     use all type SAL.Base_Peek_Type;
-                  begin
-                     Parser.Lexer.Discard_Rest_Of_Input;
-                     if Parser.Parsers.Count > 0 then
-                        Parse_Data.Put
-                          (Parser.Lexer.Errors,
-                           Parser.Parsers.First.State_Ref.Errors,
-                           Parser.Parsers.First.State_Ref.Tree);
-                     end if;
-                     Ada.Strings.Unbounded.Free (Buffer);
-                  end Clean_Up;
 
-               begin
-                  Trace_Parse    := Params.Parse_Verbosity;
-                  Trace_McKenzie := Params.McKenzie_Verbosity;
-                  Trace_Action   := Params.Action_Verbosity;
-                  Debug_Mode     := Params.Debug_Mode;
-
-                  Partial_Parse_Active        := Params.Partial_Parse_Active;
-                  Parser.Partial_Parse_Active := Params.Partial_Parse_Active;
-
-                  if WisiToken.Parse.LR.McKenzie_Defaulted (Parser.Table.all) 
then
-                     --  There is no McKenzie information; don't override that.
-                     null;
-                  elsif Params.McKenzie_Disable = -1 then
-                     --  Use default
-                     Parser.Enable_McKenzie_Recover := True;
-                  else
-                     Parser.Enable_McKenzie_Recover := Params.McKenzie_Disable 
= 0;
-                  end if;
+                  Parse_Context : constant 
Wisi.Parse_Context.Parse_Context_Access :=
+                    (case Params.Kind is
+                     when Full | Partial => Wisi.Parse_Context.Find_Create
+                       (-Params.Source_File_Name, Language, Trace),
+                     when Incremental => Wisi.Parse_Context.Find
+                       (-Params.Source_File_Name, Language, Have_Text => 
True));
 
-                  Parse_Data.Initialize
-                    (Post_Parse_Action => Params.Post_Parse_Action,
-                     Lexer             => Parser.Lexer,
-                     Descriptor        => Descriptor'Unrestricted_Access,
-                     Base_Terminals    => Parser.Terminals'Unrestricted_Access,
-                     Begin_Line        => Params.Begin_Line,
-                     End_Line          => Params.End_Line,
-                     Begin_Indent      => Params.Begin_Indent,
-                     Params            => Command_Line (Last + 2 .. 
Command_Line'Last));
-
-                  if Params.Task_Count > 0 then
-                     Parser.Table.McKenzie_Param.Task_Count := 
System.Multiprocessors.CPU_Range (Params.Task_Count);
-                  end if;
-                  if Params.Check_Limit > 0 then
-                     Parser.Table.McKenzie_Param.Check_Limit := 
Base_Token_Index (Params.Check_Limit);
+                  Parser     : Parse.LR.Parser.Parser renames 
Parse_Context.Parser;
+                  Parse_Data : Wisi.Parse_Data_Type'Class renames 
Wisi.Parse_Data_Type'Class (Parser.User_Data.all);
+               begin
+                  if Params.Zombie_Limit > 0 then
+                     Parser.Table.McKenzie_Param.Zombie_Limit := 
Params.Zombie_Limit;
                   end if;
                   if Params.Enqueue_Limit > 0 then
                      Parser.Table.McKenzie_Param.Enqueue_Limit := 
Params.Enqueue_Limit;
                   end if;
-
                   if Params.Max_Parallel > 0 then
-                     Parser.Max_Parallel := SAL.Base_Peek_Type 
(Params.Max_Parallel);
+                     Parser.Table.Max_Parallel := SAL.Base_Peek_Type 
(Params.Max_Parallel);
                   end if;
 
-                  Buffer := new String (Params.Begin_Byte_Pos .. 
Params.End_Byte_Pos);
+                  case Params.Kind is
+                  when Partial =>
+                     Parser.Partial_Parse_Active.all    := 
Params.Partial_Parse_Active;
+                     Parser.Partial_Parse_Byte_Goal.all := Buffer_Pos 
(Params.Goal_Byte_Pos);
 
-                  Read_Input (Buffer (Params.Begin_Byte_Pos)'Address, 
Params.Byte_Count);
+                     Parse_Data.Initialize;
 
-                  Parser.Lexer.Reset_With_String_Access
-                    (Buffer, Params.Source_File_Name, Params.Begin_Char_Pos, 
Params.Begin_Line);
+                     Parse_Data.Parse_Language_Params 
(-Params.Language_Params);
 
-                  --  Parser.Line_Begin_Token First, Last set by Lex_All
-                  begin
-                     Parser.Parse;
-                  exception
-                  when WisiToken.Partial_Parse =>
-                     null;
-                  end;
-                  Parser.Execute_Actions;
-                  Parse_Data.Put (Parser);
-                  Clean_Up;
+                     Ada.Strings.Unbounded.Free (Parse_Context.Text_Buffer);
+                     Parse_Context.Text_Buffer := new String 
(Params.Begin_Byte_Pos .. Params.End_Byte_Pos);
+                     Parse_Context.Text_Buffer_Byte_Last := 
Params.End_Byte_Pos;
+                     Parse_Context.Text_Buffer_Char_Last := Integer 
(Params.End_Char_Pos);
+
+                     Read_Input
+                       (Parse_Context.Text_Buffer 
(Params.Begin_Byte_Pos)'Address,
+                        Parse_Context.Text_Buffer'Length);
+
+                     if Ada.Strings.Unbounded.Length 
(Parse_Context.Root_Save_Edited_Name) /= 0 then
+                        Parse_Context.Save_Text_Auto;
+                     end if;
+
+                     Parser.Tree.Lexer.Reset_With_String_Access
+                       (Parse_Context.Text_Buffer, 
Parse_Context.Text_Buffer_Byte_Last, Params.Source_File_Name,
+                        Params.Begin_Char_Pos, Params.Begin_Line);
+
+                     --  Parser.Line_Begin_Token First, Last set by Lex_All in 
Parse.
+
+                     Parser.Parse (Recover_Log_File);
+                     --  Raises Parse_Error for ambiguous parse and similar 
errors.
+
+                     Parse_Data.Reset_Post_Parse
+                       (Parser.Tree, Params.Post_Parse_Action,
+                        Action_Region_Bytes =>
+                          (Base_Buffer_Pos (Params.Begin_Byte_Pos), 
Base_Buffer_Pos (Params.Goal_Byte_Pos)),
+                        Action_Region_Chars => (Params.Begin_Char_Pos, 
Params.Goal_Char_Pos),
+                        Begin_Indent        => Params.Begin_Indent);
+
+                     Parser.Execute_Actions (Action_Region_Bytes => 
Parse_Data.Action_Region_Bytes);
+                     Parse_Data.Put (Parser);
+
+                  when Incremental =>
+
+                     if Parse_Context.Text_Buffer = null then
+                        raise Wisi.Parse_Context.Not_Found;
+                     end if;
+
+                     --  IMPROVEME: could do incremental parse after partial 
parse, to
+                     --  expand the parsed region.
+                     Parser.Partial_Parse_Active.all := False;
+
+                     declare
+                        KMN_List : Parse.KMN_Lists.List;
+                     begin
+                        Wisi.Parse_Context.Edit_Source (Trace.all, 
Parse_Context.all, Params.Changes, KMN_List);
+
+                        if Ada.Strings.Unbounded.Length 
(Parse_Context.Root_Save_Edited_Name) /= 0 then
+                           Parse_Context.Save_Text_Auto;
+                        end if;
+
+                        Parse_Data.Parse_Language_Params 
(-Params.Language_Params);
+
+                        Parser.Tree.Lexer.Reset_With_String_Access
+                          (Parse_Context.Text_Buffer,
+                           Parse_Context.Text_Buffer_Byte_Last,
+                           Params.Source_File_Name);
+
+                        if Parser.Tree.Editable then
+                           Parser.Parse (Recover_Log_File, KMN_List);
+
+                        else
+                           --  Last parse failed; can't edit tree, so do full 
parse.
+                           --
+                           --  IMPROVEME: Edit_Tree could handle a partially 
parsed tree, if
+                           --  there is only one stream.
+                           Parser.Parse (Recover_Log_File, 
Parse.KMN_Lists.Empty_List);
+                        end if;
+
+                        --  No Execute_Actions here; that's done in 
"post-parse" command
+                     end;
+
+                  when Full =>
+                     Parser.Partial_Parse_Active.all := False;
+
+                     Parse_Data.Initialize;
+
+                     Parse_Data.Parse_Language_Params 
(-Params.Language_Params);
+
+                     Ada.Strings.Unbounded.Free (Parse_Context.Text_Buffer);
+                     Parse_Context.Text_Buffer := new String (Integer 
(Buffer_Pos'First) .. Params.Byte_Count);
+                     Parse_Context.Text_Buffer_Byte_Last := Params.Byte_Count;
+                     Parse_Context.Text_Buffer_Char_Last := Integer 
(Params.Full_End_Char_Pos);
+                     if Parse_Context.Text_Buffer'Length > 0 then
+                        Read_Input
+                          (Parse_Context.Text_Buffer 
(Parse_Context.Text_Buffer'First)'Address,
+                           Params.Byte_Count);
+                     end if;
+
+                     Parser.Tree.Lexer.Reset_With_String_Access
+                       (Parse_Context.Text_Buffer, 
Parse_Context.Text_Buffer_Byte_Last, Params.Source_File_Name);
+
+                     declare
+                        KMN_List : Parse.KMN_Lists.List;
+                        --  Leave KMN_List empty to do full parse.
+                     begin
+                        Parser.Parse (Recover_Log_File, KMN_List);
+                        --  No Execute_Actions here; that's done in 
"post-parse" command
+                     end;
+                  end case;
+
+                  Wisi.Put_Errors (Parser.Tree);
 
                exception
-               when Syntax_Error =>
-                  Clean_Up;
-                  Put_Line ("(parse_error)");
+               when Wisi.Parse_Context.Not_Found =>
+                  raise;
+
+               when WisiToken.Syntax_Error | WisiToken.Parse_Error =>
+                  Wisi.Put_Errors (Parser.Tree);
+                  raise;
+
+               when others =>
+                  Parser.Tree.Lexer.Discard_Rest_Of_Input;
+                  raise;
+               end;
+
+            elsif Match ("post-parse") then
+               --  Args: see wisi-process-parse.el wisi-post-parse
+               --  Input: none
+               --  Response:
+               --  [response elisp vector]...
+               --  [elisp error form]...
+               --  prompt
+               declare
+                  Params : constant Post_Parse_Params := Get_Post_Parse_Params 
(Command_Line, Last);
 
-               when E : Parse_Error =>
-                  Clean_Up;
-                  Put_Line ("(parse_error """ & 
Ada.Exceptions.Exception_Message (E) & """)");
+                  Parse_Context : constant 
Wisi.Parse_Context.Parse_Context_Access := Wisi.Parse_Context.Find
+                    (-Params.Source_File_Name, Language, Have_Text => True);
 
-               when E : Fatal_Error =>
-                  Clean_Up;
-                  Put_Line ("(error """ & Ada.Exceptions.Exception_Message (E) 
& """)");
+                  Parser     : Parse.LR.Parser.Parser renames 
Parse_Context.Parser;
+                  Parse_Data : Wisi.Parse_Data_Type'Class renames 
Wisi.Parse_Data_Type'Class (Parser.User_Data.all);
+               begin
+                  Parse_Data.Reset_Post_Parse
+                    (Parser.Tree, Params.Post_Parse_Action,
+                     Action_Region_Bytes =>
+                       (Base_Buffer_Pos (Params.Begin_Byte_Pos), 
Base_Buffer_Pos (Params.End_Byte_Pos)),
+                     Action_Region_Chars =>
+                       (Base_Buffer_Pos (Params.Begin_Char_Pos), 
Base_Buffer_Pos (Params.End_Char_Pos)),
+                     Begin_Indent        => 0);
+
+                  Parse_Data.Parse_Language_Params (-Params.Language_Params);
+
+                  Parser.Execute_Actions (Action_Region_Bytes => 
Parse_Data.Action_Region_Bytes);
+                  Parse_Data.Put (Parser);
                end;
 
             elsif Match ("refactor") then
                --  Args: see wisi-process-parse.el 
wisi-process-parse--send-refactor
-               --  Input: <source text>
+               --  Input: <none>
                --  Response:
                --  [edit elisp vector]...
                --  prompt
                declare
                   Params : constant Refactor_Params := Get_Refactor_Params 
(Command_Line, Last);
-                  Buffer : Ada.Strings.Unbounded.String_Access;
-
-                  procedure Clean_Up
-                  is
-                     use all type SAL.Base_Peek_Type;
-                  begin
-                     Parser.Lexer.Discard_Rest_Of_Input;
-                     if Parser.Parsers.Count > 0 then
-                        Parse_Data.Put
-                          (Parser.Lexer.Errors,
-                           Parser.Parsers.First.State_Ref.Errors,
-                           Parser.Parsers.First.State_Ref.Tree);
-                     end if;
-                     Ada.Strings.Unbounded.Free (Buffer);
-                  end Clean_Up;
 
+                  Parse_Context : constant 
Wisi.Parse_Context.Parse_Context_Access := Wisi.Parse_Context.Find
+                    (-Params.Source_File_Name, Language);
+
+                  Parse_Data : Wisi.Parse_Data_Type'Class renames 
Wisi.Parse_Data_Type'Class
+                    (Parse_Context.Parser.User_Data.all);
                begin
-                  Trace_Parse  := Params.Parse_Verbosity;
-                  Trace_Action := Params.Action_Verbosity;
-                  Debug_Mode   := Params.Debug_Mode;
-
-                  Partial_Parse_Active := True;
-
-                  Parse_Data.Initialize
-                    (Post_Parse_Action => Wisi.Navigate, -- mostly ignored
-                     Lexer             => Parser.Lexer,
-                     Descriptor        => Descriptor'Unrestricted_Access,
-                     Base_Terminals    => Parser.Terminals'Unrestricted_Access,
-                     Begin_Line        => Params.Parse_Begin_Line,
-                     End_Line          => Params.Parse_End_Line,
-                     Begin_Indent      => Params.Parse_Begin_Indent,
-                     Params            => "");
+                  Parse_Data.Refactor (Parse_Context.Parser.Tree, 
Params.Refactor_Action, Params.Edit_Begin);
+               end;
 
-                  if Params.Max_Parallel > 0 then
-                     Parser.Max_Parallel := SAL.Base_Peek_Type 
(Params.Max_Parallel);
-                  end if;
+            elsif Match ("query-tree") then
+               --  Args: see wisi-process-parse.el 
wisi-process-parse--send-query
+               --  Input: <none>
+               --  Response:
+               --  [elisp vector]...
+               --  prompt
+               declare
+                  use Wisi;
+                  Source_File_Name : constant 
Ada.Strings.Unbounded.Unbounded_String :=
+                    +Wisi.Get_String (Command_Line, Last);
 
-                  Buffer := new String (Integer (Params.Parse_Region.First) .. 
Integer (Params.Parse_Region.Last));
+                  Label : constant Wisi.Query_Label := Wisi.Query_Label'Val 
(Wisi.Get_Integer (Command_Line, Last));
 
-                  Read_Input (Buffer (Buffer'First)'Address, 
Params.Byte_Count);
+                  Parse_Context : constant 
Wisi.Parse_Context.Parse_Context_Access := Wisi.Parse_Context.Find
+                    (-Source_File_Name, Language);
 
-                  Parser.Lexer.Reset_With_String_Access
-                    (Buffer, Params.Source_File_Name, 
Params.Parse_Begin_Char_Pos, Params.Parse_Begin_Line);
-                  begin
-                     Parser.Parse;
-                  exception
-                  when WisiToken.Partial_Parse =>
-                     null;
-                  end;
-                  Parser.Execute_Actions;
-                  Parse_Data.Refactor (Parser.Parsers.First_State_Ref.Tree, 
Params.Refactor_Action, Params.Edit_Begin);
-                  Clean_Up;
+                  Parse_Data : constant Wisi.Parse_Data_Access_Constant :=
+                    Wisi.Parse_Data_Access_Constant 
(Parse_Context.Parser.User_Data);
+               begin
+                  case Label is
+                  when Point_Query =>
+                     declare
+                        Point : constant WisiToken.Buffer_Pos := 
WisiToken.Buffer_Pos
+                          (Wisi.Get_Integer (Command_Line, Last));
+                        IDs : constant WisiToken.Token_ID_Arrays.Vector :=
+                          (case Point_Query'(Label) is
+                           when Node | Containing_Statement => 
WisiToken.Token_ID_Arrays.Empty_Vector,
+                           when Ancestor => Wisi.Get_Token_IDs 
(Parse_Data.all, Command_Line, Last));
+                        Query : constant Wisi.Query :=
+                          (case Point_Query'(Label) is
+                           when Node => (Node, Point),
+                           when Containing_Statement => (Containing_Statement, 
Point),
+                           when Ancestor => (Ancestor, Point, IDs));
+                     begin
+                        Check_Command_Length (Command_Length, Last);
+
+                        Wisi.Query_Tree (Parse_Data, 
Parse_Context.Parser.Tree, Query);
+                     end;
+
+                  when Parent | Child =>
+                     declare
+                        Address : constant String := Wisi.Get_String 
(Command_Line, Last);
+                        Node    : constant 
WisiToken.Syntax_Trees.Valid_Node_Access := Wisi.To_Node_Access (Address);
+                        N       : constant Integer := Wisi.Get_Integer 
(Command_Line, Last);
+                     begin
+                        Check_Command_Length (Command_Length, Last);
+
+                        Wisi.Query_Tree (Parse_Data, 
Parse_Context.Parser.Tree, (Node_Query'(Label), Node, N));
+                     end;
+
+                  when Print =>
+                     Check_Command_Length (Command_Length, Last);
+
+                     Wisi.Query_Tree (Parse_Data, Parse_Context.Parser.Tree, 
(Label => Print));
+
+                  when Dump =>
+                     declare
+                        File_Name : constant String := Wisi.Get_String 
(Command_Line, Last);
+                     begin
+                        Check_Command_Length (Command_Length, Last);
+
+                        Wisi.Query_Tree
+                          (Parse_Data, Parse_Context.Parser.Tree,
+                           (Label     => Dump,
+                            File_Name => +File_Name));
+                     end;
+                  end case;
+               end;
 
-               exception
-               when Syntax_Error =>
-                  Clean_Up;
-                  Put_Line ("(parse_error ""refactor " & 
Params.Parse_Region.First'Image &
-                              Params.Parse_Region.Last'Image & ": syntax 
error"")");
-
-               when E : Parse_Error =>
-                  Clean_Up;
-                  Put_Line ("(parse_error ""refactor " & 
Params.Parse_Region.First'Image &
-                              Params.Parse_Region.Last'Image & ": " & 
Ada.Exceptions.Exception_Message (E) & """)");
-
-               when E : others => -- includes Fatal_Error
-                  Clean_Up;
-                  Put_Line ("(error """ & Ada.Exceptions.Exception_Message (E) 
& """)");
+            elsif Match ("save_text") then
+               --  Args: source_file_name save_file_name
+               --  Input: <none>
+               --  Response:
+               --  (message "text saved ...)
+               --  prompt
+               declare
+                  Source_File_Name : constant String := Wisi.Get_String 
(Command_Line, Last);
+                  Save_File_Name   : constant String := Wisi.Get_String 
(Command_Line, Last);
+
+                  Parse_Context : constant 
Wisi.Parse_Context.Parse_Context_Access := Wisi.Parse_Context.Find
+                    (Source_File_Name, Language);
+               begin
+                  Check_Command_Length (Command_Length, Last);
+
+                  Parse_Context.Save_Text (Save_File_Name);
                end;
 
-            elsif Match ("noop") then
-               --  Args: <source byte count>
-               --  Input: <source text>
-               --  Response: prompt
+            elsif Match ("save_text_auto") then
+               --  Args: source_file_name root_save_file_name
+               --  Input: <none>
+               --  Response:
+               --  prompt
+               --
+               --  Save text after each incremental edit, to
+               --  <root_save_file_name_nnn>, where 'nnn' is a three-digit 
number
+               --  that increments.
                declare
-                  Byte_Count  : constant Integer                             
:= Get_Integer (Command_Line, Last);
-                  Buffer      : constant Ada.Strings.Unbounded.String_Access 
:= new String (1 .. Byte_Count);
-                  Token       : Base_Token;
-                  Lexer_Error : Boolean;
-                  pragma Unreferenced (Lexer_Error);
+                  Source_File_Name : constant String := Wisi.Get_String 
(Command_Line, Last);
+                  Save_File_Name   : constant String := Wisi.Get_String 
(Command_Line, Last);
+
+                  --  We need "create" here for partial parse.
+                  Parse_Context : constant 
Wisi.Parse_Context.Parse_Context_Access := Wisi.Parse_Context.Find_Create
+                    (Source_File_Name, Language, Trace);
                begin
-                  Token.ID := Invalid_Token_ID;
-                  Read_Input (Buffer (1)'Address, Byte_Count);
-
-                  Parser.Lexer.Reset_With_String_Access (Buffer, +"");
-                  loop
-                     exit when Token.ID = Parser.Trace.Descriptor.EOI_ID;
-                     Lexer_Error := Parser.Lexer.Find_Next (Token);
-                  end loop;
-               exception
-               when Syntax_Error =>
-                  Parser.Lexer.Discard_Rest_Of_Input;
+                  Check_Command_Length (Command_Length, Last);
+
+                  Parse_Context.Root_Save_Edited_Name := +Save_File_Name;
+                  Parse_Context.Save_Edited_Count     := 0;
+
+                  Put_Line ("(message ""auto text save enabled, to '" & 
Save_File_Name & "_nnn'"")");
                end;
 
             elsif Match ("quit") then
                exit;
 
             else
-               Put_Line ("(error ""bad command: '" & Command_Line & "'"")");
+               raise Wisi.Protocol_Error with  "invalid command: '" & 
Command_Line & "'";
             end if;
          exception
-         when E : Protocol_Error =>
+         when Wisi.Parse_Context.Not_Found =>
+            --  Tell Emacs to send full text
+            Put_Line ("(file_not_found)");
+
+         when E : Syntax_Error | Parse_Error =>
+            Put_Line ("(parse_error """ & Wisi.Elisp_Escape_Quotes 
(Ada.Exceptions.Exception_Message (E)) & """)");
+
+         when E : Wisi.Protocol_Error =>
             --  don't exit the loop; allow debugging bad elisp
-            Put_Line ("(error ""protocol error "": " & 
Ada.Exceptions.Exception_Message (E) & """)");
+            Put_Line
+              ("(error ""protocol error " & Wisi.Elisp_Escape_Quotes 
(Ada.Exceptions.Exception_Message (E)) & """)");
+
+         when E : others => -- includes Fatal_Error
+            if WisiToken.Debug_Mode then
+               Trace.Put_Line (GNAT.Traceback.Symbolic.Symbolic_Traceback (E));
+            end if;
+            Put_Line
+              ("(error ""error: " & Ada.Exceptions.Exception_Name (E) & " : " &
+                 Wisi.Elisp_Escape_Quotes (Ada.Exceptions.Exception_Message 
(E)) & """)");
          end;
       end loop;
       Cleanup;
@@ -503,6 +710,10 @@ package body Emacs_Wisi_Common_Parse is
       Put_Line
         ("(error ""unhandled exception: " & Ada.Exceptions.Exception_Name (E) 
& ": " &
            Ada.Exceptions.Exception_Message (E) & """)");
+
+      if Debug_Mode then
+         Put_Line (GNAT.Traceback.Symbolic.Symbolic_Traceback (E));
+      end if;
    end Process_Stream;
 
 end Emacs_Wisi_Common_Parse;
diff --git a/emacs_wisi_common_parse.ads b/emacs_wisi_common_parse.ads
index e646eba3c5..fb10f469a9 100644
--- a/emacs_wisi_common_parse.ads
+++ b/emacs_wisi_common_parse.ads
@@ -2,7 +2,7 @@
 --
 --  Common utilities for Gen_Emacs_Wisi_*_Parse
 --
---  Copyright (C) 2018 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -21,10 +21,11 @@ pragma License (GPL);
 with Ada.Strings.Unbounded;
 with System;
 with Wisi;
-with WisiToken.Parse.LR.Parser;
+with WisiToken.Parse;
+with Wisi.Parse_Context;
 package Emacs_Wisi_Common_Parse is
 
-   Protocol_Version : constant String := "5";
+   Protocol_Version : constant String := "6";
    --  Protocol_Version defines the data sent between elisp and the
    --  background process, except for the language-specific parameters,
    --  which are defined by the Language_Protocol_Version parameter to
@@ -37,29 +38,22 @@ package Emacs_Wisi_Common_Parse is
    --  the implementation of the protocol.
    --
    --  Only changes once per wisi release. Increment as soon as required,
-   --  record new version in NEWS-wisi.text.
+   --  record new version in NEWS-wisi.text. If working on a branch and
+   --  main has already incremented, increment again, in case main is
+   --  released before branch is merged; leave two "increment protocol"
+   --  lines in NEWS-wisi.text to indicate the issue.
 
    Prompt : constant String := ";;> ";
 
-   Protocol_Error : exception;
-   Finish         : exception;
+   Finish : exception;
 
    procedure Usage (Name : in String);
 
    procedure Read_Input (A : System.Address; N : Integer);
+   --  Read N bytes from standard_input to A.
 
    function Get_Command_Length return Integer;
 
-   function Get_String
-     (Source : in     String;
-      Last   : in out Integer)
-     return String;
-
-   function Get_Integer
-     (Source : in     String;
-      Last   : in out Integer)
-     return Integer;
-
    type Process_Start_Params is record
       Recover_Log_File_Name : Ada.Strings.Unbounded.Unbounded_String;
       --  log enabled if non-empty.
@@ -70,86 +64,107 @@ package Emacs_Wisi_Common_Parse is
    --  raising Finish.
 
    procedure Process_Stream
-     (Name                      : in     String;
-      Language_Protocol_Version : in     String;
-      Partial_Parse_Active      : in out Boolean;
-      Params                    : in     Process_Start_Params;
-      Parser                    : in out WisiToken.Parse.LR.Parser.Parser;
-      Parse_Data                : in out Wisi.Parse_Data_Type'Class;
-      Descriptor                : in     WisiToken.Descriptor);
+     (Name                      : in String;
+      Language_Protocol_Version : in String;
+      Params                    : in Process_Start_Params;
+      Language                  : in Wisi.Parse_Context.Language;
+      Trace                     : in WisiToken.Trace_Access);
 
    ----------
    --  Parse command
 
-   type Parse_Params is record
-      Post_Parse_Action : Wisi.Post_Parse_Action_Type;
+   type Parse_Kind is (Partial, Incremental, Full);
+
+   type Parse_Params (Kind : Parse_Kind) is record
+      --  See Get_Parse_Params in body for what elisp this must match.
+
       Source_File_Name  : Ada.Strings.Unbounded.Unbounded_String;
 
-      Begin_Byte_Pos : Integer;
-      --  Source file byte position of first char sent; start parse here.
-
-      End_Byte_Pos : Integer;
-      --  Byte position of last char sent.
-
-      Goal_Byte_Pos : Integer;
-      --  Byte position of end of desired parse region; terminate parse at
-      --  or after here.
-
-      Begin_Char_Pos : WisiToken.Buffer_Pos;
-      --  Char position of first char sent.
-
-      Begin_Line : WisiToken.Line_Number_Type;
-      End_Line   : WisiToken.Line_Number_Type;
-      --  Line number of line containing Begin_Byte_Pos, End_Byte_Pos
-
-      Begin_Indent : Integer;
-      --  Indentation of Line_Begin
-
-      Partial_Parse_Active : Boolean;
-      Debug_Mode           : Boolean;
-      Parse_Verbosity      : Integer;
-      McKenzie_Verbosity   : Integer;
-      Action_Verbosity     : Integer;
-      McKenzie_Disable     : Integer;
-      Task_Count           : Integer;
-      Check_Limit          : Integer;
-      Enqueue_Limit        : Integer;
-      Max_Parallel         : Integer;
-      Byte_Count           : Integer;
-      --  Count of bytes of source file sent.
+      Verbosity     : Ada.Strings.Unbounded.Unbounded_String;
+      Task_Count    : Integer;
+      Zombie_Limit  : Integer;
+      Enqueue_Limit : Integer;
+      Max_Parallel  : Integer;
+
+      Language_Params : Ada.Strings.Unbounded.Unbounded_String;
+
+      case Kind is
+      when Partial =>
+         Post_Parse_Action : Wisi.Post_Parse_Action_Type;
+
+         Begin_Byte_Pos : Integer;
+         --  Source file byte position of first char sent; start parse here.
+
+         End_Byte_Pos : Integer;
+         --  Byte position of last char sent.
+         --  Emacs convention; after last char
+
+         Goal_Byte_Pos : Integer;
+         --  Byte position of end of desired parse region; terminate parse at
+         --  or after here.
+
+         Begin_Char_Pos : WisiToken.Buffer_Pos;
+         End_Char_Pos   : WisiToken.Base_Buffer_Pos;
+         Goal_Char_Pos  : WisiToken.Buffer_Pos;
+         --  Corresponding char positions; end is 0 if buffer empty.
+
+         Begin_Line : WisiToken.Line_Number_Type;
+         --  Line containing Begin_Byte_Pos
+
+         Begin_Indent : Integer;
+         --  Indentation of Line_Begin
+
+         Partial_Parse_Active : Boolean;
+
+      when Incremental =>
+         Changes : Wisi.Parse_Context.Change_Lists.List;
+
+      when Full =>
+         Byte_Count        : Integer;
+         Full_End_Char_Pos : WisiToken.Base_Buffer_Pos; -- 0 if buffer empty.
+
+      end case;
    end record;
 
    function Get_Parse_Params (Command_Line : in String; Last : in out Integer) 
return Parse_Params;
+   --  Raise Protocol_Error if, after processing Command_Line, Last /=
+   --  Command_Line'Last.
+
+   ----------
+   --  Post-Parse command
+
+   type Post_Parse_Params is record
+      Source_File_Name  : Ada.Strings.Unbounded.Unbounded_String;
+      Verbosity         : Ada.Strings.Unbounded.Unbounded_String;
+      Post_Parse_Action : Wisi.Post_Parse_Action_Type;
+
+      Begin_Byte_Pos : Integer;
+      Begin_Char_Pos : Integer;
+      End_Byte_Pos   : Integer;
+      End_Char_Pos   : Integer;
+      --  Region to execute action in.
+      --  Emacs convention; end is after last char
+
+      Language_Params : Ada.Strings.Unbounded.Unbounded_String;
+   end record;
+
+   function Get_Post_Parse_Params (Command_Line : in String; Last : in out 
Integer) return Post_Parse_Params;
+   --  Raise Protocol_Error if, after processing Command_Line, Last /=
+   --  Command_Line'Last.
 
    ----------
    --  Refactor command
 
    type Refactor_Params is record
-      Refactor_Action  : Positive; -- Language-specific
+      Refactor_Action  : Wisi.Refactor_Action; -- Language-specific
       Source_File_Name : Ada.Strings.Unbounded.Unbounded_String;
 
-      Parse_Region : WisiToken.Buffer_Region;
-      --  Source file byte region to parse.
-
       Edit_Begin : WisiToken.Buffer_Pos;
       --  Source file byte position at start of expression to refactor.
 
-      Parse_Begin_Char_Pos : WisiToken.Buffer_Pos;
-      --  Char position of first char sent.
-
-      Parse_Begin_Line : WisiToken.Line_Number_Type;
-      Parse_End_Line   : WisiToken.Line_Number_Type;
-      --  Line numbers of lines containing Parse_Begin_Byte_Pos, 
Parse_End_Byte_Pos
-
-      Parse_Begin_Indent : Integer;
-      --  Indentation of Parse_Begin_Line
+      Verbosity : Ada.Strings.Unbounded.Unbounded_String;
 
-      Debug_Mode       : Boolean;
-      Parse_Verbosity  : Integer;
-      Action_Verbosity : Integer;
-      Max_Parallel     : Integer;
-      Byte_Count       : Integer;
-      --  Count of bytes of source file sent.
+      --  no Language_Params
    end record;
 
    function Get_Refactor_Params (Command_Line : in String; Last : in out 
Integer) return Refactor_Params;
diff --git a/gen_emacs_wisi_lr_parse.adb b/gen_emacs_wisi_lr_parse.adb
index 0f818688e7..34a83df130 100644
--- a/gen_emacs_wisi_lr_parse.adb
+++ b/gen_emacs_wisi_lr_parse.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2014, 2017 - 2020 All Rights Reserved.
+--  Copyright (C) 2014, 2017 - 2020, 2022 All Rights Reserved.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -23,17 +23,15 @@ with WisiToken.Parse.LR.Parser;
 with WisiToken.Text_IO_Trace;
 procedure Gen_Emacs_Wisi_LR_Parse
 is
-   Trace      : aliased WisiToken.Text_IO_Trace.Trace 
(Descriptor'Unrestricted_Access);
-   Parser     : WisiToken.Parse.LR.Parser.Parser;
-   Parse_Data : aliased Parse_Data_Type (Parser.Terminals'Access, 
Parser.Line_Begin_Token'Access);
+   Trace               : aliased WisiToken.Text_IO_Trace.Trace;
+   Parse_Data_Template : aliased Parse_Data_Type;
 
    Params : constant Process_Start_Params := Get_Process_Start_Params;
 begin
-   Create_Parser
-     (Parser, Language_Fixes, Language_Matching_Begin_Tokens, 
Language_String_ID_Set,
-      Trace'Unrestricted_Access,
-      Parse_Data'Unchecked_Access);
-
-   Process_Stream (Name, Language_Protocol_Version, Partial_Parse_Active, 
Params, Parser, Parse_Data, Descriptor);
-
+   Process_Stream
+     (Name, Language_Protocol_Version, Params,
+      (Descriptor, Create_Lexer (Trace'Unchecked_Access), Create_Parse_Table, 
Create_Productions,
+       Partial_Parse_Active, Partial_Parse_Byte_Goal, Language_Fixes, 
Language_Matching_Begin_Tokens,
+       Language_String_ID_Set, Parse_Data_Template'Unchecked_Access),
+      Trace'Unchecked_Access);
 end Gen_Emacs_Wisi_LR_Parse;
diff --git a/gen_emacs_wisi_lr_parse.ads b/gen_emacs_wisi_lr_parse.ads
index 02eac211e3..71d5e8afbb 100644
--- a/gen_emacs_wisi_lr_parse.ads
+++ b/gen_emacs_wisi_lr_parse.ads
@@ -14,7 +14,7 @@
 --  [2] On the Ada side, it is defined here, and in
 --  wisitoken-wisi_runtime.adb
 --
---  Copyright (C) 2017 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2020, 2022 Free Software Foundation, Inc.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -30,26 +30,24 @@
 
 pragma License (GPL);
 
+with Wisi;
+with WisiToken.Lexer;
 with WisiToken.Parse.LR.Parser;
 with WisiToken.Syntax_Trees;
-with Wisi;
 generic
    type Parse_Data_Type  is new Wisi.Parse_Data_Type with private;
 
    Name                           : in String; --  for Usage, error messages.
    Language_Protocol_Version      : in String; --  Defines language-specific 
parse parameters.
-   Descriptor                     : in WisiToken.Descriptor;
-   Partial_Parse_Active           : in out Boolean;
+   Descriptor                     : in WisiToken.Descriptor_Access_Constant;
+   Partial_Parse_Active           : in WisiToken.Boolean_Access;
+   Partial_Parse_Byte_Goal        : in WisiToken.Buffer_Pos_Access;
    Language_Fixes                 : in 
WisiToken.Parse.LR.Parser.Language_Fixes_Access;
    Language_Matching_Begin_Tokens : in 
WisiToken.Parse.LR.Parser.Language_Matching_Begin_Tokens_Access;
    Language_String_ID_Set         : in 
WisiToken.Parse.LR.Parser.Language_String_ID_Set_Access;
 
-   with procedure Create_Parser
-     (Parser                         :    out          
WisiToken.Parse.LR.Parser.Parser;
-      Language_Fixes                 : in              
WisiToken.Parse.LR.Parser.Language_Fixes_Access;
-      Language_Matching_Begin_Tokens : in              
WisiToken.Parse.LR.Parser.Language_Matching_Begin_Tokens_Access;
-      Language_String_ID_Set         : in              
WisiToken.Parse.LR.Parser.Language_String_ID_Set_Access;
-      Trace                          : not null access WisiToken.Trace'Class;
-      User_Data                      : in              
WisiToken.Syntax_Trees.User_Data_Access);
+   with function Create_Lexer (Trace : in WisiToken.Trace_Access) return 
WisiToken.Lexer.Handle;
+   with function Create_Parse_Table return WisiToken.Parse.LR.Parse_Table_Ptr;
+   with function Create_Productions return 
WisiToken.Syntax_Trees.Production_Info_Trees.Vector;
 
 procedure Gen_Emacs_Wisi_LR_Parse;
diff --git a/gen_emacs_wisi_lr_text_rep_parse.adb 
b/gen_emacs_wisi_lr_text_rep_parse.adb
index 31f1052ec5..f2211954ea 100644
--- a/gen_emacs_wisi_lr_text_rep_parse.adb
+++ b/gen_emacs_wisi_lr_text_rep_parse.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2014, 2017 - 2020 All Rights Reserved.
+--  Copyright (C) 2014, 2017 - 2020, 2022 All Rights Reserved.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -21,24 +21,19 @@ pragma License (GPL);
 with Ada.Command_Line;
 with Ada.Directories;
 with Emacs_Wisi_Common_Parse; use Emacs_Wisi_Common_Parse;
-
 with WisiToken.Text_IO_Trace;
 procedure Gen_Emacs_Wisi_LR_Text_Rep_Parse
 is
-   use WisiToken; -- "+", "-" Unbounded_string
-
-   Trace      : aliased WisiToken.Text_IO_Trace.Trace 
(Descriptor'Unrestricted_Access);
-   Parser     : WisiToken.Parse.LR.Parser.Parser;
-   Parse_Data : aliased Parse_Data_Type (Parser.Terminals'Access, 
Parser.Line_Begin_Token'Access);
+   Trace               : aliased WisiToken.Text_IO_Trace.Trace;
+   Parse_Data_Template : aliased Parse_Data_Type;
 
    Params : constant Process_Start_Params := Get_Process_Start_Params;
 begin
-   Create_Parser
-     (Parser, Language_Fixes, Language_Matching_Begin_Tokens, 
Language_String_ID_Set,
-      Trace'Unrestricted_Access,
-      Parse_Data'Unchecked_Access,
-      Ada.Directories.Containing_Directory (Ada.Command_Line.Command_Name) & 
"/" & Text_Rep_File_Name);
-
-   Process_Stream (Name, Language_Protocol_Version, Partial_Parse_Active, 
Params, Parser, Parse_Data, Descriptor);
-
+   Process_Stream
+     (Name, Language_Protocol_Version, Params,
+      (Descriptor, Create_Lexer (Trace'Unchecked_Access), Create_Parse_Table
+         (Ada.Directories.Containing_Directory (Ada.Command_Line.Command_Name) 
& "/" & Text_Rep_File_Name),
+       Create_Productions, Partial_Parse_Active, Partial_Parse_Byte_Goal, 
Language_Fixes,
+       Language_Matching_Begin_Tokens, Language_String_ID_Set, 
Parse_Data_Template'Unchecked_Access),
+     Trace'Unchecked_Access);
 end Gen_Emacs_Wisi_LR_Text_Rep_Parse;
diff --git a/gen_emacs_wisi_lr_text_rep_parse.ads 
b/gen_emacs_wisi_lr_text_rep_parse.ads
index f9ff46873a..83487a72d2 100644
--- a/gen_emacs_wisi_lr_text_rep_parse.ads
+++ b/gen_emacs_wisi_lr_text_rep_parse.ads
@@ -7,7 +7,7 @@
 --
 --  References : see gen_emacs_wisi_lr_parse.ads
 --
---  Copyright (C) 2017, 2018, 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -23,28 +23,25 @@
 
 pragma License (GPL);
 
+with Wisi;
+with WisiToken.Lexer;
 with WisiToken.Parse.LR.Parser;
 with WisiToken.Syntax_Trees;
-with Wisi;
 generic
    type Parse_Data_Type  is new Wisi.Parse_Data_Type with private;
 
    Name                           : in String; --  for Usage, error messages. 
"_wisi_parse" will be appended
    Language_Protocol_Version      : in String; --  Defines language-specific 
parse parameters.
-   Descriptor                     : in WisiToken.Descriptor;
-   Partial_Parse_Active           : in out Boolean;
+   Descriptor                     : in WisiToken.Descriptor_Access_Constant;
+   Partial_Parse_Active           : in WisiToken.Boolean_Access;
+   Partial_Parse_Byte_Goal        : in WisiToken.Buffer_Pos_Access;
    Language_Fixes                 : in 
WisiToken.Parse.LR.Parser.Language_Fixes_Access;
    Language_Matching_Begin_Tokens : in 
WisiToken.Parse.LR.Parser.Language_Matching_Begin_Tokens_Access;
    Language_String_ID_Set         : in 
WisiToken.Parse.LR.Parser.Language_String_ID_Set_Access;
    Text_Rep_File_Name             : in String;
 
-   with procedure Create_Parser
-     (Parser                         :    out          
WisiToken.Parse.LR.Parser.Parser;
-      Language_Fixes                 : in              
WisiToken.Parse.LR.Parser.Language_Fixes_Access;
-      Language_Matching_Begin_Tokens : in              
WisiToken.Parse.LR.Parser.Language_Matching_Begin_Tokens_Access;
-      Language_String_ID_Set         : in              
WisiToken.Parse.LR.Parser.Language_String_ID_Set_Access;
-      Trace                          : not null access WisiToken.Trace'Class;
-      User_Data                      : in              
WisiToken.Syntax_Trees.User_Data_Access;
-      Text_Rep_File_Name             : in              String);
+   with function Create_Lexer (Trace : in WisiToken.Trace_Access) return 
WisiToken.Lexer.Handle;
+   with function Create_Parse_Table (Text_Rep_File_Name : in String) return 
WisiToken.Parse.LR.Parse_Table_Ptr;
+   with function Create_Productions return 
WisiToken.Syntax_Trees.Production_Info_Trees.Vector;
 
 procedure Gen_Emacs_Wisi_LR_Text_Rep_Parse;
diff --git a/gen_emacs_wisi_packrat_parse.adb b/gen_emacs_wisi_packrat_parse.adb
new file mode 100644
index 0000000000..b4e95f604f
--- /dev/null
+++ b/gen_emacs_wisi_packrat_parse.adb
@@ -0,0 +1,180 @@
+--  Abstract :
+--
+--  See spec.
+--
+--  Copyright (C) 2018 All Rights Reserved.
+--
+--  This program is free software; you can redistribute it and/or
+--  modify it under terms of the GNU General Public License as
+--  published by the Free Software Foundation; either version 3, or (at
+--  your option) any later version. This program is distributed in the
+--  hope that it will be useful, but WITHOUT ANY WARRANTY; without even
+--  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+--  PURPOSE. See the GNU General Public License for more details. You
+--  should have received a copy of the GNU General Public License
+--  distributed with this program; see file COPYING. If not, write to
+--  the Free Software Foundation, 51 Franklin Street, Suite 500, Boston,
+--  MA 02110-1335, USA.
+
+pragma License (GPL);
+
+with Ada.Command_Line;
+with Ada.Exceptions;
+with Ada.Strings.Fixed;
+with Ada.Strings.Unbounded;
+with Ada.Text_IO; use Ada.Text_IO;
+with Emacs_Wisi_Common_Parse; use Emacs_Wisi_Common_Parse;
+with GNAT.OS_Lib;
+with GNAT.Traceback.Symbolic;
+with System.Storage_Elements;
+with WisiToken.Lexer;
+with WisiToken.Parse.Packrat;
+with WisiToken.Text_IO_Trace;
+procedure Gen_Emacs_Wisi_Parse_Packrat
+is
+   use WisiToken; -- "+", "-" Unbounded_string
+
+   Trace      : aliased WisiToken.Text_IO_Trace.Trace (Descriptor'Access);
+   Parser     : WisiToken.Parse.Packrat.Parser;
+   Parse_Data : aliased Parse_Data_Type (Parser.Line_Begin_Token'Access);
+
+begin
+   Create_Parser (Parser, Trace'Unrestricted_Access, 
Parse_Data'Unchecked_Access);
+
+   declare
+      use Ada.Command_Line;
+   begin
+      case Argument_Count is
+      when 0 =>
+         null;
+
+      when others =>
+         Usage (Name);
+         raise Programmer_Error with "invalid option count: " & Integer'Image 
(Argument_Count);
+      end case;
+   end;
+
+   Put_Line (Name & " " & Version & ", protocol version " & Protocol_Version);
+
+   --  Read commands and tokens from standard_input via GNAT.OS_Lib,
+   --  send results to standard_output.
+   loop
+      Put (Prompt); Flush;
+      declare
+         Command_Length : constant Integer := Get_Command_Length;
+         Command_Line   : aliased String (1 .. Command_Length);
+         Last           : Integer;
+
+         function Match (Target : in String) return Boolean
+         is begin
+            Last := Command_Line'First + Target'Length - 1;
+            return Last <= Command_Line'Last and then Command_Line 
(Command_Line'First .. Last) = Target;
+         end Match;
+      begin
+         Read_Input (Command_Line'Address, Command_Length);
+
+         Put_Line (";; " & Command_Line);
+
+         if Match ("parse") then
+            --  Args: see Usage
+            --  Input: <source text>
+            --  Response:
+            --  [response elisp vector]...
+            --  [elisp error form]...
+            --  prompt
+            declare
+               use Wisi;
+               Cl_Params : constant Command_Line_Params := Get_Cl_Params 
(Command_Line, Last);
+               Buffer    : Ada.Strings.Unbounded.String_Access;
+
+               procedure Clean_Up
+               is begin
+                  Parser.Lexer.Discard_Rest_Of_Input;
+                  Parser.Put_Errors (-Cl_Param.Source_File_Name);
+                  Ada.Strings.Unbounded.Free (Buffer);
+               end Clean_Up;
+
+            begin
+               --  Computing Line_Count in elisp allows parsing in parallel 
with
+               --  sending source text.
+
+               Trace_Parse    := Cl_Params.Parse_Verbosity;
+               Trace_McKenzie := Cl_Params.McKenzie_Verbosity;
+               Trace_Action   := Cl_Params.Action_Verbosity;
+               Debug_Mode     := Cl_Params.Debug_Mode;
+
+               Parse_Data.Initialize
+                 (Post_Parse_Action => Cl_Params.Post_Parse_Action,
+                  Descriptor        => Descriptor'Access,
+                  Source_File_Name  => -Cl_Params.Source_File_Name,
+                  Line_Count        => Cl_Params.Line_Count,
+                  Params            => Command_Line (Last + 2 .. 
Command_Line'Last));
+
+               Buffer := new String (1 .. Cl_Params.Byte_Count);
+               Read_Input (Buffer (1)'Address, Cl_Params.Byte_Count);
+
+               Parser.Lexer.Reset_With_String_Access (Buffer);
+               Parser.Parse;
+               Parser.Execute_Actions;
+               Put (Parse_Data);
+               Clean_Up;
+
+            exception
+            when Syntax_Error =>
+               Clean_Up;
+               Put_Line ("(parse_error)");
+
+            when E : Parse_Error =>
+               Clean_Up;
+               Put_Line ("(parse_error """ & Ada.Exceptions.Exception_Message 
(E) & """)");
+
+            when E : Fatal_Error =>
+               Clean_Up;
+               Put_Line ("(error """ & Ada.Exceptions.Exception_Message (E) & 
""")");
+            end;
+
+         elsif Match ("noop") then
+            --  Args: <source byte count>
+            --  Input: <source text>
+            --  Response: prompt
+            declare
+               Byte_Count  : constant Integer                             := 
Get_Integer (Command_Line, Last);
+               Buffer      : constant Ada.Strings.Unbounded.String_Access := 
new String (1 .. Byte_Count);
+               Token       : Base_Token;
+               Lexer_Error : Boolean;
+               pragma Unreferenced (Lexer_Error);
+            begin
+               Token.ID := Invalid_Token_ID;
+               Read_Input (Buffer (1)'Address, Byte_Count);
+
+               Parser.Lexer.Reset_With_String_Access (Buffer);
+               loop
+                  exit when Token.ID = Parser.Trace.Descriptor.EOF_ID;
+                  Lexer_Error := Parser.Lexer.Find_Next (Token);
+               end loop;
+            exception
+            when Syntax_Error =>
+               Parser.Lexer.Discard_Rest_Of_Input;
+            end;
+
+         elsif Match ("quit") then
+            exit;
+
+         else
+            Put_Line ("(error ""bad command: '" & Command_Line & "'"")");
+         end if;
+      exception
+      when E : Protocol_Error =>
+         --  don't exit the loop; allow debugging bad elisp
+         Put_Line ("(error ""protocol error "": " & 
Ada.Exceptions.Exception_Message (E) & """)");
+      end;
+   end loop;
+exception
+when E : others =>
+   Ada.Command_Line.Set_Exit_Status (Ada.Command_Line.Failure);
+   New_Line (2);
+   Put_Line
+     ("(error ""unhandled exception: " & Ada.Exceptions.Exception_Name (E) & 
": " &
+        Ada.Exceptions.Exception_Message (E) & """)");
+   Put_Line (GNAT.Traceback.Symbolic.Symbolic_Traceback (E));
+end Gen_Emacs_Wisi_Parse_Packrat;
diff --git a/gen_emacs_wisi_packrat_parse.ads b/gen_emacs_wisi_packrat_parse.ads
new file mode 100644
index 0000000000..79c69ecf89
--- /dev/null
+++ b/gen_emacs_wisi_packrat_parse.ads
@@ -0,0 +1,42 @@
+--  Abstract :
+--
+--  Generic Emacs background process; packrat parse token stream,
+--  return parser actions.
+--
+--  See gen_run_wisi_parse_packrat.ads for a standalone version.
+--
+--  References :
+--
+--  See gen_emacs_wisi_parse.ads
+--
+--  Copyright (C) 2018 Free Software Foundation, Inc.
+--
+--  This program is free software; you can redistribute it and/or
+--  modify it under terms of the GNU General Public License as
+--  published by the Free Software Foundation; either version 3, or (at
+--  your option) any later version. This program is distributed in the
+--  hope that it will be useful, but WITHOUT ANY WARRANTY; without even
+--  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+--  PURPOSE. See the GNU General Public License for more details. You
+--  should have received a copy of the GNU General Public License
+--  distributed with this program; see file COPYING. If not, write to
+--  the Free Software Foundation, 51 Franklin Street, Suite 500, Boston,
+--  MA 02110-1335, USA.
+
+pragma License (GPL);
+
+with WisiToken.Parse.Packrat;
+with WisiToken.Syntax_Trees;
+with WisiToken.Wisi_Runtime;
+generic
+   type Parse_Data_Type  is new WisiToken.Wisi_Runtime.Parse_Data_Type with 
private;
+
+   Name       : in String; --  for Usage, error messages. 
"_wisi_parse_packrat" will be appended
+   Descriptor : in WisiToken.Descriptor;
+
+   with procedure Create_Parser
+     (Parser    :    out          WisiToken.Parse.Packrat.Parser;
+      Trace     : not null access WisiToken.Trace'Class;
+      User_Data : in              WisiToken.Syntax_Trees.User_Data_Access);
+
+procedure Gen_Emacs_Wisi_Parse_Packrat;
diff --git a/gen_run_wisi_lr_parse.adb b/gen_run_wisi_lr_parse.adb
index 84498de625..803ddb8213 100644
--- a/gen_run_wisi_lr_parse.adb
+++ b/gen_run_wisi_lr_parse.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2017 - 2020 All Rights Reserved.
+--  Copyright (C) 2017 - 2020, 2022 All Rights Reserved.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -18,20 +18,34 @@
 
 pragma License (GPL);
 
+with GNATCOLL.Memory;
 with Run_Wisi_Common_Parse;
 with WisiToken.Text_IO_Trace;
 procedure Gen_Run_Wisi_LR_Parse
 is
-   Trace      : aliased WisiToken.Text_IO_Trace.Trace 
(Descriptor'Unrestricted_Access);
-   Parser     : WisiToken.Parse.LR.Parser.Parser;
-   Parse_Data : aliased Parse_Data_Type (Parser.Terminals'Access, 
Parser.Line_Begin_Token'Access);
+   Trace               : aliased WisiToken.Text_IO_Trace.Trace;
+   Parse_Data_Template : aliased Parse_Data_Type;
 begin
-   --  Create parser first so Put_Usage has defaults from Parser.Table,
-   --  and Get_CL_Params can override them.
-   Create_Parser
-     (Parser, Language_Fixes, Language_Matching_Begin_Tokens, 
Language_String_ID_Set,
-      Trace'Unrestricted_Access, Parse_Data'Unchecked_Access);
+   --  FIXME: report memory during lexer, parser create
+   --  WisiToken.Trace_Memory            := 2;
+   --  WisiToken.Trace_Incremental_Parse := 1;
+   GNATCOLL.Memory.Configure
+     (Activate_Monitor      => True,
+      Stack_Trace_Depth     => 10,
+      Reset_Content_On_Free => False);
 
-   Run_Wisi_Common_Parse.Parse_File (Parser, Parse_Data, Descriptor);
+   declare
+      Lexer : constant WisiToken.Lexer.Handle := Create_Lexer 
(Trace'Unchecked_Access);
+      --  No point in reporting lexer memory; it's very small
+      Parse_Table : constant WisiToken.Parse.LR.Parse_Table_Ptr := 
Create_Parse_Table;
+   begin
+      Trace.Put_Line ("parse table created");
+      WisiToken.Report_Memory (Trace, Prefix => True);
 
+      Run_Wisi_Common_Parse.Parse_File
+        ((Descriptor, Lexer, Parse_Table, Create_Productions, 
Partial_Parse_Active,
+          Partial_Parse_Byte_Goal, Language_Fixes, 
Language_Matching_Begin_Tokens, Language_String_ID_Set,
+          Parse_Data_Template'Unchecked_Access),
+         Trace'Unchecked_Access);
+   end;
 end Gen_Run_Wisi_LR_Parse;
diff --git a/gen_run_wisi_lr_parse.ads b/gen_run_wisi_lr_parse.ads
index 34fbd3c03a..34b73fa171 100644
--- a/gen_run_wisi_lr_parse.ads
+++ b/gen_run_wisi_lr_parse.ads
@@ -4,7 +4,7 @@
 --
 --  See gen_emacs_wisi_lr_parse.ads for the Emacs background process.
 --
---  Copyright (C) 2017, 2018, 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -20,23 +20,22 @@
 
 pragma License (GPL);
 
+with Wisi;
+with WisiToken.Lexer;
 with WisiToken.Parse.LR.Parser;
 with WisiToken.Syntax_Trees;
-with Wisi;
 generic
    type Parse_Data_Type is new Wisi.Parse_Data_Type with private;
 
-   Descriptor                     : in WisiToken.Descriptor;
+   Descriptor                     : in WisiToken.Descriptor_Access_Constant;
+   Partial_Parse_Active           : in WisiToken.Boolean_Access;
+   Partial_Parse_Byte_Goal        : in WisiToken.Buffer_Pos_Access;
    Language_Fixes                 : in 
WisiToken.Parse.LR.Parser.Language_Fixes_Access;
    Language_Matching_Begin_Tokens : in 
WisiToken.Parse.LR.Parser.Language_Matching_Begin_Tokens_Access;
    Language_String_ID_Set         : in 
WisiToken.Parse.LR.Parser.Language_String_ID_Set_Access;
 
-   with procedure Create_Parser
-     (Parser                         :    out          
WisiToken.Parse.LR.Parser.Parser;
-      Language_Fixes                 : in              
WisiToken.Parse.LR.Parser.Language_Fixes_Access;
-      Language_Matching_Begin_Tokens : in              
WisiToken.Parse.LR.Parser.Language_Matching_Begin_Tokens_Access;
-      Language_String_ID_Set         : in              
WisiToken.Parse.LR.Parser.Language_String_ID_Set_Access;
-      Trace                          : not null access WisiToken.Trace'Class;
-      User_Data                      : in              
WisiToken.Syntax_Trees.User_Data_Access);
+   with function Create_Lexer (Trace : in WisiToken.Trace_Access) return 
WisiToken.Lexer.Handle;
+   with function Create_Parse_Table return WisiToken.Parse.LR.Parse_Table_Ptr;
+   with function Create_Productions return 
WisiToken.Syntax_Trees.Production_Info_Trees.Vector;
 
 procedure Gen_Run_Wisi_LR_Parse;
diff --git a/gen_run_wisi_lr_text_rep_parse.adb 
b/gen_run_wisi_lr_text_rep_parse.adb
index 45bc5e12f2..6c2196e83f 100644
--- a/gen_run_wisi_lr_text_rep_parse.adb
+++ b/gen_run_wisi_lr_text_rep_parse.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2017 - 2020 All Rights Reserved.
+--  Copyright (C) 2017 - 2020, 2022 All Rights Reserved.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -20,25 +20,35 @@ pragma License (GPL);
 
 with Ada.Command_Line;
 with Ada.Directories;
+with GNATCOLL.Memory;
 with Run_Wisi_Common_Parse;
 with WisiToken.Text_IO_Trace;
 procedure Gen_Run_Wisi_LR_Text_Rep_Parse
 is
-   Trace      : aliased WisiToken.Text_IO_Trace.Trace 
(Descriptor'Unrestricted_Access);
-   Parser     : WisiToken.Parse.LR.Parser.Parser;
-   Parse_Data : aliased Parse_Data_Type (Parser.Terminals'Access, 
Parser.Line_Begin_Token'Access);
+   Trace               : aliased WisiToken.Text_IO_Trace.Trace;
+   Parse_Data_Template : aliased Parse_Data_Type;
 begin
-   --  Create parser first so Put_Usage has defaults from Parser.Table,
-   --  and Get_CL_Params can override them.
+   --  FIXME: report memory during lexer, parser create
+   --  WisiToken.Trace_Memory            := 1;
+   --  WisiToken.Trace_Incremental_Parse := 1;
+   GNATCOLL.Memory.Configure
+     (Activate_Monitor      => True,
+      Stack_Trace_Depth     => 0,
+      Reset_Content_On_Free => False);
+
    declare
-      use Ada.Command_Line;
+      Lexer : constant WisiToken.Lexer.Handle := Create_Lexer 
(Trace'Unchecked_Access);
+      --  No point in reporting lexer memory; it's very small
+      Parse_Table : constant WisiToken.Parse.LR.Parse_Table_Ptr := 
Create_Parse_Table
+        (Ada.Directories.Containing_Directory (Ada.Command_Line.Command_Name) 
& "/" & Text_Rep_File_Name);
    begin
-      --  text_rep file is in same directory as exectuable.
-      Create_Parser
-        (Parser, Language_Fixes, Language_Matching_Begin_Tokens, 
Language_String_ID_Set,
-         Trace'Unrestricted_Access, Parse_Data'Unchecked_Access,
-         Ada.Directories.Containing_Directory (Command_Name) & "/" & 
Text_Rep_File_Name);
+      Trace.Put_Line ("parse table created");
+      WisiToken.Report_Memory (Trace, Prefix => True);
 
-      Run_Wisi_Common_Parse.Parse_File (Parser, Parse_Data, Descriptor);
+      Run_Wisi_Common_Parse.Parse_File
+        ((Descriptor, Lexer, Parse_Table,
+          Create_Productions, Partial_Parse_Active, Partial_Parse_Byte_Goal, 
Language_Fixes,
+          Language_Matching_Begin_Tokens, Language_String_ID_Set, 
Parse_Data_Template'Unchecked_Access),
+         Trace'Unchecked_Access);
    end;
 end Gen_Run_Wisi_LR_Text_Rep_Parse;
diff --git a/gen_run_wisi_lr_text_rep_parse.ads 
b/gen_run_wisi_lr_text_rep_parse.ads
index dd45d16822..dea8f54048 100644
--- a/gen_run_wisi_lr_text_rep_parse.ads
+++ b/gen_run_wisi_lr_text_rep_parse.ads
@@ -4,7 +4,7 @@
 --
 --  See gen_emacs_wisi_*_parse.ads for the Emacs background process.
 --
---  Copyright (C) 2018, 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -20,25 +20,23 @@
 
 pragma License (GPL);
 
+with Wisi;
+with WisiToken.Lexer;
 with WisiToken.Parse.LR.Parser;
 with WisiToken.Syntax_Trees;
-with Wisi;
 generic
    type Parse_Data_Type is new Wisi.Parse_Data_Type with private;
 
-   Descriptor                     : in WisiToken.Descriptor;
+   Descriptor                     : in WisiToken.Descriptor_Access_Constant;
+   Partial_Parse_Active           : in WisiToken.Boolean_Access;
+   Partial_Parse_Byte_Goal        : in WisiToken.Buffer_Pos_Access;
    Language_Fixes                 : in 
WisiToken.Parse.LR.Parser.Language_Fixes_Access;
    Language_Matching_Begin_Tokens : in 
WisiToken.Parse.LR.Parser.Language_Matching_Begin_Tokens_Access;
    Language_String_ID_Set         : in 
WisiToken.Parse.LR.Parser.Language_String_ID_Set_Access;
    Text_Rep_File_Name             : in String;
 
-   with procedure Create_Parser
-     (Parser                         :    out          
WisiToken.Parse.LR.Parser.Parser;
-      Language_Fixes                 : in              
WisiToken.Parse.LR.Parser.Language_Fixes_Access;
-      Language_Matching_Begin_Tokens : in              
WisiToken.Parse.LR.Parser.Language_Matching_Begin_Tokens_Access;
-      Language_String_ID_Set         : in              
WisiToken.Parse.LR.Parser.Language_String_ID_Set_Access;
-      Trace                          : not null access WisiToken.Trace'Class;
-      User_Data                      : in              
WisiToken.Syntax_Trees.User_Data_Access;
-      Text_Rep_File_Name             : in              String);
+   with function Create_Lexer (Trace : in WisiToken.Trace_Access) return 
WisiToken.Lexer.Handle;
+   with function Create_Parse_Table (Text_Rep_File_Name : in String) return 
WisiToken.Parse.LR.Parse_Table_Ptr;
+   with function Create_Productions return 
WisiToken.Syntax_Trees.Production_Info_Trees.Vector;
 
 procedure Gen_Run_Wisi_LR_Text_Rep_Parse;
diff --git a/gen_run_wisi_packrat_parse.adb b/gen_run_wisi_packrat_parse.adb
new file mode 100644
index 0000000000..fb3e900a09
--- /dev/null
+++ b/gen_run_wisi_packrat_parse.adb
@@ -0,0 +1,241 @@
+--  Abstract :
+--
+--  See spec.
+--
+--  Copyright (C) 2018 All Rights Reserved.
+--
+--  This program is free software; you can redistribute it and/or
+--  modify it under terms of the GNU General Public License as
+--  published by the Free Software Foundation; either version 3, or (at
+--  your option) any later version. This program is distributed in the
+--  hope that it will be useful, but WITHOUT ANY WARRANTY; without even
+--  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+--  PURPOSE. See the GNU General Public License for more details. You
+--  should have received a copy of the GNU General Public License
+--  distributed with this program; see file COPYING. If not, write to
+--  the Free Software Foundation, 51 Franklin Street, Suite 500, Boston,
+--  MA 02110-1335, USA.
+
+pragma License (GPL);
+
+with Ada.Command_Line;
+with Ada.Exceptions;
+with Ada.IO_Exceptions;
+with Ada.Real_Time;
+with Ada.Strings.Unbounded;
+with Ada.Text_IO; use Ada.Text_IO;
+with GNAT.Traceback.Symbolic;
+with WisiToken.Lexer;
+with WisiToken.Text_IO_Trace;
+procedure Gen_Run_Wisi_Parse_Packrat
+is
+   use WisiToken; -- Token_ID, "+", "-" Unbounded_string
+
+   Trace      : aliased WisiToken.Text_IO_Trace.Trace (Descriptor'Access);
+   Parser     : WisiToken.Parse.Packrat.Parser;
+   Parse_Data : aliased Parse_Data_Type (Parser.Line_Begin_Token'Access);
+
+   procedure Put_Usage
+   is begin
+      Put_Line ("usage: " & Name & "_wisi_parse <file_name> <parse_action> 
[options]");
+      Put_Line ("parse_action: {Navigate | Face | Indent}");
+      Put_Line ("options:");
+      Put_Line ("--verbosity n m l:");
+      Put_Line ("   n: parser; m: mckenzie; l: action");
+      Put_Line ("   0 - only report parse errors");
+      Put_Line ("   1 - shows spawn/terminate parallel parsers, error recovery 
enter/exit");
+      Put_Line ("   2 - add each parser cycle, error recovery enqueue/check");
+      Put_Line ("   3 - parse stack in each cycle, error recovery parse 
actions");
+      Put_Line ("   4 - add lexer debug");
+      Put_Line ("--lang_params <language-specific params>");
+      Put_Line ("--lexer_only : only run lexer, for profiling");
+      Put_Line ("--repeat_count n : repeat parse count times, for profiling; 
default 1");
+      Put_Line ("--pause : when repeating, prompt for <enter> after each 
parse; allows seeing memory leaks");
+      New_Line;
+   end Put_Usage;
+
+   Source_File_Name  : Ada.Strings.Unbounded.Unbounded_String;
+   Post_Parse_Action : WisiToken.Wisi_Runtime.Post_Parse_Action_Type;
+
+   Line_Count   : WisiToken.Line_Number_Type := 1;
+   Lexer_Only   : Boolean                    := False;
+   Repeat_Count : Integer                    := 1;
+   Pause        : Boolean                    := False;
+   Arg          : Integer;
+   Lang_Params  : Ada.Strings.Unbounded.Unbounded_String;
+   Start        : Ada.Real_Time.Time;
+begin
+   Create_Parser (Parser, Trace'Unrestricted_Access, 
Parse_Data'Unchecked_Access);
+
+   declare
+      use Ada.Command_Line;
+   begin
+      if Argument_Count < 1 then
+         Put_Usage;
+         Set_Exit_Status (Failure);
+         return;
+      end if;
+
+      Source_File_Name  := +Ada.Command_Line.Argument (1);
+      Post_Parse_Action := WisiToken.Wisi_Runtime.Post_Parse_Action_Type'Value 
(Ada.Command_Line.Argument (2));
+      Arg               := 3;
+
+      loop
+         exit when Arg > Argument_Count;
+
+         if Argument (Arg) = "--verbosity" then
+            WisiToken.Trace_Parse    := Integer'Value (Argument (Arg + 1));
+            WisiToken.Trace_McKenzie := Integer'Value (Argument (Arg + 2));
+            WisiToken.Trace_Action   := Integer'Value (Argument (Arg + 3));
+            Arg                      := Arg + 4;
+
+         elsif Argument (Arg) = "--lang_params" then
+            Lang_Params := +Argument (Arg + 1);
+            Arg := Arg + 2;
+
+         elsif Argument (Arg) = "--lexer_only" then
+            Lexer_Only := True;
+            Arg := Arg + 1;
+
+         elsif Argument (Arg) = "--pause" then
+            Pause := True;
+            Arg := Arg + 1;
+
+         elsif Argument (Arg) = "--repeat_count" then
+            Repeat_Count := Integer'Value (Argument (Arg + 1));
+            Arg := Arg + 2;
+
+         else
+            Put_Line ("unrecognized option: '" & Argument (Arg) & "'");
+            Put_Usage;
+            return;
+         end if;
+      end loop;
+   end;
+
+   --  Do this after setting Trace_Parse so lexer verbosity is set
+   begin
+      Parser.Lexer.Reset_With_File (-Source_File_Name);
+   exception
+   when Ada.IO_Exceptions.Name_Error =>
+      Put_Line (Standard_Error, "'" & (-Source_File_Name) & "' cannot be 
opened");
+      return;
+   end;
+
+   --  See comment in wisi-wisi_runtime.ads for why we still need this.
+   declare
+      Token : Base_Token;
+      Lexer_Error : Boolean;
+      pragma Unreferenced (Lexer_Error);
+   begin
+      loop
+         begin
+            Lexer_Error := Parser.Lexer.Find_Next (Token);
+            exit when Token.ID = Descriptor.EOF_ID;
+         exception
+         when WisiToken.Syntax_Error =>
+            Parser.Lexer.Discard_Rest_Of_Input;
+            Parser.Put_Errors (-Source_File_Name);
+            Put_Line ("(lexer_error)");
+         end;
+      end loop;
+      Line_Count := Token.Line;
+   end;
+
+   if WisiToken.Trace_Action > WisiToken.Outline then
+      Put_Line ("line_count:" & Line_Number_Type'Image (Line_Count));
+   end if;
+
+   Parse_Data.Initialize
+     (Post_Parse_Action => Post_Parse_Action,
+      Descriptor        => Descriptor'Access,
+      Source_File_Name  => -Source_File_Name,
+      Line_Count        => Line_Count,
+      Params            => -Lang_Params);
+
+   if Repeat_Count > 1 then
+      Start := Ada.Real_Time.Clock;
+   end if;
+
+   for I in 1 .. Repeat_Count loop
+      declare
+         procedure Clean_Up
+         is begin
+            Parser.Lexer.Discard_Rest_Of_Input;
+            if Repeat_Count = 1 then
+               Parser.Put_Errors (-Source_File_Name);
+            end if;
+         end Clean_Up;
+
+      begin
+         Parse_Data.Reset;
+         Parser.Lexer.Reset;
+
+         if Lexer_Only then
+            declare
+               Token : Base_Token;
+               Lexer_Error : Boolean;
+               pragma Unreferenced (Lexer_Error);
+            begin
+               Parser.Lexer.Reset;
+               loop
+                  Lexer_Error := Parser.Lexer.Find_Next (Token);
+                  exit when Token.ID = Descriptor.EOF_ID;
+               end loop;
+               --  We don't handle errors here; that was done in the count 
lines loop
+               --  above.
+            end;
+         else
+            Parser.Parse;
+            Parser.Execute_Actions;
+
+            if Repeat_Count = 1 then
+               Parse_Data.Put;
+               Parser.Put_Errors (-Source_File_Name);
+            end if;
+         end if;
+      exception
+      when WisiToken.Syntax_Error =>
+         Clean_Up;
+         Put_Line ("(parse_error)");
+
+      when E : WisiToken.Parse_Error =>
+         Clean_Up;
+         Put_Line ("(parse_error """ & Ada.Exceptions.Exception_Message (E) & 
""")");
+
+      when E : WisiToken.Fatal_Error =>
+         Clean_Up;
+         Put_Line ("(error """ & Ada.Exceptions.Exception_Message (E) & """)");
+      end;
+
+      if Pause then
+         Put_Line ("Enter to continue:");
+         Flush (Standard_Output);
+         declare
+            Junk : constant String := Get_Line;
+            pragma Unreferenced (Junk);
+         begin
+            null;
+         end;
+      end if;
+   end loop;
+
+   if Repeat_Count > 1 then
+      declare
+         use Ada.Real_Time;
+         Finish : constant Time := Clock;
+      begin
+         Put_Line ("Total time:" & Duration'Image (To_Duration (Finish - 
Start)));
+         Put_Line ("per iteration:" & Duration'Image (To_Duration ((Finish - 
Start) / Repeat_Count)));
+      end;
+   end if;
+
+exception
+when E : others =>
+   Ada.Command_Line.Set_Exit_Status (Ada.Command_Line.Failure);
+   New_Line (2);
+   Put_Line
+     ("(error ""unhandled exception: " & Ada.Exceptions.Exception_Name (E) & 
": " &
+        Ada.Exceptions.Exception_Message (E) & """)");
+   Put_Line (GNAT.Traceback.Symbolic.Symbolic_Traceback (E));
+end Gen_Run_Wisi_Parse_Packrat;
diff --git a/gen_run_wisi_packrat_parse.ads b/gen_run_wisi_packrat_parse.ads
new file mode 100644
index 0000000000..538da1d9ea
--- /dev/null
+++ b/gen_run_wisi_packrat_parse.ads
@@ -0,0 +1,36 @@
+--  Abstract :
+--
+--  Run an Emacs packrate parser as a standalone executable, for debugging.
+--
+--  See gen_emacs_wisi_parse_packrat.ads for the Emacs background process.
+--
+--  Copyright (C) 2018 Free Software Foundation, Inc.
+--
+--  This program is free software; you can redistribute it and/or
+--  modify it under terms of the GNU General Public License as
+--  published by the Free Software Foundation; either version 3, or (at
+--  your option) any later version. This program is distributed in the
+--  hope that it will be useful, but WITHOUT ANY WARRANTY; without even
+--  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+--  PURPOSE. See the GNU General Public License for more details. You
+--  should have received a copy of the GNU General Public License
+--  distributed with this program; see file COPYING. If not, write to
+--  the Free Software Foundation, 51 Franklin Street, Suite 500, Boston,
+--  MA 02110-1335, USA.
+
+pragma License (GPL);
+
+with WisiToken.Parse.Packrat;
+with WisiToken.Syntax_Trees;
+with WisiToken.Wisi_Runtime;
+generic
+   type Parse_Data_Type is new WisiToken.Wisi_Runtime.Parse_Data_Type with 
private;
+
+   Descriptor : in WisiToken.Descriptor;
+
+   with procedure Create_Parser
+     (Parser    :    out          WisiToken.Parse.Packrat.Parser;
+      Trace     : not null access WisiToken.Trace'Class;
+      User_Data : in              WisiToken.Syntax_Trees.User_Data_Access);
+
+procedure Gen_Run_Wisi_Parse_Packrat;
diff --git a/gnat-core.el b/gnat-core.el
new file mode 100644
index 0000000000..cd71ed33f3
--- /dev/null
+++ b/gnat-core.el
@@ -0,0 +1,487 @@
+;; gnat-core.el --- Support for running GNAT tools, which support multiple 
programming  -*- lexical-binding:t -*-
+;; languages.
+;;
+;; GNAT is provided by AdaCore; see http://libre.adacore.com/
+;;
+;;; Copyright (C) 2012 - 2022  Free Software Foundation, Inc.
+;;
+;; Author: Stephen Leake <stephen_leake@member.fsf.org>
+;; Maintainer: Stephen Leake <stephen_leake@member.fsf.org>
+;;
+;; This file is part of GNU Emacs.
+;;
+;; GNU Emacs is free software: you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation, either version 3 of the License, or
+;; (at your option) any later version.
+;;
+;; GNU Emacs is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;; GNU General Public License for more details.
+;;
+;; You should have received a copy of the GNU General Public License
+;; along with GNU Emacs.  If not, see <http://www.gnu.org/licenses/>.
+
+(require 'cl-lib)
+(require 'wisi-prj)
+
+;;;;; code
+
+(defcustom ada-gnat-debug-run nil
+  ;; Name implies Ada, which is wrong. Kept for backward compatibility.
+  "If t, compilation buffers containing a GNAT command will show
+the command.  Otherwise, they will show only the output of the
+command.  This applies e.g. to *gnatfind* buffers."
+  :type 'boolean
+  :safe  #'booleanp
+  :group 'ada)
+
+;;;; project file handling
+
+(cl-defstruct gnat-compiler
+  "Used with wisi-compiler-* generic functions."
+
+  gpr-file       ;; absolute file name of GNAT project file.
+  run-buffer-name ;; string; some compiler objects have no gpr file
+  project-path    ;; list of directories from GPR_PROJECT_PATH
+  target         ;; gnat --target argument.
+  runtime        ;; gnat --RTS argument.
+  gnat-stub-opts  ;; options for gnat stub
+  gnat-stub-cargs ;; cargs options for gnat stub
+  )
+
+;;;###autoload
+(cl-defun create-gnat-compiler
+    (&key
+     gpr-file
+     run-buffer-name
+     project-path
+     target
+     runtime
+     gnat-stub-opts
+     gnat-stub-cargs)
+  ;; See note on `create-ada-prj' for why this is not a defalias.
+  (make-gnat-compiler
+   :gpr-file gpr-file
+   :run-buffer-name run-buffer-name
+   :project-path project-path
+   :target target
+   :runtime runtime
+   :gnat-stub-opts gnat-stub-opts
+   :gnat-stub-cargs gnat-stub-cargs
+   ))
+
+(defun gnat-compiler-require-prj ()
+  "Return current `gnat-compiler' object from current project compiler.
+Throw an error if current project does not have a gnat-compiler."
+  (let* ((wisi-prj (wisi-prj-require-prj))
+        (compiler (wisi-prj-compiler wisi-prj)))
+    (if (gnat-compiler-p compiler)
+       compiler
+      (error "no gnat-compiler in current project"))))
+
+(defun gnat-prj-add-prj-dir (project compiler dir)
+  "Add DIR to COMPILER.project_path, and to GPR_PROJECT_PATH in 
PROJECT.file-env"
+  ;; We maintain two project values for this;
+  ;; project-path - a list of directories, for elisp find file
+  ;; GPR_PROJECT_PATH in environment, for gnat-run
+  (let ((process-environment (copy-sequence (wisi-prj-file-env project))))
+    (cl-pushnew dir (gnat-compiler-project-path compiler) :test #'string-equal)
+
+    (setenv "GPR_PROJECT_PATH"
+           (mapconcat 'identity
+                      (gnat-compiler-project-path compiler) path-separator))
+    (setf (wisi-prj-file-env project) (copy-sequence process-environment))
+    ))
+
+;; We need a dynamic variable for 'add-to-list
+(defvar gnat--src-dirs)
+
+(defun gnat-get-paths (project compiler)
+  "Add project and/or compiler source, project paths to PROJECT source-path"
+  (let* ((gnat--src-dirs (wisi-prj-source-path project))
+        (prj-dirs (cl-copy-list (gnat-compiler-project-path compiler))))
+
+    ;; Don't need project plist obj_dirs if using a project file, so
+    ;; not setting obj-dirs.
+    ;;
+    ;; We only need to update prj-dirs if the gpr-file is an aggregate
+    ;; project that sets the project path.
+
+    (condition-case-unless-debug nil
+       (with-current-buffer (gnat-run-buffer project 
(gnat-compiler-run-buffer-name (wisi-prj-compiler project)))
+         ;; gnat list -v -P can return status 0 or 4; always lists compiler 
dirs
+         (gnat-run-gnat project "list" (list "-v") '(0 4))
+
+         (goto-char (point-min))
+
+         ;; Source path
+         (search-forward "Source Search Path:")
+         (forward-line 1)
+         (while (not (looking-at "^$")) ;; terminate on blank line
+           (back-to-indentation) ;; skip whitespace forward
+
+           ;; we use 'add-to-list here, not 'cl-pushnew, because we
+           ;; want to use append to preserve the directory
+           ;; order. Directory order matters for extension projects,
+           ;; which can have duplicate file names.
+            (add-to-list
+            'gnat--src-dirs
+            (if (looking-at "<Current_Directory>")
+                (directory-file-name default-directory)
+              (expand-file-name ; Canonicalize path part.
+               (directory-file-name
+                (buffer-substring-no-properties (point) (point-at-eol)))))
+            t ;; append
+            #'string-equal)
+           (forward-line 1))
+
+          ;; Project path
+         ;;
+         ;; These are also added to src_dir, so compilation errors
+         ;; reported in project files are found.
+         (search-forward "Project Search Path:")
+         (forward-line 1)
+         (while (not (looking-at "^$"))
+           (back-to-indentation)
+           (if (looking-at "<Current_Directory>")
+                (cl-pushnew (directory-file-name default-directory) prj-dirs 
:test #'string-equal)
+             (let ((f (expand-file-name
+                        (buffer-substring-no-properties (point) 
(point-at-eol)))))
+                (cl-pushnew f prj-dirs :test #'string-equal)
+                (cl-pushnew f gnat--src-dirs :test #'string-equal)))
+           (forward-line 1))
+
+         )
+      (error
+       ;; search-forward failed. Possible causes:
+       ;;
+       ;; missing dirs in GPR_PROJECT_PATH => user error
+       ;; missing Object_Dir => gprbuild not run yet; it will be run soon
+       ;; some files are missing string quotes => user error
+       ;;
+       ;; We used to call gpr_query to get src-dirs, prj-dirs here; it
+       ;; is tolerant of the above errors. But ignoring the errors, to
+       ;; let gprbuild run with GPR_PROJECT_PATH set, is simpler.
+       (pop-to-buffer (gnat-run-buffer project (gnat-compiler-run-buffer-name 
(wisi-prj-compiler project))))
+       (message "project search path: %s" prj-dirs)
+       (message "parse gpr failed")
+       ))
+
+    (setf (wisi-prj-source-path project) (delete-dups gnat--src-dirs))
+    (setf (gnat-compiler-project-path compiler) nil)
+    (mapc (lambda (dir) (gnat-prj-add-prj-dir project compiler dir))
+         prj-dirs)
+    ))
+
+(defun gnat-parse-gpr (gpr-file project compiler)
+  "Parse GPR-FILE, append to PROJECT (a `wisi-prj' object).
+GPR-FILE must be absolute file name.
+source-path will include compiler runtime."
+  ;; this can take a long time; let the user know what's up
+  (if (gnat-compiler-gpr-file compiler)
+      ;; gpr-file previously set; new one must match
+      (when (not (string-equal gpr-file (gnat-compiler-gpr-file compiler)))
+       (error "project file %s defines a different GNAT project file than %s"
+              (gnat-compiler-gpr-file compiler)
+              gpr-file))
+
+    (setf (gnat-compiler-gpr-file compiler) gpr-file))
+
+  (gnat-get-paths project compiler))
+
+(defun gnat-parse-gpr-1 (gpr-file project)
+  "For `wisi-prj-parser-alist'."
+  (let ((compiler (wisi-prj-compiler project)))
+    (setf (gnat-compiler-run-buffer-name compiler) gpr-file)
+    (gnat-parse-gpr gpr-file project compiler)))
+
+;;;; command line tool interface
+
+(defun gnat-run-buffer-name (prj-file-name &optional prefix)
+  ;; We don't use (gnat-compiler-gpr-file compiler), because multiple
+  ;; wisi-prj files can use one gpr-file.
+  (concat (or prefix " *gnat-run-")
+         prj-file-name
+         "*"))
+
+(defun gnat-run-buffer (project name)
+  "Return a buffer suitable for running gnat command line tools for PROJECT"
+  (let* ((buffer (get-buffer name)))
+
+    (unless (buffer-live-p buffer)
+      (setq buffer (get-buffer-create name))
+      (when (gnat-compiler-gpr-file (wisi-prj-compiler project))
+       ;; Otherwise assume `default-directory' is already correct (or
+       ;; doesn't matter).
+       (with-current-buffer buffer
+         (setq default-directory
+               (file-name-directory
+                (gnat-compiler-gpr-file (wisi-prj-compiler project)))))
+       ))
+    buffer))
+
+(defun gnat-run (project exec command &optional err-msg expected-status)
+  "Run a gnat command line tool, as \"EXEC COMMAND\".
+PROJECT  is a `wisi-prj' object.
+EXEC must be an executable found on `exec-path'.
+COMMAND must be a list of strings.
+ERR-MSG must be nil or a string.
+EXPECTED-STATUS must be nil or a list of integers; throws an error if
+process status is not a member.
+
+Return process status.
+Assumes current buffer is (gnat-run-buffer)"
+  (set 'buffer-read-only nil)
+  (erase-buffer)
+
+  (setq command (cl-delete-if 'null command))
+
+  (let ((process-environment
+        (append
+          (wisi-prj-compile-env project)
+          (wisi-prj-file-env project)
+          (copy-sequence process-environment)))
+       status)
+
+    (when ada-gnat-debug-run
+      (insert (format "GPR_PROJECT_PATH=%s\n%s " (getenv "GPR_PROJECT_PATH") 
exec))
+      (mapc (lambda (str) (insert (concat str " "))) command)
+      (newline))
+
+    (setq status (apply 'call-process exec nil t nil command))
+    (cond
+     ((memq status (or expected-status '(0))); success
+      nil)
+
+     (t ; failure
+      (pop-to-buffer (current-buffer))
+      (if err-msg
+         (error "%s %s failed; %s" exec (car command) err-msg)
+       (error "%s %s failed" exec (car command))
+       ))
+     )))
+
+(defun gnat-run-gnat (project command &optional switches-args expected-status)
+  "Run the \"gnat\" command line tool, as \"gnat COMMAND -P<prj> 
SWITCHES-ARGS\".
+COMMAND must be a string, SWITCHES-ARGS a list of strings.
+EXPECTED-STATUS must be nil or a list of integers.
+Return process status.
+Assumes current buffer is (gnat-run-buffer)"
+  (let* ((compiler (wisi-prj-compiler project))
+        (gpr-file (gnat-compiler-gpr-file compiler))
+        (project-file-switch
+         (when gpr-file
+           (concat "-P" (file-name-nondirectory gpr-file))))
+         (target-gnat (concat (gnat-compiler-target compiler) "gnat"))
+         ;; gnat list understands --RTS without a fully qualified
+         ;; path, gnat find (in particular) doesn't (but it doesn't
+         ;; need to, it uses the ALI files found via the GPR)
+         (runtime
+          (when (and (gnat-compiler-runtime compiler) (string= command "list"))
+            (list (concat "--RTS=" (gnat-compiler-runtime compiler)))))
+        (cmd (append (list command) (list project-file-switch) runtime 
switches-args)))
+
+    (gnat-run project target-gnat cmd nil expected-status)
+    ))
+
+(defun gnat-run-no-prj (command &optional dir)
+  "Run \"gnat COMMAND\", with DIR as current directory.
+Return process status.  Process output goes to current buffer,
+which is displayed on error."
+  (set 'buffer-read-only nil)
+  (erase-buffer)
+
+  (when ada-gnat-debug-run
+    (setq command (cl-delete-if 'null command))
+    (mapc (lambda (str) (insert (concat str " "))) command)
+    (newline))
+
+  (let ((default-directory (or dir default-directory))
+       status)
+
+    (setq status (apply 'call-process "gnat" nil t nil command))
+    (cond
+     ((= status 0); success
+      nil)
+
+     (t ; failure
+      (pop-to-buffer (current-buffer))
+      (error "gnat %s failed" (car command)))
+     )))
+
+(cl-defmethod wisi-compiler-parse-one ((compiler gnat-compiler) project name 
value)
+  (cond
+   ((or
+     (string= name "ada_project_path") ;; backward compatibility
+     (string= name "gpr_project_path"))
+    (let ((process-environment
+          (append
+           (wisi-prj-compile-env project)
+           (wisi-prj-file-env project))));; reference, for 
substitute-in-file-name
+      (gnat-prj-add-prj-dir project compiler (expand-file-name 
(substitute-in-file-name value)))))
+
+   ((string= name "gnat-stub-cargs")
+    (setf (gnat-compiler-gnat-stub-cargs compiler) value))
+
+   ((string= name "gnat-stub-opts")
+    (setf (gnat-compiler-gnat-stub-opts compiler) value))
+
+   ((string= name "gpr_file")
+    ;; The gpr file is parsed in `wisi-compiler-parse-final' below, so
+    ;; it sees all file environment vars. We store the absolute gpr
+    ;; file name, so we can get the correct default-directory from
+    ;; it. Note that gprbuild requires the base name be found on
+    ;; GPR_PROJECT_PATH.
+    (let* ((process-environment
+           (append
+            (wisi-prj-compile-env project)
+            (wisi-prj-file-env project)));; reference, for 
substitute-in-file-name
+          (gpr-file (substitute-env-vars value)))
+
+      (if (= (aref gpr-file 0) ?$)
+         ;; An environment variable that was not resolved, possibly
+         ;; because the env var is later defined in the project file;
+         ;; it may be resoved in `wisi-compiler-parse-final'.
+         (setf (gnat-compiler-gpr-file compiler) gpr-file)
+
+       ;; else get the absolute path
+       (setf (gnat-compiler-gpr-file compiler)
+             (or (locate-file gpr-file (gnat-compiler-project-path compiler))
+                 (expand-file-name (substitute-env-vars gpr-file))))))
+    t)
+
+   ((string= name "runtime")
+    (setf (gnat-compiler-runtime compiler) value))
+
+   ((string= name "target")
+    (setf (gnat-compiler-target compiler) value))
+
+   ))
+
+(cl-defmethod wisi-compiler-parse-final ((compiler gnat-compiler) project 
prj-file-name)
+  (setf (gnat-compiler-run-buffer-name compiler) (gnat-run-buffer-name 
prj-file-name))
+
+  (let ((gpr-file (gnat-compiler-gpr-file compiler)))
+    (if gpr-file
+       (progn
+         (when (= (aref gpr-file 0) ?$)
+           ;; An environment variable that was not resolved earlier,
+           ;; because the env var is defined in the project file.
+           (let ((process-environment
+                  (append
+                   (wisi-prj-compile-env project)
+                   (wisi-prj-file-env project))));; reference, for 
substitute-in-file-name
+
+             (setq gpr-file
+                   (or
+                    (locate-file (substitute-env-vars gpr-file)
+                                 (gnat-compiler-project-path compiler))
+                    (expand-file-name (substitute-env-vars gpr-file))))
+
+             (setf (gnat-compiler-gpr-file compiler) gpr-file)))
+
+         (gnat-parse-gpr gpr-file project compiler)
+         )
+
+    ;; else add the compiler libraries to project.source-path
+    (gnat-get-paths project compiler)
+    )))
+
+(cl-defmethod wisi-compiler-select-prj ((_compiler gnat-compiler) _project)
+  (add-to-list 'completion-ignored-extensions ".ali") ;; gnat library files
+  (setq compilation-error-regexp-alist
+       ;; gnu matches the summary line from make:
+       ;; make: *** [rules.make:143: wisitoken-bnf-generate.exe] Error 4
+       ;; which is just annoying, but should be up to the user.
+       '(gnu)
+       )
+  )
+
+(cl-defmethod wisi-compiler-deselect-prj ((_compiler gnat-compiler) _project)
+  (setq completion-ignored-extensions (delete ".ali" 
completion-ignored-extensions))
+  (setq compilation-error-regexp-alist (mapcar #'car 
compilation-error-regexp-alist-alist))
+  )
+
+(cl-defmethod wisi-compiler-show-prj-path ((compiler gnat-compiler))
+    (if (gnat-compiler-project-path compiler)
+      (progn
+       (pop-to-buffer (get-buffer-create "*project file search path*"))
+       (erase-buffer)
+       (dolist (file (gnat-compiler-project-path compiler))
+         (insert (format "%s\n" file))))
+    (message "no project file search path set")
+    ))
+
+;;;; gnatprep utils
+
+(defun gnatprep-indent ()
+  "If point is on a gnatprep keyword, return indentation column
+for it. Otherwise return nil.  Intended to be added to
+`wisi-indent-calculate-functions' or other indentation function
+list."
+  ;; gnatprep keywords are:
+  ;;
+  ;; #if identifier [then]
+  ;; #elsif identifier [then]
+  ;; #else
+  ;; #end if;
+  ;;
+  ;; they are all indented at column 0.
+  (when (equal (char-after) ?\#) 0))
+
+(defun gnatprep-syntax-propertize (start end)
+  (goto-char start)
+  (save-match-data
+    (while (re-search-forward
+           "^[ \t]*\\(#\\(?:if\\|else\\|elsif\\|end\\)\\)"; gnatprep keywords.
+           end t)
+      (cond
+       ((match-beginning 1)
+       (put-text-property
+        (match-beginning 1) (match-end 1) 'syntax-table '(11 . ?\n)))
+       )
+      )))
+
+(defconst gnatprep-preprocessor-keywords
+   (list (list "^[ \t]*\\(#.*\n\\)"  '(1 font-lock-preprocessor-face t))))
+
+;; We assume that if this file is loaded, any ada-mode buffer may have
+;; gnatprep syntax; even with different host/target compilers, both
+;; must run gnatprep first. If support for another preprocessor is
+;; added, we'll need wisi-prj-preprocessor, along with -compiler and
+;; -xref.
+(defun gnatprep-setup ()
+  (add-to-list 'wisi-indent-calculate-functions 'gnatprep-indent)
+  (add-hook 'ada-syntax-propertize-hook #'gnatprep-syntax-propertize)
+  (font-lock-add-keywords 'ada-mode gnatprep-preprocessor-keywords)
+  ;; ada-mode calls font-lock-refresh-defaults after ada-mode-hook
+  )
+
+(add-hook 'ada-mode-hook #'gnatprep-setup)
+
+;;;; Initialization
+
+;; These are shared between ada-compiler-gnat and gpr-query.
+(add-to-list 'wisi-prj-file-extensions  "gpr")
+(add-to-list 'wisi-prj-parser-alist  '("gpr" . gnat-parse-gpr-1))
+
+(add-to-list
+ 'compilation-error-regexp-alist-alist
+ '(gnat
+   ;; typical:
+   ;;   cards_package.adb:45:32: expected private type "System.Address"
+   ;;
+   ;; with full path Source_Reference pragma :
+   ;;   d:/maphds/version_x/1773/sbs-abi-dll_lib.ads.gp:39:06: file 
"interfaces_c.ads" not found
+   ;;
+   ;; gnu cc1: (gnatmake can invoke the C compiler)
+   ;;   foo.c:2: `TRUE' undeclared here (not in a function)
+   ;;   foo.c:2 : `TRUE' undeclared here (not in a function)
+   ;;
+   ;; we can't handle secondary errors here, because a regexp can't 
distinquish "message" from "filename"
+   "^\\(\\(.:\\)?[^ :\n]+\\):\\([0-9]+\\)\\s-?:?\\([0-9]+\\)?" 1 3 4))
+
+(provide 'gnat-core)
+;; end of file
diff --git a/install.sh b/install.sh
new file mode 100644
index 0000000000..886fc94eed
--- /dev/null
+++ b/install.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+# Install executables for Ada mode.
+#
+# See build.sh for build (must be run before install).
+
+# $1 : optional --prefix=<dir>
+#
+# If you don't have write permission in the GNAT installation
+# directory, you need to use --prefix=<dir>, or run with root priviledges.
+
+WISI_DIR=`ls -d ../wisi-3.1.?`
+
+gprinstall -f -p -P ada_mode_wisi_parse.gpr -aP $WISI_DIR 
--install-name=ada_mode_wisi_parse $1 
+
+# end of file.
diff --git a/recover_stats.adb b/recover_stats.adb
deleted file mode 100644
index e1fed7b4ef..0000000000
--- a/recover_stats.adb
+++ /dev/null
@@ -1,271 +0,0 @@
---  Abstract :
---
---  Summarize error recover log.
---
---  Copyright (C) 2019 - 2020 Stephen Leake All Rights Reserved.
---
---  This program is free software; you can redistribute it and/or
---  modify it under terms of the GNU General Public License as
---  published by the Free Software Foundation; either version 3, or (at
---  your option) any later version. This program is distributed in the
---  hope that it will be useful, but WITHOUT ANY WARRANTY; without even
---  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
---  PURPOSE. See the GNU General Public License for more details. You
---  should have received a copy of the GNU General Public License
---  distributed with this program; see file COPYING. If not, write to
---  the Free Software Foundation, 51 Franklin Street, Suite 500, Boston,
---  MA 02110-1335, USA.
-
-pragma License (GPL);
-
-with Ada.Command_Line;
-with Ada.Exceptions;
-with Ada.Long_Float_Text_IO;
-with Ada.Strings.Fixed; use Ada.Strings.Fixed;
-with Ada.Strings.Maps;
-with Ada.Text_IO; use Ada.Text_IO;
-with GNAT.Traceback.Symbolic;
-with SAL.Gen_Stats.Gen_Image;
-with SAL.Long_Float_Stats;
-with WisiToken.Parse.LR;
-procedure Recover_Stats
-is
-   subtype Strategies is WisiToken.Parse.LR.Strategies;
-
-   File : File_Type;
-
-   Delimiters : constant Ada.Strings.Maps.Character_Set := 
Ada.Strings.Maps.To_Set (",() ");
-   Number     : constant Ada.Strings.Maps.Character_Set := 
Ada.Strings.Maps.To_Set ("0123456789");
-
-   type Strategy_Counts is array (Strategies) of Natural;
-
-   type Recover_Label is (Full, Partial);
-
-   type Recover_Summary is record
-      Event_Count : Integer := 0;
-      --  1 per recover event (1 line in log file)
-
-      Enqueue_Stats : SAL.Long_Float_Stats.Stats_Type;
-      Check_Stats   : SAL.Long_Float_Stats.Stats_Type;
-
-      Strat_Counts_Total   : Strategy_Counts := (others => 0);
-      Strat_Counts_Present : Strategy_Counts := (others => 0);
-      --  1 per recover event if used
-
-      Recover_Count_Present : Integer := 0;
-      --  1 per parser in recover result
-
-      Recover_Count_Total : Integer := 0;
-      --  Sum of all strategy counts
-
-      Fail_Event_Count      : Integer := 0; -- for all reasons
-      Fail_Enqueue_Limit    : Integer := 0;
-      Fail_No_Configs_Left  : Integer := 0;
-      Fail_Programmer_Error : Integer := 0;
-      Fail_Other            : Integer := 0;
-   end record;
-
-   Summary : array (Recover_Label) of Recover_Summary;
-begin
-   Open (File, In_File, Ada.Command_Line.Argument (1));
-
-   loop
-      exit when End_Of_File (File);
-      declare
-         --  The recover log is written by code in
-         --  wisitoken-parse-lr-parser.adb Parse (search for Recover_Log).
-         --
-         --  A line has the syntax:
-         --  yyyy-mm-dd hh:mm:ss <partial> <success> pre_parser_count 
'<file_name>' (<parser_data>)...
-         --
-         --  where there is one (<parser_data) for each parser active after 
recover. <parser_data> is:
-         --
-         --  (<strategy_counts>) <enqueue_count> <check_count> <success>
-         --
-         --  Note that the per-parser success is always TRUE; it would not be
-         --  active if recover had failed.
-
-         Line  : constant String := Get_Line (File);
-         First : Integer         := Index (Line, " "); -- after date
-         Last  : Integer;
-
-         Label : Recover_Label := Full;
-
-         function Line_Eq (Item : in String) return Boolean
-         is begin
-            return Line (First .. First + Item'Length - 1) = Item;
-         end Line_Eq;
-
-         function Next_Integer return Integer
-         is begin
-            Find_Token
-              (Line, Number,
-               From  => Last + 1,
-               Test  => Ada.Strings.Inside,
-               First => First,
-               Last  => Last);
-            return Integer'Value (Line (First .. Last));
-         exception
-         when Constraint_Error =>
-            raise Constraint_Error with "bad integer '" & Line (First .. Last 
- 1) & "' " &
-              Ada.Text_IO.Count'Image (Ada.Text_IO.Line (File) - 1) & 
First'Image & Last'Image;
-         end Next_Integer;
-
-         function Next_Boolean return  Boolean
-         is begin
-            First := Last + 2;
-            Last  := -1 + Index (Line, Delimiters, First);
-            return Boolean'Value (Line (First .. Last));
-         end Next_Boolean;
-
-         function Read_Strat_Counts (Strategy_Found : out Boolean) return 
Strategy_Counts
-         is begin
-            Strategy_Found := False;
-            Last := Index (Line, "(", Last + 1);
-            return Result : Strategy_Counts do
-               for I in Strategies loop
-                  Result (I) := Next_Integer;
-                  if Result (I) > 0 then
-                     Strategy_Found := True;
-                  end if;
-               end loop;
-               Last := 1 + Index (Line, ")", Last + 1);
-            end return;
-         end Read_Strat_Counts;
-
-      begin
-         First := Index (Line, " ", First + 1); -- after time
-         Last  := Index (Line, " ", First + 1); -- after Partial_Parse_Active
-         if Boolean'Value (Line (First + 1 .. Last - 1)) then
-            Label := Partial;
-         end if;
-
-         Summary (Label).Event_Count := Summary (Label).Event_Count + 1;
-
-         First := Last + 1;
-         if Line (First .. First + 3) = "FAIL" then
-            Summary (Label).Fail_Event_Count := Summary 
(Label).Fail_Event_Count + 1;
-            First := First + 4;
-
-            if Line_Eq ("NO_CONFIGS_LEFT") then
-               Summary (Label).Fail_No_Configs_Left := Summary 
(Label).Fail_No_Configs_Left + 1;
-            elsif Line_Eq ("ENQUEUE_LIMIT") then
-               Summary (Label).Fail_Enqueue_Limit := Summary 
(Label).Fail_Enqueue_Limit + 1;
-            elsif Line_Eq ("PROGRAMMER_ERROR") then
-               Summary (Label).Fail_Programmer_Error := Summary 
(Label).Fail_Programmer_Error + 1;
-            else
-               Summary (Label).Fail_Other := Summary (Label).Fail_Other + 1;
-            end if;
-
-         else
-            --  Process per-parser data
-            Last := Index (Line, "(", Last + 1);
-            loop
-               exit when Line (Last + 1) = ')';
-               declare
-                  Strategy_Found : Boolean;
-                  Strat_Counts   : constant Strategy_Counts := 
Read_Strat_Counts (Strategy_Found);
-                  Enqueue_Count  : constant Integer         := Next_Integer;
-                  Check_Count    : constant Integer         := Next_Integer;
-                  Success        : constant Boolean         := Next_Boolean;
-                  pragma Unreferenced (Success);
-               begin
-                  Summary (Label).Recover_Count_Present := Summary 
(Label).Recover_Count_Present + 1;
-
-                  if not Strategy_Found then
-                     raise SAL.Programmer_Error;
-                  else
-                     Summary (Label).Enqueue_Stats.Accumulate (Long_Float 
(Enqueue_Count));
-                     Summary (Label).Check_Stats.Accumulate (Long_Float 
(Check_Count));
-                     for I in Strategies loop
-                        Summary (Label).Recover_Count_Total    :=
-                          Summary (Label).Recover_Count_Total + Strat_Counts 
(I);
-                        Summary (Label).Strat_Counts_Total (I) :=
-                          Summary (Label).Strat_Counts_Total (I) + 
Strat_Counts (I);
-                        if Strat_Counts (I) > 0 then
-                           Summary (Label).Strat_Counts_Present (I) := Summary 
(Label).Strat_Counts_Present (I) + 1;
-                        end if;
-                     end loop;
-                  end if;
-               end;
-            end loop;
-         end if;
-      end;
-   end loop;
-
-   declare
-      use Ada.Strings;
-
-      Label_Field     : String (1 .. 23); -- fits strategy and fail labels
-      Count_Field     : String (1 .. 8);
-      Percent_Field   : String (1 .. 4);
-      --  Shared by Put_If, Put_Percent
-
-      procedure Put_If
-        (Summary_Label : in Recover_Label;
-         Name          : in String;
-         Count         : in Integer;
-         Always        : in Boolean := False)
-      is
-         Percent_Present : constant Integer :=
-           Integer (100.0 * Float (Count) / Float (Summary 
(Summary_Label).Event_Count));
-      begin
-         if Count > 0 or Always then
-            Move (Name, Label_Field); Put (Label_Field & " => ");
-            Move (Count'Image, Count_Field, Justify => Right); Put 
(Count_Field);
-            Move (Percent_Present'Image & "%", Percent_Field, Justify => 
Right); Put_Line (Percent_Field);
-         end if;
-      end Put_If;
-
-      package Stats_Image is new SAL.Long_Float_Stats.Gen_Image
-        (Real_IO           => Ada.Long_Float_Text_IO,
-         Default_Mean_Fore => 7,
-         Default_Mean_Aft  => 0,
-         Default_Mean_Exp  => 0,
-         Default_Sd_Fore   => 7,
-         Default_Sd_Aft    => 1,
-         Default_Sd_Exp    => 0);
-
-      procedure Put_Percent (Summary_Label : in Recover_Label; Present, Total 
: in Integer; Name : in String)
-      is
-         Percent_Present : constant Integer :=
-           Integer (100.0 * Float (Present) / Float (Summary 
(Summary_Label).Recover_Count_Present));
-         Percent_Total   : constant Integer :=
-           Integer (100.0 * Float (Total) / Float (Summary 
(Summary_Label).Recover_Count_Total));
-      begin
-         Move (Name, Label_Field); Put (Label_Field);
-         Move (Present'Image, Count_Field, Justify => Right); Put 
(Count_Field);
-         Move (Percent_Present'Image & "%", Percent_Field, Justify => Right); 
Put (Percent_Field & " /");
-         Move (Total'Image, Count_Field, Justify => Right); Put (Count_Field);
-         Move (Percent_Total'Image & "%", Percent_Field, Justify => Right); 
Put_Line (Percent_Field);
-      end Put_Percent;
-
-   begin
-      for I in Recover_Label loop
-         Put_Line (I'Image);
-         Put_Line ("present/total:" & Summary (I).Event_Count'Image & " /" & 
Summary (I).Recover_Count_Total'Image);
-         if Summary (I).Event_Count > 0 then
-            Put_Line ("           mean        std. dev.    min     max");
-            Put_Line ("Enqueue: " & Stats_Image.Image (Summary 
(I).Enqueue_Stats.Display));
-            Put_Line ("Check:   " & Stats_Image.Image (Summary 
(I).Check_Stats.Display));
-            Put_If (I, "FAIL", Summary (I).Fail_Event_Count, Always => True);
-            Put_If (I, "FAIL_ENQUEUE_LIMIT", Summary (I).Fail_Enqueue_Limit);
-            Put_If (I, "FAIL_NO_CONFIGS_LEFT", Summary 
(I).Fail_No_Configs_Left);
-            Put_If (I, "FAIL_PROGRAMMER_ERROR", Summary 
(I).Fail_Programmer_Error);
-            Put_If (I, "FAIL_OTHER", Summary (I).Fail_Other);
-            for J in Strategies loop
-               Put_Percent
-                 (I,
-                  Summary (I).Strat_Counts_Present (J),
-                  Summary (I).Strat_Counts_Total (J),
-                  J'Image);
-            end loop;
-         end if;
-         New_Line;
-      end loop;
-   end;
-exception
-when E : others =>
-   Put_Line (Ada.Exceptions.Exception_Name (E) & ": " & 
Ada.Exceptions.Exception_Message (E));
-   Put_Line (GNAT.Traceback.Symbolic.Symbolic_Traceback (E));
-end Recover_Stats;
diff --git a/run_wisi_common_parse.adb b/run_wisi_common_parse.adb
index c79f2e778f..88d410f6db 100644
--- a/run_wisi_common_parse.adb
+++ b/run_wisi_common_parse.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -19,208 +19,718 @@
 pragma License (GPL);
 
 with Ada.Command_Line;
+with Ada.Directories;
 with Ada.Exceptions;
 with Ada.IO_Exceptions;
 with Ada.Real_Time;
+with Ada.Strings.Fixed;
+with Ada.Strings.Unbounded;
 with Ada.Text_IO;
+with GNAT.Traceback.Symbolic;
+with GNATCOLL.Memory;
+with GNATCOLL.Mmap;
 with SAL;
-with System.Multiprocessors;
+with WisiToken.Lexer;
+with WisiToken.Parse.LR.McKenzie_Recover;
+with WisiToken.Parse.LR.Parser;
+with WisiToken.Syntax_Trees;
+with WisiToken.Text_IO_Trace;
 package body Run_Wisi_Common_Parse is
 
-   procedure Usage (Parser : in out WisiToken.Parse.LR.Parser.Parser)
+   use Ada.Strings.Unbounded;
+
+   type Command_Type is (Parse_Partial, Parse_Incremental, Refactor, 
Command_File);
+
+   type Command_Line_Params (Command : Command_Type) is record
+      --  Similar to emacs_wisi_common_parse.ads Parse_Params
+
+      Source_File_Name : Ada.Strings.Unbounded.Unbounded_String;
+      Language_Params  : Ada.Strings.Unbounded.Unbounded_String;
+      Repeat_Count     : Integer                    := 1;
+
+      case Command is
+      when Parse_Partial =>
+         Partial_Post_Parse_Action : Wisi.Base_Post_Parse_Action_Type;
+         Partial_Begin_Byte_Pos    : WisiToken.Buffer_Pos       := 
WisiToken.Invalid_Buffer_Pos;
+         Partial_End_Byte_Pos      : WisiToken.Base_Buffer_Pos  := 
WisiToken.Invalid_Buffer_Pos;
+         Partial_Goal_Byte_Pos     : WisiToken.Buffer_Pos       := 
WisiToken.Invalid_Buffer_Pos;
+         Partial_Begin_Char_Pos    : WisiToken.Buffer_Pos       := 
WisiToken.Invalid_Buffer_Pos;
+         Partial_End_Char_Pos      : WisiToken.Base_Buffer_Pos  := 
WisiToken.Invalid_Buffer_Pos;
+         Partial_Goal_Char_Pos     : WisiToken.Buffer_Pos       := 
WisiToken.Invalid_Buffer_Pos;
+         Partial_Begin_Line        : WisiToken.Line_Number_Type := 
WisiToken.Line_Number_Type'First;
+         Partial_Begin_Indent      : Integer                    := 0;
+
+      when Parse_Incremental =>
+         --  Incremental edit, parse, post_parse_action
+         Changes               : Wisi.Parse_Context.Change_Lists.List;
+         Inc_Post_Parse_Action : Wisi.Base_Post_Parse_Action_Type;
+         Inc_Begin_Byte_Pos    : WisiToken.Buffer_Pos      := 
WisiToken.Invalid_Buffer_Pos;
+         Inc_Begin_Char_Pos    : WisiToken.Buffer_Pos      := 
WisiToken.Invalid_Buffer_Pos;
+         Inc_End_Byte_Pos      : WisiToken.Base_Buffer_Pos := 
WisiToken.Invalid_Buffer_Pos;
+         Inc_End_Char_Pos      : WisiToken.Base_Buffer_Pos := 
WisiToken.Invalid_Buffer_Pos;
+
+      when Refactor =>
+         --  We assume the file contains only the one statement/declaration
+         --  that needs refactoring.
+
+         Refactor_Action : Wisi.Refactor_Action;
+         --  Language-specific
+
+         Edit_Begin : WisiToken.Buffer_Pos;
+         --  Source file byte position at start of expression to refactor.
+
+      when Command_File =>
+         Command_File_Name : Ada.Strings.Unbounded.Unbounded_String;
+      end case;
+   end record;
+
+   procedure Usage_1 (Parse_Data : in Wisi.Parse_Data_Type'Class)
    is
-      use all type WisiToken.Parse.LR.Parse_Table_Ptr;
       use Ada.Text_IO;
    begin
-      Put_Line ("usage: parse <parse_action> <file_name> [partial parse 
params] [options]");
+      Put_Line ("usage: parse_partial <post_parse_action> <file_name> [partial 
parse params] [options]");
+      Put_Line ("   or: parse_incremental <post_parse_action> <file_name> 
<changes> ");
+      Put_Line ("          <action_begin_byte> <action_end_byte> [options]");
       Put_Line ("   or: refactor <refactor_action> <file_name> <edit_begin> 
[options]");
-      Put_Line ("parse_action: {Navigate | Face | Indent}");
-      Put_Line ("partial parse params: begin_byte_pos end_byte_pos 
goal_byte_pos begin_char_pos begin_line" &
-                  " end_line begin_indent");
+      Put_Line ("   or: command_file <command_file_name> [source_file_name]");
+      Put_Line ("post_parse_action: {Navigate | Face | Indent}");
+      Put_Line ("refactor_action:");
+      Parse_Data.Refactor_Help;
+      New_Line;
+   end Usage_1;
+
+   procedure Usage
+     (Parse_Data  : in Wisi.Parse_Data_Type'Class;
+      Parse_Table : in WisiToken.Parse.LR.Parse_Table_Ptr)
+   is
+      use all type WisiToken.Parse.LR.Parse_Table_Ptr;
+      use Ada.Text_IO;
+   begin
+      Usage_1 (Parse_Data);
+      Put_Line ("partial parse params: begin_byte_pos end_byte_pos 
goal_byte_pos begin_char_pos end_char_pos" &
+                  " begin_line begin_indent");
       Put_Line ("options:");
-      Put_Line ("--verbosity n m l: (no 'm' for refactor)");
-      Put_Line ("   n: parser; m: mckenzie; l: action");
-      Put_Line ("   0 - only report parse errors");
-      Put_Line ("   1 - shows spawn/terminate parallel parsers, error recovery 
enter/exit");
-      Put_Line ("   2 - add each parser cycle, error recovery enqueue/check");
-      Put_Line ("   3 - parse stack in each cycle, error recovery parse 
actions");
-      Put_Line ("   4 - add lexer debug, dump syntax tree");
-      Put_Line ("--check_limit n  : set error recover token check limit" &
-                  (if Parser.Table = null then ""
-                   else "; default" & 
Parser.Table.McKenzie_Param.Check_Limit'Image));
-      Put_Line ("--check_delta n  : set error recover delta check limit" &
-                  (if Parser.Table = null then ""
-                   else "; default" & 
Parser.Table.McKenzie_Param.Check_Delta_Limit'Image));
-      Put_Line ("--enqueue_limit n  : set error recover token enqueue limit" &
-                  (if Parser.Table = null then ""
-                   else "; default" & 
Parser.Table.McKenzie_Param.Enqueue_Limit'Image));
-      Put_Line ("--max_parallel n  : set maximum count of parallel parsers 
(default" &
-                  WisiToken.Parse.LR.Parser.Default_Max_Parallel'Image & ")");
-      Put_Line ("--task_count n : worker tasks in error recovery");
-      Put_Line ("--disable_recover : disable error recovery; default enabled");
-      Put_Line ("--debug_mode : tracebacks from unhandled exceptions; default 
disabled");
+      Put_Line ("--verbosity <trace config>");
+      WisiToken.Enable_Trace_Help;
+      Put_Line ("--save_text <file_name> : write edited file text to 
file_name");
       Put_Line ("--lang_params <language-specific params>");
+      Put_Line ("--max_parallel n  : set maximum count of parallel parsers" &
+                  (if Parse_Table = null then ""
+                   else "; default" & Parse_Table.Max_Parallel'Image));
+      Put_Line ("--mckenzie_check_limit n  : set error recover token check 
limit" &
+                  (if Parse_Table = null then ""
+                   else "; default" & 
Parse_Table.McKenzie_Param.Check_Limit'Image));
+      Put_Line ("--mckenzie_check_delta n  : set error recover delta check 
limit" &
+                  (if Parse_Table = null then ""
+                   else "; default" & 
Parse_Table.McKenzie_Param.Check_Delta_Limit'Image));
+      Put_Line ("--mckenzie_enqueue_limit n  : set error recover token enqueue 
limit" &
+                  (if Parse_Table = null then ""
+                   else "; default" & 
Parse_Table.McKenzie_Param.Enqueue_Limit'Image));
+      Put_Line ("--mckenzie_full_explore : force error recover explore all 
solutions");
+      Put_Line ("--mckenzie_high_cost : error recover report high cost 
solutions");
+      Put_Line ("--mckenzie_zombie_limit n  : set error recover token zombie 
limit" &
+                  (if Parse_Table = null then ""
+                   else "; default" & 
Parse_Table.McKenzie_Param.Zombie_Limit'Image));
       Put_Line ("--repeat_count n : repeat parse count times, for profiling; 
default 1");
+      Put_Line ("--log <file_name> : output verbosity trace to <file_name>");
       New_Line;
    end Usage;
 
-   function Get_CL_Params (Parser : in out WisiToken.Parse.LR.Parser.Parser) 
return Command_Line_Params
+   Finish : exception;
+
+   Save_File_Name : Unbounded_String;
+
+   Log_File : Ada.Text_IO.File_Type; -- for Parse recover log; unused
+
+   Trace_File : aliased Ada.Text_IO.File_Type; -- for Trace log; see --log.
+
+   procedure Get_File_Size (Parse_Context : in 
Wisi.Parse_Context.Parse_Context_Access)
+   --  Run lexer to get Parse_Context.Text_Buffer_Char_Last
+   is
+      use all type WisiToken.Token_ID;
+
+      Token       : WisiToken.Lexer.Token;
+      Error_Count : Natural;
+      pragma Unreferenced (Error_Count);
+   begin
+      loop
+         Error_Count := Parse_Context.Parser.Tree.Lexer.Find_Next (Token);
+         exit when Token.ID = 
Parse_Context.Parser.Tree.Lexer.Descriptor.EOI_ID;
+      end loop;
+
+      Parse_Context.Text_Buffer_Byte_Last := Integer (Token.Byte_Region.Last);
+      Parse_Context.Text_Buffer_Char_Last := Integer (Token.Char_Region.Last);
+   end Get_File_Size;
+
+   procedure Read_Source_File (Name : in String; Parse_Context : in 
Wisi.Parse_Context.Parse_Context_Access)
+   is
+      use GNATCOLL.Mmap;
+      File   : Mapped_File   := Open_Read (Name);
+      Region : Mapped_Region := Read (File);
+   begin
+      Free (Parse_Context.Text_Buffer);
+      Parse_Context.Text_Buffer := new String'(Data (Region) (1 .. Last 
(Region)));
+      Parse_Context.Text_Buffer_Byte_Last := Parse_Context.Text_Buffer'Last;
+      --  Text_Buffer_Char_Last is set by lexer, below.
+
+      if 0 /= Ada.Strings.Fixed.Index (Parse_Context.Text_Buffer.all, ASCII.CR 
& "") then
+         --  Test case: ada_mode-recover_partial_14.adb
+         Parse_Context.Text_Buffer_Char_Last := Parse_Context.Text_Buffer'Last;
+
+         --  wisi-process-parse.el wisi-parse-require-process sets the coding
+         --  convention for sending data to the parser process to utf-8-unix.
+         --  So we have to convert DOS line endings to Unix here to match.
+         Wisi.To_Unix_Line_Endings
+           (Parse_Context.Text_Buffer, Parse_Context.Text_Buffer_Byte_Last,
+            Parse_Context.Text_Buffer_Char_Last);
+      end if;
+
+      Parse_Context.Parser.Tree.Lexer.Reset_With_String_Access
+        (Parse_Context.Text_Buffer, Parse_Context.Text_Buffer_Byte_Last, 
To_Unbounded_String (Name));
+      Free (Region);
+      Close (File);
+   end Read_Source_File;
+
+   function Command_File_Name
+     (Parse_Data : in     Wisi.Parse_Data_Type'Class;
+      Next_Arg   :    out Integer)
+     return Command_Line_Params
+   --  Read command and source file name from command line.
    is
       use Ada.Command_Line;
       use WisiToken;
-      Arg     : Integer := 1;
       Command : Command_Type;
    begin
-      if Argument_Count < 1 then
-         Usage (Parser);
-         Set_Exit_Status (Failure);
-         raise Finish;
-
-      elsif Argument (Arg) = "--help" then
-         Usage (Parser);
-         raise Finish;
-
-      elsif Argument_Count < 2 then
-         Usage (Parser);
+      if Argument_Count < 2 then
+         Usage (Parse_Data, null);
          Set_Exit_Status (Failure);
          raise Finish;
       end if;
 
-      Command := Command_Type'Value (Ada.Command_Line.Argument (1));
+      Command := Command_Type'Value (Argument (1));
 
       return Result : Command_Line_Params (Command) do
-         Result.Source_File_Name  := +Ada.Command_Line.Argument (3);
-
          case Command is
-         when Parse =>
-            Result.Post_Parse_Action := Wisi.Post_Parse_Action_Type'Value 
(Ada.Command_Line.Argument (2));
-
-            if Argument_Count >= 4 and then Argument (4)(1) /= '-' then
-               Result.Begin_Byte_Pos := WisiToken.Buffer_Pos'Value (Argument 
(4));
-               Result.End_Byte_Pos   := WisiToken.Buffer_Pos'Value (Argument 
(5)) - 1; -- match emacs region
-               Result.Goal_Byte_Pos  := WisiToken.Buffer_Pos'Value (Argument 
(6));
-               Result.Begin_Char_Pos := WisiToken.Buffer_Pos'Value (Argument 
(7));
-               Result.Begin_Line     := WisiToken.Line_Number_Type'Value 
(Argument (8));
-               Result.End_Line       := WisiToken.Line_Number_Type'Value 
(Argument (9));
-               Result.Begin_Indent   := Integer'Value (Argument (10));
-               Arg                   := 11;
-            else
-               Result.Begin_Byte_Pos := WisiToken.Invalid_Buffer_Pos;
-               Result.End_Byte_Pos   := WisiToken.Invalid_Buffer_Pos;
-               Result.Begin_Char_Pos := WisiToken.Buffer_Pos'First;
-               Result.Begin_Line     := WisiToken.Line_Number_Type'First;
-               Arg                   := 4;
-            end if;
+         when Parse_Partial =>
+            Result.Partial_Post_Parse_Action := 
Wisi.Base_Post_Parse_Action_Type'Value (Argument (2));
+            Result.Source_File_Name  := +Argument (3);
+            Next_Arg := 4;
+
+         when Parse_Incremental =>
+            Result.Inc_Post_Parse_Action := 
Wisi.Base_Post_Parse_Action_Type'Value (Argument (2));
+            Result.Source_File_Name      := +Argument (3);
+            Next_Arg                     := 4;
 
          when Refactor =>
-            Result.Refactor_Action := Integer'Value (Argument (2));
-            Result.Edit_Begin      := WisiToken.Buffer_Pos'Value (Argument 
(4));
-            Arg                    := 5;
+            Result.Refactor_Action  := Wisi.Refactor_Action'Value (Argument 
(2));
+            Result.Source_File_Name := +Argument (3);
+            Next_Arg := 4;
+
+         when Command_File =>
+            Result.Command_File_Name := +Argument (2);
+            if Argument_Count > 2 and then
+              Argument (3)'Length > 2 and then
+              Argument (3)(1 .. 2) /= "--"
+            then
+               Result.Source_File_Name  := +Argument (3);
+               Next_Arg                 := 4;
+            else
+               Next_Arg                 := 3;
+            end if;
          end case;
+      end return;
+   exception
+   when Finish =>
+      raise;
 
-         loop
-            exit when Arg > Argument_Count;
+   when E : others =>
+      Ada.Text_IO.Put_Line (Ada.Exceptions.Exception_Name (E) & ": " & 
Ada.Exceptions.Exception_Message (E));
+      Usage (Parse_Data, null);
+      Set_Exit_Status (Failure);
+      raise SAL.Parameter_Error;
+   end Command_File_Name;
 
-            if Argument (Arg) = "--verbosity" then
-               WisiToken.Trace_Parse    := Integer'Value (Argument (Arg + 1));
-               case Command is
-               when Parse =>
-                  WisiToken.Trace_McKenzie := Integer'Value (Argument (Arg + 
2));
-                  WisiToken.Trace_Action   := Integer'Value (Argument (Arg + 
3));
-                  Arg                      := Arg + 4;
-               when Refactor =>
-                  WisiToken.Trace_Action   := Integer'Value (Argument (Arg + 
2));
-                  Arg                      := Arg + 3;
-               end case;
+   procedure Remaining_Command_Params
+     (Parser : in out WisiToken.Parse.LR.Parser.Parser;
+      Params : in out Command_Line_Params;
+      Arg    : in out Integer)
+   --  Command_File_Name reads the first few command line arguments
+   is
+      use Ada.Command_Line;
+      use WisiToken;
+   begin
+      if Argument_Count >= Arg and then Argument (Arg) = "--help" then
+         Usage (Wisi.Parse_Data_Type'Class (Parser.User_Data.all), 
Parser.Table);
+         raise Finish;
+      end if;
 
-               WisiToken.Debug_Mode := WisiToken.Trace_Parse > Outline or 
WisiToken.Trace_McKenzie > Outline;
+      case Params.Command is
+      when Parse_Partial =>
+         if Argument_Count >= 4 and then Argument (4)(1) /= '-' then
+            Params.Partial_Begin_Byte_Pos := WisiToken.Buffer_Pos'Value 
(Argument (4));
+            Params.Partial_End_Byte_Pos   := WisiToken.Buffer_Pos'Value 
(Argument (5)) - 1; -- match emacs region
+            Params.Partial_Goal_Byte_Pos  := WisiToken.Buffer_Pos'Value 
(Argument (6));
+            Params.Partial_Begin_Char_Pos := WisiToken.Buffer_Pos'Value 
(Argument (7));
+            Params.Partial_End_Char_Pos   := WisiToken.Buffer_Pos'Value 
(Argument (8));
+            Params.Partial_Goal_Char_Pos  := WisiToken.Buffer_Pos'Value 
(Argument (9));
+            Params.Partial_Begin_Line     := WisiToken.Line_Number_Type'Value 
(Argument (10));
+            Params.Partial_Begin_Indent   := Integer'Value (Argument (11));
+            Arg                           := 12;
+         else
+            Params.Partial_Begin_Byte_Pos := WisiToken.Invalid_Buffer_Pos;
+            Params.Partial_End_Byte_Pos   := WisiToken.Invalid_Buffer_Pos;
+            Params.Partial_Begin_Char_Pos := WisiToken.Buffer_Pos'First;
+            Params.Partial_Begin_Line     := WisiToken.Line_Number_Type'First;
+         end if;
 
-            elsif Argument (Arg) = "--check_limit" then
-               Parser.Table.McKenzie_Param.Check_Limit := Token_Index'Value 
(Argument (Arg + 1));
-               Arg := Arg + 2;
+      when Parse_Incremental =>
+         declare
+            Text : constant String := Argument (4);
+            Last : Integer := Text'First - 1;
+         begin
+            Params.Changes := Wisi.Parse_Context.Get_Emacs_Change_List (Text, 
Last);
+         end;
 
-            elsif Argument (Arg) = "--check_delta" then
-               Parser.Table.McKenzie_Param.Check_Delta_Limit := Integer'Value 
(Argument (Arg + 1));
-               Arg := Arg + 2;
+         Params.Inc_Begin_Byte_Pos := WisiToken.Buffer_Pos'Value (Argument 
(5));
+         Params.Inc_End_Byte_Pos   := WisiToken.Buffer_Pos'Value (Argument 
(6)) - 1; -- match emacs region
+         Arg                       := 7;
 
-            elsif Argument (Arg) = "--debug_mode" then
-               WisiToken.Debug_Mode := True;
-               Arg := Arg + 1;
+      when Refactor =>
+         Params.Edit_Begin := WisiToken.Buffer_Pos'Value (Argument (4));
+         Arg               := 5;
 
-            elsif Argument (Arg) = "--disable_recover" then
-               Parser.Enable_McKenzie_Recover := False;
-               Arg := Arg + 1;
+      when Command_File =>
+         null;
 
-            elsif Argument (Arg) = "--enqueue_limit" then
-               Parser.Table.McKenzie_Param.Enqueue_Limit := Integer'Value 
(Argument (Arg + 1));
-               Arg := Arg + 2;
+      end case;
 
-            elsif Argument (Arg) = "--lang_params" then
-               Result.Lang_Params := +Argument (Arg + 1);
-               Arg := Arg + 2;
+   exception
+   when Finish | SAL.Parameter_Error =>
+      raise;
 
-            elsif Argument (Arg) = "--max_parallel" then
-               Parser.Max_Parallel := SAL.Base_Peek_Type'Value (Argument (Arg 
+ 1));
-               Arg := Arg + 2;
+   when E : others =>
+      Ada.Text_IO.Put_Line (Ada.Exceptions.Exception_Name (E) & ": " & 
Ada.Exceptions.Exception_Message (E));
+      Usage (Wisi.Parse_Data_Type'Class (Parser.User_Data.all), Parser.Table);
+      Set_Exit_Status (Failure);
+      raise SAL.Parameter_Error;
+   end Remaining_Command_Params;
 
-            elsif Argument (Arg) = "--repeat_count" then
-               Result.Repeat_Count := Integer'Value (Argument (Arg + 1));
-               Arg := Arg + 2;
+   procedure Command_Options
+     (Parser : in out WisiToken.Parse.LR.Parser.Parser;
+      Params : in out Command_Line_Params;
+      Arg    : in out Integer)
+   is
+      use Ada.Command_Line;
+      use WisiToken;
+   begin
+      loop
+         exit when Arg > Argument_Count;
 
-            elsif Argument (Arg) = "--task_count" then
-               Parser.Table.McKenzie_Param.Task_Count := 
System.Multiprocessors.CPU_Range'Value (Argument (Arg + 1));
-               Arg := Arg + 2;
+         if Argument (Arg) = "--verbosity" then
+            WisiToken.Enable_Trace (Argument (Arg + 1));
+            Arg := @ + 2;
 
-            else
-               Ada.Text_IO.Put_Line ("unrecognized option: '" & Argument (Arg) 
& "'");
-               Usage (Parser);
-               Set_Exit_Status (Failure);
-               raise SAL.Parameter_Error;
+            Parser.Tree.Lexer.Set_Verbosity (WisiToken.Trace_Lexer - 1);
+
+            if Trace_Memory > 0 then
+               GNATCOLL.Memory.Configure
+                 (Activate_Monitor      => True,
+                  Stack_Trace_Depth     => 10, -- gets to "new <type>"
+                  Reset_Content_On_Free => False);
             end if;
-         end loop;
-      end return;
+
+         elsif Argument (Arg) = "--save_text" then
+            Save_File_Name := +Argument (Arg + 1);
+            Arg := @ + 2;
+
+         elsif Argument (Arg) = "--lang_params" then
+            Params.Language_Params := +Argument (Arg + 1);
+            Arg := @ + 2;
+
+         elsif Argument (Arg) = "--max_parallel" then
+            Parser.Table.Max_Parallel := SAL.Base_Peek_Type'Value (Argument 
(Arg + 1));
+            Arg := @ + 2;
+
+         elsif Argument (Arg) = "--mckenzie_check_delta" then
+            Parser.Table.McKenzie_Param.Check_Delta_Limit := Integer'Value 
(Argument (Arg + 1));
+            Arg := @ + 2;
+
+         elsif Argument (Arg) = "--mckenzie_check_limit" then
+            Parser.Table.McKenzie_Param.Check_Limit := 
WisiToken.Syntax_Trees.Sequential_Index'Value
+              (Argument (Arg + 1));
+            Arg := @ + 2;
+
+         elsif Argument (Arg) = "--mckenzie_enqueue_limit" then
+            Parser.Table.McKenzie_Param.Enqueue_Limit := Integer'Value 
(Argument (Arg + 1));
+            Arg := @ + 2;
+
+         elsif Argument (Arg) = "--mckenzie_full_explore" then
+            WisiToken.Parse.LR.McKenzie_Recover.Force_Full_Explore := True;
+            Arg := @ + 1;
+
+         elsif Argument (Arg) = "--mckenzie_high_cost" then
+            WisiToken.Parse.LR.McKenzie_Recover.Force_High_Cost_Solutions := 
True;
+            Arg := @ + 1;
+
+         elsif Argument (Arg) = "--mckenzie_zombie_limit" then
+            Parser.Table.McKenzie_Param.Zombie_Limit := Integer'Value 
(Argument (Arg + 1));
+            Arg := @ + 2;
+
+         elsif Argument (Arg) = "--repeat_count" then
+            Params.Repeat_Count := Integer'Value (Argument (Arg + 1));
+            Arg := @ + 2;
+
+         elsif Argument (Arg) = "--log" then
+            declare
+               Log_File_Name : constant String := Argument (Arg + 1);
+            begin
+               Arg := @ + 2;
+               Ada.Text_IO.Open (Trace_File, Ada.Text_IO.Out_File, 
Log_File_Name);
+               WisiToken.Text_IO_Trace.Trace 
(Parser.Tree.Lexer.Trace.all).Set_File (Trace_File'Access);
+            end;
+
+         else
+            Ada.Text_IO.Put_Line ("unrecognized option: '" & Argument (Arg) & 
"'");
+            Usage (Wisi.Parse_Data_Type'Class (Parser.User_Data.all), 
Parser.Table);
+            Set_Exit_Status (Failure);
+            raise SAL.Parameter_Error;
+         end if;
+      end loop;
    exception
-   when Finish =>
+   when SAL.Parameter_Error =>
       raise;
 
    when E : others =>
       Ada.Text_IO.Put_Line (Ada.Exceptions.Exception_Name (E) & ": " & 
Ada.Exceptions.Exception_Message (E));
-      Usage (Parser);
+      Usage (Wisi.Parse_Data_Type'Class (Parser.User_Data.all), Parser.Table);
       Set_Exit_Status (Failure);
       raise SAL.Parameter_Error;
-   end Get_CL_Params;
+   end Command_Options;
+
+   procedure Put_Errors (Parser : in WisiToken.Parse.LR.Parser.Parser)
+   is begin
+      if Parser.Tree.Stream_Count = 0 then
+         Parser.Put_Errors;
 
-   procedure Parse_File
-     (Parser     : in out WisiToken.Parse.LR.Parser.Parser;
-      Parse_Data : in out Wisi.Parse_Data_Type'Class;
-      Descriptor : in     WisiToken.Descriptor)
+      elsif Parser.Tree.Stream_Count >= 2 then
+         Parser.Put_Errors (Parser.Tree.First_Parse_Stream);
+
+      else
+         --  Probably an error in Edit_Tree
+         Parser.Put_Errors (Parser.Tree.Shared_Stream);
+      end if;
+   end Put_Errors;
+
+   procedure Process_Command
+     (Parse_Context : in out Wisi.Parse_Context.Parse_Context_Access;
+      Language      : in     Wisi.Parse_Context.Language;
+      Line          : in     String;
+      Trace         : in     WisiToken.Trace_Access)
    is
-      use Ada.Text_IO;
-      use WisiToken;
+      use Ada.Strings.Fixed;
+      use WisiToken; -- "+" unbounded
+      use all type WisiToken.Lexer.Handle;
+
+      type File_Command_Type is
+        (File, Kill_Context, Language_Params, McKenzie_Options, 
Memory_Report_Reset, Memory_Report, Parse_Full,
+         Parse_Incremental, Post_Parse, Read_Tree, Refactor, Query_Tree, 
Save_Text, Save_Text_Auto, Verbosity);
 
-      Start     : Ada.Real_Time.Time;
-      End_Line  : WisiToken.Line_Number_Type;
+      Last  : Integer := Index (Line, " ");
+      First : Integer;
 
-      function Image_Augmented (Aug : in Base_Token_Class_Access) return String
-      is begin
-         --  For Syntax_Trees.Print_Tree
-         return Wisi.Image (Aug, Descriptor);
-      end Image_Augmented;
+      function Get_Command return File_Command_Type
+      is
+         use Ada.Text_IO;
+         Cmd_String : constant String := Line (Line'First .. (if Last = 0 then 
Line'Last else Last - 1));
+      begin
+         return File_Command_Type'Value (Cmd_String);
+      exception
+      when Constraint_Error =>
+         Put_Line ("invalid file command '" & Cmd_String & "'");
+         Put ("expecting ");
+         for Cmd in File_Command_Type'Range loop
+            Put (Cmd'Image & ", ");
+         end loop;
+         raise SAL.Parameter_Error;
+      end Get_Command;
 
+      Command : constant File_Command_Type := Get_Command;
    begin
-      Parser.Trace.Set_Prefix (";; "); -- so we get the same debug messages as 
Emacs_Wisi_Common_Parse
+      case Command is
+      when File =>
+         if Ada.Strings.Unbounded.Length (Parse_Context.File_Name) > 0 then
+            --  Changing files
+            Parse_Context := Wisi.Parse_Context.Create_No_File (Language, 
Trace);
+         end if;
+
+         declare
+            Source_File_Name : constant String := Line (Last + 1 .. Line'Last);
+         begin
+            Read_Source_File (Source_File_Name, Parse_Context);
+            if Trace_Memory > Detail then
+               Ada.Text_IO.Put_Line ("file read");
+               Report_Memory (Trace.all, Prefix => False);
+            end if;
+            Get_File_Size (Parse_Context);
+            Wisi.Parse_Context.Set_File (Source_File_Name, Parse_Context);
+         exception
+         when Ada.IO_Exceptions.Name_Error =>
+            Ada.Text_IO.Put_Line ("'" & Source_File_Name & "' cannot be 
opened");
+            return;
+         end;
+
+      when Kill_Context =>
+         declare
+            Source_File_Name : constant String := Line (Last + 1 .. Line'Last);
+         begin
+            if Source_File_Name = -Parse_Context.File_Name then
+               Parse_Context := null;
+            end if;
+            Wisi.Parse_Context.Kill (Source_File_Name);
+         end;
+
+      when Language_Params =>
+         Wisi.Parse_Data_Type'Class 
(Parse_Context.Parser.User_Data.all).Parse_Language_Params
+           (Line (Last + 1 .. Line'Last));
+
+      when McKenzie_Options =>
+         WisiToken.Parse.LR.Set_McKenzie_Options
+           (Parse_Context.Parser.Table.McKenzie_Param, Line (Last + 1 .. 
Line'Last));
+
+      when Memory_Report_Reset =>
+         WisiToken.Memory_Baseline := 
GNATCOLL.Memory.Get_Ada_Allocations.Current;
+         Ada.Text_IO.Put_Line ("(message ""memory report reset"")");
+
+      when Memory_Report =>
+         Report_Memory (Trace.all, Prefix => False);
+
+      when Parse_Full =>
+         --  Force a dispatching call.
+         Wisi.Parse_Data_Type'Class 
(Parse_Context.Parser.User_Data.all).Initialize;
+
+         Parse_Context.Parser.User_Data.Reset;
+         Parse_Context.Parser.Tree.Lexer.Reset;
+         begin
+            Parse_Context.Parser.Parse (Log_File);
+            Wisi.Put_Errors (Parse_Context.Parser.Tree);
+         exception
+         when WisiToken.Syntax_Error =>
+            Put_Errors (Parse_Context.Parser);
+            Ada.Text_IO.Put_Line ("(parse_error)");
+
+         when E : WisiToken.Parse_Error =>
+            Put_Errors (Parse_Context.Parser);
+            Ada.Text_IO.Put_Line
+              ("(parse_error """ & Ada.Exceptions.Exception_Name (E) & " " &
+                 Ada.Exceptions.Exception_Message (E) & """)");
+         end;
+
+      when Parse_Incremental =>
+         declare
+            Changes  : constant Wisi.Parse_Context.Change_Lists.List :=
+              Wisi.Parse_Context.Get_Emacs_Change_List (Line, Last);
+            KMN_List : WisiToken.Parse.KMN_Lists.List;
+         begin
+            Wisi.Parse_Context.Edit_Source (Trace.all, Parse_Context.all, 
Changes, KMN_List);
+
+            if Length (Parse_Context.Root_Save_Edited_Name) /= 0 then
+               Parse_Context.Save_Text_Auto;
+            end if;
+
+            Parse_Context.Parser.Tree.Lexer.Reset_With_String_Access
+              (Parse_Context.Text_Buffer,
+               Parse_Context.Text_Buffer_Byte_Last,
+               +Parse_Context.Parser.Tree.Lexer.File_Name);
+
+            --  Same logic as emacs_wisi_common_parse.adb
+            if Parse_Context.Parser.Tree.Editable then
+               Parse_Context.Parser.Parse (Log_File, KMN_List);
+            else
+               --  Last parse failed; can't edit tree, so do full parse.
+               Parse_Context.Parser.Parse (Log_File, 
Parse.KMN_Lists.Empty_List);
+            end if;
+
+            Wisi.Put_Errors (Parse_Context.Parser.Tree);
+         exception
+         when E : WisiToken.Syntax_Error | WisiToken.Parse_Error =>
+            Put_Errors (Parse_Context.Parser);
+            Ada.Text_IO.Put_Line
+              ("(parse_error """ & Ada.Exceptions.Exception_Name (E) & " " &
+                 Ada.Exceptions.Exception_Message (E) & """)");
+         end;
+
+      when Post_Parse =>
 
+         First := Last + 1;
+         Last  := Index (Line, " ", From => First);
+         declare
+            use all type Wisi.Post_Parse_Action_Type;
+
+            Action : constant Wisi.Post_Parse_Action_Type := 
Wisi.Post_Parse_Action_Type'Value (Line (First .. Last));
+
+            Begin_Byte_Pos : constant WisiToken.Buffer_Pos := 
WisiToken.Buffer_Pos (Wisi.Get_Integer (Line, Last));
+            Begin_Char_Pos : constant WisiToken.Buffer_Pos := 
WisiToken.Buffer_Pos (Wisi.Get_Integer (Line, Last));
+
+            --  Emacs end is after last char. FIXME: if last char is multibyte,
+            --  this is wrong; add something in wisitoken-utf_8.
+            End_Byte_Pos   : constant WisiToken.Buffer_Pos := 
WisiToken.Buffer_Pos (Wisi.Get_Integer (Line, Last)) - 1;
+            End_Char_Pos   : constant WisiToken.Buffer_Pos := 
WisiToken.Buffer_Pos (Wisi.Get_Integer (Line, Last)) - 1;
+         begin
+            Wisi.Parse_Data_Type'Class 
(Parse_Context.Parser.User_Data.all).Reset_Post_Parse
+              (Parse_Context.Parser.Tree, Action,
+               Action_Region_Bytes => (Begin_Byte_Pos, End_Byte_Pos),
+               Action_Region_Chars => (Begin_Char_Pos, End_Char_Pos),
+               Begin_Indent        => 0);
+
+            Parse_Context.Parser.Execute_Actions (Action_Region_Bytes => 
(Begin_Byte_Pos, End_Byte_Pos));
+
+            if Trace_Memory > Detail then
+               Trace.Put_Line ("post_parse action done");
+               Report_Memory (Trace.all, Prefix => False);
+            end if;
+
+            Wisi.Parse_Data_Type'Class 
(Parse_Context.Parser.User_Data.all).Put (Parse_Context.Parser);
+         end;
+
+      when Read_Tree =>
+         declare
+            Dump_File : constant String := Line (Last + 1 .. Line'Last);
+         begin
+            --  We assume a corresponding File command is also present.
+            Parse_Context.Parser.Tree.Get_Tree (Dump_File);
+         end;
+
+      when Refactor =>
+         declare
+            Action     : constant Wisi.Refactor_Action := 
Wisi.Parse_Data_Type'Class
+              (Parse_Context.Parser.User_Data.all).Refactor_Parse 
(Wisi.Get_Enum (Line, Last));
+            Edit_Begin : constant WisiToken.Buffer_Pos := WisiToken.Buffer_Pos 
(Wisi.Get_Integer (Line, Last));
+         begin
+            Wisi.Parse_Data_Type'Class 
(Parse_Context.Parser.User_Data.all).Refactor
+              (Parse_Context.Parser.Tree, Action, Edit_Begin);
+         end;
+
+      when Query_Tree =>
+         declare
+            use Wisi;
+            Label : constant Wisi.Query_Label := Wisi.Query_Label'Value 
(Wisi.Get_Enum (Line, Last));
+
+            Parse_Data : constant Wisi.Parse_Data_Access_Constant :=
+              Wisi.Parse_Data_Access_Constant (Parse_Context.Parser.User_Data);
+         begin
+            case Label is
+            when Point_Query =>
+               declare
+                  Point : constant WisiToken.Buffer_Pos := 
WisiToken.Buffer_Pos (Wisi.Get_Integer (Line, Last));
+                  IDs : constant WisiToken.Token_ID_Arrays.Vector :=
+                    (case Point_Query'(Label) is
+                     when Node | Containing_Statement => 
WisiToken.Token_ID_Arrays.Empty_Vector,
+                     when Ancestor => Wisi.Get_Token_IDs (Parse_Data.all, 
Line, Last));
+                  Query : constant Wisi.Query :=
+                    (case Point_Query'(Label) is
+                     when Node => (Node, Point),
+                     when Containing_Statement => (Containing_Statement, 
Point),
+                     when Ancestor => (Ancestor, Point, IDs));
+               begin
+                  Wisi.Query_Tree (Parse_Data, Parse_Context.Parser.Tree, 
Query);
+               end;
+
+            when Parent | Child =>
+               declare
+                  Address : constant String := Wisi.Get_String (Line, Last);
+                  Node    : constant WisiToken.Syntax_Trees.Valid_Node_Access 
:= Wisi.To_Node_Access (Address);
+                  N       : constant Integer := Wisi.Get_Integer (Line, Last);
+               begin
+                  Wisi.Query_Tree (Parse_Data, Parse_Context.Parser.Tree, 
(Node_Query'(Label), Node, N));
+               end;
+
+            when Print =>
+               Wisi.Query_Tree (Parse_Data, Parse_Context.Parser.Tree, (Label 
=> Print));
+
+            when Dump =>
+               declare
+                  File_Name : constant String := Line (Last + 1 .. Line'Last);
+               begin
+                  Wisi.Query_Tree
+                    (Parse_Data,
+                     Parse_Context.Parser.Tree,
+                     (Label     => Dump,
+                      File_Name => +File_Name));
+               end;
+            end case;
+         end;
+
+      when Save_Text =>
+         declare
+            Save_File_Name : constant String := Line (Last + 1 .. Line'Last);
+         begin
+            Parse_Context.Save_Text (Save_File_Name);
+         end;
+
+      when Save_Text_Auto =>
+         declare
+            Save_File_Name : constant String := Line (Last + 1 .. Line'Last);
+         begin
+            Parse_Context.Root_Save_Edited_Name := +Save_File_Name;
+            Parse_Context.Save_Edited_Count     := 0;
+            Ada.Text_IO.Put_Line ("auto text save enabled, to '" & 
Save_File_Name & "_nnn'");
+         end;
+
+      when Verbosity =>
+         WisiToken.Enable_Trace (Line (Last + 1 .. Line'Last));
+         if Parse_Context.Parser.Tree.Lexer /= null then
+            Parse_Context.Parser.Tree.Lexer.Set_Verbosity 
(WisiToken.Trace_Lexer - 1);
+         end if;
+         if Trace_Memory > 0 then
+            GNATCOLL.Memory.Configure
+              (Activate_Monitor      => True,
+               Stack_Trace_Depth     => 10, -- gets to "new <type>"
+               Reset_Content_On_Free => False);
+         end if;
+
+      end case;
+   end Process_Command;
+
+   procedure Parse_File (Language : in Wisi.Parse_Context.Language; Trace : in 
WisiToken.Trace_Access)
+   is
+      use Ada.Text_IO;
+      use WisiToken;
+
+      Start : Ada.Real_Time.Time;
+   begin
       declare
-         Cl_Params : constant Command_Line_Params := Get_CL_Params (Parser);
+         use all type Wisi.Base_Post_Parse_Action_Type;
+
+         Arg       : Integer;
+         Cl_Params : Command_Line_Params := Command_File_Name 
(Language.Parse_Data_Template.all, Arg);
+
+         Parse_Context : Wisi.Parse_Context.Parse_Context_Access :=
+           (if Length (Cl_Params.Source_File_Name) > 0 -- can be empty for 
Command_File
+            then Wisi.Parse_Context.Find_Create (-Cl_Params.Source_File_Name, 
Language, Trace)
+            else Wisi.Parse_Context.Create_No_File (Language, Trace));
+
+         Parser : WisiToken.Parse.LR.Parser.Parser renames 
Parse_Context.Parser;
+
+         Parse_Data : Wisi.Parse_Data_Type'Class renames 
Wisi.Parse_Data_Type'Class (Parser.User_Data.all);
       begin
+         Remaining_Command_Params (Parser, Cl_Params, Arg);
+
+         Trace.Set_Prefix (";; "); -- so we get the same debug messages as 
Emacs_Wisi_Common_Parse
+
          begin
             case Cl_Params.Command is
-            when Parse =>
-               Parser.Lexer.Reset_With_File
-                 (-Cl_Params.Source_File_Name, Cl_Params.Begin_Byte_Pos, 
Cl_Params.End_Byte_Pos,
-                  Cl_Params.Begin_Char_Pos, Cl_Params.Begin_Line);
+            when Parse_Partial =>
+               Parser.Tree.Lexer.Reset_With_File
+                 (-Cl_Params.Source_File_Name, 
Cl_Params.Partial_Begin_Byte_Pos, Cl_Params.Partial_End_Byte_Pos,
+                  Cl_Params.Partial_Begin_Char_Pos, 
Cl_Params.Partial_Begin_Line);
+
             when Refactor =>
-               Parser.Lexer.Reset_With_File (-Cl_Params.Source_File_Name);
+               Parser.Tree.Lexer.Reset_With_File (-Cl_Params.Source_File_Name);
+
+            when Parse_Incremental | Command_File =>
+               if Length (Cl_Params.Source_File_Name) > 0 then
+                  Read_Source_File (-Cl_Params.Source_File_Name, 
Parse_Context);
+               end if;
             end case;
          exception
          when Ada.IO_Exceptions.Name_Error =>
@@ -228,118 +738,235 @@ package body Run_Wisi_Common_Parse is
             return;
          end;
 
-         --  Parser.Line_Begin_Token First, Last set by Lex_All
-
-         if Cl_Params.Command = Refactor or else Cl_Params.End_Line = 
Invalid_Line_Number then
-            --  User did not provide; run lexer to get end line.
-            declare
-               Token       : Base_Token;
-               Lexer_Error : Boolean;
-               pragma Unreferenced (Lexer_Error);
-            begin
-               loop
-                  Lexer_Error := Parser.Lexer.Find_Next (Token);
-                  exit when Token.ID = Descriptor.EOI_ID;
-               end loop;
-               End_Line := Token.Line;
-            end;
-         else
-            End_Line := Cl_Params.End_Line;
+         if Length (Cl_Params.Source_File_Name) > 0 then
+            Get_File_Size (Parse_Context);
          end if;
 
-         Parse_Data.Initialize
-           (Post_Parse_Action =>
-              (case Cl_Params.Command is
-               when Parse    => Cl_Params.Post_Parse_Action,
-               when Refactor => Wisi.Navigate),
-            Lexer            => Parser.Lexer,
-            Descriptor       => Descriptor'Unrestricted_Access,
-            Base_Terminals   => Parser.Terminals'Unrestricted_Access,
-            Begin_Line       =>
-              (case Cl_Params.Command is
-               when Parse => Cl_Params.Begin_Line,
-               when Refactor => WisiToken.Line_Number_Type'First),
-            End_Line         => End_Line,
-            Begin_Indent     =>
-              (case Cl_Params.Command is
-               when Parse    => Cl_Params.Begin_Indent,
-               when Refactor => 0),
-            Params            => -Cl_Params.Lang_Params);
-
-         if Cl_Params.Repeat_Count > 1 then
-            Start := Ada.Real_Time.Clock;
-         end if;
+         case Cl_Params.Command is
+         when Parse_Partial =>
+            if Cl_Params.Partial_Begin_Byte_Pos = WisiToken.Invalid_Buffer_Pos 
then
+               Cl_Params.Partial_Begin_Byte_Pos := WisiToken.Buffer_Pos'First;
+               Cl_Params.Partial_Begin_Char_Pos := WisiToken.Buffer_Pos'First;
+               Cl_Params.Partial_End_Byte_Pos   := Base_Buffer_Pos 
(Parse_Context.Text_Buffer_Byte_Last);
+               Cl_Params.Partial_End_Char_Pos   := Base_Buffer_Pos 
(Parse_Context.Text_Buffer_Char_Last);
+            else
+               Parser.Partial_Parse_Active.all    := True;
+               Parser.Partial_Parse_Byte_Goal.all := 
Cl_Params.Partial_Goal_Byte_Pos;
+            end if;
+         when Parse_Incremental =>
+            if Cl_Params.Inc_Begin_Byte_Pos = WisiToken.Invalid_Buffer_Pos then
+               Cl_Params.Inc_Begin_Byte_Pos := WisiToken.Buffer_Pos'First;
+               Cl_Params.Inc_Begin_Char_Pos := WisiToken.Buffer_Pos'First;
+               Cl_Params.Inc_End_Byte_Pos   := Parser.Tree.Byte_Region
+                 (Parser.Tree.EOI, Trailing_Non_Grammar => False).Last;
+               Cl_Params.Inc_End_Char_Pos   := Parser.Tree.Char_Region
+                 (Parser.Tree.EOI, Trailing_Non_Grammar => False).Last;
+            end if;
+         when Refactor | Command_File =>
+            null;
+         end case;
 
-         for I in 1 .. Cl_Params.Repeat_Count loop
-            declare
-               procedure Clean_Up
-               is
-                  use all type SAL.Base_Peek_Type;
+         case Cl_Params.Command is
+         when Parse_Partial =>
+
+            Parse_Data.Initialize;
+
+            Command_Options (Parser, Cl_Params, Arg);
+            Parse_Data.Parse_Language_Params (-Cl_Params.Language_Params);
+
+            if Cl_Params.Repeat_Count > 1 then
+               Start := Ada.Real_Time.Clock;
+            end if;
+
+            for I in 1 .. Cl_Params.Repeat_Count loop
                begin
-                  Parser.Lexer.Discard_Rest_Of_Input;
-                  if Cl_Params.Repeat_Count = 1 and Parser.Parsers.Count > 0 
then
-                     Parse_Data.Put
-                       (Parser.Lexer.Errors,
-                        Parser.Parsers.First.State_Ref.Errors,
-                        Parser.Parsers.First.State_Ref.Tree);
+                  Parse_Data.Reset;
+                  Parser.Tree.Lexer.Reset;
+
+                  Parser.Parse (Log_File);
+                  --  Raises Parse_Error for ambiguous parse and similar 
errors.
+
+                  Wisi.Put_Errors (Parser.Tree);
+
+                  if Trace_Memory > 0 then
+                     Report_Memory (Trace.all, Prefix => False);
                   end if;
-               end Clean_Up;
 
-            begin
-               Parse_Data.Reset;
-               Parser.Lexer.Reset;
+                  if Cl_Params.Partial_Post_Parse_Action /= None then
+                     Parse_Data.Reset_Post_Parse
+                       (Parser.Tree,
+                        Post_Parse_Action   => 
Cl_Params.Partial_Post_Parse_Action,
+                        Action_Region_Bytes => 
(Cl_Params.Partial_Begin_Byte_Pos, Cl_Params.Partial_Goal_Byte_Pos),
+                        Action_Region_Chars => 
(Cl_Params.Partial_Begin_Char_Pos, Cl_Params.Partial_Goal_Char_Pos),
+                        Begin_Indent        => Cl_Params.Partial_Begin_Indent);
 
-               begin
-                  Parser.Parse;
+                     Parser.Execute_Actions (Action_Region_Bytes => 
Parse_Data.Action_Region_Bytes);
+
+                     if Cl_Params.Repeat_Count = 1 then
+                        Parse_Data.Put (Parser);
+                     end if;
+                  end if;
                exception
-               when WisiToken.Partial_Parse =>
-                  null;
+               when WisiToken.Syntax_Error =>
+                  Put_Errors (Parser);
+                  Put_Line ("(parse_error)");
+
+               when E : WisiToken.Parse_Error =>
+                  Put_Errors (Parser);
+                  Put_Line ("(parse_error """ & Ada.Exceptions.Exception_Name 
(E) & " " &
+                              Ada.Exceptions.Exception_Message (E) & """)");
+
+               when E : others => -- includes Fatal_Error
+                  Put_Errors (Parser);
+                  Put_Line ("(error """ & Ada.Exceptions.Exception_Name (E) & 
" " &
+                              Ada.Exceptions.Exception_Message (E) & """)");
+               end;
+            end loop;
+
+            if Cl_Params.Repeat_Count > 1 then
+               declare
+                  use Ada.Real_Time;
+                  Finish : constant Time := Clock;
+               begin
+                  Put_Line ("Total time:" & Duration'Image (To_Duration 
(Finish - Start)));
+                  Put_Line
+                    ("per iteration:" & Duration'Image (To_Duration ((Finish - 
Start) / Cl_Params.Repeat_Count)));
                end;
+            end if;
 
-               Parser.Execute_Actions (Image_Augmented'Unrestricted_Access);
+         when Parse_Incremental | Refactor =>
+            Command_Options (Parser, Cl_Params, Arg);
 
-               case Cl_Params.Command is
-               when Parse =>
-                  if Cl_Params.Repeat_Count = 1 then
-                     Parse_Data.Put (Parser);
-                     Parse_Data.Put
-                       (Parser.Lexer.Errors,
-                        Parser.Parsers.First.State_Ref.Errors,
-                        Parser.Parsers.First.State_Ref.Tree);
-                  end if;
+            if Cl_Params.Command /= Refactor then
+               Parse_Data.Parse_Language_Params (-Cl_Params.Language_Params);
+            end if;
+
+            --  First do a full parse to get the syntax tree
+            begin
+               Parse_Data.Initialize;
+               Parser.Tree.Lexer.Reset;
+               Parser.Parse (Log_File);
+               Wisi.Put_Errors (Parse_Context.Parser.Tree);
+               if Trace_Memory > 0 then
+                  Put ("initial full parse ");
+                  Report_Memory (Trace.all, Prefix => False);
+               end if;
 
-               when Refactor =>
-                  Parse_Data.Refactor
-                    (Parser.Parsers.First_State_Ref.Tree,
-                     Cl_Params.Refactor_Action, Cl_Params.Edit_Begin);
-               end case;
             exception
             when WisiToken.Syntax_Error =>
-               Clean_Up;
+               Put_Errors (Parser);
                Put_Line ("(parse_error)");
 
             when E : WisiToken.Parse_Error =>
-               Clean_Up;
+               Put_Errors (Parser);
                Put_Line ("(parse_error """ & Ada.Exceptions.Exception_Name (E) 
& " " &
                            Ada.Exceptions.Exception_Message (E) & """)");
-
-            when E : others => -- includes Fatal_Error
-               Clean_Up;
-               Put_Line ("(error """ & Ada.Exceptions.Exception_Name (E) & " " 
&
-                           Ada.Exceptions.Exception_Message (E) & """)");
             end;
-         end loop;
 
-         if Cl_Params.Repeat_Count > 1 then
+            case Cl_Params.Command is
+            when Parse_Incremental =>
+               declare
+                  KMN_List : WisiToken.Parse.KMN_Lists.List;
+               begin
+                  Wisi.Parse_Context.Edit_Source (Trace.all, 
Parse_Context.all, Cl_Params.Changes, KMN_List);
+
+                  if -Save_File_Name /= "" then
+                     declare
+                        use Ada.Directories;
+                        Save_File : File_Type;
+                     begin
+                        if Exists (-Save_File_Name) then
+                           Delete_File (-Save_File_Name);
+                        end if;
+                        Create (Save_File, Out_File, -Save_File_Name);
+                        Put (Save_File, Parse_Context.Text_Buffer (1 .. 
Parse_Context.Text_Buffer_Byte_Last));
+                        Close (Save_File);
+                     end;
+                  end if;
+
+                  Parse_Data.Parse_Language_Params 
(-Cl_Params.Language_Params);
+
+                  Parser.Tree.Lexer.Reset_With_String_Access
+                    (Parse_Context.Text_Buffer, 
Parse_Context.Text_Buffer_Byte_Last, Cl_Params.Source_File_Name);
+
+                  Parser.Parse (Log_File, KMN_List);
+                  Wisi.Put_Errors (Parse_Context.Parser.Tree);
+
+                  if Cl_Params.Inc_Post_Parse_Action /= None then
+                     Parse_Data.Reset_Post_Parse
+                       (Parser.Tree, Cl_Params.Inc_Post_Parse_Action,
+                        Action_Region_Bytes => (Cl_Params.Inc_Begin_Byte_Pos, 
Cl_Params.Inc_End_Byte_Pos),
+                        Action_Region_Chars => (Cl_Params.Inc_Begin_Char_Pos, 
Cl_Params.Inc_End_Char_Pos),
+                        Begin_Indent        => 0);
+
+                     Parser.Execute_Actions
+                       (Action_Region_Bytes => (Cl_Params.Inc_Begin_Byte_Pos, 
Cl_Params.Inc_End_Byte_Pos));
+
+                     Parse_Data.Put (Parser);
+                  end if;
+
+                  if Trace_Memory > 0 then
+                     Put ("incremental parse ");
+                     Report_Memory (Trace.all, Prefix => False);
+                  end if;
+
+               exception
+               when WisiToken.Syntax_Error =>
+                  Put_Errors (Parser);
+                  Put_Line ("(parse_error)");
+
+               when E : WisiToken.Parse_Error =>
+                  Put_Errors (Parser);
+                  Put_Line ("(parse_error """ & Ada.Exceptions.Exception_Name 
(E) & " " &
+                              Ada.Exceptions.Exception_Message (E) & """)");
+               end;
+
+            when Refactor =>
+               Parse_Data.Refactor
+                 (Parser.Tree,
+                  Cl_Params.Refactor_Action, Cl_Params.Edit_Begin);
+
+            when others =>
+               null;
+            end case;
+
+         when Command_File =>
+            Command_Options (Parser, Cl_Params, Arg);
+
+            --  We don't do a full parse here, to let .cmd file set debug 
params for full parse.
+
+            if Length (Cl_Params.Source_File_Name) > 0 then
+               Ada.Text_IO.Put_Line ('"' & (-Cl_Params.Source_File_Name) & '"' 
& (-Cl_Params.Language_Params));
+               Ada.Text_IO.New_Line;
+            end if;
             declare
-               use Ada.Real_Time;
-               Finish : constant Time := Clock;
+               Cmd_File : Ada.Text_IO.File_Type;
             begin
-               Put_Line ("Total time:" & Duration'Image (To_Duration (Finish - 
Start)));
-               Put_Line ("per iteration:" & Duration'Image (To_Duration 
((Finish - Start) / Cl_Params.Repeat_Count)));
+               Open (Cmd_File, In_File, -Cl_Params.Command_File_Name);
+               Ada.Directories.Set_Directory 
(Ada.Directories.Containing_Directory (-Cl_Params.Command_File_Name));
+               loop
+                  exit when End_Of_File (Cmd_File);
+                  declare
+                     Line : constant String := Get_Line (Cmd_File);
+                  begin
+                     if Line'Length > 0 then
+                        Trace.Put_Line (Line);
+                        if Line (1 .. 2) = "--" then
+                           null;
+                        else
+                           Process_Command (Parse_Context, Language, Line, 
Trace);
+                           Trace.New_Line;
+                        end if;
+                     end if;
+                  end;
+               end loop;
             end;
-         end if;
+         end case;
       end;
+
+      if Ada.Text_IO.Is_Open (Trace_File) then
+         Ada.Text_IO.Close (Trace_File);
+      end if;
    exception
    when SAL.Parameter_Error | Finish =>
       --  From Get_CL_Params; already handled.
@@ -351,6 +978,12 @@ package body Run_Wisi_Common_Parse is
       Put_Line
         ("(error ""unhandled exception: " & Ada.Exceptions.Exception_Name (E) 
& ": " &
            Ada.Exceptions.Exception_Message (E) & """)");
+
+      Trace.Put_Line (GNAT.Traceback.Symbolic.Symbolic_Traceback (E)); -- 
includes Prefix
+      Trace.New_Line;
+      if Ada.Text_IO.Is_Open (Trace_File) then
+         Ada.Text_IO.Close (Trace_File);
+      end if;
    end Parse_File;
 
 end Run_Wisi_Common_Parse;
diff --git a/run_wisi_common_parse.ads b/run_wisi_common_parse.ads
index c00d0071a2..b6023b04ff 100644
--- a/run_wisi_common_parse.ads
+++ b/run_wisi_common_parse.ads
@@ -2,7 +2,7 @@
 --
 --  Common utilities for Gen_Run_Wisi_*_Parse
 --
---  Copyright (C) 2018 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -18,56 +18,12 @@
 
 pragma License (GPL);
 
-with Ada.Strings.Unbounded;
 with Wisi;
-with WisiToken.Parse.LR.Parser;
+with WisiToken;
+with Wisi.Parse_Context;
 package Run_Wisi_Common_Parse is
 
-   Finish : exception;
-
-   procedure Usage (Parser : in out WisiToken.Parse.LR.Parser.Parser);
-   --  Puts parameter description to Current_Output.
-
-   type Command_Type is (Parse, Refactor);
-
-   type Command_Line_Params (Command : Command_Type) is record
-
-      Source_File_Name : Ada.Strings.Unbounded.Unbounded_String;
-      Lang_Params      : Ada.Strings.Unbounded.Unbounded_String;
-      Repeat_Count     : Integer := 1;
-
-      case Command is
-      when Parse =>
-         Post_Parse_Action : Wisi.Post_Parse_Action_Type;
-         Begin_Byte_Pos    : WisiToken.Buffer_Pos       := 
WisiToken.Invalid_Buffer_Pos;
-         End_Byte_Pos      : WisiToken.Buffer_Pos       := 
WisiToken.Invalid_Buffer_Pos;
-         Goal_Byte_Pos     : WisiToken.Buffer_Pos       := 
WisiToken.Invalid_Buffer_Pos;
-         Begin_Char_Pos    : WisiToken.Buffer_Pos       := 
WisiToken.Buffer_Pos'First;
-         Begin_Line        : WisiToken.Line_Number_Type := 
WisiToken.Line_Number_Type'First;
-         End_Line          : WisiToken.Line_Number_Type := 
WisiToken.Invalid_Line_Number;
-         Begin_Indent      : Integer                    := 0;
-
-      when Refactor =>
-         --  We assume the file contains only the one statement/declaration
-         --  that needs refactoring.
-
-         Refactor_Action : Positive;
-         --  Language-specific
-
-         Edit_Begin : WisiToken.Buffer_Pos;
-         --  Source file byte position at start of expression to refactor.
-      end case;
-   end record;
-
-   function Get_CL_Params (Parser : in out WisiToken.Parse.LR.Parser.Parser) 
return Command_Line_Params;
-   --  For any errors, calls Usage, raises SAL.Parameter_Error.
-   --
-   --  Handles --help by outputing help, raising Finish.
-
-   procedure Parse_File
-     (Parser     : in out WisiToken.Parse.LR.Parser.Parser;
-      Parse_Data : in out Wisi.Parse_Data_Type'Class;
-      Descriptor : in     WisiToken.Descriptor);
-   --  Calls Get_CL_Params, reads in file, parses, does post-parse actions.
+   procedure Parse_File (Language : in Wisi.Parse_Context.Language; Trace : in 
WisiToken.Trace_Access);
+   --  Reads command line, processes command(s).
 
 end Run_Wisi_Common_Parse;
diff --git a/sal-gen_unbounded_definite_queues-gen_image_aux.adb 
b/sal-gen_bounded_definite_doubly_linked_lists-gen_image_aux.adb
similarity index 64%
rename from sal-gen_unbounded_definite_queues-gen_image_aux.adb
rename to sal-gen_bounded_definite_doubly_linked_lists-gen_image_aux.adb
index 2a1990b1ae..5c8cc8062b 100644
--- a/sal-gen_unbounded_definite_queues-gen_image_aux.adb
+++ b/sal-gen_bounded_definite_doubly_linked_lists-gen_image_aux.adb
@@ -1,35 +1,40 @@
---  Abstract :
---
---  See spec.
---
---  Copyright (C) 2018 Free Software Foundation, Inc.
---
---  This library is free software;  you can redistribute it and/or modify it
---  under terms of the  GNU General Public License  as published by the Free
---  Software  Foundation;  either version 3,  or (at your  option) any later
---  version. This library is distributed in the hope that it will be useful,
---  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
---  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
---  As a special exception under Section 7 of GPL version 3, you are granted
---  additional permissions described in the GCC Runtime Library Exception,
---  version 3.1, as published by the Free Software Foundation.
-
-pragma License (Modified_GPL);
-
-with Ada.Strings.Unbounded;
-function SAL.Gen_Unbounded_Definite_Queues.Gen_Image_Aux (Item : in Queue; Aux 
: in Aux_Data) return String
-is
-   use Ada.Strings.Unbounded;
-   Result : Unbounded_String        := To_Unbounded_String ("(");
-   Last   : constant Base_Peek_Type := Item.Count;
-begin
-   for I in 1 .. Last loop
-      Result := Result & Element_Image (Item.Peek (I), Aux);
-      if I /= Last then
-         Result := Result & ", ";
-      end if;
-   end loop;
-   Result := Result & ")";
-   return To_String (Result);
-end SAL.Gen_Unbounded_Definite_Queues.Gen_Image_Aux;
+--  Abstract :
+--
+--  See spec.
+--
+--  Copyright (C) 2020 Free Software Foundation, Inc.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+--  As a special exception under Section 7 of GPL version 3, you are granted
+--  additional permissions described in the GCC Runtime Library Exception,
+--  version 3.1, as published by the Free Software Foundation.
+
+pragma License (Modified_GPL);
+
+with Ada.Strings.Unbounded;
+function SAL.Gen_Bounded_Definite_Doubly_Linked_Lists.Gen_Image_Aux (Item : in 
List; Aux : in Aux_Data) return String
+is
+   use Ada.Strings.Unbounded;
+   Result : Unbounded_String        := To_Unbounded_String ("(");
+   Node   : Base_Peek_Type := Item.Head;
+begin
+   if Node /= Invalid_Peek_Index then
+      loop
+         Result := Result & Element_Image (Item.Nodes (Node).Element, Aux);
+
+         Node := Item.Nodes (Node).Next;
+
+         exit when Node = Invalid_Peek_Index;
+
+         Result := Result & ", ";
+      end loop;
+   end if;
+   Result := Result & ")";
+   return To_String (Result);
+end SAL.Gen_Bounded_Definite_Doubly_Linked_Lists.Gen_Image_Aux;
diff --git a/sal-gen_unbounded_definite_queues-gen_image_aux.ads 
b/sal-gen_bounded_definite_doubly_linked_lists-gen_image_aux.ads
similarity index 79%
rename from sal-gen_unbounded_definite_queues-gen_image_aux.ads
rename to sal-gen_bounded_definite_doubly_linked_lists-gen_image_aux.ads
index dafa8d9fe4..b4e36b73e8 100644
--- a/sal-gen_unbounded_definite_queues-gen_image_aux.ads
+++ b/sal-gen_bounded_definite_doubly_linked_lists-gen_image_aux.ads
@@ -1,23 +1,23 @@
---  Abstract :
---
---  Image with auxiliary data for instantiations of parent.
---
---  Copyright (C) 2018 Free Software Foundation, Inc.
---
---  This library is free software;  you can redistribute it and/or modify it
---  under terms of the  GNU General Public License  as published by the Free
---  Software  Foundation;  either version 3,  or (at your  option) any later
---  version. This library is distributed in the hope that it will be useful,
---  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
---  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
---  As a special exception under Section 7 of GPL version 3, you are granted
---  additional permissions described in the GCC Runtime Library Exception,
---  version 3.1, as published by the Free Software Foundation.
-
-pragma License (Modified_GPL);
-
-generic
-   type Aux_Data (<>) is private;
-   with function Element_Image (Item : in Element_Type; Aux : in Aux_Data) 
return String;
-function SAL.Gen_Unbounded_Definite_Queues.Gen_Image_Aux (Item : in Queue; Aux 
: in Aux_Data) return String;
+--  Abstract :
+--
+--  Image with auxiliary data for instantiations of parent.
+--
+--  Copyright (C) 2020 Free Software Foundation, Inc.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+--  As a special exception under Section 7 of GPL version 3, you are granted
+--  additional permissions described in the GCC Runtime Library Exception,
+--  version 3.1, as published by the Free Software Foundation.
+
+pragma License (Modified_GPL);
+
+generic
+   type Aux_Data (<>) is limited private;
+   with function Element_Image (Item : in Element_Type; Aux : in Aux_Data) 
return String;
+function SAL.Gen_Bounded_Definite_Doubly_Linked_Lists.Gen_Image_Aux (Item : in 
List; Aux : in Aux_Data) return String;
diff --git a/sal-gen_bounded_definite_doubly_linked_lists.adb 
b/sal-gen_bounded_definite_doubly_linked_lists.adb
new file mode 100644
index 0000000000..9d1c51069f
--- /dev/null
+++ b/sal-gen_bounded_definite_doubly_linked_lists.adb
@@ -0,0 +1,348 @@
+--  Abstract :
+--
+--  see spec
+--
+--  Copyright (C) 2020, 2021 Free Software Foundation, Inc.
+--
+--  This library is free software; you can redistribute it and/or
+--  modify it under terms of the GNU General Public License as
+--  published by the Free Software Foundation; either version 3, or (at
+--  your option) any later version. This library is distributed in the
+--  hope that it will be useful, but WITHOUT ANY WARRANTY; without even
+--  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+--  PURPOSE. See the GNU General Public License for more details. You
+--  should have received a copy of the GNU General Public License
+--  distributed with this program; see file COPYING. If not, write to
+--  the Free Software Foundation, 59 Temple Place - Suite 330, Boston,
+--  MA 02111-1307, USA.
+--
+--  As a special exception under Section 7 of GPL version 3, you are granted
+--  additional permissions described in the GCC Runtime Library Exception,
+--  version 3.1, as published by the Free Software Foundation.
+
+pragma License (Modified_GPL);
+
+package body SAL.Gen_Bounded_Definite_Doubly_Linked_Lists is
+
+   function Allocate (Container : in out List) return Peek_Type
+   is begin
+      if Container.Free_Last = 0 then
+         raise SAL.Container_Full;
+      end if;
+      return Result : constant Peek_Type := Container.Free_List 
(Container.Free_Last) do
+         Container.Free_List (Container.Free_Last) := Invalid_Peek_Index;
+         Container.Free_Last := @ - 1;
+      end return;
+   end Allocate;
+
+   procedure Free (Container : in out List; Node : in out Base_Peek_Type)
+   is begin
+      Container.Free_Last := @ + 1;
+      Container.Free_List (Container.Free_Last) := Node;
+      Node := Invalid_Peek_Index;
+   end Free;
+
+   procedure Delete_Node (Container : in out List; Node : in out 
Base_Peek_Type)
+   is begin
+      if Container.Nodes (Node).Next = Invalid_Peek_Index then
+         Container.Tail := Container.Nodes (Node).Prev;
+      else
+         Container.Nodes (Container.Nodes (Node).Next).Prev := Container.Nodes 
(Node).Prev;
+      end if;
+      if Container.Nodes (Node).Prev = Invalid_Peek_Index then
+         Container.Head := Container.Nodes (Node).Next;
+      else
+         Container.Nodes (Container.Nodes (Node).Prev).Next := Container.Nodes 
(Node).Next;
+      end if;
+      Free (Container, Node);
+   end Delete_Node;
+
+   ---------
+   --  Public operations, declaration order.
+
+   procedure Initialize (Container : in out List)
+   is begin
+      for I in 1 .. Container.Size loop
+         Container.Free_List (I) := I;
+      end loop;
+      Container.Free_Last := Container.Size;
+   end Initialize;
+
+   function Empty_List (Size : in Peek_Type) return List
+   is begin
+      return Result : List (Size) do
+         for I in 1 .. Size loop
+            Result.Free_List (I) := I;
+         end loop;
+         Result.Free_Last := Size;
+      end return;
+   end Empty_List;
+
+   procedure Clear (Container : in out List)
+   is begin
+      Container.Head := Invalid_Peek_Index;
+      Container.Tail := Invalid_Peek_Index;
+      for I in 1 .. Container.Size loop
+         Container.Free_List (I) := I;
+      end loop;
+      Container.Free_Last := Container.Size;
+
+      for I in 1 .. Container.Size loop
+         Container.Nodes (I).Next := Invalid_Peek_Index;
+         Container.Nodes (I).Prev := Invalid_Peek_Index;
+      end loop;
+
+      Container.Count := 0;
+   end Clear;
+
+   function Length (Container : in List) return Ada.Containers.Count_Type
+   is begin
+      return Container.Count;
+   end Length;
+
+   procedure Append (Container : in out List; Element : in Element_Type)
+   is
+      use all type Ada.Containers.Count_Type;
+      New_Node : constant Peek_Type := Allocate (Container);
+   begin
+      Container.Nodes (New_Node) :=
+        (Element => Element,
+         Prev    => Container.Tail,
+         Next    => Invalid_Peek_Index);
+
+      if Container.Tail = Invalid_Peek_Index then
+         Container.Head := New_Node;
+         Container.Tail := New_Node;
+      else
+         Container.Nodes (Container.Tail).Next := New_Node;
+         Container.Tail                        := New_Node;
+      end if;
+      Container.Count := Container.Count + 1;
+   end Append;
+
+   procedure Prepend (Container : in out List; Element : in Element_Type)
+   is
+      use all type Ada.Containers.Count_Type;
+      New_Node : constant Peek_Type := Allocate (Container);
+   begin
+      Container.Nodes (New_Node) :=
+        (Element => Element,
+         Prev    => Invalid_Peek_Index,
+         Next    => Container.Head);
+
+      if Container.Tail = Invalid_Peek_Index then
+         Container.Head := New_Node;
+         Container.Tail := New_Node;
+      else
+         Container.Nodes (Container.Head).Prev := New_Node;
+         Container.Head                        := New_Node;
+      end if;
+      Container.Count := Container.Count + 1;
+   end Prepend;
+
+   function To_List (Element : in Element_Type; Size : in Peek_Type) return 
List
+   is begin
+      return Result : List (Size) do
+         Result.Append (Element);
+      end return;
+   end To_List;
+
+   function Has_Element (Position : in Cursor) return Boolean
+   is begin
+      return Position.Ptr /= Invalid_Peek_Index;
+   end Has_Element;
+
+   function First (Container : in List) return Cursor
+   is begin
+      if Container.Head = Invalid_Peek_Index then
+         return (Ptr => Invalid_Peek_Index);
+      else
+         return (Ptr => Container.Head);
+      end if;
+   end First;
+
+   function Last (Container : in List) return Cursor
+   is begin
+      if Container.Tail = Invalid_Peek_Index then
+         return (Ptr => Invalid_Peek_Index);
+      else
+         return (Ptr => Container.Tail);
+      end if;
+   end Last;
+
+   procedure Next (Container : in List; Position : in out Cursor)
+   is begin
+      if Position.Ptr /= Invalid_Peek_Index then
+         if Container.Nodes (Position.Ptr).Next = Invalid_Peek_Index then
+            Position.Ptr := Invalid_Peek_Index;
+         else
+            Position.Ptr := Container.Nodes (Position.Ptr).Next;
+         end if;
+      end if;
+   end Next;
+
+   function Next (Container : in List; Position : in Cursor) return Cursor
+   is begin
+      if Position.Ptr = Invalid_Peek_Index then
+         return Position;
+      else
+         if Container.Nodes (Position.Ptr).Next = Invalid_Peek_Index then
+            return (Ptr => Invalid_Peek_Index);
+         else
+            return (Ptr => Container.Nodes (Position.Ptr).Next);
+         end if;
+      end if;
+   end Next;
+
+   procedure Previous (Container : in List; Position : in out Cursor)
+   is begin
+      if Position.Ptr = Invalid_Peek_Index then
+         return;
+      else
+         if Container.Nodes (Position.Ptr).Prev = Invalid_Peek_Index then
+            Position.Ptr := Invalid_Peek_Index;
+         else
+            Position.Ptr := Container.Nodes (Position.Ptr).Prev;
+         end if;
+      end if;
+   end Previous;
+
+   function Previous (Container : in List; Position : in Cursor) return Cursor
+   is begin
+      if Position.Ptr = Invalid_Peek_Index then
+         return Position;
+      else
+         if Container.Nodes (Position.Ptr).Prev = Invalid_Peek_Index then
+            return (Ptr => Invalid_Peek_Index);
+         else
+            return (Ptr => Container.Nodes (Position.Ptr).Prev);
+         end if;
+      end if;
+   end Previous;
+
+   function Element (Container : in List; Position : in Cursor) return 
Element_Type
+   is begin
+      return Container.Nodes (Position.Ptr).Element;
+   end Element;
+
+   procedure Delete (Container : in out List; Position : in out Cursor)
+   is
+      use all type Ada.Containers.Count_Type;
+   begin
+      Delete_Node (Container, Position.Ptr);
+      Position        := (Ptr => Invalid_Peek_Index);
+      Container.Count := Container.Count - 1;
+   end Delete;
+
+   procedure Delete_First (Container : in out List)
+   is
+      use all type Ada.Containers.Count_Type;
+      Node : Base_Peek_Type := Container.Head;
+   begin
+      Delete_Node (Container, Node);
+      Container.Count := Container.Count - 1;
+   end Delete_First;
+
+   function Append (Container : in out List; Element : in Element_Type) return 
Cursor
+   is begin
+      Append (Container, Element);
+      return (Ptr => Container.Tail);
+   end Append;
+
+   function Insert
+     (Container : in out List;
+      Before    : in     Cursor;
+      Element   : in     Element_Type)
+     return Cursor
+   is
+      use all type Ada.Containers.Count_Type;
+   begin
+      if Before = (Ptr => Invalid_Peek_Index) then
+         return Container.Append (Element);
+      else
+         return Result : Cursor do
+            if Before.Ptr = Container.Head then
+               declare
+                  --  old list: before ...
+                  --  newlist:  new  before ...
+                  New_Node : constant Peek_Type := Allocate (Container);
+               begin
+                  Container.Nodes (New_Node) :=
+                    (Element => Element,
+                     Prev    => Invalid_Peek_Index,
+                     Next    => Before.Ptr);
+
+                  Container.Nodes (Before.Ptr).Prev := New_Node;
+                  Container.Head                    := New_Node;
+                  Result.Ptr                        := New_Node;
+               end;
+            else
+               declare
+                  --  old list: ... prev  before ...
+                  --  newlist:  ... prev  new  before ...
+                  New_Node : constant Peek_Type := Allocate (Container);
+               begin
+                  Container.Nodes (New_Node) :=
+                    (Element => Element,
+                     Prev    => Container.Nodes (Before.Ptr).Prev,
+                     Next    => Before.Ptr);
+
+                  Container.Nodes (Container.Nodes (Before.Ptr).Prev).Next := 
New_Node;
+
+                  Container.Nodes (Before.Ptr).Prev := New_Node;
+
+                  Result.Ptr := New_Node;
+               end;
+            end if;
+            Container.Count := Container.Count + 1;
+         end return;
+      end if;
+   end Insert;
+
+   procedure Insert
+     (Container : in out List;
+      Before    : in     Cursor;
+      Element   : in     Element_Type)
+   is
+      Junk : Cursor := Insert (Container, Before, Element);
+      pragma Unreferenced (Junk);
+   begin
+      null;
+   end Insert;
+
+   function Constant_Ref (Container : aliased in List; Position : in Cursor) 
return Constant_Reference_Type
+   is begin
+      return (Element => Container.Nodes (Position.Ptr).Element'Access, Dummy 
=> 1);
+   end Constant_Ref;
+
+   function Variable_Ref (Container : aliased in out List; Position : in 
Cursor) return Variable_Reference_Type
+   is begin
+      return (Element => Container.Nodes (Position.Ptr).Element'Access, Dummy 
=> 1);
+   end Variable_Ref;
+
+   function Iterate (Container : aliased in List) return 
Iterator_Interfaces.Reversible_Iterator'Class
+   is begin
+      return Iterator'(Container => Container'Access);
+   end Iterate;
+
+   overriding function First (Object : Iterator) return Cursor
+   is begin
+      return First (Object.Container.all);
+   end First;
+
+   overriding function Last  (Object : Iterator) return Cursor
+   is begin
+      return Last (Object.Container.all);
+   end Last;
+
+   overriding function Next (Object : in Iterator; Position : in Cursor) 
return Cursor
+   is begin
+      return Next (Object.Container.all, Position);
+   end Next;
+
+   overriding function Previous (Object : in Iterator; Position : in Cursor) 
return Cursor
+   is begin
+      return Previous (Object.Container.all, Position);
+   end Previous;
+
+end SAL.Gen_Bounded_Definite_Doubly_Linked_Lists;
diff --git a/sal-gen_definite_doubly_linked_lists.ads 
b/sal-gen_bounded_definite_doubly_linked_lists.ads
similarity index 62%
copy from sal-gen_definite_doubly_linked_lists.ads
copy to sal-gen_bounded_definite_doubly_linked_lists.ads
index 4d68707007..949820bfc6 100644
--- a/sal-gen_definite_doubly_linked_lists.ads
+++ b/sal-gen_bounded_definite_doubly_linked_lists.ads
@@ -1,7 +1,6 @@
 --  Abstract :
 --
---  A generic doubly linked list with definite elements, allowing
---  permanent references to elements.
+--  A generic bounded doubly linked list with definite elements; no dynamic 
memory.
 --
 --  Copyright (C) 2017 - 2021 Free Software Foundation, Inc.
 --
@@ -24,17 +23,20 @@
 pragma License (Modified_GPL);
 
 with Ada.Containers;
-with Ada.Finalization;
 with Ada.Iterator_Interfaces;
-with Ada.Unchecked_Deallocation;
 generic
    type Element_Type is private;
-package SAL.Gen_Definite_Doubly_Linked_Lists is
-
-   type List is new Ada.Finalization.Controlled with private
+package SAL.Gen_Bounded_Definite_Doubly_Linked_Lists is
+
+   type List (Size : Peek_Type) is tagged private
+   --  We'd like to force initialization; one choice is to use
+   --  Ada.Finalization, but that's pretty heavy. Another is to use <>
+   --  instead of Size, but then we can't declare a List in a record type
+   --  (ie WisiToken Configuration). So we do this, and let the user
+   --  cope.
    with
-      Constant_Indexing => Constant_Reference,
-      Variable_Indexing => Variable_Reference,
+      Constant_Indexing => Constant_Ref,
+      Variable_Indexing => Variable_Ref,
       Default_Iterator  => Iterate,
       Iterator_Element  => Element_Type;
 
@@ -44,23 +46,24 @@ package SAL.Gen_Definite_Doubly_Linked_Lists is
    type List_Access is access all List;
    for List_Access'Storage_Size use 0;
 
-   Empty_List : constant List;
-
-   overriding procedure Adjust (Container : in out List);
-   --  Deep copy.
+   procedure Initialize (Container : in out List);
 
-   overriding procedure Finalize (Container : in out List);
-   --  Free all items in List.
+   function Empty_List (Size : in Peek_Type) return List;
 
-   procedure Clear (Container : in out List) renames Finalize;
+   procedure Clear (Container : in out List);
+   --  Set Container to empty.
 
    function Length (Container : in List) return Ada.Containers.Count_Type;
 
    procedure Append (Container : in out List; Element : in Element_Type);
+   --  Raises SAL.Container_Full if Container is full, or if it is not
+   --  initialized.
 
    procedure Prepend (Container : in out List; Element : in Element_Type);
+   --  Raises SAL.Container_Full if Container is full, or if it is not
+   --  initialized.
 
-   function To_List (Element : in Element_Type) return List;
+   function To_List (Element : in Element_Type; Size : in Peek_Type) return 
List;
 
    type Cursor is private;
 
@@ -70,19 +73,19 @@ package SAL.Gen_Definite_Doubly_Linked_Lists is
    function First (Container : in List) return Cursor;
    function Last (Container : in List) return Cursor;
 
-   procedure Next (Position : in out Cursor)
+   procedure Next (Container : in List; Position : in out Cursor)
    with Pre => Has_Element (Position);
 
-   function Next (Position : in Cursor) return Cursor
+   function Next (Container : in List; Position : in Cursor) return Cursor
    with Pre => Has_Element (Position);
 
-   procedure Previous (Position : in out Cursor)
+   procedure Previous (Container : in List; Position : in out Cursor)
    with Pre => Has_Element (Position);
 
-   function Previous (Position : in Cursor) return Cursor
+   function Previous (Container : in List; Position : in Cursor) return Cursor
    with Pre => Has_Element (Position);
 
-   function Element (Position : in Cursor) return Element_Type
+   function Element (Container : in List; Position : in Cursor) return 
Element_Type
    with Pre => Has_Element (Position);
 
    procedure Delete (Container : in out List; Position : in out Cursor)
@@ -103,26 +106,16 @@ package SAL.Gen_Definite_Doubly_Linked_Lists is
      return Cursor;
    --  If Before is No_Element, insert after Last.
 
-   function Persistent_Ref (Position : in Cursor) return access Element_Type
-   with Pre => Has_Element (Position);
-
    type Constant_Reference_Type (Element : not null access constant 
Element_Type) is private with
      Implicit_Dereference => Element;
 
-   function Constant_Reference (Container : in List; Position : in Cursor) 
return Constant_Reference_Type
-   with Inline, Pre => Has_Element (Position);
-   --  Not 'Constant_Ref' because that is taken, and it is wrong for 
Constant_Indexing
-
-   function Constant_Ref (Position : in Cursor) return Constant_Reference_Type
+   function Constant_Ref (Container : aliased in List; Position : in Cursor) 
return Constant_Reference_Type
    with Inline, Pre => Has_Element (Position);
 
    type Variable_Reference_Type (Element : not null access Element_Type) is 
private with
      Implicit_Dereference => Element;
 
-   function Variable_Reference (Container : in List; Position : in Cursor) 
return Variable_Reference_Type
-   with Inline, Pre => Has_Element (Position);
-
-   function Variable_Ref (Position : in Cursor) return Variable_Reference_Type
+   function Variable_Ref (Container : aliased in out List; Position : in 
Cursor) return Variable_Reference_Type
    with Inline, Pre => Has_Element (Position);
 
    package Iterator_Interfaces is new Ada.Iterator_Interfaces (Cursor, 
Has_Element);
@@ -130,26 +123,33 @@ package SAL.Gen_Definite_Doubly_Linked_Lists is
    function Iterate (Container : aliased in List) return 
Iterator_Interfaces.Reversible_Iterator'Class;
 
 private
-   type Node_Type;
-
-   type Node_Access is access Node_Type;
 
    type Node_Type is record
       Element : aliased Element_Type;
-      Prev    : Node_Access;
-      Next    : Node_Access;
+      Prev    : Base_Peek_Type := Invalid_Peek_Index; -- index in List.Nodes
+      Next    : Base_Peek_Type := Invalid_Peek_Index;
    end record;
 
-   procedure Free is new Ada.Unchecked_Deallocation (Node_Type, Node_Access);
+   type Node_Array_Type is array (Peek_Type range <>) of Node_Type;
+
+   type Index_Array is array (Peek_Type range <>) of Base_Peek_Type;
+   --  We'd like to use a different index type for Free_List, but Ada
+   --  does not allow Integer (Size); "discriminant in constraint must
+   --  appear alone"
+
+   type List (Size : Peek_Type) is tagged record
+      Head : Base_Peek_Type := Invalid_Peek_Index;
+      Tail : Base_Peek_Type := Invalid_Peek_Index;
+
+      Nodes     : Node_Array_Type (1 .. Size);
+      Free_List : Index_Array (1 .. Size);
+      Free_Last : Base_Peek_Type := 0; --  Append raises Container_Full if 
user does not call Initialize.
 
-   type List is new Ada.Finalization.Controlled with record
-      Head  : Node_Access               := null;
-      Tail  : Node_Access               := null;
       Count : Ada.Containers.Count_Type := 0;
    end record;
 
    type Cursor is record
-      Ptr : Node_Access;
+      Ptr : Base_Peek_Type := Invalid_Peek_Index; -- index of Node in 
List.Nodes
    end record;
 
    type Constant_Reference_Type (Element : not null access constant 
Element_Type) is
@@ -162,9 +162,7 @@ private
       Dummy : Integer := raise Program_Error with "uninitialized reference";
    end record;
 
-   No_Element : constant Cursor := (Ptr => null);
-
-   Empty_List : constant List := (Ada.Finalization.Controlled with null, null, 
0);
+   No_Element : constant Cursor := (Ptr => Invalid_Peek_Index);
 
    type Iterator (Container : not null access constant List) is new 
Iterator_Interfaces.Reversible_Iterator with
    null record;
@@ -180,4 +178,4 @@ private
      (Object   : Iterator;
       Position : Cursor) return Cursor;
 
-end SAL.Gen_Definite_Doubly_Linked_Lists;
+end SAL.Gen_Bounded_Definite_Doubly_Linked_Lists;
diff --git a/sal-gen_bounded_definite_vectors-gen_image.adb 
b/sal-gen_bounded_definite_vectors-gen_image.adb
deleted file mode 100644
index ae901fe2ac..0000000000
--- a/sal-gen_bounded_definite_vectors-gen_image.adb
+++ /dev/null
@@ -1,40 +0,0 @@
---  Abstract :
---
---  See spec.
---
---  Copyright (C) 2018 - 2019 Free Software Foundation, Inc.
---
---  This library is free software;  you can redistribute it and/or modify it
---  under terms of the  GNU General Public License  as published by the Free
---  Software  Foundation;  either version 3,  or (at your  option) any later
---  version. This library is distributed in the hope that it will be useful,
---  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
---  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
---  As a special exception under Section 7 of GPL version 3, you are granted
---  additional permissions described in the GCC Runtime Library Exception,
---  version 3.1, as published by the Free Software Foundation.
-
-pragma License (Modified_GPL);
-
-with Ada.Strings.Fixed;
-with Ada.Strings.Unbounded;
-function SAL.Gen_Bounded_Definite_Vectors.Gen_Image (Item : in Vector) return 
String
-is
-   use Ada.Strings;
-   use Ada.Strings.Unbounded;
-   Result : Unbounded_String        := To_Unbounded_String ("(");
-   Last   : constant Base_Peek_Type := To_Peek_Index (Item.Last);
-begin
-   for I in 1 .. Last loop
-      Result := Result &
-        (if Trim
-         then Fixed.Trim (Element_Image (Item.Elements (I)), Left)
-         else Element_Image (Item.Elements (I)));
-      if I /= Last then
-         Result := Result & ", ";
-      end if;
-   end loop;
-   Result := Result & ")";
-   return To_String (Result);
-end SAL.Gen_Bounded_Definite_Vectors.Gen_Image;
diff --git a/sal-gen_bounded_definite_vectors.adb 
b/sal-gen_bounded_definite_vectors.adb
index b6163a6f4e..9130b6d96a 100644
--- a/sal-gen_bounded_definite_vectors.adb
+++ b/sal-gen_bounded_definite_vectors.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2021 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -98,7 +98,8 @@ is
       --  WORKAROUND: If init Result with ":= Left", GNAT Community 2019
       --  checks Default_Initial_Condition (which fails when Left is not
       --  empty)! That is only supposed to be checked when initialized by
-      --  default. Reported to AdaCore as ticket S724-042.
+      --  default. Reported to AdaCore as ticket S724-042. Fixed in GNAT
+      --  Community 2021; keeping workaround until we drop support for 2020.
       return Result : Vector do
          Result := Left;
          Append (Result, Right);
diff --git a/sal-gen_definite_doubly_linked_lists_sorted-gen_image.adb 
b/sal-gen_definite_doubly_linked_lists-gen_image.adb
similarity index 85%
rename from sal-gen_definite_doubly_linked_lists_sorted-gen_image.adb
rename to sal-gen_definite_doubly_linked_lists-gen_image.adb
index 6e6efb3c11..b715019d74 100644
--- a/sal-gen_definite_doubly_linked_lists_sorted-gen_image.adb
+++ b/sal-gen_definite_doubly_linked_lists-gen_image.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2018 Free Software Foundation, Inc.
+--  Copyright (C) 2019 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -18,9 +18,10 @@
 pragma License (Modified_GPL);
 
 with Ada.Strings.Unbounded;
-function SAL.Gen_Definite_Doubly_Linked_Lists_Sorted.Gen_Image
+function SAL.Gen_Definite_Doubly_Linked_Lists.Gen_Image
   (Item : in List; Strict : in Boolean := False) return String
 is
+   use all type Ada.Containers.Count_Type;
    use Ada.Strings;
    use Ada.Strings.Unbounded;
    Result     : Unbounded_String := To_Unbounded_String ("(");
@@ -44,4 +45,4 @@ begin
       Result := Result & ")";
       return To_String (Result);
    end if;
-end SAL.Gen_Definite_Doubly_Linked_Lists_Sorted.Gen_Image;
+end SAL.Gen_Definite_Doubly_Linked_Lists.Gen_Image;
diff --git a/sal-gen_definite_doubly_linked_lists_sorted-gen_image.ads 
b/sal-gen_definite_doubly_linked_lists-gen_image.ads
similarity index 87%
rename from sal-gen_definite_doubly_linked_lists_sorted-gen_image.ads
rename to sal-gen_definite_doubly_linked_lists-gen_image.ads
index 4080707dfb..6c2ff2dd02 100644
--- a/sal-gen_definite_doubly_linked_lists_sorted-gen_image.ads
+++ b/sal-gen_definite_doubly_linked_lists-gen_image.ads
@@ -2,7 +2,7 @@
 --
 --  Image of parent.
 --
---  Copyright (C) 2018 Free Software Foundation, Inc.
+--  Copyright (C) 2019 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -19,7 +19,7 @@ pragma License (Modified_GPL);
 
 generic
    with function Element_Image (Item : in Element_Type) return String;
-function SAL.Gen_Definite_Doubly_Linked_Lists_Sorted.Gen_Image
+function SAL.Gen_Definite_Doubly_Linked_Lists.Gen_Image
   (Item : in List; Strict : in Boolean := False) return String;
 --  Image of Item, in Ada aggregate syntax. If Strict, use correct
 --  syntax for 0 and 1 item; otherwise, use () and (item).
diff --git a/sal-gen_unbounded_definite_stacks-gen_image_aux.adb 
b/sal-gen_definite_doubly_linked_lists-gen_image_aux.adb
similarity index 56%
rename from sal-gen_unbounded_definite_stacks-gen_image_aux.adb
rename to sal-gen_definite_doubly_linked_lists-gen_image_aux.adb
index c4b52a683b..0354ce2092 100644
--- a/sal-gen_unbounded_definite_stacks-gen_image_aux.adb
+++ b/sal-gen_definite_doubly_linked_lists-gen_image_aux.adb
@@ -1,42 +1,47 @@
---  Abstract :
---
---  See spec.
---
---  Copyright (C) 2018 Free Software Foundation, Inc.
---
---  This library is free software;  you can redistribute it and/or modify it
---  under terms of the  GNU General Public License  as published by the Free
---  Software  Foundation;  either version 3,  or (at your  option) any later
---  version. This library is distributed in the hope that it will be useful,
---  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
---  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
---  As a special exception under Section 7 of GPL version 3, you are granted
---  additional permissions described in the GCC Runtime Library Exception,
---  version 3.1, as published by the Free Software Foundation.
-
-pragma License (Modified_GPL);
-
-with Ada.Strings.Unbounded;
-function SAL.Gen_Unbounded_Definite_Stacks.Gen_Image_Aux
-  (Item  : in Stack;
-   Aux   : in Aux_Data;
-   Depth : in SAL.Base_Peek_Type := 0)
-  return String
-is
-   use Ada.Strings.Unbounded;
-   Result : Unbounded_String        := To_Unbounded_String ("(");
-   Last   : constant Base_Peek_Type :=
-     (if Depth = 0
-      then Item.Top
-      else Base_Peek_Type'Min (Depth, Item.Top));
-begin
-   for I in 1 .. Last loop
-      Result := Result & Element_Image (Item.Peek (I), Aux);
-      if I /= Last then
-         Result := Result & ", ";
-      end if;
-   end loop;
-   Result := Result & ")";
-   return To_String (Result);
-end SAL.Gen_Unbounded_Definite_Stacks.Gen_Image_Aux;
+--  Abstract :
+--
+--  See spec.
+--
+--  Copyright (C) 2021, 2022 Free Software Foundation, Inc.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+--  As a special exception under Section 7 of GPL version 3, you are granted
+--  additional permissions described in the GCC Runtime Library Exception,
+--  version 3.1, as published by the Free Software Foundation.
+
+pragma License (Modified_GPL);
+
+with Ada.Strings.Unbounded;
+function SAL.Gen_Definite_Doubly_Linked_Lists.Gen_Image_Aux
+  (Item  : in List;
+   Aux   : in Aux_Data;
+   First : in Cursor := No_Element)
+  return String
+is
+   use Ada.Strings.Unbounded;
+   Result : Unbounded_String := To_Unbounded_String ("(");
+   Node   : Node_Access      :=
+     (if First = No_Element
+      then Item.Head
+      else First.Ptr);
+begin
+   if Node /= null then
+      loop
+         Append (Result, Element_Image (Node.Element, Aux));
+
+         Node := Node.Next;
+
+         exit when Node = null;
+
+         Append (Result, ", ");
+      end loop;
+   end if;
+   Append (Result, ")");
+   return To_String (Result);
+end SAL.Gen_Definite_Doubly_Linked_Lists.Gen_Image_Aux;
diff --git a/sal-gen_bounded_definite_vectors-gen_image.ads 
b/sal-gen_definite_doubly_linked_lists-gen_image_aux.ads
similarity index 58%
rename from sal-gen_bounded_definite_vectors-gen_image.ads
rename to sal-gen_definite_doubly_linked_lists-gen_image_aux.ads
index 950b9d0793..0b8b445028 100644
--- a/sal-gen_bounded_definite_vectors-gen_image.ads
+++ b/sal-gen_definite_doubly_linked_lists-gen_image_aux.ads
@@ -1,23 +1,29 @@
---  Abstract :
---
---  Image for instantiations of parent.
---
---  Copyright (C) 2018 Free Software Foundation, Inc.
---
---  This library is free software;  you can redistribute it and/or modify it
---  under terms of the  GNU General Public License  as published by the Free
---  Software  Foundation;  either version 3,  or (at your  option) any later
---  version. This library is distributed in the hope that it will be useful,
---  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
---  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
---  As a special exception under Section 7 of GPL version 3, you are granted
---  additional permissions described in the GCC Runtime Library Exception,
---  version 3.1, as published by the Free Software Foundation.
-
-pragma License (Modified_GPL);
-
-generic
-   with function Element_Image (Item : in Element_Type) return String;
-   Trim : in Boolean;
-function SAL.Gen_Bounded_Definite_Vectors.Gen_Image (Item : in Vector) return 
String;
+--  Abstract :
+--
+--  Image with auxiliary data for instantiations of parent.
+--
+--  Copyright (C) 2021, 2022 Free Software Foundation, Inc.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+--  As a special exception under Section 7 of GPL version 3, you are granted
+--  additional permissions described in the GCC Runtime Library Exception,
+--  version 3.1, as published by the Free Software Foundation.
+
+pragma License (Modified_GPL);
+
+generic
+   type Aux_Data (<>) is limited private;
+   with function Element_Image (Item : in Element_Type; Aux : in Aux_Data) 
return String;
+function SAL.Gen_Definite_Doubly_Linked_Lists.Gen_Image_Aux
+  (Item  : in List;
+   Aux   : in Aux_Data;
+   First : in Cursor := No_Element)
+  return String;
+--  If First /= No_Element, include First .. Item.Last. Otherwise
+--  include Item.First .. Item.Last.
diff --git a/sal-gen_definite_doubly_linked_lists.ads 
b/sal-gen_definite_doubly_linked_lists.ads
index 4d68707007..75016694f1 100644
--- a/sal-gen_definite_doubly_linked_lists.ads
+++ b/sal-gen_definite_doubly_linked_lists.ads
@@ -33,10 +33,10 @@ package SAL.Gen_Definite_Doubly_Linked_Lists is
 
    type List is new Ada.Finalization.Controlled with private
    with
-      Constant_Indexing => Constant_Reference,
-      Variable_Indexing => Variable_Reference,
-      Default_Iterator  => Iterate,
-      Iterator_Element  => Element_Type;
+     Constant_Indexing => Constant_Reference,
+     Variable_Indexing => Variable_Reference,
+     Default_Iterator  => Iterate,
+     Iterator_Element  => Element_Type;
 
    type List_Access_Constant is access constant List;
    for List_Access_Constant'Storage_Size use 0;
diff --git a/sal-gen_definite_doubly_linked_lists_ref_count.adb 
b/sal-gen_definite_doubly_linked_lists_ref_count.adb
new file mode 100644
index 0000000000..2e325d9f89
--- /dev/null
+++ b/sal-gen_definite_doubly_linked_lists_ref_count.adb
@@ -0,0 +1,347 @@
+--  Abstract :
+--
+--  see spec
+--
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
+--
+--  This library is free software; you can redistribute it and/or
+--  modify it under terms of the GNU General Public License as
+--  published by the Free Software Foundation; either version 3, or (at
+--  your option) any later version. This library is distributed in the
+--  hope that it will be useful, but WITHOUT ANY WARRANTY; without even
+--  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+--  PURPOSE. See the GNU General Public License for more details. You
+--  should have received a copy of the GNU General Public License
+--  distributed with this program; see file COPYING. If not, write to
+--  the Free Software Foundation, 59 Temple Place - Suite 330, Boston,
+--  MA 02111-1307, USA.
+--
+--  As a special exception under Section 7 of GPL version 3, you are granted
+--  additional permissions described in the GCC Runtime Library Exception,
+--  version 3.1, as published by the Free Software Foundation.
+
+pragma License (Modified_GPL);
+
+package body SAL.Gen_Definite_Doubly_Linked_Lists_Ref_Count is
+
+   ---------
+   --  Public operations, declaration order.
+
+   overriding
+   procedure Adjust (Container : in out List)
+   is
+      Next_Source : Node_Access := Container.Head;
+      New_Node    : Node_Access;
+   begin
+      if Next_Source = null then
+         return;
+      end if;
+
+      Container.Tail := null;
+
+      loop
+         New_Node := new Node_Type'
+           (Element   => Next_Source.Element,
+            Next      => null,
+            Prev      => Container.Tail,
+            Ref_Count => 0);
+
+         if Container.Tail = null then
+            Container.Head := New_Node;
+            Container.Tail := New_Node;
+         else
+            Container.Tail.Next := New_Node;
+            Container.Tail      := New_Node;
+         end if;
+         Next_Source := Next_Source.Next;
+         exit when Next_Source = null;
+      end loop;
+   end Adjust;
+
+   overriding
+   procedure Finalize (Container : in out List)
+   is
+      Next : Node_Access := Container.Head;
+   begin
+      loop
+         exit when Next = null;
+         Next := Container.Head.Next;
+         --  We raise an exception here, even though Finalize never should,
+         --  because Finalize is also renamed to Clear, and called as a
+         --  normal procedure.
+         if Container.Enable_Checks and Container.Head.Ref_Count /= 0 then
+            raise Invalid_Operation with "ref_count " & 
Container.Head.Ref_Count'Image;
+         end if;
+         Free (Container.Head);
+         Container.Head := Next;
+      end loop;
+      Container.Tail  := null;
+      Container.Count := 0;
+   end Finalize;
+
+   procedure Enable_Ref_Count_Check (Container : in out List; Enable : in 
Boolean)
+   is begin
+      Container.Enable_Checks := Enable;
+   end Enable_Ref_Count_Check;
+
+   procedure Check_Ref_Counts (Container : in out List)
+   is
+      Next : Node_Access := Container.Head;
+   begin
+      loop
+         exit when Next = null;
+         if Container.Enable_Checks and Next.Ref_Count /= 0 then
+            raise Invalid_Operation with "ref_count " & Next.Ref_Count'Image;
+         end if;
+         Next := Next.Next;
+      end loop;
+   end Check_Ref_Counts;
+
+   function Length (Container : in List) return Ada.Containers.Count_Type
+   is begin
+      return Container.Count;
+   end Length;
+
+   procedure Append (Container : in out List; Element : in Element_Type)
+   is
+      use all type Ada.Containers.Count_Type;
+      New_Node : constant Node_Access := new Node_Type'
+        (Element   => Element,
+         Prev      => Container.Tail,
+         Next      => null,
+         Ref_Count => 0);
+   begin
+      if Container.Tail = null then
+         Container.Head := New_Node;
+         Container.Tail := New_Node;
+      else
+         Container.Tail.Next := New_Node;
+         Container.Tail      := New_Node;
+      end if;
+      Container.Count := Container.Count + 1;
+   end Append;
+
+   procedure Prepend (Container : in out List; Element : in Element_Type)
+   is
+      use all type Ada.Containers.Count_Type;
+      New_Node : constant Node_Access := new Node_Type'
+        (Element   => Element,
+         Prev      => null,
+         Next      => Container.Head,
+         Ref_Count => 0);
+   begin
+      if Container.Tail = null then
+         Container.Head := New_Node;
+         Container.Tail := New_Node;
+      else
+         Container.Head.Prev := New_Node;
+         Container.Head      := New_Node;
+      end if;
+      Container.Count := Container.Count + 1;
+   end Prepend;
+
+   function To_List (Element : in Element_Type) return List
+   is begin
+      return Result : List do
+         Result.Append (Element);
+      end return;
+   end To_List;
+
+   function Has_Element (Position : in Cursor) return Boolean
+   is begin
+      return Position.Ptr /= null;
+   end Has_Element;
+
+   function First (Container : in List'Class) return Cursor
+   is begin
+      if Container.Head = null then
+         return (Ada.Finalization.Controlled with Ptr => null);
+      else
+         Container.Head.Ref_Count := @ + 1;
+         return (Ada.Finalization.Controlled with Ptr => Container.Head);
+      end if;
+   end First;
+
+   function Last (Container : in List'Class) return Cursor
+   is begin
+      if Container.Tail = null then
+         return (Ada.Finalization.Controlled with Ptr => null);
+      else
+         Container.Tail.Ref_Count := @ + 1;
+         return (Ada.Finalization.Controlled with Ptr => Container.Tail);
+      end if;
+   end Last;
+
+   procedure Next (Position : in out Cursor)
+   is begin
+      Position.Ptr.Ref_Count := @ - 1;
+      if Position.Ptr.Next /= null then
+         Position.Ptr.Next.Ref_Count := @ + 1;
+      end if;
+
+      Position.Ptr := Position.Ptr.Next;
+   end Next;
+
+   function Next (Position : in Cursor) return Cursor
+   is begin
+      if Position.Ptr.Next /= null then
+         Position.Ptr.Next.Ref_Count := @ + 1;
+      end if;
+
+      return (Ada.Finalization.Controlled with Ptr => Position.Ptr.Next);
+   end Next;
+
+   procedure Previous (Position : in out Cursor)
+   is begin
+      Position.Ptr.Ref_Count := @ - 1;
+      if Position.Ptr.Prev /= null then
+         Position.Ptr.Prev.Ref_Count := @ + 1;
+      end if;
+
+      Position.Ptr := Position.Ptr.Prev;
+   end Previous;
+
+   function Previous (Position : in Cursor) return Cursor
+   is begin
+      return Result : constant Cursor := (Ada.Finalization.Controlled with Ptr 
=> Position.Ptr.Prev) do
+         if Result.Ptr /= null then
+            Result.Ptr.Ref_Count := @ + 1;
+         end if;
+      end return;
+   end Previous;
+
+   function Element (Position : in Cursor) return Element_Type
+   is begin
+      return Position.Ptr.Element;
+   end Element;
+
+   procedure Delete (Container : in out List; Position : in out Cursor'Class)
+   is
+      use all type Ada.Containers.Count_Type;
+      Node : Node_Access renames Position.Ptr;
+   begin
+      if Container.Enable_Checks and Node.Ref_Count /= 1 then
+         raise Invalid_Operation with "ref_count " & Node.Ref_Count'Image;
+      end if;
+
+      if Node.Next = null then
+         Container.Tail := Node.Prev;
+      else
+         Node.Next.Prev := Node.Prev;
+      end if;
+      if Node.Prev = null then
+         Container.Head := Node.Next;
+      else
+         Node.Prev.Next := Node.Next;
+      end if;
+      Free (Node);
+
+      Container.Count := Container.Count - 1;
+   end Delete;
+
+   procedure Replace_Element
+     (Position    : in Cursor'Class;
+      New_Element : in Element_Type)
+   is begin
+      Position.Ptr.Element := New_Element;
+   end Replace_Element;
+
+   function Append (Container : in out List'Class; Element : in Element_Type) 
return Cursor
+   is begin
+      Append (Container, Element);
+      Container.Tail.Ref_Count := @ + 1;
+      return (Ada.Finalization.Controlled with Ptr => Container.Tail);
+   end Append;
+
+   function Insert
+     (Container : in out List'Class;
+      Before    : in     Cursor'Class;
+      Element   : in     Element_Type)
+     return Cursor
+   is
+      use all type Ada.Containers.Count_Type;
+   begin
+      if Before.Ptr = null then
+         return Container.Append (Element);
+      else
+         return Result : Cursor do
+            if Before.Ptr = Container.Head then
+               declare
+                  --  old list: before ...
+                  --  newlist:  new  before ...
+                  New_Node : constant Node_Access := new Node_Type'
+                    (Element   => Element,
+                     Prev      => null,
+                     Next      => Before.Ptr,
+                     Ref_Count => 1);
+               begin
+                  Before.Ptr.Prev := New_Node;
+                  Container.Head  := New_Node;
+                  Result.Ptr      := New_Node;
+               end;
+            else
+               declare
+                  --  old list: ... prev  before ...
+                  --  newlist:  ... prev  new  before ...
+                  New_Node : constant Node_Access := new Node_Type'
+                    (Element   => Element,
+                     Prev      => Before.Ptr.Prev,
+                     Next      => Before.Ptr,
+                     Ref_Count => 1);
+               begin
+                  Before.Ptr.Prev.Next := New_Node;
+                  Before.Ptr.Prev      := New_Node;
+                  Result.Ptr           := New_Node;
+               end;
+            end if;
+            Container.Count := Container.Count + 1;
+         end return;
+      end if;
+   end Insert;
+
+   procedure Insert
+     (Container : in out List;
+      Before    : in     Cursor'Class;
+      Element   : in     Element_Type)
+   is
+      Junk : Cursor := Insert (Container, Before, Element);
+      pragma Unreferenced (Junk);
+   begin
+      null;
+   end Insert;
+
+   function Contains
+     (Container : in List;
+      Item      : in Cursor'Class)
+     return Boolean
+   is
+      Node : Node_Access := Container.Head;
+   begin
+      loop
+         exit when Node = null;
+         if Node = Item.Ptr then
+            return True;
+         end if;
+         Node := Node.Next;
+      end loop;
+      return False;
+   end Contains;
+
+   ----------
+   --  Private operations, declaration order
+
+   overriding procedure Finalize (Object : in out Cursor)
+   is begin
+      if Object.Ptr /= null then
+         Object.Ptr.Ref_Count := @ - 1;
+      end if;
+   end Finalize;
+
+   overriding procedure Adjust (Object : in out Cursor)
+   is begin
+      if Object.Ptr /= null then
+         Object.Ptr.Ref_Count := @ + 1;
+      end if;
+   end Adjust;
+
+end SAL.Gen_Definite_Doubly_Linked_Lists_Ref_Count;
diff --git a/sal-gen_definite_doubly_linked_lists_ref_count.ads 
b/sal-gen_definite_doubly_linked_lists_ref_count.ads
new file mode 100644
index 0000000000..2542f44c58
--- /dev/null
+++ b/sal-gen_definite_doubly_linked_lists_ref_count.ads
@@ -0,0 +1,200 @@
+--  Abstract :
+--
+--  A generic doubly linked list with definite elements, with
+--  reference counting on cursors to detect dangling references.
+--
+--  WORKAROUND: there is a bug in GNAT Community 2020 (Eurocontrol
+--  ticket V107-045) that causes reference counting to be inaccurate
+--  in some cases, so we support turning off the reference counting.
+--
+--  Rationale for not implementing reference types and iterators:
+--  Consider a typical reference type use:
+--
+--  declare
+--     A : Element_Type renames List.First;
+--     To_Delete : Cursor := List.First;
+--  begin
+--     Delete (To_Delete);
+--  end;
+--
+--  The reference object exists only while evaluating the renames, so
+--  it cannot assert any kind of lock on the element or list that
+--  survives thru the call to Delete and is then released. We would
+--  have to use something like:
+--
+--  declare
+--     A_Ref : constant Reference_Type := List.First;
+--     A : Element_Type renames Element (A_Ref);
+--     To_Delete : Cursor := List.First;
+--  begin
+--     Delete (To_Delete);
+--  end;
+--
+--  Where "Reference_Type" is opaque, and thus cannot be used for iterators.
+--
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
+--
+--  This library is free software; you can redistribute it and/or
+--  modify it under terms of the GNU General Public License as
+--  published by the Free Software Foundation; either version 3, or (at
+--  your option) any later version. This library is distributed in the
+--  hope that it will be useful, but WITHOUT ANY WARRANTY; without even
+--  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+--  PURPOSE. See the GNU General Public License for more details. You
+--  should have received a copy of the GNU General Public License
+--  distributed with this program; see file COPYING. If not, write to
+--  the Free Software Foundation, 59 Temple Place - Suite 330, Boston,
+--  MA 02111-1307, USA.
+--
+--  As a special exception under Section 7 of GPL version 3, you are granted
+--  additional permissions described in the GCC Runtime Library Exception,
+--  version 3.1, as published by the Free Software Foundation.
+
+pragma License (Modified_GPL);
+
+with Ada.Containers;
+with Ada.Finalization;
+with Ada.Unchecked_Deallocation;
+generic
+   type Element_Type is private;
+package SAL.Gen_Definite_Doubly_Linked_Lists_Ref_Count is
+
+   type List is new Ada.Finalization.Controlled with private;
+   --  We cannot implement reference counting that detects reference
+   --  types (see discussion above), so no reference types, no iterators.
+
+   type List_Access_Constant is access constant List;
+   for List_Access_Constant'Storage_Size use 0;
+
+   type List_Access is access all List;
+   for List_Access'Storage_Size use 0;
+
+   Empty_List : constant List;
+
+   overriding procedure Adjust (Container : in out List);
+   --  Deep copy.
+
+   overriding procedure Finalize (Container : in out List);
+   --  Free all items in List.
+
+   procedure Enable_Ref_Count_Check (Container : in out List; Enable : in 
Boolean);
+   --  Enable or disable checks; default is enabled.
+   --
+   --  This is useful when there are bugs that you want to ignore.
+
+   procedure Check_Ref_Counts (Container : in out List);
+   --  Raises SAL.Invalid_Operation if any ref count is non-zero.
+
+   procedure Clear (Container : in out List) renames Finalize;
+
+   function Length (Container : in List) return Ada.Containers.Count_Type;
+
+   procedure Append (Container : in out List; Element : in Element_Type);
+
+   procedure Prepend (Container : in out List; Element : in Element_Type);
+
+   function To_List (Element : in Element_Type) return List;
+
+   type Cursor is tagged private;
+   --  Cursor is not simply 'private', to allow implementing reference
+   --  counting.
+
+   function Has_Element (Position : in Cursor) return Boolean;
+
+   No_Element : constant Cursor;
+
+   function Ref_Count (Position : in Cursor) return Integer;
+   --  For debugging, tests, preconditions.
+
+   function First (Container : in List'Class) return Cursor;
+   function Last (Container : in List'Class) return Cursor;
+
+   procedure Next (Position : in out Cursor)
+   with Pre => Has_Element (Position);
+
+   function Next (Position : in Cursor) return Cursor
+   with Pre => Has_Element (Position);
+
+   procedure Previous (Position : in out Cursor)
+   with Pre => Has_Element (Position);
+
+   function Previous (Position : in Cursor) return Cursor
+   with Pre => Has_Element (Position);
+
+   function Element (Position : in Cursor) return Element_Type
+   with Pre => Has_Element (Position);
+
+   procedure Delete (Container : in out List; Position : in out Cursor'Class)
+   with Pre => Has_Element (Position) and then Ref_Count (Position) = 1;
+   --  Raises SAL.Invalid_Operation if any other cursors, iterators, or
+   --  reference objects reference the same element as Position.
+
+   procedure Replace_Element
+     (Position    : in Cursor'Class;
+      New_Element : in Element_Type)
+   with Pre => Has_Element (Position);
+
+   function Append (Container : in out List'Class; Element : in Element_Type) 
return Cursor;
+
+   procedure Insert
+     (Container : in out List;
+      Before    : in     Cursor'Class;
+      Element   : in     Element_Type);
+   function Insert
+     (Container : in out List'Class;
+      Before    : in     Cursor'Class;
+      Element   : in     Element_Type)
+     return Cursor;
+   --  If Before is No_Element, insert after Last.
+
+   ----------
+   --  Iterator replacements, since we don't have Ada.Iterator_Interfaces
+   --  iterators.
+
+   function Contains
+     (Container : in List;
+      Item      : in Cursor'Class)
+     return Boolean;
+
+private
+   type Node_Type;
+
+   type Node_Access is access Node_Type;
+
+   type Node_Type is record
+      Element   : aliased Element_Type;
+      Prev      : Node_Access;
+      Next      : Node_Access;
+      Ref_Count : Integer := 0;
+      --  Ref_Count does not include the internal pointers Prev, Next, Head,
+      --  Tail, since there are always two of those for each node.
+   end record;
+
+   procedure Free is new Ada.Unchecked_Deallocation (Node_Type, Node_Access);
+
+   type List is new Ada.Finalization.Controlled with record
+      Head  : Node_Access               := null;
+      Tail  : Node_Access               := null;
+      Count : Ada.Containers.Count_Type := 0;
+
+      Enable_Checks : Boolean := True;
+   end record;
+
+   Empty_List : constant List := (Ada.Finalization.Controlled with others => 
<>);
+
+   type Cursor is new Ada.Finalization.Controlled with record
+      Ptr : Node_Access;
+   end record;
+
+   overriding procedure Finalize (Object : in out Cursor);
+   --  Decrement external reference count of Object.Ptr.all.
+
+   overriding procedure Adjust (Object : in out Cursor);
+   --  Increment external reference count of Object.Ptr.all.
+
+   No_Element : constant Cursor := (Ada.Finalization.Controlled with Ptr => 
null);
+
+   function Ref_Count (Position : in Cursor) return Integer
+   is (Position.Ptr.Ref_Count);
+
+end SAL.Gen_Definite_Doubly_Linked_Lists_Ref_Count;
diff --git a/sal-gen_graphs.adb b/sal-gen_graphs.adb
index c3892819e3..858b3ca7c0 100644
--- a/sal-gen_graphs.adb
+++ b/sal-gen_graphs.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2017, 2019, 2020 Free Software Foundation All Rights 
Reserved.
+--  Copyright (C) 2017, 2019, 2020, 2022 Free Software Foundation All Rights 
Reserved.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -46,25 +46,9 @@ package body SAL.Gen_Graphs is
       Data     : in     Edge_Data)
    is
       Multigraph : Boolean := False;
-
-      procedure Update_First_Last (Vertex : in Vertex_Index)
-      is
-         use all type Ada.Containers.Count_Type;
-      begin
-         if Graph.Vertices.Length = 0 then
-            Graph.Vertices.Set_First_Last (Vertex, Vertex);
-         else
-            if Vertex < Graph.Vertices.First_Index then
-               Graph.Vertices.Set_First_Last (Vertex, 
Graph.Vertices.Last_Index);
-            elsif Vertex > Graph.Vertices.Last_Index then
-               Graph.Vertices.Set_First_Last (Graph.Vertices.First_Index, 
Vertex);
-            end if;
-         end if;
-      end Update_First_Last;
-
    begin
-      Update_First_Last (Vertex_A);
-      Update_First_Last (Vertex_B);
+      Graph.Vertices.Extend (Vertex_A);
+      Graph.Vertices.Extend (Vertex_B);
 
       Graph.Last_Edge_ID := Graph.Last_Edge_ID + 1;
       if (for some E of Graph.Vertices (Vertex_A) => E.Vertex_B = Vertex_B) 
then
diff --git a/sal-gen_unconstrained_array_image.adb 
b/sal-gen_indefinite_doubly_linked_lists-gen_image_aux.adb
similarity index 54%
rename from sal-gen_unconstrained_array_image.adb
rename to sal-gen_indefinite_doubly_linked_lists-gen_image_aux.adb
index 7ea1c830f1..5e71aa280b 100644
--- a/sal-gen_unconstrained_array_image.adb
+++ b/sal-gen_indefinite_doubly_linked_lists-gen_image_aux.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2021, 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -17,18 +17,31 @@
 
 pragma License (Modified_GPL);
 
-with Ada.Strings.Unbounded; use Ada.Strings.Unbounded;
-function SAL.Gen_Unconstrained_Array_Image (Item : in Array_Type) return String
+with Ada.Strings.Unbounded;
+function SAL.Gen_Indefinite_Doubly_Linked_Lists.Gen_Image_Aux
+  (Item  : in List;
+   Aux   : in Aux_Data;
+   First : in Cursor := No_Element)
+  return String
 is
+   use Ada.Strings.Unbounded;
    Result : Unbounded_String := To_Unbounded_String ("(");
+   Node   : Node_Access      :=
+     (if First = No_Element
+      then Item.Head
+      else First.Ptr);
 begin
-   for I in Item'Range loop
-      Result := Result & Element_Image (Item (I));
-      if I = Item'Last then
-         Result := Result & ")";
-      else
-         Result := Result & ", ";
-      end if;
-   end loop;
+   if Node /= null then
+      loop
+         Append (Result, Element_Image (Node.Element.all, Aux));
+
+         Node := Node.Next;
+
+         exit when Node = null;
+
+         Append (Result, ", ");
+      end loop;
+   end if;
+   Append (Result, ")");
    return To_String (Result);
-end SAL.Gen_Unconstrained_Array_Image;
+end SAL.Gen_Indefinite_Doubly_Linked_Lists.Gen_Image_Aux;
diff --git a/sal-gen_unbounded_definite_stacks-gen_image_aux.ads 
b/sal-gen_indefinite_doubly_linked_lists-gen_image_aux.ads
similarity index 74%
rename from sal-gen_unbounded_definite_stacks-gen_image_aux.ads
rename to sal-gen_indefinite_doubly_linked_lists-gen_image_aux.ads
index 801ffd3763..eea5077938 100644
--- a/sal-gen_unbounded_definite_stacks-gen_image_aux.ads
+++ b/sal-gen_indefinite_doubly_linked_lists-gen_image_aux.ads
@@ -1,27 +1,29 @@
---  Abstract :
---
---  Image with auxiliary data for instantiations of parent.
---
---  Copyright (C) 2018 Free Software Foundation, Inc.
---
---  This library is free software;  you can redistribute it and/or modify it
---  under terms of the  GNU General Public License  as published by the Free
---  Software  Foundation;  either version 3,  or (at your  option) any later
---  version. This library is distributed in the hope that it will be useful,
---  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
---  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
---  As a special exception under Section 7 of GPL version 3, you are granted
---  additional permissions described in the GCC Runtime Library Exception,
---  version 3.1, as published by the Free Software Foundation.
-
-pragma License (Modified_GPL);
-
-generic
-   type Aux_Data (<>) is private;
-   with function Element_Image (Item : in Element_Type; Aux : in Aux_Data) 
return String;
-function SAL.Gen_Unbounded_Definite_Stacks.Gen_Image_Aux
-  (Item  : in Stack;
-   Aux   : in Aux_Data;
-   Depth : in SAL.Base_Peek_Type := 0)
-  return String;
+--  Abstract :
+--
+--  Image with auxiliary data for instantiations of parent.
+--
+--  Copyright (C) 2022 Free Software Foundation, Inc.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+--  As a special exception under Section 7 of GPL version 3, you are granted
+--  additional permissions described in the GCC Runtime Library Exception,
+--  version 3.1, as published by the Free Software Foundation.
+
+pragma License (Modified_GPL);
+
+generic
+   type Aux_Data (<>) is limited private;
+   with function Element_Image (Item : in Element_Type; Aux : in Aux_Data) 
return String;
+function SAL.Gen_Indefinite_Doubly_Linked_Lists.Gen_Image_Aux
+  (Item  : in List;
+   Aux   : in Aux_Data;
+   First : in Cursor := No_Element)
+  return String;
+--  If First /= No_Element, include First .. Item.Last. Otherwise
+--  include Item.First .. Item.Last.
diff --git a/sal-gen_indefinite_doubly_linked_lists.adb 
b/sal-gen_indefinite_doubly_linked_lists.adb
index 0ff37640c7..fdfef03c33 100644
--- a/sal-gen_indefinite_doubly_linked_lists.adb
+++ b/sal-gen_indefinite_doubly_linked_lists.adb
@@ -2,7 +2,7 @@
 --
 --  see spec
 --
---  Copyright (C) 2018 - 2021 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -111,6 +111,13 @@ package body SAL.Gen_Indefinite_Doubly_Linked_Lists is
       Container.Count := Container.Count + 1;
    end Prepend;
 
+   function To_List (Element : in Element_Type) return List
+   is begin
+      return Result : List do
+         Result.Append (Element);
+      end return;
+   end To_List;
+
    function Has_Element (Position : in Cursor) return Boolean
    is begin
       return Position.Ptr /= null;
@@ -125,6 +132,15 @@ package body SAL.Gen_Indefinite_Doubly_Linked_Lists is
       end if;
    end First;
 
+   function Last (Container : in List) return Cursor
+   is begin
+      if Container.Tail = null then
+         return (Ptr => null);
+      else
+         return (Ptr => Container.Tail);
+      end if;
+   end Last;
+
    procedure Next (Position : in out Cursor)
    is begin
       if Position.Ptr /= null then
@@ -149,11 +165,30 @@ package body SAL.Gen_Indefinite_Doubly_Linked_Lists is
       end if;
    end Next;
 
+   function Previous (Position : in Cursor) return Cursor
+   is begin
+      if Position.Ptr = null then
+         return Position;
+      else
+         if Position.Ptr.Prev = null then
+            return (Ptr => null);
+         else
+            return (Ptr => Position.Ptr.Prev);
+         end if;
+      end if;
+   end Previous;
+
    function Element (Position : in Cursor) return Element_Type
    is begin
       return Position.Ptr.Element.all;
    end Element;
 
+   function Append (Container : in out List; Element : in Element_Type) return 
Cursor
+   is begin
+      Append (Container, Element);
+      return (Ptr => Container.Tail);
+   end Append;
+
    procedure Delete (Container : in out List; Position : in out Cursor)
    is
       Node : Node_Access renames Position.Ptr;
@@ -174,39 +209,58 @@ package body SAL.Gen_Indefinite_Doubly_Linked_Lists is
       Container.Count := Container.Count - 1;
    end Delete;
 
-   function Persistent_Ref (Position : in Cursor) return access Element_Type
+   function Unchecked_Ref (Position : in Cursor) return access Element_Type
    is begin
       return Position.Ptr.Element;
-   end Persistent_Ref;
+   end Unchecked_Ref;
 
    function Constant_Ref (Position : in Cursor) return Constant_Reference_Type
    is begin
       return (Element => Position.Ptr.all.Element, Dummy => 1);
    end Constant_Ref;
 
-   function Constant_Reference (Container : in List; Position : in Peek_Type) 
return Constant_Reference_Type
-   is
-      Ptr : Node_Access := Container.Head;
-   begin
-      for I in 2 .. Position loop
-         Ptr := Ptr.Next;
-      end loop;
-      return (Element => Ptr.all.Element, Dummy => 1);
+   function Constant_Reference (Container : in List; Position : in Cursor) 
return Constant_Reference_Type
+   is begin
+      return (Element => Position.Ptr.Element, Dummy => 1);
    end Constant_Reference;
 
-   function Variable_Reference (Container : in List; Position : in Peek_Type) 
return Variable_Reference_Type
-   is
-      Ptr : Node_Access := Container.Head;
-   begin
-      for I in 2 .. Position loop
-         Ptr := Ptr.Next;
-      end loop;
-      return (Element => Ptr.all.Element, Dummy => 1);
+   function Variable_Reference (Container : in List; Position : in Cursor) 
return Variable_Reference_Type
+   is begin
+      return (Element => Position.Ptr.Element, Dummy => 1);
    end Variable_Reference;
 
    function Variable_Ref (Position : in Cursor) return Variable_Reference_Type
    is begin
-      return (Element => Position.Ptr.all.Element, Dummy => 1);
+      return (Element => Position.Ptr.Element, Dummy => 1);
    end Variable_Ref;
 
+   function Iterate (Container : aliased in List) return 
Iterator_Interfaces.Reversible_Iterator'Class
+   is begin
+      return Iterator'(Container => Container'Access);
+   end Iterate;
+
+   overriding function First (Object : Iterator) return Cursor
+   is begin
+      return First (Object.Container.all);
+   end First;
+
+   overriding function Last  (Object : Iterator) return Cursor
+   is begin
+      return Last (Object.Container.all);
+   end Last;
+
+   overriding function Next (Object : in Iterator; Position : in Cursor) 
return Cursor
+   is
+      pragma Unreferenced (Object);
+   begin
+      return Next (Position);
+   end Next;
+
+   overriding function Previous (Object : in Iterator; Position : in Cursor) 
return Cursor
+   is
+      pragma Unreferenced (Object);
+   begin
+      return Previous (Position);
+   end Previous;
+
 end SAL.Gen_Indefinite_Doubly_Linked_Lists;
diff --git a/sal-gen_indefinite_doubly_linked_lists.ads 
b/sal-gen_indefinite_doubly_linked_lists.ads
index b1d1cdb29e..b9d3022256 100644
--- a/sal-gen_indefinite_doubly_linked_lists.ads
+++ b/sal-gen_indefinite_doubly_linked_lists.ads
@@ -1,9 +1,8 @@
 --  Abstract :
 --
---  A generic doubly linked list with indefinite elements, allowing
---  permanent references to elements.
+--  A generic doubly linked list with indefinite elements.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -24,14 +23,18 @@
 pragma License (Modified_GPL);
 
 with Ada.Finalization;
+with Ada.Iterator_Interfaces;
 with Ada.Unchecked_Deallocation;
 generic
    type Element_Type (<>) is private;
 package SAL.Gen_Indefinite_Doubly_Linked_Lists is
 
-   type List is new Ada.Finalization.Controlled with private with
+   type List is new Ada.Finalization.Controlled with private
+   with
      Constant_Indexing => Constant_Reference,
-     Variable_Indexing => Variable_Reference;
+     Variable_Indexing => Variable_Reference,
+     Default_Iterator  => Iterate,
+     Iterator_Element  => Element_Type;
 
    Empty_List : constant List;
 
@@ -47,6 +50,8 @@ package SAL.Gen_Indefinite_Doubly_Linked_Lists is
 
    procedure Prepend (Container : in out List; Element : in Element_Type);
 
+   function To_List (Element : in Element_Type) return List;
+
    type Cursor is private;
 
    No_Element : constant Cursor;
@@ -54,19 +59,24 @@ package SAL.Gen_Indefinite_Doubly_Linked_Lists is
    function Has_Element (Position : in Cursor) return Boolean;
 
    function First (Container : in List) return Cursor;
+   function Last (Container : in List) return Cursor;
 
    procedure Next (Position : in out Cursor);
 
    function Next (Position : in Cursor) return Cursor;
+   function Previous (Position : in Cursor) return Cursor;
 
    function Element (Position : in Cursor) return Element_Type
    with Pre => Has_Element (Position);
 
+   function Append (Container : in out List; Element : in Element_Type) return 
Cursor;
+
    procedure Delete (Container : in out List; Position : in out Cursor)
    with Pre => Has_Element (Position);
 
-   function Persistent_Ref (Position : in Cursor) return access Element_Type
+   function Unchecked_Ref (Position : in Cursor) return access Element_Type
    with Pre => Has_Element (Position);
+   --  For use in building higher-level containers.
 
    type Constant_Reference_Type (Element : not null access constant 
Element_Type) is private with
      Implicit_Dereference => Element;
@@ -74,18 +84,22 @@ package SAL.Gen_Indefinite_Doubly_Linked_Lists is
    function Constant_Ref (Position : in Cursor) return Constant_Reference_Type
    with Inline, Pre => Has_Element (Position);
 
-   function Constant_Reference (Container : in List; Position : in Peek_Type) 
return Constant_Reference_Type
-   with Inline, Pre => Position <= Container.Length;
+   function Constant_Reference (Container : in List; Position : in Cursor) 
return Constant_Reference_Type
+   with Inline, Pre => Has_Element (Position);
 
    type Variable_Reference_Type (Element : not null access Element_Type) is 
private with
      Implicit_Dereference => Element;
 
-   function Variable_Reference (Container : in List; Position : in Peek_Type) 
return Variable_Reference_Type
-   with Inline, Pre => Position <= Container.Length;
+   function Variable_Reference (Container : in List; Position : in Cursor) 
return Variable_Reference_Type
+   with Inline, Pre => Has_Element (Position);
 
    function Variable_Ref (Position : in Cursor) return Variable_Reference_Type
    with Inline, Pre => Has_Element (Position);
 
+   package Iterator_Interfaces is new Ada.Iterator_Interfaces (Cursor, 
Has_Element);
+
+   function Iterate (Container : aliased in List) return 
Iterator_Interfaces.Reversible_Iterator'Class;
+
 private
    type Node_Type;
    type Node_Access is access Node_Type;
@@ -121,8 +135,22 @@ private
       Dummy : Integer := raise Program_Error with "uninitialized reference";
    end record;
 
-   Empty_List : constant List := (Ada.Finalization.Controlled with null, null, 
0);
+   Empty_List : aliased constant List := (Ada.Finalization.Controlled with 
null, null, 0);
 
    No_Element : constant Cursor := (Ptr => null);
 
+   type Iterator (Container : not null access constant List) is new 
Iterator_Interfaces.Reversible_Iterator with
+   null record;
+
+   overriding function First (Object : Iterator) return Cursor;
+   overriding function Last  (Object : Iterator) return Cursor;
+
+   overriding function Next
+     (Object   : Iterator;
+      Position : Cursor) return Cursor;
+
+   overriding function Previous
+     (Object   : Iterator;
+      Position : Cursor) return Cursor;
+
 end SAL.Gen_Indefinite_Doubly_Linked_Lists;
diff --git a/sal-gen_unbounded_definite_hash_tables.adb 
b/sal-gen_unbounded_definite_hash_tables.adb
new file mode 100644
index 0000000000..2446b76648
--- /dev/null
+++ b/sal-gen_unbounded_definite_hash_tables.adb
@@ -0,0 +1,276 @@
+--  Abstract :
+--
+--  See spec
+--
+--  Notice
+--
+--  Copyright (C) 2020 - 2021 Free Software Foundation, Inc. All Rights 
Reserved.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+--  As a special exception under Section 7 of GPL version 3, you are granted
+--  additional permissions described in the GCC Runtime Library Exception,
+--  version 3.1, as published by the Free Software Foundation.
+
+pragma License (Modified_GPL);
+
+package body SAL.Gen_Unbounded_Definite_Hash_Tables is
+
+   --  Local subprograms
+
+   Row_Sizes : constant array (Positive range <>) of Positive :=
+     --  List of primes > 2 * previous. From [1]. Max is given by Ada LR1
+     --  parse table number of states; ~ 500_000, with an average row
+     --  content of 3; we allow for twice that => (* 2 (/ 500000 3)) =
+     --  333_332.
+     (113, 227, 467, 937, 1877, 3761, 7523, 15_053, 31_013, 62_039, 124_087, 
248_177, 496_381);
+
+   function Find_Prime (Rows_Target : in Positive) return Positive
+   is begin
+      for P of Row_Sizes loop
+         if P >= Rows_Target then
+            return P;
+         end if;
+      end loop;
+      raise Programmer_Error with Rows_Target'Image & " not found in Primes 
table (max" &
+        Row_Sizes (Row_Sizes'Last)'Image & "); need better hash function, or 
more primes";
+   end Find_Prime;
+
+   procedure Grow (Table : in out Hash_Table; New_Rows : in Positive := 
Positive'First)
+   is
+      Rows_Target : constant Positive := (if New_Rows = Positive'First then 2 
* Table.Table.Last_Index else New_Rows);
+
+      Prime_New_Rows : constant Positive := Find_Prime (Rows_Target);
+
+      New_Table : Hash_Table;
+   begin
+      New_Table.Table.Set_First_Last (Table.Table.First_Index, Prime_New_Rows);
+
+      for Row of Table.Table loop
+         for Element of Row loop
+            New_Table.Insert (Element, Duplicate => Error);
+         end loop;
+      end loop;
+
+      Table := New_Table;
+   end Grow;
+
+   ----------
+   --  Public subprograms, in spec order
+
+   procedure Set_Rows
+     (Table : in out Hash_Table;
+      Rows  : in     Positive)
+   is begin
+      if Table.Table.Is_Empty then
+         Table.Table.Set_First_Last (1, Find_Prime (Rows));
+
+      elsif Table.Table.Last_Index = Rows then
+         --  No change
+         null;
+
+      else
+         Grow (Table, Rows);
+      end if;
+   end Set_Rows;
+
+   procedure Clear (Table : in out Hash_Table)
+   is begin
+      if Table.Table.Is_Empty then
+         return;
+      else
+         for Row of Table.Table loop
+            Row.Clear;
+         end loop;
+      end if;
+   end Clear;
+
+   procedure Insert
+     (Table     : in out Hash_Table;
+      Element   : in     Element_Type;
+      Duplicate : in     Ignore_Error_Type)
+   is begin
+      if Table.Table.Is_Empty then
+         Set_Rows (Table, Default_Init_Rows);
+      end if;
+
+      declare
+         Found    : Boolean;
+         Tree_Cur : constant Element_Trees.Cursor := Table.Table (Hash 
(Element, Table.Table.Last_Index))
+           .Find_Or_Insert (Element, Found);
+         pragma Unreferenced (Tree_Cur);
+      begin
+         if Found then
+            case Duplicate is
+            when Ignore =>
+               null;
+            when Error =>
+               raise Duplicate_Key;
+            end case;
+         end if;
+      end;
+   end Insert;
+
+   function Find_Or_Insert
+     (Table   : in out Hash_Table;
+      Element : in     Element_Type;
+      Found   :    out Boolean)
+     return Constant_Reference_Type
+   is begin
+      if Table.Table.Is_Empty then
+         Set_Rows (Table, Default_Init_Rows);
+      end if;
+
+      declare
+         Tree : Element_Trees.Tree renames Table.Table (Hash (Element, 
Table.Table.Last_Index));
+         Tree_Cur : constant Element_Trees.Cursor := Tree.Find_Or_Insert 
(Element, Found);
+      begin
+         return Tree.Unchecked_Const_Ref (Tree_Cur);
+      end;
+   end Find_Or_Insert;
+
+   function Find_Or_Insert_Var
+     (Table   : in out Hash_Table;
+      Element : in     Element_Type;
+      Found   :    out Boolean)
+     return Variable_Reference_Type
+   is begin
+      if Table.Table.Is_Empty then
+         Set_Rows (Table, Default_Init_Rows);
+      end if;
+
+      declare
+         Tree : Element_Trees.Tree renames Table.Table (Hash (Element, 
Table.Table.Last_Index));
+         Tree_Cur : constant Element_Trees.Cursor := Tree.Find_Or_Insert 
(Element, Found);
+      begin
+         return Variable_Reference_Type (Tree.Unchecked_Var_Ref (Tree_Cur));
+      end;
+   end Find_Or_Insert_Var;
+
+   function Has_Element (Cursor : in Pkg.Cursor) return Boolean
+   is begin
+      return Cursor /= No_Element;
+   end Has_Element;
+
+   function Constant_Ref
+     (Table    : aliased in Hash_Table;
+      Position :         in Cursor)
+     return Constant_Reference_Type
+   is
+      Tree : Element_Trees.Tree renames Table.Table (Position.Row);
+   begin
+      return Tree.Unchecked_Const_Ref (Position.Cur);
+   end Constant_Ref;
+
+   function Variable_Ref
+     (Table    : aliased in Hash_Table;
+      Position :         in Cursor)
+     return Variable_Reference_Type
+   is
+      Tree : Element_Trees.Tree renames Table.Table (Position.Row);
+   begin
+      return Tree.Unchecked_Var_Ref (Position.Cur);
+   end Variable_Ref;
+
+   function Find
+     (Table   : aliased in Hash_Table;
+      Element :         in Element_Type)
+     return Cursor
+   is begin
+      if Table.Table.Is_Empty then
+         return No_Element;
+      end if;
+
+      declare
+         Row : constant Positive := Hash (Element, Table.Table.Last_Index);
+         Tree : Element_Trees.Tree renames Table.Table (Row);
+         Tree_Cur : constant Element_Trees.Cursor := Tree.Find (Key (Element));
+      begin
+         if Element_Trees.Has_Element (Tree_Cur) then
+            return (Row, Tree_Cur);
+         else
+            return No_Element;
+         end if;
+      end;
+   end Find;
+
+   function Iterate (Table : aliased in Pkg.Hash_Table'Class) return Iterator
+   is begin
+      return (Table => Table'Access);
+   end Iterate;
+
+   overriding function First (Iterator : in Pkg.Iterator) return Cursor
+   is begin
+      for Row in Iterator.Table.Table.First_Index .. 
Iterator.Table.Table.Last_Index loop
+         declare
+            Iter     : constant Element_Trees.Iterator := Iterator.Table.Table 
(Row).Iterate;
+            Tree_Cur : constant Element_Trees.Cursor   := Iter.First;
+         begin
+            if Element_Trees.Has_Element (Tree_Cur) then
+               return (Iterator.Table.Table.First_Index, Tree_Cur);
+            end if;
+         end;
+      end loop;
+      return No_Element;
+   end First;
+
+   overriding function Next (Iterator : in Pkg.Iterator; Position : in Cursor) 
return Cursor
+   is
+      Iter      : Element_Trees.Iterator        := Iterator.Table.Table 
(Position.Row).Iterate;
+      Tree_Next : constant Element_Trees.Cursor := Iter.Next (Position.Cur);
+   begin
+      if Element_Trees.Has_Element (Tree_Next) then
+         return (Position.Row, Tree_Next);
+      elsif Position.Row < Iterator.Table.Table.Last_Index then
+         Iter := Iterator.Table.Table (Position.Row + 1).Iterate;
+         return (Position.Row + 1, Iter.First);
+      else
+         return No_Element;
+      end if;
+   end Next;
+
+   procedure Sizes
+     (Table             : in     Hash_Table;
+      Elements          :    out Ada.Containers.Count_Type;
+      Rows              :    out Integer;
+      Max_Row_Depth     :    out Ada.Containers.Count_Type;
+      Average_Row_Depth :    out Ada.Containers.Count_Type;
+      Empty_Rows        :    out Integer)
+   is
+      use Ada.Containers;
+   begin
+      Elements          := 0;
+      Rows              := Table.Table.Last_Index;
+      Max_Row_Depth     := 0;
+      Average_Row_Depth := 0;
+      Empty_Rows        := 0;
+
+      for Row of Table.Table loop
+         declare
+            Count : Count_Type;
+            Depth : Count_Type;
+         begin
+            Row.Count_Depth (Count, Depth);
+            Elements := @ + Count;
+            if Count = 0 then
+               Empty_Rows := @ + 1;
+            else
+               if Max_Row_Depth < Depth then
+                  Max_Row_Depth := Depth;
+               end if;
+               Average_Row_Depth := @ + Depth;
+            end if;
+         end;
+      end loop;
+
+      if Rows > 0 and Rows > Empty_Rows then
+         Average_Row_Depth := @ / Count_Type (Rows - Empty_Rows);
+      end if;
+   end Sizes;
+
+end SAL.Gen_Unbounded_Definite_Hash_Tables;
diff --git a/sal-gen_unbounded_definite_hash_tables.ads 
b/sal-gen_unbounded_definite_hash_tables.ads
new file mode 100644
index 0000000000..5c8a9f1dac
--- /dev/null
+++ b/sal-gen_unbounded_definite_hash_tables.ads
@@ -0,0 +1,180 @@
+--  Abstract:
+--
+--  Generic Hash Table, using red-black trees for collisions.
+--
+--  Design
+--
+--  We assume computing Key from Element is free (for example, Element
+--  is (Key, Index to actual store)), and computing Hash from Key is
+--  cheap. Hashes are recomputed for all elements when the table is
+--  grown.
+--
+--  References
+--
+--  [1] Prime numbers 
http://compoasso.free.fr/primelistweb/page/prime/liste_online_en.php
+--
+--  Notice
+--
+--  Copyright (C) 2020 - 2021 Free Software Foundation, Inc. All Rights 
Reserved.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+--  As a special exception under Section 7 of GPL version 3, you are granted
+--  additional permissions described in the GCC Runtime Library Exception,
+--  version 3.1, as published by the Free Software Foundation.
+
+pragma License (Modified_GPL);
+
+with Ada.Containers;
+with Ada.Iterator_Interfaces;
+private with SAL.Gen_Unbounded_Definite_Red_Black_Trees;
+private with SAL.Gen_Unbounded_Definite_Vectors;
+generic
+   type Element_Type is private;
+   type Key_Type (<>) is private;
+   with function Key (Element : in Element_Type) return Key_Type is <>;
+   with function Key_Compare (Left, Right : in Key_Type) return Compare_Result;
+
+   with function Hash (Element : Element_Type; Rows : Positive) return 
Positive;
+   --  WORKAROUND: GNAT community 2019 doesn't allow 'with post' here
+   --  with Release compilation switches. Fixed in GNAT community 2021.
+   --  with Post => Hash'Result in 1 .. Rows;
+   --
+   --  Takes Element, not Key, to allow storing Hash in Element so it is
+   --  only computed once.
+   --
+   --  1 + (Some_hash (Key) mod Rows) works.
+
+   Default_Init_Rows : Positive := 113;
+
+package SAL.Gen_Unbounded_Definite_Hash_Tables is
+
+   package Pkg renames Gen_Unbounded_Definite_Hash_Tables;
+
+   Default_Rows : constant Positive := Pkg.Default_Init_Rows;
+
+   type Hash_Table is tagged private
+   with
+     Constant_Indexing => Constant_Ref,
+     Default_Iterator  => Iterate,
+     Iterator_Element  => Element_Type;
+
+   procedure Set_Rows
+     (Table : in out Hash_Table;
+      Rows  : in     Positive);
+   --  Set the hash table size. If Table is not empty, all hashes are
+   --  recomputed; this renders any Constant_Refs invalid.
+
+   function Rows (Table : in Hash_Table) return Positive;
+
+   procedure Clear (Table : in out Hash_Table);
+   --  Set Table to empty.
+
+   procedure Insert
+     (Table     : in out Hash_Table;
+      Element   : in     Element_Type;
+      Duplicate : in     Ignore_Error_Type);
+   --  If Key (Element) is already in Table: if Duplicate is Ignore, does
+   --  nothing; otherwise, raises Duplicate_Key.
+
+   type Constant_Reference_Type is access constant Element_Type;
+   --  The name lies; this is not a "reference type" as defined by Ada.
+   --  But gnat pro 22.0w 20201222 does not support using a real
+   --  reference type here. See AdaCore ticket U117-010 (on the
+   --  Eurocontrol contract).
+
+   type Variable_Reference_Type is access all Element_Type;
+   --  Similarly, this is not a "reference type"; therefore we cannot
+   --  implement aspect Variable_Indexing.
+
+   function Find_Or_Insert
+     (Table   : in out Hash_Table;
+      Element : in     Element_Type;
+      Found   :    out Boolean)
+     return Constant_Reference_Type;
+
+   function Find_Or_Insert_Var
+     (Table   : in out Hash_Table;
+      Element : in     Element_Type;
+      Found   :    out Boolean)
+     return Variable_Reference_Type;
+   --  User must not change Key or Hash via this reference.
+
+   type Cursor is private;
+
+   No_Element : constant Cursor;
+
+   function Has_Element (Cursor : in Pkg.Cursor) return Boolean;
+
+   function Constant_Ref
+     (Table    : aliased in Hash_Table;
+      Position :         in Cursor)
+     return Constant_Reference_Type
+   with Inline, Pre => Has_Element (Position);
+
+   function Variable_Ref
+     (Table    : aliased in Hash_Table;
+      Position :         in Cursor)
+     return Variable_Reference_Type
+   with Inline, Pre => Has_Element (Position);
+
+   function Find
+     (Table   : aliased in Hash_Table;
+      Element :         in Element_Type)
+     return Cursor;
+   --  Result is No_Element if Key is not in Table.
+   --
+   --  Takes Element instead of Key to allow storing Hash in Element.
+
+   package Iterators is new Ada.Iterator_Interfaces (Cursor, Has_Element);
+
+   type Iterator (<>) is new Iterators.Forward_Iterator with private;
+
+   function Iterate (Table : aliased in Pkg.Hash_Table'Class) return Iterator;
+
+   overriding function First (Iterator : in Pkg.Iterator) return Cursor;
+   overriding function Next (Iterator : in Pkg.Iterator; Position : in Cursor) 
return Cursor
+   with Pre => Has_Element (Position);
+
+   procedure Sizes
+     (Table             : in     Hash_Table;
+      Elements          :    out Ada.Containers.Count_Type;
+      Rows              :    out Integer;
+      Max_Row_Depth     :    out Ada.Containers.Count_Type;
+      Average_Row_Depth :    out Ada.Containers.Count_Type;
+      Empty_Rows        :    out Integer);
+
+private
+
+   package Element_Trees is new SAL.Gen_Unbounded_Definite_Red_Black_Trees
+     (Element_Type, Key_Type, Key, Key_Compare);
+   --  Holds elements for a row
+
+   package Hash_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
+     (Natural, Element_Trees.Tree, Element_Trees.Empty_Tree);
+
+   type Hash_Table is tagged record
+      --  Directly deriving Hash_Table from Hash_Arrays.Vector would mean we
+      --  have to implement Iterate.
+      Table : Hash_Arrays.Vector;
+   end record;
+
+   function Rows (Table : in Hash_Table) return Positive
+   is (if Table.Table.Last_Index = Hash_Arrays.No_Index then Default_Init_Rows 
else Table.Table.Last_Index);
+
+   type Cursor is record
+      Row : Integer              := Hash_Arrays.No_Index; --  index into 
Table.Table.
+      Cur : Element_Trees.Cursor := Element_Trees.No_Element;
+   end record;
+
+   No_Element : constant Cursor := (others => <>);
+
+   type Iterator (Table : not null access constant Hash_Table) is new 
Iterators.Forward_Iterator
+     with null record;
+
+end SAL.Gen_Unbounded_Definite_Hash_Tables;
diff --git a/sal-gen_unbounded_definite_min_heaps_fibonacci.adb 
b/sal-gen_unbounded_definite_min_heaps_fibonacci.adb
index c88a0cb133..73731519ef 100644
--- a/sal-gen_unbounded_definite_min_heaps_fibonacci.adb
+++ b/sal-gen_unbounded_definite_min_heaps_fibonacci.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2017 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2019, 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -320,6 +320,11 @@ package body 
SAL.Gen_Unbounded_Definite_Min_Heaps_Fibonacci is
       return (Element => Heap.Min.all.Element'Access, Dummy => 1);
    end Peek;
 
+   function Peek_Var (Heap : in Heap_Type) return Variable_Reference_Type
+   is begin
+      return (Element => Heap.Min.all.Element'Access, Dummy => 1);
+   end Peek_Var;
+
    procedure Process (Heap : in Heap_Type; Process_Element : access procedure 
(Element : in Element_Type))
    is
       type Cursor is record
diff --git a/sal-gen_unbounded_definite_min_heaps_fibonacci.ads 
b/sal-gen_unbounded_definite_min_heaps_fibonacci.ads
index 44174b5359..7c621e2234 100644
--- a/sal-gen_unbounded_definite_min_heaps_fibonacci.ads
+++ b/sal-gen_unbounded_definite_min_heaps_fibonacci.ads
@@ -7,7 +7,7 @@
 --  [1] Introduction to Algorithms, Third Edition. Thomas H. Cormen,
 --  Charles E. Leiserson, Ronald L. Rivest, Clifford Stein. Chapter 19.
 --
---  Copyright (C) 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -51,15 +51,18 @@ package SAL.Gen_Unbounded_Definite_Min_Heaps_Fibonacci is
    function Count (Heap : in Heap_Type) return Base_Peek_Type;
    --  Return count of elements in Heap.
 
-   function Remove (Heap : in out Heap_Type) return Element_Type;
+   function Remove (Heap : in out Heap_Type) return Element_Type
+   with Pre => Heap.Count > 0;
    --  Remove minimum element in Heap, return it.
 
-   function Min_Key (Heap : in out Heap_Type) return Key_Type;
+   function Min_Key (Heap : in out Heap_Type) return Key_Type
+   with Pre => Heap.Count > 0;
    --  Return a copy of the minimum key value.
 
    function Get (Heap : in out Heap_Type) return Element_Type renames Remove;
 
-   procedure Drop (Heap : in out Heap_Type);
+   procedure Drop (Heap : in out Heap_Type)
+   with Pre => Heap.Count > 0;
    --  Remove minimum element in Heap, discard it.
 
    procedure Add (Heap : in out Heap_Type; Item : in Element_Type);
@@ -75,7 +78,17 @@ package SAL.Gen_Unbounded_Definite_Min_Heaps_Fibonacci is
    --  Return a constant reference to the min element.
    pragma Inline (Peek);
 
-   --  We don't provide a Cursor/Iterator interface; to complex to
+   type Variable_Reference_Type (Element : not null access Element_Type) is 
private with
+     Implicit_Dereference => Element;
+
+   function Peek_Var (Heap : in Heap_Type) return Variable_Reference_Type
+   with Pre => Heap.Count > 0;
+   pragma Inline (Peek_Var);
+   --  Return a variable reference to the min element. User must not change 
Key.
+   --
+   --  Not named "Peek" to avoid ambiguity with the constant version.
+
+   --  We don't provide a Cursor/Iterator interface; too complex to
    --  implement. So far, we only need a read-only forward iterator,
    --  which Process provides.
 
@@ -108,6 +121,11 @@ private
       Dummy : Integer := raise Program_Error with "uninitialized reference";
    end record;
 
+   type Variable_Reference_Type (Element : not null access Element_Type) is
+   record
+      Dummy : Integer := raise Program_Error with "uninitialized reference";
+   end record;
+
    Empty_Heap : constant Heap_Type := (Ada.Finalization.Controlled with Min => 
null, Count => 0);
 
 end SAL.Gen_Unbounded_Definite_Min_Heaps_Fibonacci;
diff --git a/sal-gen_unbounded_definite_queues.adb 
b/sal-gen_unbounded_definite_queues.adb
index af53823ee8..4c4b60b029 100644
--- a/sal-gen_unbounded_definite_queues.adb
+++ b/sal-gen_unbounded_definite_queues.adb
@@ -50,40 +50,6 @@ package body SAL.Gen_Unbounded_Definite_Queues is
       Queue.Data.Delete_First;
    end Drop;
 
-   function Peek (Queue : in Pkg.Queue; N : Peek_Type := 1) return 
Constant_Reference_Type
-   is
-      use Ada.Containers;
-      use Element_Lists;
-      I : Cursor := Queue.Data.First;
-   begin
-      if Count_Type (N) > Queue.Data.Length then
-         raise Parameter_Error;
-      end if;
-
-      for K in 2 .. N loop
-         Next (I);
-      end loop;
-
-      return (Element => Element_Lists.Constant_Reference (Queue.Data, 
I).Element, Dummy => 1);
-   end Peek;
-
-   function Variable_Peek (Queue : in out Pkg.Queue; N : Peek_Type := 1) 
return Variable_Reference_Type
-   is
-      use Ada.Containers;
-      use Element_Lists;
-      I : Cursor := Queue.Data.First;
-   begin
-      if Count_Type (N) > Queue.Data.Length then
-         raise Parameter_Error;
-      end if;
-
-      for K in 2 .. N loop
-         Next (I);
-      end loop;
-
-      return (Element => Element_Lists.Variable_Reference (Queue.Data, 
I).Element, Dummy => 1);
-   end Variable_Peek;
-
    procedure Add (Queue : in out Pkg.Queue; Item : in Element_Type)
    is begin
       Queue.Data.Append (Item);
diff --git a/sal-gen_unbounded_definite_queues.ads 
b/sal-gen_unbounded_definite_queues.ads
index 237800daf9..304506abda 100644
--- a/sal-gen_unbounded_definite_queues.ads
+++ b/sal-gen_unbounded_definite_queues.ads
@@ -54,27 +54,6 @@ package SAL.Gen_Unbounded_Definite_Queues is
    --
    --  Raise Container_Empty if Is_Empty.
 
-   type Constant_Reference_Type (Element : not null access constant 
Element_Type) is private
-   with
-      Implicit_Dereference => Element;
-
-   function Peek (Queue : in Pkg.Queue; N : Peek_Type := 1) return 
Constant_Reference_Type;
-   pragma Inline (Peek);
-   --  Return a constant reference to a queue item. N = 1 is the queue
-   --  head.
-   --
-   --  Raise Parameter_Error if N > Count
-
-   type Variable_Reference_Type (Element : not null access Element_Type) is 
private
-   with Implicit_Dereference => Element;
-
-   function Variable_Peek (Queue : in out Pkg.Queue; N : Peek_Type := 1) 
return Variable_Reference_Type;
-   pragma Inline (Variable_Peek);
-   --  Return a variable reference to a queue item. N = 1 is the queue
-   --  head.
-   --
-   --  Raises Parameter_Error if N > Count
-
    procedure Add (Queue : in out Pkg.Queue; Item : in Element_Type);
    --  Add Element to the tail/back of Queue.
 
@@ -95,16 +74,6 @@ private
       --  Add at Tail/Back = Last, remove at Head/Front = First.
    end record;
 
-   type Constant_Reference_Type (Element : not null access constant 
Element_Type) is
-   record
-      Dummy : Integer := raise Program_Error with "uninitialized reference";
-   end record;
-
-   type Variable_Reference_Type (Element : not null access Element_Type) is
-   record
-      Dummy : Integer := raise Program_Error with "uninitialized reference";
-   end record;
-
    Empty_Queue : constant Queue := (Data => Element_Lists.Empty_List);
 
 end SAL.Gen_Unbounded_Definite_Queues;
diff --git a/sal-gen_unbounded_definite_red_black_trees.adb 
b/sal-gen_unbounded_definite_red_black_trees.adb
index 533b23cd90..b5190cc186 100644
--- a/sal-gen_unbounded_definite_red_black_trees.adb
+++ b/sal-gen_unbounded_definite_red_black_trees.adb
@@ -450,12 +450,12 @@ package body SAL.Gen_Unbounded_Definite_Red_Black_Trees is
       if Node = null then
          raise Not_Found;
       else
-         --  WORKAROUND: GNAT Community 2019 requires Node.all.Element here,
-         --  GNAT Community 2020 and GNAT Pro 21.0w 20200426 require .all _not_
-         --  be here. The code is technically legal either way, so both
-         --  compilers have a bug. Matching 2020 for now. GNAT Pro 22, GNAT
-         --  Community 2021 fix the bug. AdaCore ticket T503-001 on Eurocontrol
-         --  support contract.
+         --  WORKAROUND: GNAT Community 2019 requires "Node.all.Element'Access"
+         --  here, GNAT Community 2020 and GNAT Pro 21.0w 20200426 require .all
+         --  _not_ be here. The code is technically legal either way, so both
+         --  compilers have a bug. Matching 2020 for now. AdaCore ticket
+         --  T503-001 on Eurocontrol support contract. Fixed in GNAT Pro 22,
+         --  GNAT Community 2021.
          return (Element => Node.Element'Access, Dummy => 1);
       end if;
    end Constant_Ref;
diff --git a/sal-gen_unbounded_definite_red_black_trees.ads 
b/sal-gen_unbounded_definite_red_black_trees.ads
index 05b6d60b53..9323e67e0e 100644
--- a/sal-gen_unbounded_definite_red_black_trees.ads
+++ b/sal-gen_unbounded_definite_red_black_trees.ads
@@ -35,6 +35,9 @@ with Ada.Iterator_Interfaces;
 with Ada.Unchecked_Deallocation;
 generic
    type Element_Type is private;
+   --  Element_Type must have valid default initialization; one
+   --  non-initialized object of this type is declared, in Tree.Nil.
+
    type Key_Type (<>) is private;
    with function Key (Element : in Element_Type) return Key_Type is <>;
    with function Key_Compare (Left, Right : in Key_Type) return Compare_Result 
is <>;
@@ -228,7 +231,7 @@ private
       --  'null'. This simplifies several algorithms (for example,
       --  Node.Left.Color is always valid). Its parent, left, right links
       --  are used as temp storage for some algorithms (especially Delete).
-      --  Nil.Color is Black.
+      --  Nil.Color is Black. Nil.Element is never accessed.
    end record;
 
    type Cursor is record
diff --git a/sal-gen_unbounded_definite_stacks.adb 
b/sal-gen_unbounded_definite_stacks.adb
index e13bc2c6f6..f128e713dc 100644
--- a/sal-gen_unbounded_definite_stacks.adb
+++ b/sal-gen_unbounded_definite_stacks.adb
@@ -143,6 +143,16 @@ package body SAL.Gen_Unbounded_Definite_Stacks is
       end if;
    end Top;
 
+   procedure Bottom_Pop (Stack : in out Sguds.Stack)
+   is begin
+      if Stack.Top = 0 then
+         raise Container_Empty;
+      else
+         Stack.Top := Stack.Top - 1;
+         Stack.Data (1 .. Stack.Top) := Stack.Data (2 .. Stack.Top + 1);
+      end if;
+   end Bottom_Pop;
+
    procedure Set_Depth
      (Stack : in out Sguds.Stack;
       Depth : in     Peek_Type)
@@ -165,6 +175,33 @@ package body SAL.Gen_Unbounded_Definite_Stacks is
       Stack.Data (Depth - Index + 1) := Element;
    end Set;
 
+   function Invert (Stack : in Sguds.Stack) return Sguds.Stack
+   is
+   begin
+      return Result : constant Sguds.Stack := Stack do
+         for I in 1 .. Result.Top loop
+            Result.Data (Result.Top - I + 1) := Stack.Data (I);
+         end loop;
+      end return;
+   end Invert;
+
+   procedure Copy_Slice
+     (Source             : in     Stack;
+      Target             : in out Stack;
+      Source_Start_Depth : in     Peek_Type;
+      Target_Start_Depth : in     Peek_Type;
+      Count              : in     Peek_Type)
+   is
+      S : Base_Peek_Type := Source.Top - Source_Start_Depth + 1;
+      T : Base_Peek_Type := Target.Top - Target_Start_Depth + 1;
+   begin
+      for I in 1 .. Count  loop
+         Target.Data (T) := Source.Data (S);
+         S := S + 1;
+         T := T + 1;
+      end loop;
+   end Copy_Slice;
+
    function Constant_Reference
      (Container : aliased in Stack'Class;
       Position  :         in Peek_Type)
diff --git a/sal-gen_unbounded_definite_stacks.ads 
b/sal-gen_unbounded_definite_stacks.ads
index d2b69027e3..d8c5017fcd 100644
--- a/sal-gen_unbounded_definite_stacks.ads
+++ b/sal-gen_unbounded_definite_stacks.ads
@@ -92,6 +92,11 @@ package SAL.Gen_Unbounded_Definite_Stacks is
    --
    --  Raises Container_Empty if Is_Empty.
 
+   procedure Bottom_Pop (Stack : in out Sguds.Stack)
+   with Pre => Stack.Depth >= 1;
+   --  Remove one item from the bottom of the stack (element
+   --  Stack.Depth), discard it.
+
    procedure Set_Depth
      (Stack : in out Sguds.Stack;
       Depth : in     Peek_Type);
@@ -112,6 +117,20 @@ package SAL.Gen_Unbounded_Definite_Stacks is
    --
    --  Useful when creating a stack from pre-existing data.
 
+   function Invert (Stack : in Sguds.Stack) return Sguds.Stack;
+   --  Return a new stack with the same elements as Stack, in inverted order.
+
+   procedure Copy_Slice
+     (Source             : in     Stack;
+      Target             : in out Stack;
+      Source_Start_Depth : in     Peek_Type;
+      Target_Start_Depth : in     Peek_Type;
+      Count              : in     Peek_Type);
+   --  Copy elements Source.Peek (Source_Start_Depth) .. Source.Peek
+   --  (Source_Start_Depth - Count + 1) to Target (Target_Start_Depth) ..
+   --  Target (Target_Start_Depth - Count + 1), overwriting existing
+   --  Target elements.
+
    type Constant_Reference_Type (Element : not null access constant 
Element_Type) is private with
      Implicit_Dereference => Element;
 
@@ -145,7 +164,7 @@ private
       Top  : Base_Peek_Type := Invalid_Peek_Index; -- empty
       Data : Element_Array_Access;
 
-      --  Top of stack is at Data (Top).
+      --  Top of stack is at Data (Top); bottom at Data (1).
       --  Data (1 .. Top) has been set at some point.
    end record;
 
diff --git a/sal-gen_unbounded_definite_vectors.adb 
b/sal-gen_unbounded_definite_vectors.adb
index be86525d6d..5eac77f1a6 100644
--- a/sal-gen_unbounded_definite_vectors.adb
+++ b/sal-gen_unbounded_definite_vectors.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2018 - 2021 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -471,6 +471,23 @@ package body SAL.Gen_Unbounded_Definite_Vectors is
       Set_Last (Container, Last);
    end Set_First_Last;
 
+   procedure Extend
+     (Container : in out Vector;
+      Index     : in     Index_Type)
+   is begin
+      if Container.First = No_Index then
+         Set_First (Container, Index);
+         Set_Last (Container, Index);
+
+      elsif Index < Container.First then
+         Set_First (Container, Index);
+
+      elsif Index > Container.Last then
+         Set_Last (Container, Index);
+
+      end if;
+   end Extend;
+
    procedure Delete (Container : in out Vector; Index : in Index_Type)
    is
       J : constant Peek_Type := To_Peek_Type (Index);
@@ -488,8 +505,8 @@ package body SAL.Gen_Unbounded_Definite_Vectors is
       if Container.Length = 0 then
          return False;
       else
-         for It of Container.Elements.all loop
-            if It = Element then
+         for I in To_Peek_Type (Container.First) .. To_Peek_Type 
(Container.Last) loop
+            if Container.Elements (I) = Element then
                return True;
             end if;
          end loop;
diff --git a/sal-gen_unbounded_definite_vectors.ads 
b/sal-gen_unbounded_definite_vectors.ads
index 67c071aafe..4239dd67a4 100644
--- a/sal-gen_unbounded_definite_vectors.ads
+++ b/sal-gen_unbounded_definite_vectors.ads
@@ -24,7 +24,7 @@
 --  several subprogram argument modes, and why Container must be an
 --  access discriminant in Cursor and Iterator.
 --
---  Copyright (C) 2018 - 2021 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -156,6 +156,11 @@ package SAL.Gen_Unbounded_Definite_Vectors is
    --  Elements in the expansion from previous First .. Last are set to
    --  Default_Element.
 
+   procedure Extend
+     (Container : in out Vector;
+      Index     : in     Index_Type);
+   --  Extend Container index range (if needed) to include Index.
+
    procedure Delete (Container : in out Vector; Index : in Index_Type);
    --  Replace Index element contents with Default_Element. If Index =
    --  Container.Last_Index, Container.Last_Index is decremented.
diff --git a/sal-gen_unbounded_sparse_ordered_sets.adb 
b/sal-gen_unbounded_sparse_ordered_sets.adb
new file mode 100644
index 0000000000..9ec5e5301b
--- /dev/null
+++ b/sal-gen_unbounded_sparse_ordered_sets.adb
@@ -0,0 +1,85 @@
+--  Abstract :
+--
+--  See spec.
+--
+--  Copyright (C) 2020 - 2021 Free Software Foundation All Rights Reserved.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+pragma License (Modified_GPL);
+
+package body SAL.Gen_Unbounded_Sparse_Ordered_Sets is
+
+   procedure Clear (Set : in out Pkg.Set)
+   is begin
+      Set.Tree.Finalize;
+      Set.Tree.Initialize;
+   end Clear;
+
+   function Count (Set : in Pkg.Set) return Ada.Containers.Count_Type
+   is
+      use Ada.Containers;
+      Result : Count_Type := 0;
+   begin
+      for N of Set loop
+         Result := @ + 1;
+      end loop;
+      return Result;
+   end Count;
+
+   procedure Insert (Set : in out Pkg.Set; Item : in Index_Type)
+   is begin
+      Set.Tree.Insert (Item, Duplicate => Ignore);
+   end Insert;
+
+   function Contains (Set : in Pkg.Set; Item : in Index_Type) return Boolean
+   is begin
+      return Boolean_Trees.Has_Element (Set.Tree.Find (Item));
+   end Contains;
+
+   procedure Delete (Set : in out Pkg.Set; Item : in Index_Type)
+   is begin
+      Set.Tree.Delete (Item);
+   end Delete;
+
+   function Has_Element (Position : in Cursor) return Boolean
+   is begin
+      return Boolean_Trees.Has_Element (Position.Cur);
+   end Has_Element;
+
+   function Element (Position : in Cursor) return Index_Type
+   is begin
+      return Boolean_Trees.Key (Position.Cur);
+   end Element;
+
+   function Constant_Ref
+     (Container : aliased in Set;
+      Position  :         in Cursor)
+     return Constant_Reference_Type
+   is begin
+      return (Element => Container.Tree.Unchecked_Const_Ref (Position.Cur), 
Dummy => 0);
+   end Constant_Ref;
+
+   function Iterate (Container : aliased in Pkg.Set'Class) return Iterator
+   is begin
+      return
+        (Container => Container.Tree'Access,
+         Iter      => Container.Tree.Iterate);
+   end Iterate;
+
+   overriding function First (Iterator : in Pkg.Iterator) return Cursor
+   is begin
+      return (Cur => Iterator.Iter.First);
+   end First;
+
+   overriding function Next (Iterator : in Pkg.Iterator; Position : in Cursor) 
return Cursor
+   is begin
+      return (Cur => Iterator.Iter.Next (Position.Cur));
+   end Next;
+
+end SAL.Gen_Unbounded_Sparse_Ordered_Sets;
diff --git a/sal-gen_unbounded_sparse_ordered_sets.ads 
b/sal-gen_unbounded_sparse_ordered_sets.ads
new file mode 100644
index 0000000000..d8c130e3b3
--- /dev/null
+++ b/sal-gen_unbounded_sparse_ordered_sets.ads
@@ -0,0 +1,106 @@
+--  Abstract :
+--
+--  Unbounded sparse sets.
+--
+--  Copyright (C) 2020 - 2022 Free Software Foundation All Rights Reserved.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+pragma License (Modified_GPL);
+
+with Ada.Iterator_Interfaces;
+with SAL.Gen_Unbounded_Definite_Red_Black_Trees;
+generic
+   type Index_Type is private;
+   --  Index_Type must have a valid default initialization; it is used as
+   --  Gen_Unbounded_Definite_Red_Black_Trees.Element_Type.
+
+   with function Index_Compare (Left, Right : in Index_Type) return 
Compare_Result;
+package SAL.Gen_Unbounded_Sparse_Ordered_Sets is
+
+   package Pkg renames Gen_Unbounded_Sparse_Ordered_Sets;
+
+   type Set is tagged private
+   with
+     Constant_Indexing => Constant_Ref,
+     Default_Iterator  => Iterate,
+     Iterator_Element  => Index_Type;
+   --  We'd like to have Constant_Indexing return a Boolean, so we could
+   --  use 'if Set (Item) then'. But then the default iterator would
+   --  always return True, instead of Index_Type; we can't specify a
+   --  different Constant_Indexing function for the default iterator.
+
+   procedure Clear (Set : in out Pkg.Set);
+   --  Set Set to empty.
+
+   function Count (Set : in Pkg.Set) return Ada.Containers.Count_Type;
+
+   procedure Insert (Set : in out Pkg.Set; Item : in Index_Type);
+   --  No error if already present.
+
+   function Contains (Set : in Pkg.Set; Item : in Index_Type) return Boolean;
+
+   procedure Delete (Set : in out Pkg.Set; Item : in Index_Type);
+   --  Invalid while an iterator is active; not enforced.
+
+   type Cursor is private;
+
+   function Has_Element (Position : in Cursor) return Boolean;
+
+   function Element (Position : in Cursor) return Index_Type;
+
+   type Constant_Reference_Type (Element : not null access constant 
Index_Type) is private with
+     Implicit_Dereference => Element;
+
+   function Constant_Ref
+     (Container : aliased in Set;
+      Position  :         in Cursor)
+     return Constant_Reference_Type
+   with Inline;
+
+   package Iterators is new Ada.Iterator_Interfaces (Cursor, Has_Element);
+
+   type Iterator (<>) is new Iterators.Forward_Iterator with private;
+
+   function Iterate (Container : aliased in Set'Class) return Iterator;
+   --  Returns Index_Type of elements that were inserted.
+
+   overriding function First (Iterator : in Pkg.Iterator) return Cursor;
+   overriding function Next (Iterator : in Pkg.Iterator; Position : in Cursor) 
return Cursor;
+
+private
+   function Key (Item : in Index_Type) return Index_Type
+   is (Item);
+
+   package Boolean_Trees is new SAL.Gen_Unbounded_Definite_Red_Black_Trees
+     (Element_Type => Index_Type,
+      Key_Type     => Index_Type,
+      Key          => Key,
+      Key_Compare  => Index_Compare);
+
+   type Set is tagged record
+      Tree : aliased Boolean_Trees.Tree;
+   end record;
+
+   type Constant_Reference_Type (Element : not null access constant Index_Type)
+   is record
+      Dummy : Integer := raise Program_Error with "uninitialized reference";
+   end record;
+
+   type Cursor is record
+      Cur : Boolean_Trees.Cursor;
+   end record;
+
+   Empty_Set : constant Set := (Tree => Boolean_Trees.Empty_Tree);
+
+   type Iterator (Container : not null access constant Boolean_Trees.Tree) is 
new Iterators.Forward_Iterator
+   with record
+      Iter : Boolean_Trees.Iterator (Container);
+   end record;
+
+end SAL.Gen_Unbounded_Sparse_Ordered_Sets;
diff --git a/sal-gen_unconstrained_array_image.ads 
b/sal-gen_unconstrained_array_image.ads
deleted file mode 100644
index 226ba00d93..0000000000
--- a/sal-gen_unconstrained_array_image.ads
+++ /dev/null
@@ -1,24 +0,0 @@
---  Abstract :
---
---  Image for unconstrained Ada array types
---
---  Copyright (C) 2019 Free Software Foundation, Inc.
---
---  This library is free software;  you can redistribute it and/or modify it
---  under terms of the  GNU General Public License  as published by the Free
---  Software  Foundation;  either version 3,  or (at your  option) any later
---  version. This library is distributed in the hope that it will be useful,
---  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
---  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
---  As a special exception under Section 7 of GPL version 3, you are granted
---  additional permissions described in the GCC Runtime Library Exception,
---  version 3.1, as published by the Free Software Foundation.
-
-pragma License (Modified_GPL);
-generic
-   type Index_Type is (<>);
-   type Element_Type is private;
-   type Array_Type is array (Index_Type range <>) of Element_Type;
-   with function Element_Image (Item : in Element_Type) return String;
-function SAL.Gen_Unconstrained_Array_Image (Item : in Array_Type) return 
String;
diff --git a/sal-unix_text_io.adb b/sal-unix_text_io.adb
new file mode 100644
index 0000000000..8622e66d9d
--- /dev/null
+++ b/sal-unix_text_io.adb
@@ -0,0 +1,65 @@
+--  Abstract :
+--
+--  see spec.
+--
+--  Copyright (C) 2020 Free Software Foundation All Rights Reserved.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+pragma License (Modified_GPL);
+
+package body SAL.Unix_Text_IO is
+
+   use Ada.Streams.Stream_IO;
+
+   procedure Create
+     (File : in out File_Type;
+      Mode : in     File_Mode := Out_File;
+      Name : in     String    := "";
+      Form : in     String    := "")
+   is begin
+      Create
+        (File.Stream,
+         (case Mode is
+          when In_File => Ada.Streams.Stream_IO.In_File,
+          when Out_File => Ada.Streams.Stream_IO.Out_File,
+          when Append_File => Ada.Streams.Stream_IO.Append_File),
+        Name, Form);
+   end Create;
+
+   procedure Close (File : in out File_Type)
+   is begin
+      Close (File.Stream);
+   end Close;
+
+   procedure Put (File : in File_Type; Item : in Character)
+   is begin
+      Write (File.Stream, (1 => Ada.Streams.Stream_Element (Character'Pos 
(Item))));
+   end Put;
+
+   procedure Put (File : in File_Type; Item : in String)
+   is
+      use Ada.Streams;
+      Stream_Item : constant Stream_Element_Array :=
+        (for I in Stream_Element_Offset (Item'First) .. Stream_Element_Offset 
(Item'Last) =>
+         Stream_Element (Character'Pos (Item (Integer (I)))));
+   begin
+      Write (File.Stream, Stream_Item);
+   end Put;
+
+   procedure Put_Line (File : in File_Type; Item : in String)
+   is begin
+      Put (File, Item); New_Line (File);
+   end Put_Line;
+
+   procedure New_Line (File : in File_Type)
+   is begin
+      Write (File.Stream, (1 => Ada.Streams.Stream_Element (Character'Pos 
(ASCII.LF))));
+   end New_Line;
+
+end SAL.Unix_Text_IO;
diff --git a/sal-unix_text_io.ads b/sal-unix_text_io.ads
new file mode 100644
index 0000000000..0912c14620
--- /dev/null
+++ b/sal-unix_text_io.ads
@@ -0,0 +1,47 @@
+--  Abstract :
+--
+--  Replacement for subset of Ada.Text_IO, using Unix line endings
+--  on all platforms.
+--
+--  For very large files, this is significantly faster than Text_IO
+--  output followed by dos2unix.
+--
+--  Copyright (C) 2020 Free Software Foundation All Rights Reserved.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+pragma License (Modified_GPL);
+
+private with Ada.Streams.Stream_IO;
+package SAL.Unix_Text_IO is
+
+   type File_Type is limited private;
+
+   type File_Mode is (In_File, Out_File, Append_File);
+
+   procedure Create
+     (File : in out File_Type;
+      Mode : in     File_Mode := Out_File;
+      Name : in     String    := "";
+      Form : in     String    := "");
+
+   procedure Close (File : in out File_Type);
+
+   procedure Put (File : in File_Type; Item : in Character);
+   procedure Put (File : in File_Type; Item : in String);
+
+   procedure Put_Line (File : in File_Type; Item : in String);
+
+   procedure New_Line (File : in File_Type);
+
+private
+   type File_Type is limited record
+      Stream : Ada.Streams.Stream_IO.File_Type;
+   end record;
+
+end SAL.Unix_Text_IO;
diff --git a/sal.adb b/sal.adb
index d889f4f1c6..d90195600d 100644
--- a/sal.adb
+++ b/sal.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 1997 - 2004, 2006, 2009, 2019, 2020 Free Software 
Foundation, Inc.
+--  Copyright (C) 1997 - 2004, 2006, 2009, 2019 - 2022 Free Software 
Foundation, Inc.
 --
 --  SAL is free software; you can redistribute it and/or modify it
 --  under terms of the GNU General Public License as published by the
@@ -28,7 +28,7 @@ package body SAL is
 
    function Version return String is
    begin
-      return "SAL 3.5";
+      return "SAL 3.6";
    end Version;
 
    function String_Compare (Left, Right : in String) return Compare_Result
@@ -67,4 +67,15 @@ package body SAL is
       end if;
    end String_Compare;
 
+   function Gen_Compare_Integer (Left, Right : in Item_Type) return 
Compare_Result
+   is begin
+      if Left < Right then
+         return Less;
+      elsif Left > Right then
+         return Greater;
+      else
+         return Equal;
+      end if;
+   end Gen_Compare_Integer;
+
 end SAL;
diff --git a/sal.ads b/sal.ads
index 843ba59675..a683a1de6b 100644
--- a/sal.ads
+++ b/sal.ads
@@ -9,7 +9,7 @@
 --
 --  Contact Stephe at stephen_leake@stephe-leake.org.
 --
---  Copyright (C) 1997 - 2004, 2008, 2009, 2015, 2017, 2018, 2020 Free 
Software Foundation, Inc.
+--  Copyright (C) 1997 - 2004, 2008, 2009, 2015, 2017, 2018, 2020, 2021 Free 
Software Foundation, Inc.
 --
 --  SAL is free software; you can redistribute it and/or modify it
 --  under terms of the GNU General Public License as published by the
@@ -76,4 +76,8 @@ package SAL is
 
    function String_Compare (Left, Right : in String) return Compare_Result;
 
+   generic
+   type Item_Type is (<>);
+   function Gen_Compare_Integer (Left, Right : in Item_Type) return 
Compare_Result;
+
 end SAL;
diff --git a/standard_common.gpr b/standard_common.gpr
index ce019c7683..3d4652ee6c 100644
--- a/standard_common.gpr
+++ b/standard_common.gpr
@@ -2,7 +2,7 @@
 --
 --  Standard settings for all of Stephe's Ada projects.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2021 Free Software Foundation, Inc.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -22,6 +22,9 @@ project Standard_Common is
    type Build_Type is ("Debug", "Normal");
    Build : Build_Type := External ("Standard_Common_Build", "Normal");
 
+   type Assertions_Type is ("Build", "Off");
+   Assertions : Assertions_Type := External ("Standard_Common_Assertions", 
"Build");
+
    type Profile_Type is ("On", "Off");
    Profile : Profile_Type := External ("Standard_Common_Profile", "Off");
 
@@ -40,6 +43,12 @@ project Standard_Common is
    package Compiler is
       -- Switches for gcc
 
+      -- "-gnatd*" generates compiler debug info; see compiler source
+      -- debug.adb, in Archive/GNAT/*
+      --
+      --   From 20.2:
+      --   -gnatdO - Output immediate error messages
+
       Base_Style_Checks := "-gnaty3abcefhiklnOprtx";
       Line_Length := "-gnatyM120";
 
@@ -49,25 +58,35 @@ project Standard_Common is
         (
          "-fstack-check",
          "-g",
-         "-gnat2012",
          "-gnatfqQ", -- f = all error messages, qQ = process semantics, 
generate .ali if syntax errors
          "-gnatw.d", -- warnings about tags
-         "-gnatwaBCeJL", -- wa = most warnings, wB = no warn on bad fixed 
values, wC = no warn on conditionals
-                         -- we = error on warning, wJ = no warn on 
obsolescent, wL = no warn on elaboration
-         "-gnatyO" -- warn on overriding
+         "-gnatwaBCeJL" -- wa = most warnings, wB = no warn on bad fixed 
values, wC = no warn on conditionals
+                        -- we = error on warning, wJ = no warn on obsolescent, 
wL = no warn on elaboration
         );
 
       --  -gnatVa causes some inline procedures to be non-inlineable;
       --  suppress that warning with -gnatwP.
-      Debug_Switches := Common_Switches &
+      Base_Debug_Switches := Common_Switches &
         (
          "-O0", --  we don't use -Og because that causes gdb to report 
incorrect results in some cases in Ada.
-         "-gnata",  -- assertions, pre/post-conditions
          "-gnatVa", -- validity checks
          "-gnateE", -- extra info in exceptions
          "-gnatwaP" -- no warn on Inline
         );
 
+      case Assertions is
+      when "Build" =>
+         Base_Debug_Switches := Base_Debug_Switches &
+           ("-gnata");  -- assertions, pre/post-conditions
+      when "Off" =>
+         --  Allow turning off assertions with debug on, for execution speed.
+         null;
+      end case;
+
+      Debug_Switches := Base_Debug_Switches &
+        ("-gnat2020"); -- @, declare_expression. Community 2021 says it's 
-gnat2022, but accepts -gnat2020
+      -- Don't use -gnatX; it allows stuff that may not become standard
+
       --  -O3 is measurably faster than -O2 for wisitoken generate
       --  LR1. We include -fstack-check because it catches
       --  hard-to-find bugs, and the processors are so fast.
@@ -76,13 +95,14 @@ project Standard_Common is
       Base_Release_Switches := Common_Switches &
         (
          "-O3",
-         "-fno-var-tracking-assignments",
-         "-gnatyO"
+         "-fno-var-tracking-assignments"
         );
 
       Inlining := ("-gnatn");
 
-      Release_Switches := Base_Release_Switches & Inlining;
+      Release_Switches := Base_Release_Switches & Inlining &
+        "-gnat2020" -- @
+      ;
 
       -- No -ansi; GNAT 7.1 compiler C header files are mingw 64, which don't 
support -ansi
       Debug_Switches_C := ("-Wall", "-Wstrict-prototypes", "-pedantic", 
"-Werror", "-g", "-O0", "-funwind-tables");
diff --git a/wisi-fringe.el b/wisi-fringe.el
index 2194b09f8f..c7b1715334 100644
--- a/wisi-fringe.el
+++ b/wisi-fringe.el
@@ -1,6 +1,6 @@
 ;;; wisi-fringe.el --- show approximate error locations in the fringe
 ;;
-;; Copyright (C) 2018 - 2019  Free Software Foundation, Inc.
+;; Copyright (C) 2018 - 2019, 2021 - 2022  Free Software Foundation, Inc.
 ;;
 ;; This file is part of GNU Emacs.
 ;;
@@ -120,33 +120,34 @@ in the window."
   (remove-overlays (point-min) (point-max) 'wisi-fringe t))
 
 (defun wisi-fringe-display-errors (positions)
-  "Display markers in the left and right fringe for each buffer position in 
POSITIONS.
+  "Display markers in the fringe for each buffer position in POSITIONS.
 The buffer containing POSITIONS must be current, and the window
 displaying that buffer must be current."
   ;; We don't recompute fringe display on scroll, because the user
   ;; will probably have edited the code by then, triggering a new
   ;; parse.
   (wisi-fringe-clean)
-  (let (scaled-posns
-       (buffer-lines (line-number-at-pos (point-max)))
-       (window-lines (window-height))
-       (window-pos-first (window-start))
-       (window-pos-last  (window-end))
-       (window-line-first (line-number-at-pos (window-start))))
-    (dolist (pos positions)
-      (let* ((line (line-number-at-pos pos))
-            (scaled-pos (wisi-fringe--scale line buffer-lines 
window-line-first window-lines)))
-       (when (and (>= pos window-pos-first)
-                  (<= pos window-pos-last))
-         (wisi-fringe--put-left line))
-       (if (and scaled-posns
-                (= (caar scaled-posns) (car scaled-pos)))
-           (setcdr (car scaled-posns) (logior (cdar scaled-posns) (cdr 
scaled-pos)))
-         (push scaled-pos scaled-posns))
-       ))
+  (when positions
+    (let (scaled-posns
+         (buffer-lines (line-number-at-pos (point-max) t))
+         (window-lines (window-height))
+         (window-pos-first (window-start))
+         (window-pos-last  (window-end))
+         (window-line-first (line-number-at-pos (window-start) t)))
+      (dolist (pos positions)
+       (let* ((line (line-number-at-pos (max (point-min) (min (point-max) 
pos)) t))
+              (scaled-pos (wisi-fringe--scale line buffer-lines 
window-line-first window-lines)))
+         (when (and (>= pos window-pos-first)
+                    (<= pos window-pos-last))
+           (wisi-fringe--put-left line))
+         (if (and scaled-posns
+                  (= (caar scaled-posns) (car scaled-pos)))
+             (setcdr (car scaled-posns) (logior (cdar scaled-posns) (cdr 
scaled-pos)))
+           (push scaled-pos scaled-posns))
+         ))
 
-    (dolist (pos scaled-posns)
-      (wisi-fringe--put-right (car pos) (1- (cdr pos))))
-    ))
+      (dolist (pos scaled-posns)
+       (wisi-fringe--put-right (car pos) (1- (cdr pos))))
+      )))
 
 (provide 'wisi-fringe)
diff --git a/wisi-parse-common.el b/wisi-parse-common.el
index 4e71be9933..28df0aa01b 100644
--- a/wisi-parse-common.el
+++ b/wisi-parse-common.el
@@ -1,6 +1,6 @@
 ;;; wisi-parse-common.el --- declarations used by wisi-parse.el, 
wisi-ada-parse.el, and wisi.el -*- lexical-binding:t -*-
 ;;
-;; Copyright (C) 2014, 2015, 2017 - 2019, 2021  Free Software Foundation, Inc.
+;; Copyright (C) 2014, 2015, 2017 - 2022  Free Software Foundation, Inc.
 ;;
 ;; Author: Stephen Leake <stephen_leake@member.fsf.org>
 ;;
@@ -20,6 +20,7 @@
 ;; along with GNU Emacs.  If not, see <http://www.gnu.org/licenses/>.
 
 ;;; Code:
+(require 'cl-lib)
 
 (defcustom wisi-partial-parse-threshold 100001
   "Minimum size that will be parsed by each call to the parser.
@@ -35,6 +36,16 @@ and parse the whole buffer."
   :safe 'integerp)
 (make-variable-buffer-local 'wisi-partial-parse-threshold)
 
+(defcustom wisi-save-all-changes nil
+  "When non-nil, save all changes sent to the parser for each
+buffer, to aid in reproducing bugs. Also writes a copy of the
+buffer to a file at each full parse, to give the starting point
+for the changes. The filename is the visited file name with
+\"-wisi-change-start\"' appended."
+  :type 'boolean
+  :group 'wisi
+  :safe 'booleanp)
+
 (cl-defstruct (wisi--lexer-error)
   pos ;; position (integer) in buffer where error was detected.
   message  ;; string error message
@@ -52,11 +63,35 @@ and parse the whole buffer."
   ;; Includes information derived from compiler error recovery to edit
   ;; text to fix one error. Used by ’wisi-repair-error’ to edit buffer.
   pos      ;; position (integer or marker) in buffer where error was detected.
+  pos-2    ;; secondary error position
   message  ;; string error message
   repair   ;; list of wisi--parse-error-repair.
   )
 
+(defconst wisi-parser-transaction-log-buffer-size-default 300000)
+
 (cl-defstruct wisi-parser
+  ;; Per-language values for a wisi parser. Also holds transient
+  ;; values set by the current parse, that must be used before the
+  ;; next parse starts.
+
+  repair-image
+  ;; alist of (TOKEN-ID . STRING); used by repair error
+
+  transaction-log-buffer
+  ;; Buffer holding history of communications with parser; one log per
+  ;; parser instance.
+
+  (transaction-log-buffer-size wisi-parser-transaction-log-buffer-size-default)
+  ;; Max character count to retain in transaction-log-buffer. Set to 0
+  ;; to disable log. Default is large enough for all transactions in
+  ;; test/ada_mode-incremental_parse.adb with lots of verbosity.
+)
+
+(cl-defstruct wisi-parser-local
+  ;; Per-buffer values set by the wisi parser, that must persist past
+  ;; the current parse.
+
   ;; Separate lists for lexer and parse errors, because lexer errors
   ;; must be repaired first, before parse errors can be repaired. And
   ;; they have different structures.
@@ -67,27 +102,65 @@ and parse the whole buffer."
   ;; List of wisi--parse-errors from last parse. Can be more than one if
   ;; parser supports error recovery.
 
-  repair-image
-  ;; alist of (TOKEN-ID . STRING); used by repair error
+  (all-changes -1)
+  ;; List of all changes sent to parser for the current buffer since
+  ;; the last full parse for that buffer (last change at head of
+  ;; list). Used to reproduce bugs. Value of -1 means keeping this
+  ;; list is disabled; it can easily get huge if enabled. See
+  ;; `wisi-save-all-changes', `wisi-process-all-changes-to-cmd'.
 )
 
-(cl-defgeneric wisi-parse-format-language-options ((parser wisi-parser))
+(cl-defgeneric wisi-parser-transaction-log-buffer-name (parser)
+  "Return a buffer name for the transaction log buffer.")
+
+(defun wisi-parse-log-message (parser message)
+  "Write MESSAGE (a string) to PARSER transaction-log-buffer.
+Text properties on MESSAGE are preserved,"
+  (let ((max (wisi-parser-transaction-log-buffer-size parser)))
+    (when (> max 0)
+      (unless (buffer-live-p (wisi-parser-transaction-log-buffer parser))
+       (setf (wisi-parser-transaction-log-buffer parser)
+             (get-buffer-create (wisi-parser-transaction-log-buffer-name 
parser)))
+       (with-current-buffer (wisi-parser-transaction-log-buffer parser)
+         (read-only-mode 1)
+         (buffer-disable-undo)))
+      (with-current-buffer (wisi-parser-transaction-log-buffer parser)
+       (goto-char (point-max))
+       (let ((inhibit-read-only t))
+         (insert (format "%s:\n%s\n" (format-time-string "%H:%M:%S.%3N") 
message))
+         (when (> (buffer-size) max)
+           (save-excursion
+             (goto-char (- (buffer-size) max))
+             ;; search for tail of time stamp "mm:ss.mmm:\n"
+             (search-forward-regexp ":[0-9][0-9]:[0-9][0-9].[0-9][0-9][0-9]:$" 
nil t)
+             (forward-line -1)
+             (delete-region (point-min) (point)))))))))
+
+(cl-defgeneric wisi-parse-format-language-options (parser)
   "Return a string to be sent to the parser, containing settings
-for the language-specific parser options."
-  )
+for the language-specific parser options.")
 
-(cl-defgeneric wisi-parse-expand-region ((_parser wisi-parser) begin end)
+(cl-defgeneric wisi-parse-expand-region (_parser begin end)
   "Return a cons SEND-BEGIN . SEND-END that is an expansion of
 region BEGIN END that starts and ends at points the parser can
 handle gracefully."
   (cons begin end))
 
-(defvar-local wisi--parser nil
-  "The current wisi parser; a ‘wisi-parser’ object.")
+(defvar-local wisi-parser-shared nil
+  "The current shared wisi parser; a ‘wisi-parser-shared’ object.
+There is one parser object per language; `wisi-parser-shared' is a
+buffer-local reference to that shared object.")
+
+(defvar-local wisi-parser-local nil
+  "Buffer-local values used by the wisi parser; a ‘wisi-parser-local’ object.")
+
+(defconst wisi-post-parse-actions '(navigate face indent none refactor query 
debug)
+  "Actions that the parser can perform after parsing.
+Only navigate thru indent are valid for partial parse.")
 
 (defun wisi-read-parse-action ()
   "Read a parse action symbol from the minibuffer."
-  (intern-soft (completing-read "parse action (indent): " '(face navigate 
indent) nil t nil nil 'indent)))
+  (intern-soft (completing-read "parse action (indent): " 
wisi-post-parse-actions nil t nil nil 'indent)))
 
 (defun wisi-search-backward-skip (regexp skip-p)
   "Search backward for REGEXP. If SKIP-P returns non-nil, search again.
@@ -112,35 +185,131 @@ Return nil if no match found before eob."
 (defun wisi-show-expanded-region ()
   "For debugging. Expand currently selected region."
   (interactive)
-  (let ((region (wisi-parse-expand-region wisi--parser (region-beginning) 
(region-end))))
+  (let ((region (wisi-parse-expand-region wisi-parser-shared 
(region-beginning) (region-end))))
     (message "pre (%d . %d) post %s" (region-beginning) (region-end) region)
     (set-mark (car region))
     (goto-char (cdr region))
     ))
 
-(cl-defgeneric wisi-parse-adjust-indent ((_parser wisi-parser) indent _repair)
+(cl-defgeneric wisi-parse-adjust-indent (_parser indent _repair)
   "Adjust INDENT for REPAIR (a wisi--parse-error-repair struct). Return new 
indent."
   indent)
 
-(cl-defgeneric wisi-parse-current ((parser wisi-parser) begin send-end 
parse-end)
+(cl-defgeneric wisi-parse-require-process (parser &key nowait)
+    "If PARSER uses an external process, start the process for PARSER.
+If NOWAIT is non-nil, does not wait for the process to respond.")
+
+(cl-defgeneric wisi-parse-current (parser parse-action begin send-end 
parse-end)
   "Parse current buffer starting at BEGIN, continuing at least thru PARSE-END.
-If using an external parser, send it BEGIN thru SEND-END.")
+Send the parser BEGIN thru SEND-END, which does a full or partial
+parse, and performs post-parse action PARSE-ACTION (one of
+`wisi-post-parse-actions') on region BEGIN PARSE-END.  Returns
+parsed region.")
+
+(defvar wisi-parse-full-active nil
+  ;; Only one buffer can be doing a full parse.
+  "Non-nil if `wisi-parse-incremental was called with full and nowait.
+The value is a list (source-buffer (font-lock-begin
+. font-lock-end)), where (FONT-LOCK-BEGIN . FONT-LOCK-END) is the
+region font-lock attempted to fontify while the parser was
+busy.")
+
+(cl-defgeneric wisi-parse-incremental (parser parser-action &key full nowait)
+  "Incrementally parse current buffer.
+PARSER-ACTION (one of `wisi-post-parse-actions') is used to
+decide whether to wait if parser is busy.  If FULL, do initial
+full parse.  If FULL and NOWAIT, don't wait for parse to
+complete; buffer is read-only until full parse completes.  Text
+changes for incremental parse are stored in `wisi--changes',
+created by `wisi-after-change'.")
+
+(cl-defgeneric wisi-post-parse (parser parse-action begin end)
+  "Perform PARSE-ACTION on region BEGIN END.
+PARSE-ACTION is one of `wisi-post-parse-actions'. Buffer must
+have been previously parsed by `wisi-parse-current' or
+`wisi-parse-incremental'");
+
+(cl-defgeneric wisi-refactor (parser refactor-action pos)
+  "Perform REFACTOR-ACTION at point POS")
+
+(defconst wisi-parse-tree-queries
+  ;; Must match wisi.ads Query_Label. Results defined in doc string of 
`wisi-parse-tree-query'.
+  '((node                   .   0)
+    (containing-statement   .  1)
+    (ancestor              .   2)
+    (parent                .   3)
+    (child                 .   4)
+    (print                 .   5)
+    (dump                  .   6)
+    )
+  "Query values for `wisi-parse-tree-query'.")
+
+(cl-defstruct wisi-tree-node
+  "A syntax tree node"
+  address     ;; hexadecimal string
+  id          ;; token_id, a symbol
+  char-region ;; cons (start_pos . end_pos)
+  )
+
+(cl-defgeneric wisi-parse-tree-query (parser query &rest args)
+  "Return result of parse tree query QUERY with ARGS:
+
+- node: ARGS is a buffer position. Return the wisi-tree-node for
+  the terminal at ARGS. If ARGS is in whitespace or comment,
+  preceding terminal.
+
+- containing-statement: ARGS is a buffer position. Return the
+  wisi-tree-node for the statement ancestor of the terminal at
+  ARGS, or nil if no such ancestor. A \"statement\" is one of the
+  statement ids declared by the language-specific grammar
+  backend.
+
+- ancestor: ARGS are a buffer position and a list of ids. Return
+  the wisi-tree-node for the ancestor of the terminal at that pos
+  that is one of the ids, or nil if no such ancestor.
+
+- parent: ARGS are (node-address n). Return the wisi-tree-node
+  for the nth parent of the node, or nil if no such parent.
 
-(cl-defgeneric wisi-refactor ((parser wisi-parser) refactor-action parse-begin 
parse-end edit-begin)
-  "Perform REFACTOR-ACTION at point EDIT_BEGIN.
-STMT-START, STMT-END are the start and end positions of the
-statement containing EDIT_BEGIN.")
+- child: ARGS are (node-address n). Return wisi-tree-node for the
+  nth child of the node, or nil if no such child.
 
-(cl-defgeneric wisi-parse-kill ((parser wisi-parser))
+- print: ARGS ignored. Output parse tree and any errors in the
+  tree to the trace log. Returns t.
+
+- dump: ARGS are (file-name). Dump text representation of parse
+  tree to file file-name, overwriting any existing
+  file. Returns t.
+
+\"terminal at pos\" means pos is in the region defined by the
+terminal token text plus following non_grammar and whitespace."
+  ;; wisi-indent-statement requires this definition of 'terminal at pos'.
+  )
+
+(cl-defgeneric wisi-parse-kill-buffer (parser)
+  "Tell parser the current buffer is being deleted.
+Called by `wisi-parse-kill-buf'.")
+
+(defun wisi-parse-kill-buf ()
+  "Tell parser the current buffer is being deleted.
+For `kill-buffer-hook'."
+  (when wisi-parser-shared
+    (wisi-parse-kill-buffer wisi-parser-shared)))
+
+(cl-defgeneric wisi-parse-reset (parser)
+  "Ensure parser is ready to process a new parse.")
+
+(cl-defgeneric wisi-parse-kill (parser)
   "Kill any external process associated with parser.")
 
-(cl-defgeneric wisi-parse-find-token ((parser wisi-parser) token-symbol)
-  "Find token with TOKEN-SYMBOL on current parser stack, return token struct.
-For use in grammar actions.")
+(cl-defgeneric wisi-parse-enable-memory-report (parser)
+  "Configure parser to report memory use.")
+
+(cl-defgeneric wisi-parse-memory-report-reset (parser)
+  "Reset memory report base.")
 
-(cl-defgeneric wisi-parse-stack-peek ((parser wisi-parser) n)
-  "Return the Nth token on the parse stack.
-For use in grammar actions.")
+(cl-defgeneric wisi-parse-memory-report (parser)
+  "Display memory use since last reset.")
 
 (cl-defstruct
   (wisi-cache
@@ -169,16 +338,34 @@ For use in grammar actions.")
   "Return `wisi-cache' struct from the `wisi-cache' text property at POS."
   (get-text-property pos 'wisi-cache))
 
+(defun wisi-prev-cache (pos)
+  "Return previous cache before POS, or nil if none."
+  (let (cache)
+    ;; If pos is not near cache, p-s-p-c will return pos just after
+    ;; cache, so 1- is the beginning of cache.
+    ;;
+    ;; If pos is just after end of cache, p-s-p-c will return pos at
+    ;; start of cache.
+    ;;
+    ;; So we test for the property before subtracting 1.
+    (setq pos (previous-single-property-change pos 'wisi-cache))
+    (cond
+     ((null pos)
+       nil)
+
+     ((setq cache (get-text-property pos 'wisi-cache))
+      cache)
+
+     (t
+      (setq pos (1- pos))
+      (setq cache (get-text-property pos 'wisi-cache))
+      cache)
+     )))
+
 (defun wisi-backward-cache ()
-  "Move point backward to the beginning of the first token preceding point 
that has a cache.
-Returns cache, or nil if at beginning of buffer."
-  ;; If point is not near cache, p-s-p-c will return pos just after
-  ;; cache, so 1- is the beginning of cache.
-  ;;
-  ;; If point is just after end of cache, p-s-p-c will return pos at
-  ;; start of cache.
-  ;;
-  ;; So we test for the property before subtracting 1.
+  "Move point backward to the first token cache preceding point.
+Returns cache, or nil if at beginning of buffer.
+Assumes the buffer is fully parsed."
   (let ((pos (previous-single-property-change (point) 'wisi-cache))
        cache)
     (cond
@@ -197,9 +384,26 @@ Returns cache, or nil if at beginning of buffer."
       cache)
      )))
 
+(defun wisi-next-cache (pos)
+  "Return next cache after POS, or nil if none."
+  (let (cache)
+    (when (get-text-property pos 'wisi-cache)
+      ;; on a cache; get past it
+      (setq pos (1+ pos)))
+
+    (setq cache (get-text-property pos 'wisi-cache))
+    (unless cache
+      (setq pos (next-single-property-change pos 'wisi-cache))
+      (when pos
+       (setq cache (get-text-property pos 'wisi-cache)))
+      )
+    cache
+    ))
+
 (defun wisi-forward-cache ()
-  "Move point forward to the beginning of the first token after point that has 
a cache.
-Returns cache, or nil if at end of buffer."
+  "Move point forward to the first token cache after point.
+Returns cache, or nil if at end of buffer.
+Assumes the buffer is fully parsed."
   (let (cache pos)
     (when (get-text-property (point) 'wisi-cache)
       ;; on a cache; get past it
@@ -226,89 +430,64 @@ Returns cache, or nil if at end of buffer."
   (unless start (setq start (point)))
   (cons start (+ start (wisi-cache-last cache))))
 
-(defvar wisi-debug 0
+(defcustom wisi-debug 0
   "wisi debug mode:
 0 : normal - ignore parse errors, for indenting new code
-1 : report parse errors (for running tests)
-2 : show parse states, position point at parse errors
-3 : also show top 10 items of parser stack.")
-
-;; The following parameters are easily changeable for debugging.
-(defvar wisi-action-disable nil
-  "If non-nil, disable all elisp actions during parsing.
-Allows timing parse separate from actions.")
-
-(defvar-local wisi-trace-mckenzie 0
-  "McKenzie trace level; 0 for none")
-
-(defvar-local wisi-trace-action 0
-  "Parse action trace level; 0 for none")
-
-(defvar-local wisi-mckenzie-disable nil
-  "If non-nil, disable McKenzie error recovery. Otherwise, use parser 
default.")
-
-(defcustom wisi-mckenzie-task-count nil
-  "If integer, sets McKenzie error recovery task count.
-Higher value (up to system processor limit) runs error recovery
-faster, but may encounter race conditions.  Using only one task
-makes error recovery repeatable; useful for tests.  If nil, uses
-value from grammar file."
+1 : report parse errors (for running tests)"
   :type 'integer
   :group 'wisi
   :safe 'integerp)
-(make-variable-buffer-local 'wisi-mckenzie-task-count)
-
-(defcustom wisi-mckenzie-check-limit nil
-  "If integer, sets McKenzie error recovery algorithm token check limit.
-This sets the number of tokens past the error point that must be
-parsed successfully for a solution to be deemed successful.
-Higher value gives better solutions, but may fail if there are
-two errors close together.  If nil, uses value from grammar
-file."
+
+;; The following parameters are easily changeable for debugging.
+(defcustom wisi-parser-verbosity ""
+  "WisiToken trace config; empty string for none.
+See WisiToken Trace_Enable for complete set of options.
+Examples:
+debug=1 lexer=1 parse=2 action=3"
+  :type 'string
+  :group 'wisi
+  :safe 'stringp)
+(make-variable-buffer-local 'wisi-parser-verbosity)
+
+(defcustom wisi-mckenzie-zombie-limit nil
+  "If integer, overrides %mckenzie_zombie_limit.
+This sets the number of tokens past the error point that other
+parsers accept before the error parser is terminated.  Higher
+value gives better solutions, but may cause too many parsers to
+be active at once.  If nil, uses %mckenzie_zombie_limit value from grammar 
file."
   :type 'integer
   :group 'wisi
   :safe 'integerp)
-(make-variable-buffer-local 'wisi-mckenzie-check-limit)
+(make-variable-buffer-local 'wisi-mckenzie-zombie-limit)
 
 (defcustom wisi-mckenzie-enqueue-limit nil
-  "If integer, sets McKenzie error recovery algorithm enqueue limit.
+  "If integer, overrides %mckenzie_enqueue_limit.
 This sets the maximum number of solutions that will be considered.
 Higher value has more recover power, but will be slower to fail.
-If nil, uses value from grammar file."
+If nil, uses %mckenzie_enqueue_limit value from grammar file."
   :type 'integer
   :group 'wisi
   :safe 'integerp)
 (make-variable-buffer-local 'wisi-mckenzie-enqueue-limit)
 
-(defcustom wisi-parse-max-parallel 15
+(defcustom wisi-parse-max-parallel nil
   "Maximum number of parallel parsers during regular parsing.
-Parallel parsers are used to resolve redundancy in the grammar.
+Parallel parsers are used to resolve conflicts in the grammar.
 If a file needs more than this, it's probably an indication that
-the grammar is excessively redundant."
+the grammar has excessive conflicts. If nil, uses %max_parallel
+value from grammar file (default 15)"
   :type 'integer
   :group 'wisi
   :safe 'integerp)
+(make-variable-buffer-local 'wisi-parse-max-parallel)
 
-(defvar wisi-parse-max-stack-size 500
-  "Maximum parse stack size.
-Larger stack size allows more deeply nested constructs.")
 ;; end of easily changeable parameters
 
-(defvar wisi--parse-action nil
-  ;; not buffer-local; only let-bound in wisi-indent-region, 
wisi-validate-cache
-  "Reason current parse is begin run; one of
-{indent, face, navigate}.")
-
 (defvar-local wisi-indent-comment-col-0 nil
   "If non-nil, comments currently starting in column 0 are left in column 0.
 Otherwise, they are indented with previous comments or code.
 Normally set from a language-specific option.")
 
-(defconst wisi-eoi-term 'Wisi_EOI
-  ;; must match FastToken wisi-output_elisp.adb EOI_Name, which must
-  ;; be part of a valid Ada identifer.
-  "End Of Input token.")
-
 (defconst wisi-class-list
   [motion ;; motion-action
    statement-end
@@ -397,4 +576,45 @@ Normally set from a language-specific option.")
     (format "(%s)" (wisi-tok-token tok)))
    ))
 
+(defun wisi-save-kbd-macro (file-name)
+  "Write `last-kbd-macro' to FILE_NAME."
+  (interactive "F")
+  (with-temp-buffer
+    (insert (format "%s" last-kbd-macro))
+    (write-file file-name))
+  (message "keyboard macro saved to file '%s'" file-name))
+
+(defun wisi-parse-incremental-none ()
+  "Force an incremental parse.
+Signals an error if `wisi-incremental-parse-enable' is nil."
+  (unless wisi-incremental-parse-enable
+    (user-error "wisi-parse-incremental-none with 
wisi-incremental-parse-enable nil"))
+  (save-excursion (wisi-parse-incremental wisi-parser-shared 'none)))
+
+(defun wisi-replay-kbd-macro (macro)
+  "Replay keyboard macro MACRO into current buffer,
+with incremental parse after each key event."
+  (unless wisi-incremental-parse-enable
+    (user-error "wisi-incremental-parse-enable nil; use EMACS_SKIP_UNLESS"))
+  (let ((i 0))
+    (while (< i  (length macro))
+      (execute-kbd-macro (make-vector 1 (aref macro i)))
+      (save-excursion
+       (condition-case err
+           (wisi-parse-incremental wisi-parser-shared 'none)
+         (wisi-parse-error
+          (when (< 0 wisi-debug)
+            ;; allow continuing when parser throws parse-error
+            (signal (car err) (cdr err))))))
+      (setq i (1+ i)))))
+
+(defun wisi-replay-undo (count)
+  "Execute `undo' COUNT times, delaying in between each."
+  (let ((i 0))
+    (undo-start)
+    (while (< i count)
+      (undo-more 1)
+      (sit-for 0.1)
+      (setq i (1+ i)))))
+
 (provide 'wisi-parse-common)
diff --git a/wisi-parse_context.adb b/wisi-parse_context.adb
new file mode 100644
index 0000000000..5a908f0599
--- /dev/null
+++ b/wisi-parse_context.adb
@@ -0,0 +1,787 @@
+--  Abstract :
+--
+--  See spec.
+--
+--  Copyright (C) 2020 - 2022 Free Software Foundation All Rights Reserved.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+pragma License (GPL);
+
+with Ada.Directories;
+with Ada.Exceptions;
+with Ada.Finalization;
+with Ada.Tags;
+with GNAT.OS_Lib;
+with SAL.Gen_Unbounded_Definite_Red_Black_Trees;
+package body Wisi.Parse_Context is
+
+   function Source_File_Name (Item : in Parse_Context_Access) return String
+   is (Ada.Strings.Unbounded.To_String (Item.File_Name));
+
+   package File_Parse_Context_Maps is new 
SAL.Gen_Unbounded_Definite_Red_Black_Trees
+     (Element_Type => Parse_Context_Access,
+      Key_Type     => String,
+      Key          => Source_File_Name,
+      Key_Compare  => SAL.String_Compare);
+
+   Map : File_Parse_Context_Maps.Tree;
+
+   function Create_No_File
+     (Language : in Wisi.Parse_Context.Language;
+      Trace    : in WisiToken.Trace_Access)
+     return Parse_Context_Access
+   is
+      use WisiToken;
+   begin
+      return Result : constant Parse_Context_Access :=
+        (new Parse_Context'
+           (File_Name                         => +"",
+            Text_Buffer                       => null,
+            Text_Buffer_Byte_Last             => 0,
+            Text_Buffer_Char_Last             => 0,
+            Parser                            => 
WisiToken.Parse.LR.Parser.Parser'
+              (Ada.Finalization.Limited_Controlled with
+               User_Data                      => Wisi.New_User_Data 
(Language.Parse_Data_Template.all),
+               Table                          => Language.Table,
+               Productions                    => Language.Productions,
+               Language_Fixes                 => Language.Fixes,
+               Language_Matching_Begin_Tokens => 
Language.Matching_Begin_Tokens,
+               Language_String_ID_Set         => Language.String_ID_Set,
+               Partial_Parse_Active           => Language.Partial_Parse_Active,
+               Partial_Parse_Byte_Goal        => 
Language.Partial_Parse_Byte_Goal,
+               others                         => <>),
+            Root_Save_Edited_Name             => <>,
+            Save_Edited_Count                 => <>))
+      do
+         Result.Parser.Tree.Lexer := Language.Lexer;
+         if Trace_Incremental_Parse > Outline then
+            Trace.Put_Line
+              ("parse_context (no file) created, language " & 
Ada.Tags.Expanded_Name
+                 (Language.Parse_Data_Template.all'Tag));
+            if Trace_Memory > Outline then
+               Report_Memory (Trace.all, Prefix => True);
+            end if;
+         end if;
+      end return;
+   end Create_No_File;
+
+   procedure Create_No_Text
+     (File_Name : in String;
+      Language  : in Wisi.Parse_Context.Language;
+      Trace     : in WisiToken.Trace_Access)
+   is
+      Temp : constant Parse_Context_Access := Create_No_File (Language, Trace);
+   begin
+      Set_File (File_Name, Temp);
+   end Create_No_Text;
+
+   procedure Set_File (File_Name : in String; Parse_Context : in 
Parse_Context_Access)
+   is
+      use File_Parse_Context_Maps;
+      use WisiToken;
+      use Ada.Strings.Unbounded;
+   begin
+      if Length (Parse_Context.File_Name) > 0 then
+         raise Protocol_Error;
+      end if;
+
+      Parse_Context.File_Name := +File_Name;
+      Map.Insert (Parse_Context);
+   end Set_File;
+
+   function Find_Create
+     (File_Name : in String;
+      Language  : in Wisi.Parse_Context.Language;
+      Trace     : in WisiToken.Trace_Access)
+     return Parse_Context_Access
+   is begin
+      if File_Name'Length = 0 then
+         raise Wisi.Protocol_Error with "no file name given";
+      end if;
+
+      declare
+         use File_Parse_Context_Maps;
+         use WisiToken;
+
+         Found : constant Cursor := Map.Find (File_Name);
+      begin
+         if Has_Element (Found) then
+            return Result : constant Parse_Context_Access := Element (Found) do
+               if Language.Descriptor /= Result.Parser.Tree.Lexer.Descriptor 
then
+                  raise WisiToken.User_Error with "language does not match for 
buffer '" & File_Name & "'";
+               end if;
+               if Trace_Incremental_Parse > Outline then
+                  Trace.Put_Line ("parse_context found");
+               end if;
+            end return;
+         end if;
+
+         return Result : constant Parse_Context_Access :=
+           (new Parse_Context'
+              (File_Name                         => +File_Name,
+               Text_Buffer                       => null,
+               Text_Buffer_Byte_Last             => 0,
+               Text_Buffer_Char_Last             => 0,
+               Parser                            => 
WisiToken.Parse.LR.Parser.Parser'
+                 (Ada.Finalization.Limited_Controlled with
+                  User_Data                      => Wisi.New_User_Data 
(Language.Parse_Data_Template.all),
+                  Table                          => Language.Table,
+                  Productions                    => Language.Productions,
+                  Language_Fixes                 => Language.Fixes,
+                  Language_Matching_Begin_Tokens => 
Language.Matching_Begin_Tokens,
+                  Language_String_ID_Set         => Language.String_ID_Set,
+                  Partial_Parse_Active           => 
Language.Partial_Parse_Active,
+                  Partial_Parse_Byte_Goal        => 
Language.Partial_Parse_Byte_Goal,
+                  others                         => <>),
+               Root_Save_Edited_Name             => <>,
+               Save_Edited_Count                 => <>))
+         do
+            Result.Parser.Tree.Lexer := Language.Lexer;
+            Map.Insert (Result);
+            if Trace_Incremental_Parse > Outline then
+               Trace.Put_Line
+                 ("parse_context created, language " & Ada.Tags.Expanded_Name 
(Language.Parse_Data_Template.all'Tag));
+               if Trace_Memory > Outline then
+                  Report_Memory (Trace.all, Prefix => True);
+               end if;
+            end if;
+         end return;
+      end;
+   end Find_Create;
+
+   function Find
+     (File_Name : in String;
+      Language  : in Wisi.Parse_Context.Language;
+      Have_Text : in Boolean := False)
+     return Parse_Context_Access
+   is begin
+      if File_Name'Length = 0 then
+         raise Wisi.Protocol_Error with "no file name given";
+      end if;
+
+      declare
+         use File_Parse_Context_Maps;
+         use WisiToken;
+         use all type Ada.Strings.Unbounded.String_Access;
+
+         Found : constant Cursor := Map.Find (File_Name);
+      begin
+         if Has_Element (Found) then
+            return Result : constant Parse_Context_Access := Element (Found) do
+               if Language.Descriptor /= Result.Parser.Tree.Lexer.Descriptor 
then
+                  raise WisiToken.User_Error with "language does not match for 
buffer '" & File_Name & "'";
+               end if;
+               if Have_Text and (Result.Text_Buffer = null or else 
Result.Text_Buffer'Length = 0) then
+                  if Trace_Incremental_Parse > Outline then
+                     Result.Parser.Tree.Lexer.Trace.Put_Line ("parse_context 
found, but text buffer empty");
+                  end if;
+                  raise Not_Found;
+               end if;
+               if Trace_Incremental_Parse > Outline then
+                  Result.Parser.Tree.Lexer.Trace.Put_Line
+                    ("parse_context found" & (if Have_Text then " and text 
present" else ""));
+               end if;
+            end return;
+         else
+            raise Not_Found;
+         end if;
+      end;
+   end Find;
+
+   procedure Kill (File_Name : in String)
+   is begin
+      if File_Name'Length = 0 then
+         raise Wisi.Protocol_Error with "no file name given";
+      end if;
+
+      declare
+         use File_Parse_Context_Maps;
+
+         Found : constant Cursor := Map.Find (File_Name);
+      begin
+         if not Has_Element (Found) then
+            --  already killed, or never opened
+            null;
+         else
+            declare
+               Context : Parse_Context_Access := Element (Found);
+            begin
+               Map.Delete (File_Name);
+               Ada.Strings.Unbounded.Free (Context.Text_Buffer);
+               Free (Context);
+            end;
+         end if;
+      end;
+   end Kill;
+
+   procedure Clear
+   is begin
+      Map.Clear;
+   end Clear;
+
+   function Image (Item : in Change) return String
+   is
+      use WisiToken;
+   begin
+      return "(" &
+        Item.Begin_Byte_Pos'Image & "," &
+        Item.Begin_Char_Pos'Image & "," &
+        Item.Inserted_End_Byte_Pos'Image & "," &
+        Item.Inserted_End_Char_Pos'Image & "," &
+        " +""" & (-Item.Inserted_Text) & """," &
+        Item.Deleted_Bytes'Image & "," &
+        Item.Deleted_Chars'Image & ")";
+   end Image;
+
+   function Get_Emacs_Change_List
+     (Command_Line : in     String;
+      Last         : in out Integer)
+     return Change_Lists.List
+   is
+      function Substitute_Escapes (Item : in String) return String
+      is begin
+         if Item'Length = 0 then
+            return Item;
+         else
+            declare
+               I      : Integer := Item'First;
+               J      : Integer := Item'First;
+               Result : String (Item'Range);
+            begin
+               loop
+                  --  See test_edit_source.adb String_Escape for rationale of 
what is
+                  --  handled here.
+                  if Item (I) = '\' and I < Item'Last then
+                     if Item (I + 1) = 'n' then
+                        Result (J) := ASCII.LF;
+                        I := @ + 2;
+                     elsif Item (I + 1) = '"' then
+                        Result (J) := '"';
+                        I := @ + 2;
+                     elsif Item (I + 1) = '\' then
+                        Result (J) := '\';
+                        I := @ + 2;
+                     else
+                        Result (J) := Item (I);
+                        I := @ + 1;
+                     end if;
+                  else
+                     Result (J) := Item (I);
+                     I := @ + 1;
+                  end if;
+                  exit when I > Item'Last;
+                  J := @ + 1;
+               end loop;
+               return Result (Result'First .. J);
+            end;
+         end if;
+      end Substitute_Escapes;
+
+   begin
+      return Result : Change_Lists.List do
+         Skip (Command_Line, Last, '('); --  start of changes list
+         loop
+            exit when Last = Command_Line'Last;
+            exit when Command_Line (Last + 1) = ')';
+
+            declare
+               use WisiToken;
+               Item : Change;
+            begin
+               Skip (Command_Line, Last, '(');
+               Item.Begin_Byte_Pos        := Base_Buffer_Pos (Get_Integer 
(Command_Line, Last));
+               Item.Begin_Char_Pos        := Base_Buffer_Pos (Get_Integer 
(Command_Line, Last));
+               Item.Inserted_End_Byte_Pos := Base_Buffer_Pos (Get_Integer 
(Command_Line, Last));
+               Item.Inserted_End_Char_Pos := Base_Buffer_Pos (Get_Integer 
(Command_Line, Last));
+               Item.Deleted_Bytes         := Get_Integer (Command_Line, Last);
+               Item.Deleted_Chars         := Get_Integer (Command_Line, Last);
+               Item.Inserted_Text         := +Substitute_Escapes (Get_String 
(Command_Line, Last));
+               Skip (Command_Line, Last, ')');
+
+               if Integer (Item.Inserted_End_Byte_Pos - Item.Begin_Byte_Pos) /=
+                 Ada.Strings.Unbounded.Length (Item.Inserted_Text)
+               then
+                  raise Protocol_Error with "invalid change; begin, end does 
not match inserted_text length" &
+                    Item.Begin_Byte_Pos'Image & 
Item.Inserted_End_Byte_Pos'Image & Integer'Image
+                      (Ada.Strings.Unbounded.Length (Item.Inserted_Text));
+               end if;
+               Result.Append (Item);
+            end;
+         end loop;
+         Skip (Command_Line, Last, ')'); --  end of edits list
+      end return;
+   end Get_Emacs_Change_List;
+
+   procedure Edit_Source
+     (Trace         : in out WisiToken.Trace'Class;
+      Parse_Context : in out Wisi.Parse_Context.Parse_Context;
+      Changes       : in     Change_Lists.List;
+      KMN_List      :    out WisiToken.Parse.KMN_Lists.List)
+   is
+      use Ada.Containers;
+      use WisiToken;
+
+      --  Changes is in increasing time order (ie _not_ in buffer pos
+      --  order); KMN_List is in buffer pos order.
+
+      Source           : Ada.Strings.Unbounded.String_Access renames 
Parse_Context.Text_Buffer;
+      Source_Byte_Last : Integer renames Parse_Context.Text_Buffer_Byte_Last;
+      Source_Char_Last : Integer renames Parse_Context.Text_Buffer_Char_Last;
+
+      Initial_Text_Byte_Region : constant Buffer_Region := (1, Base_Buffer_Pos 
(Source_Byte_Last));
+      Initial_Text_Char_Region : constant Buffer_Region := (1, Base_Buffer_Pos 
(Source_Char_Last));
+
+      Gap_First : Integer := Source_Byte_Last + 1;
+      Gap_Last  : Integer := Source'Last;
+
+      function Gap_Invariant return Boolean
+      is (Gap_Last - Gap_First = Source'Last - (Source_Byte_Last + 1));
+
+      Total_Inserted_Bytes : Integer := 0;
+
+      function Reallocate return Boolean
+      is begin
+         --  This is a conservative analysis; Total_Inserted_Bytes is correct
+         --  if all Changes are inserted with no overlap and no deletes;
+         --  otherwise it is too large. But the savings from being more
+         --  accurate are not large, and this simplifies the editing algorithm.
+         if Changes.Length = 0 then
+            return False;
+         end if;
+
+         for Change of Changes loop
+            Total_Inserted_Bytes := @ + Ada.Strings.Unbounded.Length 
(Change.Inserted_Text);
+         end loop;
+
+         if Source_Byte_Last + Total_Inserted_Bytes > Source'Last then
+            return True;
+         else
+            return False;
+         end if;
+      end Reallocate;
+
+      procedure Move_Gap (New_Gap_First : in Integer)
+      with Pre =>
+        New_Gap_First /= Gap_First and Gap_Invariant and
+        (if New_Gap_First < Gap_First
+         then
+            (New_Gap_First + Gap_Last - Gap_First) + 1 in Source'First .. 
Source'Last and
+              New_Gap_First in Source'First .. Source'Last
+         else
+            New_Gap_First - 1 in Source'First .. Source'Last and
+              Gap_Last + 1 in Source'First .. Source'Last),
+        Post => Gap_Invariant
+      is
+         --  Examples:
+         --  gap_first : 15
+         --  gap_last  : 19
+         --
+         --  new_gap_first: 5
+         --     new_gap_last := 9
+         --     source (10 .. 19) := source (5 .. 14)
+         --
+         --  new_gap_first: 25
+         --  new_gap_last : 29
+         --      source (15 .. 24) := source (20 .. 29)
+
+         New_Gap_Last : constant Integer := New_Gap_First + Gap_Last - 
Gap_First;
+      begin
+         if New_Gap_First < Gap_First then
+            Source (New_Gap_Last + 1 .. Gap_Last) := Source (New_Gap_First .. 
Gap_First - 1);
+         else
+            Source (Gap_First .. New_Gap_First - 1) := Source (Gap_Last + 1 .. 
New_Gap_Last);
+         end if;
+
+         Gap_First := New_Gap_First;
+         Gap_Last  := New_Gap_Last;
+      end Move_Gap;
+
+      procedure Edit_Text (Change : in Wisi.Parse_Context.Change)
+      with Pre => Gap_Invariant, Post => Gap_Invariant
+      --  Apply Change to Source. Leaves Gap at edit point.
+      is
+         use Ada.Strings.Unbounded;
+         Inserted_Bytes : constant Integer := Ada.Strings.Unbounded.Length 
(Change.Inserted_Text);
+      begin
+         if Gap_First /= Integer (Change.Begin_Byte_Pos) then
+            Move_Gap (Integer (Change.Begin_Byte_Pos));
+         end if;
+
+         if Change.Deleted_Bytes > 0 then
+            Gap_Last         := @ + Change.Deleted_Bytes;
+            pragma Assert (Gap_Last <= Source'Last);
+            Source_Byte_Last := @ - Change.Deleted_Bytes;
+            Source_Char_Last := @ - Change.Deleted_Chars;
+         end if;
+
+         if Inserted_Bytes > 0 then
+            pragma Assert (Gap_Last + 1 - Gap_First >= Inserted_Bytes);
+            Source (Gap_First .. Gap_First + Inserted_Bytes - 1) := 
-Change.Inserted_Text;
+
+            Gap_First        := Gap_First + Inserted_Bytes;
+            Source_Byte_Last := @ + Inserted_Bytes;
+            Source_Char_Last := @ + Integer (Change.Inserted_End_Char_Pos - 
Change.Begin_Char_Pos);
+         end if;
+      end Edit_Text;
+
+      procedure Delete_KMNs
+        (KMN_Last_Byte     : in     Zero_Buffer_Pos;
+         KMN_Last_Char     : in     Zero_Buffer_Pos;
+         After             : in     Parse.KMN_Lists.Cursor;
+         Last_Deleted_Byte : in     Buffer_Pos;
+         Last_Deleted_Char : in     Buffer_Pos;
+         KMN               : in out Parse.KMN)
+      --  Last_Deleted_Byte is deleted from current text by current
+      --  Change. Delete KMNs after After whose Stable are entirely within
+      --  Last_Deleted_Byte; merge into KMN (initially the current Change).
+      --  Adjust following KMN if stable contains Last_Deleted_Byte.
+      is
+         use Parse.KMN_Lists;
+         use all type Parse.KMN;
+
+         Last_Byte : Zero_Buffer_Pos := KMN_Last_Byte + KMN_List 
(After).Stable_Bytes +
+           KMN_List (After).Inserted_Bytes; -- end of After KMN and subsequent 
deleted KMN
+         Last_Char : Zero_Buffer_Pos := KMN_Last_Char + KMN_List 
(After).Stable_Chars +
+           KMN_List (After).Inserted_Chars;
+
+         Cur : Cursor := Next (After);
+      begin
+         loop
+            exit when not Has_Element (Cur);
+            if Last_Byte + KMN_List (Cur).Stable_Bytes + KMN_List 
(Cur).Inserted_Bytes <=
+              Last_Deleted_Byte
+            then
+               --  All of cur inserted are deleted, and some of next.
+               --  test_edit_source.adb Edit_06.
+               KMN.Deleted_Bytes := @ + KMN_List (Cur).Deleted_Bytes - 
KMN_List (Cur).Inserted_Bytes;
+               KMN.Deleted_Chars := @ + KMN_List (Cur).Deleted_Chars - 
KMN_List (Cur).Inserted_Chars;
+
+               Last_Byte := @ + KMN_List (Cur).Stable_Bytes + KMN_List 
(Cur).Inserted_Bytes;
+               Last_Char := @ + KMN_List (Cur).Stable_Chars + KMN_List 
(Cur).Inserted_Chars;
+
+               declare
+                  To_Delete : Cursor := Cur;
+               begin
+                  Cur := Next (Cur);
+                  KMN_List.Delete (To_Delete);
+               end;
+
+            elsif Last_Byte + KMN_List (Cur).Stable_Bytes <= Last_Deleted_Byte 
then
+               --  Some of Cur.inserted are deleted. test_edit_source.adb 
Edit_05.
+               declare
+                  Deleted_Bytes : constant Zero_Buffer_Pos := 
Last_Deleted_Byte -
+                    (Last_Byte + KMN_List (Cur).Stable_Bytes); -- bytes of 
cur.inserted that are deleted
+                  Deleted_Chars : constant Zero_Buffer_Pos := 
Last_Deleted_Char -
+                    (Last_Char + KMN_List (Cur).Stable_Chars);
+               begin
+                  KMN.Inserted_Bytes := @ - Deleted_Bytes + KMN_List 
(Cur).Inserted_Bytes;
+                  KMN.Inserted_Chars := @ - Deleted_Chars + KMN_List 
(Cur).Inserted_Chars;
+
+                  KMN.Deleted_Bytes := @ + KMN_List (Cur).Deleted_Bytes - 
Deleted_Bytes;
+                  KMN.Deleted_Chars := @ + KMN_List (Cur).Deleted_Chars - 
Deleted_Chars;
+
+                  KMN_List.Delete (Cur);
+                  exit;
+               end;
+            else
+               --  Last_Byte is in Cur.stable
+               KMN_List (Cur).Stable_Bytes := @ - (Last_Deleted_Byte - 
Last_Byte);
+               KMN_List (Cur).Stable_Chars := @ - (Last_Deleted_Char - 
Last_Char);
+
+               if KMN_List (Cur) = (others => 0) then
+                  KMN_List.Delete (Cur);
+               end if;
+               exit;
+            end if;
+         end loop;
+      end Delete_KMNs;
+
+      procedure Edit_KMN (Change : in Wisi.Parse_Context.Change)
+      --  Apply Change to KMN list
+      is
+         use Parse.KMN_Lists;
+         use all type Parse.KMN;
+
+         Cur : Cursor := KMN_List.First;
+
+         KMN_Last_Byte : Base_Buffer_Pos := 0; --  Last byte of prev KMN.
+         KMN_Last_Char : Base_Buffer_Pos := 0; --  Last char of prev KMN.
+
+         function To_KMN (Item : in Wisi.Parse_Context.Change) return Parse.KMN
+         --  Assuming Change does not overlap any current KMN non-stable,
+         --  return a new KMN for it.
+         is (Stable_Bytes   => Item.Begin_Byte_Pos - KMN_Last_Byte - 1, -- 
Begin_Byte_Pos is deleted or inserted
+             Stable_Chars   => Item.Begin_Char_Pos - KMN_Last_Char - 1,
+             Inserted_Bytes => Item.Inserted_End_Byte_Pos - 
Item.Begin_Byte_Pos, -- End_Byte_Pos is after last inserted
+             Inserted_Chars => Item.Inserted_End_Char_Pos - 
Item.Begin_Char_Pos,
+             Deleted_Bytes  => Base_Buffer_Pos (Item.Deleted_Bytes),
+             Deleted_Chars  => Base_Buffer_Pos (Item.Deleted_Chars));
+
+      begin
+         loop
+            declare
+               Cur_KMN : Parse.KMN renames KMN_List (Cur);
+               KMN     : Parse.KMN := To_KMN (Change);
+
+               Cur_Last_Inserted_Byte : constant Base_Buffer_Pos :=
+                 KMN_Last_Byte + Cur_KMN.Stable_Bytes + Cur_KMN.Inserted_Bytes;
+               Cur_Last_Inserted_Char : constant Base_Buffer_Pos :=
+                 KMN_Last_Char + Cur_KMN.Stable_Chars + Cur_KMN.Inserted_Chars;
+
+               Change_Last_Deleted_Byte : constant Base_Buffer_Pos :=
+                 Change.Begin_Byte_Pos + Base_Buffer_Pos 
(Change.Deleted_Bytes) - 1;
+
+               Change_Last_Deleted_Char : constant Base_Buffer_Pos :=
+                 Change.Begin_Char_Pos + Base_Buffer_Pos 
(Change.Deleted_Chars) - 1;
+            begin
+               pragma Assert (KMN_Last_Byte < Change.Begin_Byte_Pos);
+
+               if Change.Begin_Byte_Pos + Base_Buffer_Pos 
(Change.Deleted_Bytes) - 1 <
+                 KMN_Last_Byte + Cur_KMN.Stable_Bytes
+               then
+                  --  Change is entirely within Cur_KMN.Stable_Bytes;
+                  --  test_edit_source.adb Edit_01
+                  --
+                  --  Or Change is inserting at end of text; Edit_10.
+                  Cur_KMN.Stable_Bytes := @ - (KMN.Stable_Bytes + 
KMN.Deleted_Bytes);
+                  Cur_KMN.Stable_Chars := @ - (KMN.Stable_Chars + 
KMN.Deleted_Chars);
+
+                  if KMN_List (Cur) = (others => 0) then
+                     Cur_KMN := KMN;
+                  else
+                     KMN_List.Insert (Before => Cur, Element => KMN);
+                  end if;
+                  exit;
+
+               elsif Change.Begin_Byte_Pos <= KMN_Last_Byte + 
Cur_KMN.Stable_Bytes + 1 then
+                  --  Change starts in or immediately after 
Cur_KMN.Stable_Bytes, ends
+                  --  in or after Cur_KMN.Insert; merge Change into Cur_KMN.
+
+                  if Cur_Last_Inserted_Byte >= Change_Last_Deleted_Byte then
+                     --  Some of Cur_KMN.Inserted are preserved; 
test_edit_source.adb
+                     --  Edit_02, _03, Deindent.
+                     --
+                     --   cur_kmn       next_kmn
+                     --  stable|  ins| stable| ins| ...
+                     --
+                     --   change
+                     --     | ins     |
+                     --     | del  |
+
+                     Cur_KMN.Inserted_Bytes := KMN.Inserted_Bytes + 
Cur_Last_Inserted_Byte - Change_Last_Deleted_Byte;
+                     Cur_KMN.Inserted_Chars := KMN.Inserted_Chars + 
Cur_Last_Inserted_Char - Change_Last_Deleted_Char;
+
+                     Cur_KMN.Deleted_Bytes := @ + KMN_Last_Byte + 
Cur_KMN.Stable_Bytes + 1 - Change.Begin_Byte_Pos;
+                     Cur_KMN.Deleted_Chars := @ + KMN_Last_Char + 
Cur_KMN.Stable_Chars + 1 - Change.Begin_Char_Pos;
+                  else
+                     --  All of Cur_KMN.Inserted and some of following KMN are 
deleted;
+                     --  test_edit_source.adb Edit_04, _05, _06.
+
+                     --  cur_kmn      next_kmn
+                     --  stable|   ins| stable| ins| ...
+                     --
+                     --   change
+                     --     | ins    |
+                     --     | del                          |
+
+                     Delete_KMNs
+                       (KMN_Last_Byte, KMN_Last_Char, Cur,
+                        Last_Deleted_Byte => Change.Begin_Byte_Pos + 
KMN.Deleted_Bytes - 1,
+                        Last_Deleted_Char => Change.Begin_Char_Pos + 
KMN.Deleted_Chars - 1,
+                        KMN               => KMN);
+
+                     Cur_KMN.Deleted_Bytes := @ + KMN.Deleted_Bytes - 
Cur_KMN.Inserted_Bytes;
+                     Cur_KMN.Deleted_Chars := @ + KMN.Deleted_Chars - 
Cur_KMN.Inserted_Chars;
+
+                     Cur_KMN.Inserted_Bytes := KMN.Inserted_Bytes;
+                     Cur_KMN.Inserted_Chars := KMN.Inserted_Chars;
+                  end if;
+
+                  Cur_KMN.Stable_Bytes := KMN.Stable_Bytes;
+                  Cur_KMN.Stable_Chars := KMN.Stable_Chars;
+                  exit;
+
+               elsif Change.Begin_Byte_Pos <= KMN_Last_Byte + 
Cur_KMN.Stable_Bytes + Cur_KMN.Inserted_Bytes + 1 then
+                  --  Change starts in or immediately after Cur_KMN inserted; 
merge
+                  --  Change into Cur_KMN. test_edit_source.adb Edit_07, _08, 
_09,
+                  --  Insert_Deindent
+
+                  if Cur_Last_Inserted_Byte >= Change_Last_Deleted_Byte then
+                     --  Beginning and end of Cur_KMN.Inserted are preserved; 
test_edit_source.adb
+                     --  Edit_07.
+                     --
+                     --   cur_kmn          next_kmn
+                     --  stable|  ins   | stable| ins| ...
+                     --
+                     --   change
+                     --          | ins     |
+                     --          | del|
+
+                     Cur_KMN.Inserted_Bytes := KMN.Inserted_Bytes + 
Cur_KMN.Inserted_Bytes - KMN.Deleted_Bytes;
+                     Cur_KMN.Inserted_Chars := KMN.Inserted_Chars + 
Cur_KMN.Inserted_Chars - KMN.Deleted_Chars;
+
+                     --  Cur_KMN.Deleted_Bytes unchanged
+                  else
+                     --  Remainder of Cur_KMN.Inserted and some of following 
KMN are deleted;
+                     --  test_edit_source.adb Edit_08, _09
+
+                     --  cur_kmn      next_kmn
+                     --  stable|   ins| stable| ins| ...
+                     --
+                     --   change
+                     --         | ins    |
+                     --         | del                          |
+
+                     Delete_KMNs
+                       (KMN_Last_Byte, KMN_Last_Char, Cur,
+                        Last_Deleted_Byte => Change.Begin_Byte_Pos + 
KMN.Deleted_Bytes - 1,
+                        Last_Deleted_Char => Change.Begin_Char_Pos + 
KMN.Deleted_Chars - 1,
+                        KMN               => KMN);
+
+                     declare
+                        Remaining_Cur_Ins_Bytes : constant Zero_Buffer_Pos :=
+                          Change.Begin_Byte_Pos - (KMN_Last_Byte + 
Cur_KMN.Stable_Bytes + 1);
+
+                        Remaining_Cur_Ins_Chars : constant Zero_Buffer_Pos :=
+                          Change.Begin_Char_Pos - (KMN_Last_Char + 
Cur_KMN.Stable_Chars + 1);
+                     begin
+                        Cur_KMN.Deleted_Bytes := @ + KMN.Deleted_Bytes -
+                          (Cur_KMN.Inserted_Bytes - Remaining_Cur_Ins_Bytes);
+
+                        Cur_KMN.Deleted_Chars := @ + KMN.Deleted_Chars -
+                          (Cur_KMN.Inserted_Chars  - Remaining_Cur_Ins_Chars);
+
+                        Cur_KMN.Inserted_Bytes := Remaining_Cur_Ins_Bytes + 
KMN.Inserted_Bytes;
+                        Cur_KMN.Inserted_Chars := Remaining_Cur_Ins_Chars + 
KMN.Inserted_Chars;
+                     end;
+                  end if;
+
+                  exit;
+
+               else
+                  --  Change is entirely after Cur_KMN
+                  KMN_Last_Byte := @ + Cur_KMN.Stable_Bytes + 
Cur_KMN.Inserted_Bytes;
+                  KMN_Last_Char := @ + Cur_KMN.Stable_Chars + 
Cur_KMN.Inserted_Chars;
+
+                  Next (Cur);
+
+                  if not Has_Element (Cur) then
+                     --  Since KMN_List starts with one KMN covering all of 
Source, we
+                     --  should never get here.
+                     raise SAL.Programmer_Error;
+                  end if;
+               end if;
+            end;
+         end loop;
+
+         if Cur /= KMN_List.Last and then (KMN_List (Cur).Inserted_Bytes = 0 
and KMN_List (Cur).Deleted_Bytes = 0) then
+            --  Change undone; merge stable with next KMN. 
test_edit_source.adb Edit_11
+            declare
+               To_Delete : Cursor := Cur;
+            begin
+               Next (Cur);
+               KMN_List (Cur).Stable_Bytes := @ + KMN_List 
(To_Delete).Stable_Bytes;
+               KMN_List (Cur).Stable_Chars := @ + KMN_List 
(To_Delete).Stable_Chars;
+               KMN_List.Delete (To_Delete);
+            end;
+         end if;
+
+         if Debug_Mode then
+            begin
+               WisiToken.Parse.Validate_KMN
+                 (List                     => KMN_List,
+                  Initial_Text_Byte_Region => Initial_Text_Byte_Region,
+                  Initial_Text_Char_Region => Initial_Text_Char_Region,
+                  Edited_Text_Byte_Region  => Buffer_Region'(1, 
Base_Buffer_Pos (Source_Byte_Last)),
+                  Edited_Text_Char_Region  => Buffer_Region'(1, 
Base_Buffer_Pos (Source_Char_Last)));
+            exception
+            when E : WisiToken.User_Error =>
+               raise Protocol_Error with Ada.Exceptions.Exception_Message (E);
+            end;
+         end if;
+      end Edit_KMN;
+
+   begin
+      if Reallocate then
+         declare
+            New_Source : constant Ada.Strings.Unbounded.String_Access := new 
String
+              (Source'First .. Source_Byte_Last + Total_Inserted_Bytes);
+         begin
+            New_Source (Source'First .. Source_Byte_Last) := Source 
(Source'First .. Source_Byte_Last);
+            Ada.Strings.Unbounded.Free (Source);
+            Source := New_Source;
+         end;
+
+         Gap_Last := Source'Last;
+      end if;
+
+      --  Start with one KMN with stable region = entire source.
+      KMN_List.Append
+        ((Stable_Bytes   => Base_Buffer_Pos (Source_Byte_Last),
+          Stable_Chars   => Base_Buffer_Pos (Source_Char_Last),
+          Deleted_Bytes  => 0,
+          Deleted_Chars  => 0,
+          Inserted_Bytes => 0,
+          Inserted_Chars => 0));
+
+      for Change of Changes loop
+         Edit_Text (Change);
+         Edit_KMN (Change);
+
+         if Trace_Incremental_Parse > Detail then
+            Trace.Put_Line ("change:" & Image (Change));
+            Trace.Put_Line ("kmn_list:");
+            for KMN of KMN_List loop
+               Trace.Put_Line (Parse.Image (KMN));
+            end loop;
+         end if;
+      end loop;
+
+      if Gap_Last /= Source'Last then
+         --  Remove the gap
+         Source (Gap_First .. Source_Byte_Last) := Source (Gap_Last + 1 .. 
Source'Last);
+      end if;
+   end Edit_Source;
+
+   procedure Save_Text
+     (Context   : in Parse_Context;
+      File_Name : in String)
+   is
+      use GNAT.OS_Lib;
+      File : File_Descriptor;
+      Written : Integer;
+      pragma Unreferenced (Written);
+   begin
+      if Ada.Directories.Exists (File_Name) then
+         Ada.Directories.Delete_File (File_Name);
+      end if;
+      File := Create_New_File (File_Name, Fmode => Binary);
+      Written := Write (File, Context.Text_Buffer 
(Context.Text_Buffer'First)'Address,
+             N => Context.Text_Buffer_Byte_Last - Context.Text_Buffer'First + 
1);
+      --  Written /= N on disk full; we don't check for that, because there's
+      --  nothing to do.
+      Close (File);
+
+      Context.Parser.Tree.Lexer.Trace.Put_Line ("text saved to '" & File_Name 
& "'");
+   end Save_Text;
+
+   procedure Save_Text_Auto (Context : in out Parse_Context)
+   is begin
+      Context.Save_Edited_Count := @ + 1;
+
+      declare
+         Save_File_Name : constant String :=
+           Ada.Strings.Unbounded.To_String (Context.Root_Save_Edited_Name) & 
"_" &
+           Wisi.Integer_Filled_Image (Item => Context.Save_Edited_Count, Width 
=> 3);
+      begin
+         Save_Text (Context, Save_File_Name);
+      end;
+   end Save_Text_Auto;
+
+end Wisi.Parse_Context;
diff --git a/wisi-parse_context.ads b/wisi-parse_context.ads
new file mode 100644
index 0000000000..d9ec6f9740
--- /dev/null
+++ b/wisi-parse_context.ads
@@ -0,0 +1,143 @@
+--  Abstract :
+--
+--  Parse context for one source file.
+--
+--  Copyright (C) 2020 - 2022 Free Software Foundation All Rights Reserved.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+pragma License (Modified_GPL);
+
+with Ada.Strings.Unbounded;
+with Ada.Unchecked_Deallocation;
+with WisiToken.Lexer;
+with WisiToken.Parse.LR.Parser;
+with WisiToken.Syntax_Trees;
+package Wisi.Parse_Context is
+
+   Not_Found : exception;
+
+   type Language is record
+      Descriptor              : WisiToken.Descriptor_Access_Constant;
+      Lexer                   : WisiToken.Lexer.Handle;
+      Table                   : WisiToken.Parse.LR.Parse_Table_Ptr;
+      Productions             : 
WisiToken.Syntax_Trees.Production_Info_Trees.Vector;
+      Partial_Parse_Active    : access Boolean;
+      Partial_Parse_Byte_Goal : access WisiToken.Buffer_Pos;
+      Fixes                   : 
WisiToken.Parse.LR.Parser.Language_Fixes_Access;
+      Matching_Begin_Tokens   : 
WisiToken.Parse.LR.Parser.Language_Matching_Begin_Tokens_Access;
+      String_ID_Set           : 
WisiToken.Parse.LR.Parser.Language_String_ID_Set_Access;
+      Parse_Data_Template     : Wisi.Parse_Data_Access;
+   end record;
+
+   type Parse_Context is tagged limited record
+      --  'tagged' for Object.Method notation
+
+      File_Name   : Ada.Strings.Unbounded.Unbounded_String;
+      Text_Buffer : Ada.Strings.Unbounded.String_Access;
+      --  Text_Buffer is encoded in UTF-8. Text_Buffer may hold all or part
+      --  of the actual Emacs buffer content. If partial, the lexer holds
+      --  the mapping from Text_Buffer index to Emacs buffer position.
+
+      Text_Buffer_Byte_Last : Integer := Integer'First;
+      Text_Buffer_Char_Last : Integer := Integer'Last;
+      --  For Incremental parse; after editing, there may be empty space at
+      --  the end of Text_Buffer.
+
+      Parser : WisiToken.Parse.LR.Parser.Parser;
+
+      Root_Save_Edited_Name : Ada.Strings.Unbounded.Unbounded_String;
+      --  If not "", save source text after the edit in a parse_incremental 
command,
+      --  to <root_save_edited_name_nnn>, where 'nnn' is a three-digit number 
that
+      --  increments.
+
+      Save_Edited_Count : Integer := 0;
+   end record;
+   type Parse_Context_Access is access all Parse_Context;
+
+   function Create_No_File
+     (Language : in Wisi.Parse_Context.Language;
+      Trace    : in WisiToken.Trace_Access)
+     return Parse_Context_Access;
+
+   procedure Create_No_Text
+     (File_Name : in String;
+      Language  : in Wisi.Parse_Context.Language;
+      Trace     : in WisiToken.Trace_Access);
+
+   procedure Set_File (File_Name : in String; Parse_Context : in 
Parse_Context_Access);
+
+   function Find_Create
+     (File_Name : in String;
+      Language  : in Wisi.Parse_Context.Language;
+      Trace     : in WisiToken.Trace_Access)
+     return Parse_Context_Access;
+   --  If a context for File_Name exists, return it if Language matches.
+   --
+   --  If no context found for File_Name, create one, return it.
+   --
+   --  Raise Protocol_Error if Source_File_Name is an empty string.
+   --
+   --  Raise WisiToken.User_Error if context found for File_Name, but Language 
does not match.
+
+   function Find
+     (File_Name : in String;
+      Language  : in Wisi.Parse_Context.Language;
+      Have_Text : in Boolean := False)
+     return Parse_Context_Access;
+   --  If a context for File_Name exists, return it if Language matches.
+   --
+   --  Raise Protocol_Error if Source_File_Name is an empty string.
+   --
+   --  Raise WisiToken.User_Error if context found for File_Name, but Language 
does not match.
+   --
+   --  Raise Not_Found if no context found for File_Name.
+   --  If Have_Text, raise Not_Found if Text_Buffer is empty.
+
+   procedure Kill (File_Name : in String);
+
+   procedure Clear;
+   --  Delete all contexts.
+
+   type Change is record
+      Begin_Byte_Pos        : WisiToken.Buffer_Pos; -- inserted or deleted
+      Begin_Char_Pos        : WisiToken.Buffer_Pos;
+      Inserted_End_Byte_Pos : WisiToken.Buffer_Pos;
+      Inserted_End_Char_Pos : WisiToken.Buffer_Pos; --  emacs convention: end 
is after last inserted char
+      Inserted_Text         : Ada.Strings.Unbounded.Unbounded_String;
+      Deleted_Bytes         : Natural;
+      Deleted_Chars         : Natural;
+   end record;
+
+   function Image (Item : in Change) return String;
+
+   package Change_Lists is new Ada.Containers.Doubly_Linked_Lists (Change);
+
+   function Get_Emacs_Change_List
+     (Command_Line : in     String;
+      Last         : in out Integer)
+     return Change_Lists.List;
+
+   procedure Edit_Source
+     (Trace         : in out WisiToken.Trace'Class;
+      Parse_Context : in out Wisi.Parse_Context.Parse_Context;
+      Changes       : in     Change_Lists.List;
+      KMN_List      :    out WisiToken.Parse.KMN_Lists.List);
+   --  Changes must be UTF-8.
+
+   procedure Save_Text
+     (Context   : in Parse_Context;
+      File_Name : in String);
+   --  Write Context.Text_Buffer to File_Name.
+
+   procedure Save_Text_Auto (Context : in out Parse_Context);
+
+   procedure Free is new Ada.Unchecked_Deallocation (Parse_Context, 
Parse_Context_Access);
+   --  Declared last to avoid freezing rules.
+
+end Wisi.Parse_Context;
diff --git a/wisi-prj.el b/wisi-prj.el
index 6561c4618d..744a4ab8d3 100644
--- a/wisi-prj.el
+++ b/wisi-prj.el
@@ -40,8 +40,8 @@
 
   file-env
   ;; Environment (list of strings NAME=VALUE) set in project file;
-  ;; prepended to `process-environment' running tools in an external
-  ;; process.
+  ;; prepended to `process-environment' for running tools in an
+  ;; external process.
 
   compiler
   xref
@@ -336,22 +336,22 @@ If no symbol at point, or with prefix arg, prompt for 
symbol, goto spec."
       ;; from xref-backend-identifier-at-point; desired location is 'other'
       (let ((item (wisi-xref-item identifier prj)))
        (condition-case err
-           ;; WORKAROUND: xref 1.3.2 in Emacs 28 xref-location
-           ;; changed from defclass to cl-defstruct. If drop emacs
-           ;; 26, use 'with-suppressed-warnings'.
+           ;; WORKAROUND: xref 1.3.2 xref-location changed from
+           ;; defclass to cl-defstruct. If drop emacs 26, use
+           ;; 'with-suppressed-warnings'.
            (with-no-warnings ;; "unknown slot"
-             (let ((summary (if (functionp 'xref-item-summary) 
(xref-item-summary item) (oref item :summary)))
-                   (location (if (functionp 'xref-item-location) 
(xref-item-location item) (oref item :location)))
+             (let ((summary (if (functionp 'xref-item-summary) 
(xref-item-summary item) (oref item summary)))
+                   (location (if (functionp 'xref-item-location) 
(xref-item-location item) (oref item location)))
                    (eieio-skip-typecheck t)) ;; 'location' may have line, 
column nil
                (let ((file (if (functionp 'xref-file-location-file)
                                (xref-file-location-file location)
-                             (oref location :file)))
+                             (oref location file)))
                      (line (if (functionp 'xref-file-location-line)
                                (xref-file-location-line location)
-                             (oref location :line)))
+                             (oref location line)))
                      (column (if (functionp 'xref-file-location-column)
                                  (xref-file-location-column location)
-                               (oref location :column))))
+                               (oref location column))))
                  (let ((target
                         (wisi-xref-other
                          (wisi-prj-xref prj) prj
@@ -449,7 +449,7 @@ Displays a buffer in compilation-mode giving locations of 
the
 parent type declarations.")
 
 (defun wisi-show-declaration-parents ()
-  "Display the locations of the parent type declarations of the type 
identifier around point."
+  "Display the parent type declarations of the type identifier around point."
   (interactive)
   (let* ((project (wisi-check-current-project (buffer-file-name)))
         (id (wisi-prj-identifier-at-point project)))
@@ -566,10 +566,6 @@ COLUMN - Emacs column of the start of the identifier")
    ;; Not meaningful, but some project functions insist on a valid directory
    (car (wisi-prj-source-path project)))
 
-(cl-defmethod project-roots ((project wisi-prj))
-   ;; Not meaningful, but some project functions insist on a valid directory 
list
-   (wisi-prj-source-path project))
-
 (cl-defmethod project-files ((project wisi-prj) &optional dirs)
   (let (result)
     (dolist (dir (or dirs
@@ -586,6 +582,23 @@ COLUMN - Emacs column of the start of the identifier")
         (directory-files dir t))))
     result))
 
+(defun wisi-prj-kill-buffer-condition (buffer)
+  "Return non-nil if BUFFER should be killed.
+For `project-kill-buffer-conditions'."
+  (let* ((source-path (wisi-prj-source-path (project-current)))
+        (buf-file-name (buffer-file-name buffer))
+        (done (not (buffer-file-name buffer)))
+        (result nil)
+        dir)
+    (while (and source-path
+               (not done))
+      (setq dir (pop source-path))
+      (when (and dir
+                (file-in-directory-p buf-file-name dir))
+       (setq done t)
+       (setq result t)))
+    result))
+
 (defun wisi-refresh-prj-cache (not-full)
   "Refresh all cached data in the current project, and re-select it.
 With prefix arg, very slow refresh operations may be skipped."
@@ -705,8 +718,8 @@ Called with three args: PROJECT NAME VALUE.")
       ;; ignore lines that don't have the format "name=value", put
       ;; 'name', 'value' in match-string.
       (when (looking-at "^\\([^= \n]+\\)=\\(.*\\)")
-       (let ((name (match-string 1))
-             (value (match-string 2))
+       (let ((name (match-string-no-properties 1))
+             (value (match-string-no-properties 2))
              result)
 
          ;; Both compiler and xref need to see some settings; eg gpr_file, env 
vars.
@@ -1142,30 +1155,24 @@ strings as code, and treat `wisi-case-strict' as t in 
code."
                     (funcall wisi-case-adjust-p-function typed-char))
                 ))
 
-      ;; The indentation engine may trigger a reparse on
-      ;; non-whitespace changes, but we know we don't need to reparse
-      ;; for this change (assuming the user has not abused case
-      ;; exceptions!).
-      (let ((inhibit-modification-hooks t))
-       (cond
-        ;; Some attributes are also keywords, but captialized as
-        ;; attributes. So check for attribute first.
-        ((and
-          (not in-comment)
-          (save-excursion
-            (skip-syntax-backward "w_")
-            (eq (char-before) ?')))
-         (wisi-case-adjust-identifier in-comment))
-
-        ((and
-          (not in-comment)
-          (not (eq typed-char ?_))
-          (wisi-after-keyword-p))
-         (wisi-case-adjust-keyword))
-
-        (t (wisi-case-adjust-identifier in-comment))
-        ))
-      )))
+      (cond
+       ;; Some attributes are also keywords, but capitalized as
+       ;; attributes. So check for attribute first.
+       ((and
+        (not in-comment)
+        (save-excursion
+          (skip-syntax-backward "w_")
+          (eq (char-before) ?')))
+       (wisi-case-adjust-identifier in-comment))
+
+       ((and
+        (not in-comment)
+        (not (eq typed-char ?_))
+        (wisi-after-keyword-p))
+       (wisi-case-adjust-keyword))
+
+       (t (wisi-case-adjust-identifier in-comment))
+       ))))
 
 (defun wisi-case-adjust-at-point (&optional in-comment)
   "If ’wisi-auto-case’ is non-nil, adjust case of symbol at point.
@@ -1431,7 +1438,8 @@ Also add DOMINATING-FILE (default current buffer file 
name) to
 
 ;;;###autoload
 (defun wisi-prj-current-parse (_dir)
-  "For `project-find-functions'; parse the current project file, select and 
return the project"
+  "Parse the current project file, select and return the project.
+For `project-find-functions'."
   (let ((prj (wisi-prj-parse-file
              :prj-file wisi-prj--current-file
              :init-prj (cdr (assoc-string wisi-prj--current-file 
wisi-prj--default))
diff --git a/wisi-process-parse.el b/wisi-process-parse.el
index bddd03388a..cf6ee963ab 100644
--- a/wisi-process-parse.el
+++ b/wisi-process-parse.el
@@ -1,6 +1,6 @@
 ;;; wisi-process-parse.el --- interface to external parse program
 ;;
-;; Copyright (C) 2014, 2017 - 2020 Free Software Foundation, Inc.
+;; Copyright (C) 2014, 2017 - 2022 Free Software Foundation, Inc.
 ;;
 ;; Author: Stephen Leake <stephen_leake@member.fsf.org>
 ;;
@@ -22,19 +22,7 @@
 (require 'cl-lib)
 (require 'wisi-parse-common)
 
-(defgroup wisi nil
-  "Options for Wisi package."
-  :group 'programming)
-
-(defcustom wisi-process-time-out 5.0
-  "Time out waiting for parser response. An error occurs if there
-  is no response from the parser after waiting this amount (in
-  seconds)."
-  :type 'float
-  :safe 'numberp)
-(make-variable-buffer-local 'wisi-process-time-out)
-
-(defconst wisi-process-parse-protocol-version "5"
+(defconst wisi-process-parse-protocol-version "6"
   "Defines data exchanged between this package and the background process.
 Must match emacs_wisi_common_parse.ads Protocol_Version.")
 
@@ -64,21 +52,32 @@ Must match emacs_wisi_common_parse.ads Protocol_Version.")
   (busy nil)              ;; t while parser is active
   (process nil)          ;; running *_wisi_parse executable
   (buffer nil)                   ;; receives output of executable
-  line-begin              ;; vector of beginning-of-line positions in buffer
+  (source-buffer nil)    ;; buffer begin parsed
+  version-checked         ;; nil if not checked, t if checked and passed.
   (total-wait-time 0.0)   ;; total time during last parse spent waiting for 
subprocess output.
   (response-count 0)      ;; responses received from subprocess during last 
parse; for profiling.
   end-pos                 ;; last character position parsed
   language-action-table   ;; array of function pointers, each taking an sexp 
sent by the process
+  query-result            ;; holds result of wisi-process-parse--Query
   )
 
+(cl-defmethod wisi-parser-transaction-log-buffer-name ((parser 
wisi-process--parser))
+  (concat "*"(wisi-process--parser-label parser) "-wisi-parser-log*"))
+
 (defvar wisi-process--alist nil
   "Alist mapping string label to ‘wisi-process--session’ struct")
 
+(defvar wisi-file_not_found nil
+  "Signal handled internally by functions in this file")
+(put 'wisi-file_not_found
+     'error-conditions
+     '(wisi-file_not_found))
+
 ;;;###autoload
 (defun wisi-process-parse-get (parser)
-  "Return a ‘wisi-process--parser’ object matching PARSER label.
-If label found in ‘wisi-process--alist’, return that.
-Otherwise add PARSER to ‘wisi-process--alist’, return it."
+  "Return a `wisi-process--parser' object matching PARSER label.
+If label found in `wisi-process--alist', return that.
+Otherwise add PARSER to `wisi-process--alist', return it."
   (or (cdr (assoc (wisi-process--parser-label parser) wisi-process--alist))
       (let ((exec-file (locate-file (wisi-process--parser-exec-file parser) 
exec-path '("" ".exe"))))
 
@@ -98,29 +97,83 @@ Otherwise add PARSER to ‘wisi-process--alist’, return it."
       (wisi-parse-kill parser)
       (setf (wisi-process--parser-exec-file parser) exec-file))))
 
-(defun wisi-process-parse--check-version (parser)
-  "Verify protocol version reported by process."
-  ;; The process has just started; the first non-comment line in the
-  ;; process buffer contains the process and language protocol versions.
-  (with-current-buffer (wisi-process--parser-buffer parser)
-    (goto-char (point-min))
-    (if (search-forward-regexp "protocol: process version \\([0-9]+\\) 
language version \\([0-9]+\\)" nil t)
-       (unless (and (match-string 1)
-                    (string-equal (match-string 1) 
wisi-process-parse-protocol-version)
-                    (match-string 2)
-                    (string-equal (match-string 2) 
(wisi-process--parser-language-protocol-version parser)))
-         (wisi-parse-kill parser)
-         (error "%s parser process protocol version mismatch: elisp %s %s, 
process %s %s"
-                (wisi-process--parser-label parser)
-                wisi-process-parse-protocol-version 
(wisi-process--parser-language-protocol-version parser)
-                (match-string 1) (match-string 2)))
-      ;; Search failed
-      (error "%s parser process protocol version message not found"
-            (wisi-process--parser-label parser))
-    )))
-
-(defun wisi-process-parse--require-process (parser)
-  "Start the process for PARSER if not already started."
+(defun wisi-process-parse--filter (process text)
+  "Process filter for parser process."
+  (when (buffer-live-p (process-buffer process))
+    (with-current-buffer (process-buffer process)
+      (let ((search-start (marker-position (process-mark process))))
+        (goto-char (process-mark process))
+        (insert text)
+        (set-marker (process-mark process) (point))
+
+       (goto-char search-start)
+       ;; back up a line in case we got part of the line previously.
+       (forward-line -1)
+       (while (re-search-forward wisi-process-parse-prompt (point-max) t)
+         (cond
+          ((not (wisi-process--parser-version-checked wisi-parser-shared))
+           (when (< 1 wisi-debug)
+              (wisi-parse-log-message
+               wisi-parser-shared
+               (format "parse--filter found prompt - protocol; 
wisi-parse-full-active %s" wisi-parse-full-active)))
+           (save-excursion
+             ;; The process has just started; the first non-comment line in the
+             ;; process buffer contains the process and language protocol 
versions.
+             (goto-char (point-min))
+             (if (search-forward-regexp "protocol: process version 
\\([0-9]+\\) language version \\([0-9]+\\)" nil t)
+                 (unless (and (match-string 1)
+                              (string-equal (match-string 1) 
wisi-process-parse-protocol-version)
+                              (match-string 2)
+                              (string-equal (match-string 2)
+                                            
(wisi-process--parser-language-protocol-version wisi-parser-shared)))
+                   (wisi-parse-kill wisi-parser-shared)
+                   (error "%s parser process protocol version mismatch: elisp 
%s %s, process %s %s"
+                          (wisi-process--parser-label wisi-parser-shared)
+                          wisi-process-parse-protocol-version
+                          (wisi-process--parser-language-protocol-version 
wisi-parser-shared)
+                          (match-string 1) (match-string 2)))
+               ;; Search failed
+               (error "%s parser process protocol version message not found"
+                      (wisi-process--parser-label wisi-parser-shared))))
+
+           (delete-region (point-min) (point))
+            (set-marker (process-mark process) (point-min))
+           (setf (wisi-process--parser-version-checked wisi-parser-shared) t)
+           (unless wisi-parse-full-active
+             ;; if active, filter must handle second prompt as well
+             (set-process-filter process nil)
+             (setf (wisi-process--parser-busy wisi-parser-shared) nil)))
+
+          ((and (wisi-process--parser-version-checked wisi-parser-shared)
+                wisi-parse-full-active)
+           (when (< 1 wisi-debug)
+             (wisi-parse-log-message wisi-parser-shared "parse--filter found 
prompt - initial full"))
+           (if (buffer-live-p (car wisi-parse-full-active))
+               (with-current-buffer (car wisi-parse-full-active)
+                 (read-only-mode -1)
+                 (let ((region (cdr wisi-parse-full-active)))
+                   (when (and (>= (cdr region) (car region))
+                              (>= (cdr region) (point-min))
+                              (<= (car region) (point-max)))
+                     (font-lock-flush (car region) (cdr region))))
+
+                 (set-process-filter process nil)
+
+                 ;; handle syntax error messages from full parse
+                 (set-marker (process-mark process) (point-min))
+                 (condition-case err
+                     (progn
+                       (wisi-process-parse--handle-messages wisi-parser-shared)
+                       (setq wisi-parse-full-active nil))
+                   (error
+                    (setq wisi-parse-full-active nil)
+                    (signal (car err) (cdr err))))
+                 )
+             (setq wisi-parse-full-active nil)
+             ))))
+         ))))
+
+(cl-defmethod wisi-parse-require-process (parser &key nowait)
   (unless (process-live-p (wisi-process--parser-process parser))
     (let ((process-connection-type nil) ;; use a pipe, not a pty; avoid 
line-by-line reads
          (process-name (format " *%s_wisi_parse*" (wisi-process--parser-label 
parser))))
@@ -128,45 +181,71 @@ Otherwise add PARSER to ‘wisi-process--alist’, return it."
       (unless (buffer-live-p (wisi-process--parser-buffer parser))
        ;; User may have killed buffer to kill parser.
        (setf (wisi-process--parser-buffer parser)
-             (get-buffer-create process-name)))
+             (get-buffer-create process-name))
+       (with-current-buffer (wisi-process--parser-buffer parser)
+         (emacs-lisp-mode) ;; for comment syntax
+         (setq wisi-parser-shared parser) ;; for process filter
+         ))
 
       (with-current-buffer (wisi-process--parser-buffer parser)
-       (erase-buffer)); delete any previous messages, prompt
+       (erase-buffer));; delete any previous messages, prompt
 
+      (when (or (not nowait) (>= wisi-debug 2))
+       (message "starting parser ..."))
+      (wisi-parse-log-message parser "create process")
+
+      (setf (wisi-process--parser-version-checked parser) nil)
       (setf (wisi-process--parser-process parser)
            (make-process
             :name process-name
             :buffer (wisi-process--parser-buffer parser)
+
+            :coding 'utf-8-unix
+            ;; We don't need utf-8-dos for reading when the parser is
+            ;; compiled for Windows; ASCII.CR is easy to ignore.
+            ;;
+            ;; See test/mixed_unix_dos_line_ends.adb; we'd like to
+            ;; have truly "no conversion" here, so the count of sent
+            ;; bytes is the same as computed in wpp--send-*. But
+            ;; utf-8-emacs strips the stray ^M from that buffer, and
+            ;; binary gets something else wrong.
+
             :command (append (list (wisi-process--parser-exec-file parser))
                              (wisi-process--parser-exec-opts parser))))
 
       (set-process-query-on-exit-flag (wisi-process--parser-process parser) 
nil)
+      (set-process-filter (wisi-process--parser-process parser) 
#'wisi-process-parse--filter)
 
-      (wisi-process-parse--wait parser)
-      (wisi-process-parse--check-version parser)
+      (unless nowait
+       (wisi-process-parse--wait parser)
+       (message "starting parser ... done"))
       )))
 
 (defun wisi-process-parse--wait (parser)
   "Wait for the current command to complete."
-  (let ((process (wisi-process--parser-process parser))
-       (search-start (point-min))
-       (wait-count 0)
-       (found nil))
-
+  (let* ((process (wisi-process--parser-process parser))
+        search-start
+        (filter-active (not (eq #'internal-default-process-filter 
(process-filter process))))
+        (wait-count 0)
+        (found nil))
     (with-current-buffer (wisi-process--parser-buffer parser)
-      (while (and (process-live-p process)
-                 (progn
-                   ;; process output is inserted before point, so move back 
over it to search it
-                   (goto-char search-start)
-                   (not (setq found (re-search-forward 
wisi-process-parse-prompt (point-max) t)))))
+      (setq search-start (point-min))
+      (while (if filter-active
+                (not (eq #'internal-default-process-filter (process-filter 
process)));; wait for filter to finish
+
+             (and (process-live-p process)
+                  (progn
+                    ;; process output is inserted before point, so move back 
over it to search it
+                    (goto-char search-start)
+                    (not (setq found (re-search-forward 
wisi-process-parse-prompt (point-max) t))))))
        (setq search-start (point));; don't search same text again
        (setq wait-count (1+ wait-count))
-       (accept-process-output process 0.1))
+       (accept-process-output process 0.1)))
 
-      (unless found
-       (wisi-process-parse-show-buffer parser)
-       (error "%s process died" (wisi-process--parser-exec-file parser)))
-      )))
+    (unless (or filter-active found)
+      (wisi-parse-log-message parser "process died")
+      (error "%s process died" (wisi-process--parser-exec-file parser)))
+    ))
 
 (defun wisi-process-parse-show-buffer (parser)
   "Show PARSER buffer."
@@ -174,98 +253,240 @@ Otherwise add PARSER to ‘wisi-process--alist’, return 
it."
       (pop-to-buffer (wisi-process--parser-buffer parser))
     (error "wisi-process-parse process not active")))
 
-(defun wisi-process-parse--send-parse (parser begin send-end parse-end)
-  "Send a parse command to PARSER external process, followed by
-the content of the current buffer from BEGIN thru SEND-END.  Does
-not wait for command to complete. PARSE-END is end of desired
-parse region."
-  ;; Must match "parse" command arguments read by
+(defun wisi-process-parse--add-cmd-length (cmd)
+  "Return CMD (a string) with length prefixed."
+  ;; Characters in cmd length must match emacs_wisi_common_parse.adb
+  ;; Get_Command_Length. If the actual length overflows the alloted
+  ;; space, we will get a protocol_error from the parser
+  ;; eventually. Caller should prevent that and send an alternate
+  ;; command.
+  (format "%04d%s" (string-bytes cmd) cmd))
+
+(defun wisi-process-parse--send-parse (parser parse-action begin send-end 
parse-end)
+  "Send a partial PARSE-ACTION command to PARSER external process.
+The command is followed by the content of the current buffer from
+BEGIN thru SEND-END.  Does not wait for command to
+complete. PARSE-END is end of desired parse region."
+  ;; Must match "full/partial parse" command arguments read by
   ;; emacs_wisi_common_parse.adb Get_Parse_Params.
-  (let* ((cmd (format "parse %d \"%s\" %d %d %d %d %d %d %d %d %d %d %d %d %d 
%d %d %d %d %d %s"
-                     (cl-ecase wisi--parse-action
+  ;; Parse_Kind is always Partial here; that really means "legacy".
+  (let* ((cmd (format "parse 0 %d \"%s\" %d %d %d %d %d %d %d %d %d \"%s\" %d 
%d %d %d \"%s\""
+                     (cl-ecase parse-action
                        (navigate 0)
                        (face 1)
                        (indent 2))
-                     (if (buffer-file-name) (file-name-nondirectory 
(buffer-file-name)) "")
+                     (or (buffer-file-name) (buffer-name))
                      (position-bytes begin)
                      (position-bytes send-end)
-                     (position-bytes parse-end)
-                     begin ;; char_pos
-                     (line-number-at-pos begin)
-                     (line-number-at-pos send-end)
-                     (save-excursion (goto-char begin) (back-to-indentation) 
(current-column));; indent-begin
+                     (position-bytes (min (point-max) parse-end)) ;; 
goal_byte_pos
+                     begin ;; begin_char_pos
+                     send-end ;; end_char_pos
+                     (min (point-max) parse-end) ;; goal_char_pos
+
+                     (line-number-at-pos begin t)
+
+                     ;; begin_indent. Example:
+                     ;;
+                     ;; end if;
+                     ;;
+                     ;;    if ...
+                     ;;    end if;
+                     ;;
+                     ;; Indenting 'if ...'; ada-wisi-expand-region
+                     ;; returns BEGIN after first 'end if;', SEND-END
+                     ;; after second 'end if'. Begin_indent is first
+                     ;; 'end if;'
+                     (save-excursion
+                       (goto-char begin)
+                       (back-to-indentation)
+                       (current-column))
+
                      (if (or (and (= begin (point-min)) (= parse-end 
(point-max)))
                              (< (point-max) wisi-partial-parse-threshold))
                          0 1) ;; partial parse active
-                     (if (> wisi-debug 0) 1 0) ;; debug_mode
-                     (1- wisi-debug) ;; trace_parse
-                     wisi-trace-mckenzie
-                     wisi-trace-action
-                     (if wisi-mckenzie-disable 1 0)
-                     (or wisi-mckenzie-task-count -1)
-                     (or wisi-mckenzie-check-limit -1)
+                     wisi-parser-verbosity
+                     (or wisi-mckenzie-zombie-limit -1)
                      (or wisi-mckenzie-enqueue-limit -1)
                      (or wisi-parse-max-parallel -1)
-                     (- (position-bytes send-end) (position-bytes begin)) ;; 
send-end is after last byte
+                     (- (position-bytes send-end) (position-bytes begin)) ;; 
byte_count: send-end is after last byte
                      (wisi-parse-format-language-options parser)
                      ))
-        (msg (format "%03d%s" (length cmd) cmd))
         (process (wisi-process--parser-process parser)))
+
     (with-current-buffer (wisi-process--parser-buffer parser)
       (erase-buffer))
 
-    (process-send-string process msg)
+    (wisi-parse-log-message parser cmd)
+    (process-send-string process (wisi-process-parse--add-cmd-length cmd))
+
+    ;; we don't log the buffer text; may be huge
     (process-send-string process (buffer-substring-no-properties begin 
send-end))
 
-    ;; We don’t wait for the send to complete; the external process
-    ;; may start parsing and send an error message.
+    ;; We don't wait for the send to complete here.
+    ))
+
+(defun wisi-process-parse--send-incremental-parse (parser full)
+  "Send an incremental parse command to PARSER external process.
+If FULL, do initial full parse.  Does not wait for command to
+complete."
+  ;; First check length of changes list. For example, in almost any
+  ;; buffer, changing the indent of every line (done in unit tests)
+  ;; builds a change list that is longer than the buffer text, and no
+  ;; faster to parse. So for long change lists, fall back to full
+  ;; parse.
+  ;;
+  ;; For now, we define "long" by the command length character count
+  ;; limit set by `wisi-process-parse--add-cmd-length'. For 4
+  ;; characters, this is hit by
+  ;; ada_mode-conditional_expressions.adb.
+  (let ((changes
+        ;; wisi--changes is in reverse time order.
+        (if wisi--changes
+            (prin1-to-string (nreverse wisi--changes))
+          "()")))
+    (when (> (length changes) 9999)
+      (setq full t))
+
+    (unless (equal -1 (wisi-parser-local-all-changes wisi-parser-local))
+      (cond
+       (full
+       (setf (wisi-parser-local-all-changes wisi-parser-local) nil)
+       (when (buffer-file-name)
+         (write-region (point-min) (point-max) (concat (buffer-file-name) 
"-wisi-change-start"))))
+
+       (t
+        (setf (wisi-parser-local-all-changes wisi-parser-local)
+             (append wisi--changes (wisi-parser-local-all-changes 
wisi-parser-local))))
+       ))
+
+    ;; Must match "incremental parse" command arguments read by
+    ;; emacs_wisi_common_parse.adb Get_Parse_Params.
+    (let* ((cmd
+           (apply #'format
+                  (concat
+                   "parse %d \"%s\" \"%s\" %d %d %d "
+                   (if full "%d %d " "%s ")
+                   "\"%s\""
+                   )
+                  (append
+                   (list
+                    (if full 2 1) ;; Parse_Kind
+                    (if (buffer-file-name) (buffer-file-name) (buffer-name))
+                    wisi-parser-verbosity
+                    (or wisi-mckenzie-zombie-limit -1)
+                    (or wisi-mckenzie-enqueue-limit -1)
+                    (or wisi-parse-max-parallel -1)
+                    )
+                   (if full
+                       (list
+                        (- (position-bytes (point-max)) (position-bytes 
(point-min))) ;; byte_count
+                        (point-max) ;; end_char_pos (after last char)
+                        )
+                     (list changes))
+                   (list (wisi-parse-format-language-options parser))
+                   )))
+          (process (wisi-process--parser-process parser)))
+
+      (with-current-buffer (wisi-process--parser-buffer parser)
+       (erase-buffer))
+
+      (wisi-parse-log-message parser cmd)
+      (process-send-string process (wisi-process-parse--add-cmd-length cmd))
+      (setq wisi--changes nil)
+      (when full
+       (process-send-string process (buffer-substring-no-properties 
(point-min) (point-max))))
+
+      ;; We don't wait for the send to complete here.
+      )))
+
+(defun wisi-process--kill-context (parser)
+  "Send a \"kill-context\" command for the current buffer to PARSER.
+Does not wait for command to complete."
+  (let* ((cmd (format "kill-context \"%s\""
+                     (if (buffer-file-name) (buffer-file-name) (buffer-name))
+                     ))
+        (process (wisi-process--parser-process parser)))
+
+    ;; We can get here from an mmm-mode buffer; then the
+    ;; current-buffer is "mmm-temp-buffer", but we are still waiting
+    ;; for the main buffer to finish the initial parse.
+    (when (and wisi-parse-full-active
+              (equal (car wisi-parse-full-active)
+                     (current-buffer)))
+      (setq wisi-parse-full-active nil))
+
+    (with-current-buffer (wisi-process--parser-buffer parser)
+      (erase-buffer))
+
+    (wisi-parse-log-message parser cmd)
+    (process-send-string process (wisi-process-parse--add-cmd-length cmd))
+
+    ;; We don't wait for the send to complete here.
     ))
 
-(defun wisi-process-parse--send-refactor (parser refactor-action parse-begin 
parse-end edit-begin)
-  "Send a refactor command to PARSER external process, followed
-by the content of the current buffer from PARSE-BEGIN thru
-PARSE-END, wait for command to complete. PARSER will respond with
-one or more Edit messages."
+(defun wisi-process-parse--send-refactor (parser refactor-action pos)
+  "Send a refactor command to PARSER external process, wait for
+command to complete. PARSER will respond with one or more Edit
+messages."
   ;; Must match "refactor" command arguments read by
   ;; emacs_wisi_common_parse.adb Get_Refactor_Params.
-  (let* ((cmd (format "refactor %d \"%s\" %d %d %d %d %d %d %d %d %d %d %d %d"
+  (let* ((cmd (format "refactor \"%s\" %d %d \"%s\""
+                     (if (buffer-file-name) (buffer-file-name) (buffer-name))
                      refactor-action
-                     (if (buffer-file-name) (file-name-nondirectory 
(buffer-file-name)) "")
-                     (position-bytes parse-begin)
-                     (position-bytes parse-end)
-                     (position-bytes edit-begin)
-                     parse-begin ;; char_pos
-                     (line-number-at-pos parse-begin)
-                     (line-number-at-pos parse-end)
-                     (save-excursion (goto-char parse-begin) 
(back-to-indentation) (current-column));; indent-begin
-                     (if (> wisi-debug 0) 1 0) ;; debug-mode
-                     (1- wisi-debug) ;; trace_parse
-                     wisi-trace-action
-                     (or wisi-parse-max-parallel -1)
-                     (- (position-bytes parse-end) (position-bytes 
parse-begin)) ;; parse-end is after last byte
+                     (position-bytes pos)
+                     wisi-parser-verbosity
                      ))
-        (msg (format "%03d%s" (length cmd) cmd))
         (process (wisi-process--parser-process parser)))
+
     (with-current-buffer (wisi-process--parser-buffer parser)
       (erase-buffer))
 
-    (process-send-string process msg)
-    (process-send-string process (buffer-substring-no-properties parse-begin 
parse-end))
+    (wisi-parse-log-message parser cmd)
+    (process-send-string process (wisi-process-parse--add-cmd-length cmd))
     (wisi-process-parse--wait parser)
     ))
 
-(defun wisi-process-parse--send-noop (parser)
-  "Send a noop command to PARSER external process, followed by
-the content of the current buffer.  Does not wait for command to
-complete."
-  (let* ((cmd (format "noop %d" (1- (position-bytes (point-max)))))
-        (msg (format "%03d%s" (length cmd) cmd))
+(defun wisi-process-parse--send-query (parser query &rest args)
+  "Send a query command to PARSER external process, wait for command to 
complete.
+PARSER will respond with one or more Query messages."
+  ;; Must match "query-tree" command arguments read by
+  ;; emacs_wisi_common_parse.adb Process_Stream "query-tree"
+  (let* ((cmd (format "query-tree \"%s\" %d"
+                     (if (buffer-file-name) (buffer-file-name) (buffer-name))
+                     (cdr (assoc query wisi-parse-tree-queries))
+                     ))
         (process (wisi-process--parser-process parser)))
+
+    (cl-ecase query
+      ((node containing-statement)
+       ;; arg is a buffer-pos. If eob, query will fail because eob is
+       ;; after the char-region of the tree.
+       (let* ((arg (car args))
+             (query-point (if (= (point-max) arg) (point-min) arg)))
+        (setq cmd (concat cmd (format " %d" query-point)))))
+
+      (ancestor
+       (setq cmd
+            (concat cmd
+                    (format " %d (%s_id )"
+                            (nth 0 args)
+                            (mapconcat (lambda (s) (symbol-name s)) (nth 1 
args) "_id ")))))
+
+      ((parent child)
+       (setq cmd (concat cmd (format " \"%s\" %d" (car args) (nth 1 args)))))
+
+      (print nil)
+
+      (dump
+       (setq cmd (concat cmd (format " \"%s\"" (car args)))))
+      )
+
     (with-current-buffer (wisi-process--parser-buffer parser)
       (erase-buffer))
 
-    (process-send-string process msg)
-    (process-send-string process (buffer-substring-no-properties (point-min) 
(point-max)))
+    (wisi-parse-log-message parser cmd)
+    (process-send-string process (wisi-process-parse--add-cmd-length cmd))
+    (wisi-process-parse--wait parser)
     ))
 
 (defun wisi-process-parse--marker-or-nil (item)
@@ -273,12 +494,12 @@ complete."
 
 (defun wisi-process-parse--Navigate_Cache (parser sexp)
   ;; sexp is [Navigate_Cache pos statement_id id length class containing_pos 
prev_pos next_pos end_pos]
-  ;; see ‘wisi-process-parse--execute’
+  ;; see `wisi-process-parse--execute'
   (let ((pos (aref sexp 1)))
     (with-silent-modifications
       (put-text-property
        pos
-       (1+ pos)
+       (min (1+ pos) (point-max))
        'wisi-cache
        (wisi-cache-create
        :nonterm    (aref (wisi-process--parser-token-table parser) (aref sexp 
2))
@@ -294,40 +515,52 @@ complete."
 
 (defun wisi-process-parse--Name_Property (parser sexp)
   ;; sexp is [Name_Property first-pos last-pos]
-  ;; see ‘wisi-process-parse--execute’
+  ;; see `wisi-process-parse--execute'
   ;; implements wisi-name-action
   (with-silent-modifications
     (put-text-property (aref sexp 1) (1+ (aref sexp 2)) 'wisi-name t)))
 
 (defun wisi-process-parse--Face_Property (parser sexp)
   ;; sexp is [Face_Property first-pos last-pos face-index]
-  ;; see ‘wisi-process-parse--execute’
+  ;; see `wisi-process-parse--execute'
   ;; implements wisi--face-action-1
-  (with-silent-modifications
-    (add-text-properties
-     (aref sexp 1)
-     (1+ (aref sexp 2))
-     (list 'font-lock-face (aref (wisi-process--parser-face-table parser) 
(aref sexp 3))
-          'fontified t)
-     )))
+  (let ((first (aref sexp 1))
+       (last (1+ (aref sexp 2))))
+
+    (when (< 0 wisi-debug)
+      (unless (and
+              (<= (point-min) first (point-max))
+              (<= (point-min) last (point-max)))
+       (message "Face_Property out of buffer bounds: %s" sexp)))
+
+    (when (and
+          (<= (point-min) first (point-max))
+          (<= (point-min) last (point-max)))
+      (with-silent-modifications
+       (add-text-properties
+        first
+        (1+ (aref sexp 2))
+        (list 'font-lock-face (aref (wisi-process--parser-face-table parser) 
(aref sexp 3))
+              'fontified t)
+        )))))
 
 (defun wisi-process-parse--Indent (parser sexp)
-  ;; sexp is [Indent line-number indent]
-  ;; see ‘wisi-process-parse--execute’
-  (let ((pos (aref (wisi-process--parser-line-begin parser) (1- (aref sexp 
1)))))
+  ;; sexp is [Indent line-number line-begin-char-pos indent]
+  ;; see `wisi-process-parse--execute'
+  (let ((pos (aref sexp 2)))
     (with-silent-modifications
       (when (< (point-min) pos)
        (put-text-property
         (1- pos)
         pos
         'wisi-indent
-        (aref sexp 2)))
+        (aref sexp 3)))
       )))
 
 (defun wisi-process-parse--Lexer_Error (parser sexp)
   ;; sexp is [Lexer_Error char-position <message> <repair-char>]
-  ;; see ‘wisi-process-parse--execute’
-  (let ((pos (aref sexp 1))
+  ;; see `wisi-process-parse--execute'
+  (let ((pos (min (point-max) (aref sexp 1)))
        err)
 
     (goto-char pos);; for current-column
@@ -339,91 +572,145 @@ complete."
           (format "%s:%d:%d: %s"
                   (if (buffer-file-name) (file-name-nondirectory 
(buffer-file-name)) "")
                   ;; file-name can be nil during vc-resolve-conflict
-                  (line-number-at-pos pos)
+                  (line-number-at-pos pos t)
                   (current-column)
                   (aref sexp 2))
           :inserted (when (= 4 (length sexp)) (aref sexp 3))))
 
-    (push err (wisi-parser-lexer-errors parser))
+    (push err (wisi-parser-local-lexer-errors wisi-parser-local))
     ))
 
 (defun wisi-process-parse--Parser_Error (parser sexp)
   ;; sexp is [Parser_Error char-position <string>]
-  ;; see ‘wisi-process-parse--execute’
-  (let ((pos (aref sexp 1))
+  ;; see `wisi-process-parse--execute'
+  (let ((pos (min (point-max) (aref sexp 1)))
+       ;; pos from parser can be > (point-max) if user has edited
+       ;; the buffer since the parse; we don't force read-only except
+       ;; during initial full parse. This happens during dvc ediff; dvc
+       ;; creates an empty buffer, puts it in ada-mode, which inserts
+       ;; the {header} placeholder. font-lock requests a parse, dvc
+       ;; erases the buffer preparing to insert old revision.
        err)
 
-    (goto-char pos);; for current-column
-
-    (setq err
-         (make-wisi--parse-error
-          :pos (copy-marker pos)
-          :message
-          (format "%s:%d:%d: %s"
-                  (if (buffer-file-name) (file-name-nondirectory 
(buffer-file-name)) "")
-                  ;; file-name can be nil during vc-resolve-conflict
-                  (line-number-at-pos pos)
-                  (1+ (current-column))
-                  (aref sexp 2))))
-
-    (push err (wisi-parser-parse-errors parser))
+    (save-excursion
+      (goto-char pos);; for current-column
+
+      (setq err
+           (make-wisi--parse-error
+            :pos (copy-marker pos)
+            :message
+            (format "%s:%d:%d: %s"
+                    (if (buffer-file-name) (file-name-nondirectory 
(buffer-file-name)) "")
+                    ;; file-name can be nil during vc-resolve-conflict
+                    (line-number-at-pos pos t)
+                    (1+ (current-column))
+                    (aref sexp 2)))))
+
+    (push err (wisi-parser-local-parse-errors wisi-parser-local))
     ))
 
-(defun wisi-process-parse--Check_Error (parser sexp)
-  ;; sexp is [Check_Error code name-1-pos name-2-pos <string>]
-  ;; see ‘wisi-process-parse--execute’
-  (let* ((name-1-pos (aref sexp 2))
-       (name-1-col (1+ (progn (goto-char name-1-pos)(current-column)))) ;; 
gnat columns are 1 + emacs columns
+(defun wisi-process-parse--In_Parse_Action_Error (parser sexp)
+  ;; sexp is [In_Parse_Action_Error code name-1-pos name-2-pos <string>]
+  ;; see `wisi-process-parse--execute'
+  (let ((name-1-pos (aref sexp 2))
        (name-2-pos (aref sexp 3))
-       (name-2-col (1+ (progn (goto-char name-2-pos)(current-column))))
-       (file-name (if (buffer-file-name) (file-name-nondirectory 
(buffer-file-name)) ""))
-       ;; file-name can be nil during vc-resolve-conflict
-       (err (make-wisi--parse-error
-             :pos (copy-marker name-1-pos)
-             :message
-             (format "%s:%d:%d: %s %s:%d:%d"
-                     file-name (line-number-at-pos name-1-pos) name-1-col
-                     (aref sexp 4)
-                     file-name (line-number-at-pos name-2-pos) name-2-col)))
-       )
+       (column-at-pos (lambda (pos) (goto-char pos)(current-column)))
+       (file-name (if (buffer-file-name) (file-name-nondirectory 
(buffer-file-name)) "")))
+    ;; file-name can be nil during vc-resolve-conflict
+
+    (when (not name-1-pos)
+      (setq name-1-pos name-2-pos)
+      (setq name-2-pos 0))
+
+    (when (not name-2-pos)
+      (setq name-2-pos 0))
 
-    (push err (wisi-parser-parse-errors parser))
+    (push (make-wisi--parse-error
+          :pos (copy-marker name-1-pos)
+          :pos-2 (copy-marker name-2-pos)
+          :message
+          (format
+           (concat "%s:%d:%d: %s"
+                   (when (> 0 name-2-pos) " %s:%d:%d"))
+           file-name (line-number-at-pos name-1-pos t) (funcall column-at-pos 
name-1-pos)
+           (aref sexp 4)
+           (when (> 0 name-2-pos)
+             file-name (line-number-at-pos name-2-pos t) (funcall 
column-at-pos name-2-pos))))
+         (wisi-parser-local-parse-errors wisi-parser-local))
     ))
 
+(defun wisi-process-parse--find-err (pos errors)
+  (let ((result))
+    (dolist (err errors)
+      (when (or (= pos (wisi--parse-error-pos err))
+               (and (wisi--parse-error-pos-2 err) (= pos 
(wisi--parse-error-pos-2 err))))
+       (setq result err)))
+    result))
+
 (defun wisi-process-parse--Recover (parser sexp)
-  ;; sexp is [Recover [pos [inserted] [deleted] deleted-region]...]
-  ;; see ‘wisi-process-parse--execute’
-  ;; convert to list of wisi--parse-error-repair, add to last error
-  (let* ((token-table (wisi-process--parser-token-table parser))
-        (last-error (car (wisi-parser-parse-errors parser))))
+  ;; sexp is [Recover [error-pos edit-pos [inserted] [deleted] 
deleted-region]...]
+  ;; see `wisi-process-parse--execute'
+  ;; convert to list of wisi--parse-error-repair, add to corresponding error
+  (let ((token-table (wisi-process--parser-token-table parser)))
+
     (unless (= 1 (length sexp))
       (cl-do ((i 1 (1+ i))) ((= i (length sexp)))
-       (push
-        (make-wisi--parse-error-repair
-         :pos (copy-marker (aref (aref sexp i) 0))
-         :inserted (mapcar (lambda (id) (aref token-table id)) (aref (aref 
sexp i) 1))
-         :deleted  (mapcar (lambda (id) (aref token-table id)) (aref (aref 
sexp i) 2))
-         :deleted-region (aref (aref sexp i) 3))
-        (wisi--parse-error-repair last-error)))
-      )))
+       (let* ((error-pos (aref (aref sexp i) 0))
+              (edit-pos (aref (aref sexp i) 1))
+              (err (wisi-process-parse--find-err error-pos 
(wisi-parser-local-parse-errors wisi-parser-local))))
+          (when err
+           (cl-nsubst
+            (push
+             (make-wisi--parse-error-repair
+              :pos (copy-marker edit-pos)
+              :inserted (mapcar (lambda (id) (aref token-table id)) (aref 
(aref sexp i) 2))
+              :deleted  (mapcar (lambda (id) (aref token-table id)) (aref 
(aref sexp i) 3))
+              :deleted-region (aref (aref sexp i) 4))
+             (wisi--parse-error-repair err)) ;; new
+            err ;; old
+            (wisi-parser-local-parse-errors wisi-parser-local) ;; tree
+            :test (lambda (old el) (= (wisi--parse-error-pos old) 
(wisi--parse-error-pos err)))))
+          )))
+    ))
 
 (defun wisi-process-parse--End (parser sexp)
   ;; sexp is [End pos]
-  ;; see ‘wisi-process-parse--execute’
+  ;; see `wisi-process-parse--execute'
   (setf (wisi-process--parser-end-pos parser) (1+ (aref sexp 1))))
 
 (defun wisi-process-parse--Edit (parser sexp)
   ;; sexp is [Edit begin end text]
-  (delete-region (aref sexp 1) (1+ (aref sexp 2)))
-  (goto-char (aref sexp 1))
-  (insert (aref sexp 3)))
+  (save-excursion
+    (delete-region (aref sexp 1) (1+ (aref sexp 2)))
+    (goto-char (aref sexp 1))
+    (insert (aref sexp 3))))
 
 (defun wisi-process-parse--Language (parser sexp)
   ;; sexp is [Language language-action ...]
   (funcall (aref (wisi-process--parser-language-action-table parser) (aref 
sexp 1)) sexp))
 
+(defun wisi-process-parse--to-tree-node (parser sexp)
+  "Return a `wisi-tree-node' or nil."
+  (when (aref sexp 2)
+    (make-wisi-tree-node
+     :address     (aref sexp 2)
+     :id          (aref (wisi-process--parser-token-table parser) (aref sexp 
3))
+     :char-region (cons (aref sexp 4) (aref sexp 5)))))
+
+(defun wisi-process-parse--Query (parser sexp)
+  ;; sexp is [Query query-label ...] see `wisi-parse-tree-queries'
+  (cl-ecase (car (rassoc (aref sexp 1) wisi-parse-tree-queries))
+    ((node containing-statement ancestor parent child)
+     (setf (wisi-process--parser-query-result parser)
+          (wisi-process-parse--to-tree-node parser sexp)))
+    (print
+     (setf (wisi-process--parser-query-result parser)
+          t))
+    ))
+
 (defun wisi-process-parse--execute (parser sexp)
-  "Execute encoded SEXP sent from external process."
+  "Execute encoded SEXP sent from external process.
+Source buffer is current."
   ;; sexp is [action arg ...]; an encoded instruction that we need to execute
   ;;
   ;; Actions:
@@ -457,27 +744,30 @@ complete."
   ;;    If error recovery is successful, there can be more than one
   ;;    error reported during a parse.
   ;;
-  ;; [Check_Error code name-1-pos name-2-pos <string>]
-  ;;    The parser detected a semantic check error; save information
-  ;;    for later reporting.
+  ;; [In_Parse_Action_Error code name-1-pos name-2-pos <string>]
+  ;;    The parser detected an in-parse action error; save information
+  ;;    for later reporting. Either of the name-*-pos may be 0,
+  ;;    indicating a missing name.
   ;;
   ;;    If error recovery is successful, there can be more than one
   ;;    error reported during a parse.
   ;;
-  ;; [Recover [pos [inserted] [deleted] deleted-region]...]
+  ;; [Recover [error-pos edit-pos [inserted] [deleted] deleted-region]...]
   ;;    The parser finished a successful error recovery.
   ;;
-  ;;    pos: Buffer position
+  ;;    error-pos: Buffer position where error was detected
+  ;;
+  ;;    edit-pos: Buffer position of inserted/deleted tokens
   ;;
   ;;    inserted: Virtual tokens (terminal or non-terminal) inserted
-  ;;    before pos.
+  ;;    before edit-pos.
   ;;
-  ;;    deleted: Tokens deleted after pos.
+  ;;    deleted: Tokens deleted after edit-pos.
   ;;
-  ;;    deleted-region: source buffer region containing deleted tokens
+  ;;    deleted-region: source buffer char region containing deleted tokens
   ;;
   ;;    Args are token ids; index into parser-token-table. Save the
-  ;;    information for later use by ’wisi-repair-error’.
+  ;;    information for later use by `wisi-repair-error'.
   ;;
   ;; [Edit begin end text]
   ;;    Replace region BEGIN . END with TEXT; normally the result of a
@@ -487,210 +777,270 @@ complete."
   ;;    Dispatch to a language-specific action, via
   ;;    `wisi-process--parser-language-action-table'.
   ;;
+  ;; [Query query-label ...]
   ;;
   ;; Numeric action codes are given in the case expression below
 
-  (cl-ecase (aref sexp 0)
-    (1  (wisi-process-parse--Navigate_Cache parser sexp))
-    (2  (wisi-process-parse--Face_Property parser sexp))
-    (3  (wisi-process-parse--Indent parser sexp))
-    (4  (wisi-process-parse--Lexer_Error parser sexp))
-    (5  (wisi-process-parse--Parser_Error parser sexp))
-    (6  (wisi-process-parse--Check_Error parser sexp))
-    (7  (wisi-process-parse--Recover parser sexp))
-    (8  (wisi-process-parse--End parser sexp))
-    (9  (wisi-process-parse--Name_Property parser sexp))
-    (10 (wisi-process-parse--Edit parser sexp))
-    (11 (wisi-process-parse--Language parser sexp))
-    ))
+  (condition-case err
+      (cl-ecase (aref sexp 0)
+       (1  (wisi-process-parse--Navigate_Cache parser sexp))
+       (2  (wisi-process-parse--Face_Property parser sexp))
+       (3  (wisi-process-parse--Indent parser sexp))
+       (4  (wisi-process-parse--Lexer_Error parser sexp))
+       (5  (wisi-process-parse--Parser_Error parser sexp))
+       (6  (wisi-process-parse--In_Parse_Action_Error parser sexp))
+       (7  (wisi-process-parse--Recover parser sexp))
+       (8  (wisi-process-parse--End parser sexp))
+       (9  (wisi-process-parse--Name_Property parser sexp))
+       (10 (wisi-process-parse--Edit parser sexp))
+       (11 (wisi-process-parse--Language parser sexp))
+       (12 (wisi-process-parse--Query parser sexp))
+       )
+    (error
+     (when (< 0 wisi-debug)
+       (message "wi-pr-pa-Execute '%s' error: %s" sexp err))
+     (signal (car err) (cdr err)))))
 
 ;;;;; main
 
+(cl-defmethod wisi-parse-kill-buffer ((parser wisi-process--parser))
+  (when (process-live-p (wisi-process--parser-process parser))
+    (wisi-process--kill-context parser)))
+
+(cl-defmethod wisi-parse-reset ((parser wisi-process--parser))
+  (setf (wisi-process--parser-busy parser) nil)
+  (wisi-parse-require-process parser)
+  (wisi-process--kill-context parser)
+  (wisi-process-parse--wait parser))
+
 (cl-defmethod wisi-parse-kill ((parser wisi-process--parser))
   (when (process-live-p (wisi-process--parser-process parser))
     ;; We used to send a quit command first, to be nice. But there's
     ;; no timeout on that, so it would hang when the process
     ;; executable is not reading command input.
-    (when (process-live-p (wisi-process--parser-process parser))
-      (kill-process (wisi-process--parser-process parser)))
-    )
-  (setf (wisi-process--parser-busy parser) nil))
-
-(defvar wisi--lexer nil) ;; wisi-elisp-lexer.el
-(declare-function wisi-elisp-lexer-reset "wisi-elisp-lexer")
-
-(defun wisi-process-parse--prepare (parser)
-  ;; font-lock can trigger a face parse while navigate or indent parse
-  ;; is active, due to ‘accept-process-output’ below. Signaling an
-  ;; error tells font-lock to try again later.
-  (if (wisi-process--parser-busy parser)
-      (progn
-       (setf (wisi-parser-parse-errors parser)
-             (list
-              (make-wisi--parse-error
-               :pos 0
-               :message (format "%s:%d:%d: parser busy (try 
’wisi-kill-parser’)"
-                                (if (buffer-file-name) (file-name-nondirectory 
(buffer-file-name)) "") 1 1))
-              ))
-       (error "%s parse abandoned; parser busy - use partial parse?" 
wisi--parse-action)
-       )
 
-    ;; It is not possible for a background elisp function (ie
-    ;; font-lock) to interrupt this code between checking and setting
-    ;; parser-busy; background elisp can only run when we call
-    ;; accept-process-output below.
+    ;; Don't let font-lock start a parse for face while waiting for
+    ;; the process to die. FIXME: that just means font-lock will
+    ;; restart the process immediately; tell font-lock not to do that?
     (setf (wisi-process--parser-busy parser) t)
+    (wisi-parse-log-message parser "kill process")
+    (kill-process (wisi-process--parser-process parser)))
+  (setf (wisi-process--parser-busy parser) nil))
 
-    ;; If the parser process has not started yet,
-    ;; wisi-process-parse--require-process calls
-    ;; wisi-process-parse--wait, which can let font-lock invoke the
-    ;; parser again. Thus this call must be after we set
-    ;; wisi-process--parser-busy t
-    (wisi-process-parse--require-process parser)
-
-    (setf (wisi-process--parser-total-wait-time parser) 0.0)
-    (setf (wisi-parser-lexer-errors parser) nil)
-    (setf (wisi-parser-parse-errors parser) nil)
-    ))
+(cl-defun wisi-process-parse--prepare (parser parse-action &key nowait)
+  "Check for parser busy and startup, mark parser busy, require parser 
process."
+  (when (wisi-process--parser-busy parser)
+    (when (< 1 wisi-debug)
+      (wisi-parse-log-message parser (format "parse--prepare %s in %s parser 
busy" parse-action (current-buffer))))
+
+    (when
+       ;; Decide whether to wait or signal.
+       (cl-ecase parse-action
+         (face
+          ;; font-lock can trigger a face parse during initial full
+          ;; parse, or via a timer delay while navigate or indent
+          ;; parse is active, due to `accept-process-output' in
+          ;; w-p-p--handle-messages. If we signal here, it will try
+          ;; again later.
+          t)
+
+         ((navigate indent none refactor query debug)
+          (cond
+           (wisi-parse-full-active
+            ;; User just opened a buffer and is doing some action
+            ;; that requires a parse; wait for full parse in
+            ;; other buffer to complete.
+            (wisi-parse-log-message
+             parser
+             (format "parse %s in %s waiting for full parse of %s to complete 
..."
+                     parse-action
+                     (current-buffer)
+                     (car wisi-parse-full-active)))
+            (wisi-process-parse--wait parser)
+            nil)
+
+           ((eq #'wisi-process-parse--filter (process-filter 
(wisi-process--parser-process parser)))
+            ;; Initial process start still running
+            (message "parse %s in %s waiting for parser process start ..." 
parse-action (current-buffer))
+            (wisi-process-parse--wait parser)
+            nil)
+
+           (t t))))
+      (error "parse %s in %s abandoned; parser busy" parse-action 
(current-buffer))))
+
+  ;; parser is now not busy
+  (when (< 1 wisi-debug)
+    (wisi-parse-log-message parser (format "parse--prepare %s in %s parser not 
busy" parse-action (current-buffer))))
+
+  ;; It is not possible for a background elisp function (ie
+  ;; font-lock run from a timer) to interrupt this code between
+  ;; checking and setting parser-busy; background elisp can only run
+  ;; when we call accept-process-output in w-p-p--handle-messages.
+  (setf (wisi-process--parser-busy parser) t)
+  (setf (wisi-process--parser-source-buffer parser) (current-buffer))
+
+  ;; If the parser process has not started yet,
+  ;; wisi-parse-require-process calls wisi-process-parse--wait, which
+  ;; can let font-lock invoke the parser again. Thus this call must be
+  ;; after we set wisi-process--parser-busy t
+  (wisi-parse-require-process parser :nowait nowait)
+
+  (setf (wisi-process--parser-total-wait-time parser) 0.0)
+
+  ;; We con't clear errors here; only clear before parse, not post-parse.
+
+  ;; We don't erase the parser-buffer here, because we call --send*
+  ;; without --prepare in response to wisi-file_not_found.
+  )
 
 (defun wisi-process-parse--handle-messages (parser)
-  (condition-case-unless-debug err
-      (let* ((source-buffer (current-buffer))
-            (response-buffer (wisi-process--parser-buffer parser))
-            (process (wisi-process--parser-process parser))
-            (w32-pipe-read-delay 0) ;; fastest subprocess read
-            response
-            response-end
-            (response-count 0)
-            sexp-start
-            (need-more nil) ;; point-max if need more, to check for new input
-            (done nil)
-            start-wait-time)
-
-       (set-buffer response-buffer)
-       (setq sexp-start (point-min))
-
-       ;; process responses until prompt received
-       (while (not done)
-
-         ;; process all complete responses currently in buffer
-         (while (and (not need-more)
-                     (not done))
-
-           (goto-char sexp-start)
-
-           (cond
-            ((eobp)
-             (setq need-more (point-max)))
-
-            ((looking-at wisi-process-parse-prompt)
-             (setq done t))
-
-            ((or (looking-at "\\[") ;; encoded action
-                 (looking-at "(")) ;; error or other elisp expression to eval
-             (condition-case nil
-                 (setq response-end (scan-sexps (point) 1))
-               (error
-                ;; incomplete response
-                (setq need-more (point-max))
-                nil))
-
-             (unless need-more
-               (setq response-count (1+ response-count))
-               (setq response (car (read-from-string 
(buffer-substring-no-properties (point) response-end))))
-               (goto-char response-end)
+  ;; signals 'wisi-file_not_found if parser reports (file-not-found)
+  (let ((response-buffer (wisi-process--parser-buffer parser))
+        (source-buffer (wisi-process--parser-source-buffer parser))
+       log-start)
+    (condition-case err
+       (let* ((process (wisi-process--parser-process parser))
+              (w32-pipe-read-delay 0) ;; fastest subprocess read
+              response
+              response-end
+              (response-count 0)
+              sexp-start
+              (need-more nil) ;; t need more input to complete current sexp
+              (done nil)
+              start-wait-time)
+
+         (set-buffer response-buffer)
+         (setq log-start (point-min))
+
+         ;; User will set wisi-process-time-out in source-buffer, but
+         ;; we reference it from response-buffer.
+         (setq-local wisi-process-time-out (with-current-buffer source-buffer 
wisi-process-time-out))
+
+         (setq sexp-start (point-min))
+
+         ;; process responses until prompt received
+         (while (not done)
+
+           ;; process all complete responses currently in buffer
+           (while (and (not need-more)
+                       (not done))
+
+             (goto-char sexp-start)
+
+             (cond
+              ((eobp)
+               (setq need-more t))
+
+              ((looking-at wisi-process-parse-prompt)
                (forward-line 1)
-               (setq sexp-start (point))
+               (if (eobp)
+                   (setq done t)
+                 (if wisi-parse-full-active
+                     ;; Messages are probably from a parse command
+                     ;; sent from the mode hook, before the full
+                     ;; parse was started.
+                     nil
+                   (wisi-parse-log-message
+                    parser
+                    (format "extra messages \"%s\"" (buffer-substring (point) 
(point-max))))
+                   (error "wisi-process-parse: extra messages \"%s\"" 
(buffer-substring (point) (point-max))))))
+
+              ((wisi-in-comment-p)
+               ;; In debug output. Just move to beginning of comment;
+               ;; sexp loop will handle moving forward.
+               ;;
+               ;; (must be after prompt check; the prompt is a comment)
+               (goto-char (line-beginning-position))
+               (setq sexp-start (point)))
+
+              ((or (looking-at "\\[") ;; encoded action
+                   (looking-at "(")) ;; error or other elisp expression to eval
+               (condition-case nil
+                   (setq response-end (scan-sexps (point) 1))
+                 (error
+                  ;; incomplete response
+                  (setq need-more t)
+                  nil))
+
+               (unless need-more
+                 (setq response-count (1+ response-count))
+                 (setq response (car (read-from-string 
(buffer-substring-no-properties (point) response-end))))
+
+                 (goto-char response-end)
+                 (forward-line 1)
+                 (setq sexp-start (point))
 
-               (set-buffer source-buffer) ;; for put-text-property in actions
-               (cond
-                ((listp response)
-                 ;; non-syntax error of some sort
                  (cond
-                  ((equal '(parse_error) response)
-                   ;; Parser detected a syntax error, and recovery failed, so 
signal it.
-
-                   (when (> wisi-debug 0)
-                     ;; Save a copy of parser output; may be overwritten by 
subsequent parse face attempts.
-                     (set-buffer response-buffer)
-                     (let ((content (buffer-substring-no-properties 
(point-min) (point-max)))
-                           (buf-name (concat (buffer-name) "-save-error")))
-                       (set-buffer (get-buffer-create buf-name))
-                       (insert content)))
-
-                   (if (wisi-parser-parse-errors parser)
-                       (signal 'wisi-parse-error
-                               (wisi--parse-error-message (car 
(wisi-parser-parse-errors parser))))
-
-                     ;; can have no errors when testing a new parser
-                     (push
-                      (make-wisi--parse-error :pos 0 :message "parser failed 
with no message")
-                      (wisi-parser-parse-errors parser))
-                     (signal 'wisi-parse-error "parser failed with no 
message")))
-
-                  ((equal 'parse_error (car response))
-                   ;; Parser detected some other error non-fatal error, so 
signal it.
-                   (push
-                    (make-wisi--parse-error :pos 0 :message (cadr response))
-                    (wisi-parser-parse-errors parser))
-                   (signal 'wisi-parse-error (cdr response)))
-
-                  ((and (eq 'error (car response))
-                        (string-prefix-p "bad command:" (cadr response)))
-                   ;; Parser dropped bytes, is treating buffer
-                   ;; content bytes as commands. Kill the process
-                   ;; to kill the pipes; there is no other way to
-                   ;; flush them.
-                   (kill-process (wisi-process--parser-process parser))
-                   (signal 'wisi-parse-error "parser lost sync; killed"))
-
-                  (t
-                   ;; Some other error
-                   (condition-case-unless-debug err
-                       (eval response)
-                     (error
-                      (push (make-wisi--parse-error :pos (point) :message 
(cadr err)) (wisi-parser-parse-errors parser))
-                      (signal (car err) (cdr err)))))
-                  ))
-
-                ((arrayp response)
-                 ;; encoded action
-                 (condition-case-unless-debug err
-                     (wisi-process-parse--execute parser response)
-                   (wisi-parse-error
-                    (push (make-wisi--parse-error :pos (point) :message (cadr 
err)) (wisi-parser-parse-errors parser))
-                    (signal (car err) (cdr err)))
-
-                   (error ;; ie from un-commented 
[C:\Windows\system32\KERNEL32.DLL], or bug in action code above.
-                    (set-buffer response-buffer)
-                    (let ((content (buffer-substring-no-properties (point-min) 
(point-max)))
-                          (buf-name (concat (buffer-name) "-save-error")))
-                      (set-buffer (get-buffer-create buf-name))
-                      (insert content)
-                      (insert (format "%s" err))
-                      (error "parser failed; error messages in %s" buf-name)))
-                   ))
-                )
+                  ((listp response)
+                   ;; error or message
+                   (cond
+                    ((equal 'file_not_found (car response))
+                     ;; Parser does not have full text for file;
+                     ;; signal it, caller will do a full parse.
+                     (wisi-parse-log-message parser (buffer-substring 
log-start (point)))
+                     (set-buffer source-buffer)
+                     (signal 'wisi-file_not_found nil))
+
+                    ((equal 'parse_error (car response))
+                     ;; Parse failed for some reason, so signal it, and report 
it in error list.
+                     (with-current-buffer source-buffer
+                       (push
+                        (make-wisi--parse-error :pos 0 :message (cadr 
response))
+                        (wisi-parser-local-parse-errors wisi-parser-local)))
+                     (signal 'wisi-parse-error (cadr response)))
+
+                    ((and (eq 'error (car response))
+                          (string-prefix-p "bad command:" (cadr response)))
+                     ;; Parser dropped bytes, is treating buffer
+                     ;; content bytes as commands. Kill the process
+                     ;; to kill the pipes; there is no other way to
+                     ;; flush them.
+                     (wisi-parse-log-message parser "parser lost sync; killed")
+                     (kill-process (wisi-process--parser-process parser))
+                     (signal 'wisi-parse-error "parser lost sync; killed"))
+
+                    (t
+                     ;; Something else
+                     (condition-case-unless-debug err
+                         (eval response)
+                       (error
+                        (wisi-parse-log-message parser (cadr err))
+                        (with-current-buffer source-buffer
+                          (push (make-wisi--parse-error
+                                 :pos (point)
+                                 :message (cadr err))
+                                (wisi-parser-local-parse-errors 
wisi-parser-local)))
+                        (signal (car err) (cdr err)))))
+                    ))
+
+                  ((arrayp response)
+                   ;; encoded action
+                   (set-buffer source-buffer) ;; for put-text-property in 
actions
+                   (condition-case err
+                       (wisi-process-parse--execute parser response)
+
+                     (t ;; error from bug in action code above, or bad data 
from parser.
+                      (let ((msg (format "elisp processing of message '%s' 
failed in %s" response source-buffer)))
+                        (wisi-parse-log-message parser msg)
+                        (error msg)))
+                     ))
+                  )
 
-               (set-buffer response-buffer)
-               ))
+                 (set-buffer response-buffer)
+                 ))
 
-            (t
-             ;; debug output
-             (forward-line 1)
-             (setq sexp-start (point)))
-            )
+              (t
+               ;; debug output
+               (forward-line 1)
+               (setq sexp-start (point)))
+              )
              )
 
            (unless done
              ;; end of response buffer
              (unless (process-live-p process)
-               (set-buffer response-buffer)
-               (let ((content (buffer-substring-no-properties (point-min) 
(point-max)))
-                     (buf-name (concat (buffer-name) "-save-error")))
-                 (set-buffer (get-buffer-create buf-name))
-                 (insert content)
-                 (error "parser failed; error messages in %s" buf-name)))
+               (wisi-parse-log-message parser "process died")
+               (error "parser process died"))
 
              (setq start-wait-time (float-time))
 
@@ -710,135 +1060,603 @@ complete."
                    (+ (wisi-process--parser-total-wait-time parser)
                       (- (float-time) start-wait-time)))
 
-             (when (and (= (point-max) need-more)
+             (when (and need-more
                         (> (wisi-process--parser-total-wait-time parser) 
wisi-process-time-out))
-               (error "wisi-process-parse timing out; increase 
`wisi-process-time-out'? (or bad syntax in process output)"))
+               (error (concat "wisi-process-parse timing out; increase 
`wisi-process-time-out'?"
+                              " (or bad syntax in process output)")))
 
              (setq need-more nil))
            );; while not done
 
          ;; got command prompt
-         (unless (process-live-p process)
-           (wisi-process-parse-show-buffer parser)
-           (error "wisi-process-parse process died"))
+         (set-buffer response-buffer)
+         (wisi-parse-log-message parser (buffer-substring log-start (point)))
 
          (setf (wisi-process--parser-response-count parser) response-count)
-
          (setf (wisi-process--parser-busy parser) nil)
          (set-buffer source-buffer)
-         ;; If we get here, the parse succeeded (possibly with error
-         ;; recovery); move point to end of parsed region.
-         (goto-char (wisi-process--parser-end-pos parser))
+         (when (< 1 wisi-debug)
+           (wisi-parse-log-message parser (format "parse--handle-messages in 
%s done" source-buffer)))
          )
 
+      ;; These do _not_ catch 'wisi-file_not_found
       (wisi-parse-error
+       (set-buffer response-buffer)
+       (wisi-parse-log-message parser (buffer-substring log-start (point)))
        (setf (wisi-process--parser-busy parser) nil)
+       (set-buffer source-buffer)
        (signal (car err) (cdr err)))
 
       (error
+       (wisi-parse-log-message parser (cadr err))
+       (set-buffer response-buffer)
+       (wisi-parse-log-message parser (buffer-substring log-start (point)))
        (setf (wisi-process--parser-busy parser) nil)
-       (signal (car err) (cdr err))
-       )))
-
-(cl-defmethod wisi-parse-current ((parser wisi-process--parser) begin send-end 
parse-end)
-  (wisi-process-parse--prepare parser)
-  (let ((total-line-count (1+ (count-lines (point-max) (point-min)))))
-    (setf (wisi-process--parser-line-begin parser) (wisi--set-line-begin 
total-line-count))
-    (wisi-process-parse--send-parse parser begin send-end parse-end)
-
-    ;; We reset the elisp lexer, because post-parse actions may use it.
-    (when wisi--lexer
-      (wisi-elisp-lexer-reset total-line-count wisi--lexer))
-    )
+       (set-buffer source-buffer)
+       (signal (car err) (cdr err)))
+      )))
+
+(cl-defun wisi-process-parse--handle-messages-file-not-found (parser action 
&key no-text)
+  (funcall action)
+  (condition-case _err
+      (wisi-process-parse--handle-messages parser)
+    ('wisi-file_not_found
+     (cond
+      (no-text
+       (let ((cmd (format "create-context \"%s\"" (if (buffer-file-name) 
(buffer-file-name) (buffer-name))))
+            (process (wisi-process--parser-process parser)))
+        (with-current-buffer (wisi-process--parser-buffer parser)
+          (erase-buffer))
+        (wisi-parse-log-message parser cmd)
+        (process-send-string process (wisi-process-parse--add-cmd-length cmd)))
+       (wisi-process-parse--wait parser)
+       (wisi-process-parse--handle-messages parser)
+       (funcall action)
+       (wisi-process-parse--handle-messages parser)
+       )
+      (t
+       (message "parsing buffer ...")
+       (wisi-process-parse--send-incremental-parse parser t)
+       (wisi-process-parse--wait parser)
+       (wisi-process-parse--handle-messages parser)
+       (message "parsing buffer ... done")
+       (funcall action)
+       (wisi-process-parse--handle-messages parser)
+       )))))
+
+(cl-defmethod wisi-parse-enable-memory-report ((parser wisi-parser))
+  (wisi-process-parse--prepare parser 'debug)
+  (let* ((cmd "enable_memory_report")
+        (process (wisi-process--parser-process parser)))
+    (with-current-buffer (wisi-process--parser-buffer parser)
+      (erase-buffer))
+
+    (wisi-parse-log-message parser cmd)
+    (process-send-string process (wisi-process-parse--add-cmd-length cmd))
+    (wisi-process-parse--handle-messages parser)))
+
+(cl-defmethod wisi-parse-memory-report-reset ((parser wisi-parser))
+  (wisi-process-parse--prepare parser 'debug)
+  (let* ((cmd "memory_report_reset")
+        (process (wisi-process--parser-process parser)))
+    (with-current-buffer (wisi-process--parser-buffer parser)
+      (erase-buffer))
+
+    (wisi-parse-log-message parser cmd)
+    (process-send-string process (wisi-process-parse--add-cmd-length cmd))
+    (wisi-process-parse--handle-messages parser)))
+
+(cl-defmethod wisi-parse-memory-report ((parser wisi-process--parser))
+  (wisi-process-parse--prepare parser 'debug)
+  (let* ((cmd "memory_report")
+        (process (wisi-process--parser-process parser)))
+    (with-current-buffer (wisi-process--parser-buffer parser)
+      (erase-buffer))
+
+    (wisi-parse-log-message parser cmd)
+    (process-send-string process (wisi-process-parse--add-cmd-length cmd))
+    (wisi-process-parse--handle-messages parser)))
+
+(cl-defmethod wisi-parse-current ((parser wisi-process--parser) parse-action 
begin send-end parse-end)
+  (wisi-process-parse--prepare parser parse-action)
+  (setf (wisi-parser-local-lexer-errors wisi-parser-local) nil)
+  (setf (wisi-parser-local-parse-errors wisi-parser-local) nil)
+  (wisi-process-parse--send-parse parser parse-action begin send-end parse-end)
   (wisi-process-parse--handle-messages parser)
-  (cons begin (point))
+  (cons begin (wisi-process--parser-end-pos parser))
   )
 
-(cl-defmethod wisi-refactor ((parser wisi-process--parser) refactor-action 
parse-begin parse-end edit-begin)
-  (save-excursion
-    (wisi-process-parse--prepare parser)
-    (wisi-process-parse--send-refactor parser refactor-action parse-begin 
parse-end edit-begin)
-    (wisi-process-parse--handle-messages parser))
-  )
+(cl-defmethod wisi-parse-incremental ((parser wisi-process--parser) 
parse-action &key full nowait)
+  (when (and full nowait (not (wisi-process--parser-version-checked 
wisi-parser-shared)))
+    ;; The parser process has not finished starting up, or has not yet
+    ;; been started. If this is the very first Ada file in the current
+    ;; project, and there is more text in the file than the process
+    ;; send buffer holds, w-p-p--send-* hangs waiting for the process
+    ;; to start reading, which is after it loads the parse table,
+    ;; which can take noticeable time for Ada.
+    (message "starting parser ..."))
+  (wisi-process-parse--prepare parser parse-action :nowait nowait)
+  (setf (wisi-parser-local-lexer-errors wisi-parser-local) nil)
+  (setf (wisi-parser-local-parse-errors wisi-parser-local) nil)
+  (cond
+   ((and full nowait)
+    (set-process-filter (wisi-process--parser-process parser) 
#'wisi-process-parse--filter)
+    (setq wisi-parse-full-active (cons (current-buffer) (cons (point-max) 
(point-min))))
+    (read-only-mode 1)
+    (wisi-process-parse--send-incremental-parse parser full))
+   (t
+    (wisi-process-parse--send-incremental-parse parser full)
+    (condition-case _err
+       (wisi-process-parse--handle-messages parser)
+      ('wisi-file_not_found
+       (message "parsing buffer ...")
+       (wisi-process-parse--send-incremental-parse parser t)
+       (wisi-process-parse--handle-messages parser)
+       (message "parsing buffer ... done")
+       )))))
+
+(cl-defmethod wisi-post-parse ((parser wisi-process--parser) parse-action 
begin end)
+  (wisi-process-parse--prepare parser parse-action)
+  (wisi-process-parse--handle-messages-file-not-found
+   parser
+   (lambda ()
+     ;; Must match emacs_wisi_common_parse.adb Get_Parse_Action.
+     (let* ((cmd (format "post-parse \"%s\" \"%s\" %d %d %d %d %d \"%s\""
+                     (if (buffer-file-name) (buffer-file-name) (buffer-name))
+                     wisi-parser-verbosity
+                     (cl-ecase parse-action
+                       (navigate 0) ;; same order as wisi-parse-common 
wisi-post-parse-actions
+                       (face 1)
+                       (indent 2))
+                     (position-bytes begin)
+                     ;; indent-region passes markers
+                     (if (markerp begin) (marker-position begin) begin)
+                     (position-bytes (min (point-max) end))
+                     (min (point-max) (if (markerp end) (marker-position end) 
end))
+                     (wisi-parse-format-language-options parser)
+                     ))
+        (process (wisi-process--parser-process parser)))
 
-(defvar wisi--parser nil) ;; wisi.el
-
-(defun wisi-process-send-tokens-noop ()
-  "Run lexer, send tokens to subprocess; otherwise no operation.
-For use with ’wisi-time’."
-  (wisi-process-parse--require-process wisi--parser)
-  (if (wisi-process--parser-busy wisi--parser)
-      (error "%s parser busy" wisi--parse-action)
-
-    ;; not busy
-    (let* ((source-buffer (current-buffer))
-          (action-buffer (wisi-process--parser-buffer wisi--parser))
-          (process (wisi-process--parser-process wisi--parser))
-          (sexp-start (point-min))
-          (need-more nil)
-          (done nil))
-
-      (setf (wisi-process--parser-busy wisi--parser) t)
-      (wisi-process-parse--send-noop wisi--parser)
-
-      (set-buffer action-buffer)
-      (while (and (process-live-p process)
-                 (not done))
-       (goto-char sexp-start)
-       (cond
-        ((eobp)
-         (setq need-more t))
-
-        ((looking-at wisi-process-parse-prompt)
-         (setq done t))
-
-        (t
-         (forward-line 1)
-         (setq sexp-start (point)))
-        )
-
-       (unless done
-         ;; end of response buffer
-         (unless (process-live-p process)
-           (wisi-process-parse-show-buffer wisi--parser)
-           (error "wisi-process-parse process died"))
-
-         (accept-process-output process 1.0 nil nil)
-         (setq need-more nil))
-       )
-      (set-buffer source-buffer)
-      (setf (wisi-process--parser-busy wisi--parser) nil)
-      )))
+    (with-current-buffer (wisi-process--parser-buffer parser)
+      (erase-buffer))
+
+    (wisi-parse-log-message parser cmd)
+    (process-send-string process (wisi-process-parse--add-cmd-length cmd))
+
+    ;; We don't wait for the send to complete here.
+    ))))
+
+(cl-defmethod wisi-refactor ((parser wisi-process--parser) refactor-action pos)
+  (when (and wisi-incremental-parse-enable wisi--changes)
+    (wisi-parse-incremental parser 'refactor))
+
+  (wisi-process-parse--prepare parser 'refactor)
+  (wisi-process-parse--handle-messages-file-not-found
+   parser
+   (lambda () (wisi-process-parse--send-refactor parser refactor-action pos))))
+
+(cl-defmethod wisi-parse-tree-query ((parser wisi-process--parser) query &rest 
args)
+  (cl-assert wisi-incremental-parse-enable)
+  (cl-assert (assoc query wisi-parse-tree-queries))
+  (when wisi--changes
+    (wisi-parse-incremental parser 'query))
+
+  (wisi-process-parse--prepare parser 'query)
+  (setf (wisi-process--parser-query-result parser) nil)
+  (wisi-process-parse--handle-messages-file-not-found
+   parser
+   (lambda ()
+     (apply 'wisi-process-parse--send-query parser query args)))
+  (wisi-process--parser-query-result parser))
 
 ;;;;; debugging
-(defun wisi-process-parse-ids-to-enum (token-table &rest int-ids)
-  "Translate INT-IDS from process integer token ids to elisp enumeral ids.
-Returns reversed sequence."
-  (let ((enum-ids nil))
-    (cl-dolist (i int-ids)
-      (push (aref token-table i) enum-ids))
-    enum-ids))
-
-(defun wisi-process-parse-show-args ()
-  "Show the partial parse command-line args for run_ada_[lalr | lr1]_parse for 
current region.
-Also add it to the kill ring."
+(defun wisi-process-parse-save-text (parser save-file-name auto)
+  (wisi-process-parse--prepare parser 'debug)
+  (let* ((cmd
+         (format (concat (if auto "save_text_auto" "save_text")
+                           " \"%s\" \"%s\"")
+                 (if (buffer-file-name) (buffer-file-name) (buffer-name))
+                 save-file-name))
+        (process (wisi-process--parser-process parser)))
+    (with-current-buffer (wisi-process--parser-buffer parser)
+      (erase-buffer))
+
+    (wisi-parse-log-message parser cmd)
+    (process-send-string process (wisi-process-parse--add-cmd-length cmd))
+    (wisi-process-parse--handle-messages parser)))
+
+(defun wisi-process-parse-dump-tree (save-file-root)
+  (interactive "Fsave-file-root: ")
+  (let ((parser wisi-parser-shared))
+    (wisi-process-parse--prepare parser 'debug)
+    ;; Also save the source text, so we have a complete test case
+    ;; starting point.
+    (let* ((cmd
+           (format (concat "save_text" " \"%s\" \"%s\"")
+                   (if (buffer-file-name) (buffer-file-name) (buffer-name))
+                   (concat save-file-root ".source")))
+          (process (wisi-process--parser-process parser)))
+      (with-current-buffer (wisi-process--parser-buffer parser)
+       (erase-buffer))
+
+      (wisi-parse-log-message parser cmd)
+      (process-send-string process (wisi-process-parse--add-cmd-length cmd))
+      (wisi-process-parse--handle-messages parser))
+
+    (wisi-parse-tree-query parser 'dump (concat save-file-root ".tree_text"))))
+
+(defun wisi-process-all-changes-to-cmd (&optional cmd-buffer-name)
+  "Convert wisi-parser-local-all-changes in current buffer to command file
+in CMD-BUFFER-NAME."
   (interactive)
-  (let* ((begin (region-beginning))
-        (end   (region-end))
-        (parse-action (wisi-read-parse-action))
-        (msg
-         (format "%s %s %d %d %d %d %d %d %d"
-                 (file-name-nondirectory (buffer-file-name))
-                 parse-action
-                 (position-bytes begin)
-                 (position-bytes end)
-                 (position-bytes end)
-                 begin ;; char_pos
-                 (line-number-at-pos begin)
-                 (line-number-at-pos end)
-                 (save-excursion (goto-char begin) (back-to-indentation) 
(current-column));; indent-begin
-                 )))
-    (kill-new msg)
-    (message msg)))
+  (when (equal -1 (wisi-parser-local-all-changes wisi-parser-local))
+    (user-error "saving all changes is disabled"))
+  (unless cmd-buffer-name
+    (setq cmd-buffer-name "debug.cmd"))
+  (let ((changes (nreverse (copy-sequence (wisi-parser-local-all-changes 
wisi-parser-local))))
+       (cmd-buffer (get-buffer-create cmd-buffer-name))
+       (source-file (buffer-file-name))
+       (verbosity wisi-parser-verbosity)
+       (mckenzie_zombie_limit wisi-mckenzie-zombie-limit)
+       (mckenzie_enqueue_limit wisi-mckenzie-enqueue-limit)
+       (language_options (wisi-parse-format-language-options 
wisi-parser-shared))
+       edit begin end)
+    (set-buffer cmd-buffer)
+    (erase-buffer)
+
+    (setq-local comment-start "-- ")
+
+    (insert "-- -*- comment-start: \"" comment-start "\" -*-" "\n")
+
+    (insert "file " source-file "-wisi-change-start\n")
+
+    (when (not (string-equal verbosity ""))
+      (insert "verbosity " verbosity "\n"))
+
+    (insert "save_text_auto debug_edited\n")
+
+    (when (or mckenzie_zombie_limit mckenzie_enqueue_limit)
+      (insert "mckenzie_options ")
+
+      (when mckenzie_zombie_limit
+       (insert (format "zombie_limit=%d " mckenzie_zombie_limit)))
+
+      (when mckenzie_enqueue_limit
+       (insert (format "enqueue_limit=%d " mckenzie_enqueue_limit)))
+
+      ;; parse-max-parallel is not specified by a file command,
+      ;; only by a command line option.
+      (insert "\n"))
+
+    (insert "language_params " language_options "\n\n")
+
+    (insert "parse_full\n\n")
+
+    (dolist (change changes)
+       (insert "parse_incremental (")
+       (setq begin (point))
+       (prin1 change cmd-buffer)
+       (insert ")")
+       (setq end (copy-marker (point) t))
+       (goto-char begin)
+       (while (search-forward "\n" end t)
+         (delete-char -1)
+         (insert "\\n"))
+       (goto-char (point-max))
+       (insert "\n\n")
+      )
+    (with-current-buffer cmd-buffer
+      (if (buffer-file-name)
+         (save-buffer)
+       (write-file cmd-buffer-name)))))
+
+(defun wisi-process-log-to-cmd (&optional prompt)
+  "Convert parser log in current buffer to command file.
+Command file name defaults to \"debug.cmd\"; with user arg,
+prompt for it."
+  (interactive "P")
+  (let* ((cmd-buffer-name
+        (if prompt
+            (read-string "command file name: " "debug.cmd")
+          "debug.cmd"))
+       (log-buffer (current-buffer))
+       (log-buffer-point (point))
+       (cmd-buffer (get-buffer-create cmd-buffer-name))
+       edit begin end source-file)
+    (set-buffer cmd-buffer)
+    (erase-buffer)
+
+    (set-buffer log-buffer)
+    (goto-char (point-min))
+
+    (when (search-forward-regexp "kill-context " nil t)
+      ;; IMPROVEME: use last auto-save file to restore context
+      (user-error "kill-context not implemented"))
+
+    ;; get options from full parse line; we assume they don't change
+    (unless (search-forward-regexp "parse \\([02]\\)" nil t)
+      (user-error "log buffer overflowed; increase 
wisi-parser-transaction-log-buffer-size"))
+
+    (cl-ecase (string-to-number (match-string 1))
+      (0 ;; partial parse
+       (user-error "can't create command file for partial parse"))
+
+      (2 ;; full parse
+       (looking-at " \"\\([^\"]*\\)\" \"\\([^\"]*\\)\" \\([-0-9]+\\) 
\\([-0-9]+\\) \\([-0-9]+\\) [-0-9]+ [-0-9]+ \"\\([^\"]*\\)\"")
+       (setq source-file (match-string 1))
+       (let ((verbosity (match-string 2))
+            (mckenzie_zombie_limit (match-string 3))
+            (mckenzie_enqueue_limit (match-string 4))
+            (parse_max_parallel (match-string 5))
+            (language_param (match-string 6)))
+
+        (set-buffer cmd-buffer)
+        (setq-local comment-start "-- ")
+
+        (insert "-- -*- comment-start: \"" comment-start "\" -*-" "\n")
+
+        (insert "file " source-file "\n")
+
+        (when (not (string-equal verbosity ""))
+          (insert "verbosity " verbosity "\n"))
+
+        (insert "save_text_auto debug_edited\n")
+
+        (when (or (not (string-equal mckenzie_zombie_limit "-1"))
+                  (not (string-equal mckenzie_enqueue_limit "-1")))
+          (insert "mckenzie_options ")
+
+          (when (not (string-equal mckenzie_zombie_limit "-1"))
+            (insert "zombie_limit=" mckenzie_zombie_limit " "))
+
+          (when (not (string-equal mckenzie_enqueue_limit "-1"))
+            (insert "enqueue_limit=" mckenzie_enqueue_limit))
+
+          ;; parse-max-parallel is not available
+          (insert "\n"))
+
+        (insert "language_params " language_param "\n\n")
+
+        (insert "parse_full\n\n"))))
+
+    (set-buffer log-buffer)
+    (goto-char (point-min))
+    (while (search-forward-regexp "^\\(parse 
1\\|post-parse\\|query-tree\\|refactor\\) \"\\([^\"]+\\)\"" nil t)
+      (cond
+       ((string-equal (match-string 1) "parse 1")
+       (search-forward-regexp "((")
+       (setq begin (match-beginning 0))
+       (search-forward-regexp "\")) ") ;; avoid matching 'is (Float (A));'
+       (setq edit (buffer-substring begin (match-end 0)))
+
+       (set-buffer cmd-buffer)
+       (goto-char (point-max))
+       (insert "parse_incremental ")
+       (setq begin (point))
+       (insert edit)
+       (setq end (copy-marker (point) t))
+       (goto-char begin)
+       (while (search-forward "\n" end t)
+         (delete-char -1)
+         (insert "\\n"))
+       (goto-char (point-max))
+       (insert "\n\n")
+
+       (set-buffer log-buffer))
+
+       ((and (string-equal (match-string 1) "post-parse")
+            (string-equal (match-string 2) source-file))
+       (goto-char (match-end 0))
+       (looking-at " \"[^\"]*\" \\([0-9]+\\) \\([0-9]+ [0-9]+ [0-9]+ 
[0-9]+\\)")
+       (let ((action (string-to-number (match-string 1)))
+             (args (match-string 2)))
+         (set-buffer cmd-buffer)
+         (goto-char (point-max))
+         (insert "--  post_parse "
+                 (cl-ecase action
+                   (0 "navigate ")
+                   (1 "face ")
+                   (2 "indent "))
+                 args "\n\n")
+         (set-buffer log-buffer)))
+
+       ((and (string-equal (match-string 1) "query-tree")
+            (string-equal (match-string 2) source-file))
+       (goto-char (match-end 0))
+       (looking-at " \\([0-9]+\\) \\([0-9]+\\)")
+       (let ((label (string-to-number (match-string 1)))
+             (pos (match-string 2)))
+         (set-buffer cmd-buffer)
+         (goto-char (point-max))
+         (insert "--  query_tree "
+                 (cl-ecase label
+                   (0 "node ")
+                   (1 "containing_statement ")
+                   (2 "ancestor ")
+                   (3 "parent ")
+                   (4 "child ")
+                   (5 "print "))
+                 pos "\n\n")
+         (set-buffer log-buffer)))
+
+       ((and (string-equal (match-string 1) "refactor")
+            (string-equal (match-string 2) source-file))
+       (goto-char (match-end 0))
+       (looking-at " \\([0-9]+\\) \\([0-9]+\\)")
+       (let ((label (string-to-number (match-string 1)))
+             (pos (match-string 2)))
+         (set-buffer cmd-buffer)
+         (goto-char (point-max))
+         (insert "--  refactor " ;; must match wisi-ada.ads Refactor_Label
+                 (cl-ecase label
+                   (0 "Method_Object_To_Object_Method ")
+                   (1 "Object_Method_To_Method_Object ")
+                   (2 "Element_Object_To_Object_Index ")
+                   (3 "Object_Index_To_Element_Object ")
+                   (4 "Format_Parameter_List "))
+                 pos "\n\n")
+         (set-buffer log-buffer)))
+
+       (t
+       ;; other command or other file - ignore
+       ))
+
+      )
+    (with-current-buffer cmd-buffer
+      (if (buffer-file-name)
+         (save-buffer)
+       (write-file cmd-buffer-name)))
+    (goto-char log-buffer-point)))
+
+(defun wisi-process-log-to-cmd-1 ()
+  "Convert parser command at point to a run_* command line or
+command_file command in the kill ring."
+  (interactive)
+  (forward-line 0)
+  (cond
+   ((looking-at "parse 0 \\([-0-9]+\\) \"\\([^\"]*\\)\" \\([-0-9]+\\) 
\\([-0-9]+\\) \\([-0-9]+\\) \\([-0-9]+\\) \\([-0-9]+\\) \\([-0-9]+\\) 
\\([-0-9]+\\) \\([-0-9]+\\) \\([-0-9]+\\) \"\\([^\"]*\\)\" \\([-0-9]+\\) 
\\([-0-9]+\\) \\([-0-9]+\\) \\([-0-9]+\\) \"\\([^\"]*\\)\"")
+    (let ((parse-action (string-to-number (match-string 1)))
+         (source-file (match-string 2))
+         (begin-byte-pos (match-string 3))
+         (end-byte-pos (match-string 4))
+         (goal-byte-pos (match-string 5))
+         (begin-char-pos (match-string 6))
+         (end-char-pos (match-string 7))
+         (goal-char-pos (match-string 8))
+         (begin-line (match-string 9))
+         (begin-indent (match-string 10))
+         (_partial-parse-active (match-string 11))
+         (verbosity (match-string 12))
+         (mckenzie_zombie_limit (string-to-number (match-string 13)))
+         (mckenzie_enqueue_limit (string-to-number (match-string 14)))
+         (parse_max_parallel (string-to-number (match-string 15)))
+         (_byte-count (match-string 16))
+         (language_param (match-string 17))
+         cmd)
+
+      (setq cmd
+           (concat "parse_partial "
+                   (cl-ecase parse-action
+                     (0 "navigate ")
+                     (1 "face ")
+                     (2 "indent "))
+                   source-file " "
+                   begin-byte-pos " "
+                   end-byte-pos " "
+                   goal-byte-pos " "
+                   begin-char-pos " "
+                   end-char-pos " "
+                   goal-char-pos " "
+                   begin-line " "
+                   begin-indent " "
+                   (when (not (string-equal "" verbosity)) (format 
"--verbosity \"%s\" " verbosity))
+                   (when (not (= -1 mckenzie_zombie_limit)) (format 
"--mckenzie_zombie_limit %d" mckenzie_zombie_limit))
+                   " "
+                   (when (not (= -1 mckenzie_enqueue_limit))
+                     (format "--mckenzie_enqueue_limit %d" 
mckenzie_enqueue_limit))
+                   " "
+                   (when (not (= -1 parse_max_parallel)) (format 
"--parse_max_parallel %d" parse_max_parallel)) " "
+                   (when (not (string-equal "" language_param)) (format 
"--lang_params \"%s\" " language_param))
+                   ))
+      (kill-new cmd)))
+
+   ((looking-at "parse 2 \"\\([^\"]*\\)\" \"\\([^\"]*\\)\" \\([-0-9]+\\) 
\\([-0-9]+\\) \\([-0-9]+\\) [-0-9]+ [-0-9]+ \"\\([^\"]*\\)\"")
+    (let ((source-file (match-string 1))
+         (verbosity (match-string 2))
+         (mckenzie_zombie_limit (match-string 3))
+         (mckenzie_enqueue_limit (match-string 4))
+         (parse_max_parallel (match-string 5))
+         (language_param (match-string 6))
+         cmd)
+
+      (setq cmd
+           (concat "parse_partial none "
+                   source-file " "
+                   (when (not (string-equal "" verbosity)) (format 
"--verbosity \"%s\" " verbosity))
+                   (when (not (string-equal "" language_param)) (format 
"--lang_params \"%s\" " language_param))
+                   ))
+      (kill-new cmd)))
+
+   ((looking-at "post-parse \"\\([^\"]*\\)\" \"\\([^\"]*\\)\" \\([-0-9]+\\) 
\\([-0-9]+\\) \\([-0-9]+\\) \\([-0-9]+\\) \\([-0-9]+\\) \"\\([^\"]*\\)\"")
+    ;; see wisi-process-parse--send-action above
+    (let* ((source-file (match-string 1))
+          (verbosity (match-string 2))
+          (action (string-to-number (match-string 3)))
+          (begin-bytes (match-string 4))
+          (begin-chars (match-string 5))
+          (end-bytes (match-string 6))
+          (end-chars (match-string 7))
+          (language-opts (match-string 8))
+          (cmd
+           (concat "post_parse "
+                   (cl-ecase action (0 "Navigate") (1 "Face") (2 "Indent") (3 
"None")) " "
+                   begin-bytes " "
+                   begin-chars " "
+                   end-bytes " "
+                   end-chars
+                   )))
+      (kill-new cmd)))
+
+   (t
+    (user-error "unrecognized parser command"))
+   ))
+
+(cl-defun wisi-time (func count &key report-wait-time)
+  "call FUNC COUNT times, show total time"
+  (interactive "afunction \nncount ")
+
+  (let ((start-time (float-time))
+       (start-gcs gcs-done)
+       (cum-wait-time 0.0)
+        (i 0)
+        diff-time
+       diff-gcs)
+    (while (not (eq (1+ count) (setq i (1+ i))))
+      (save-excursion
+        (funcall func))
+      (when report-wait-time
+       (setq cum-wait-time (+ cum-wait-time 
(wisi-process--parser-total-wait-time wisi-parser-shared)))))
+    (setq diff-time (- (float-time) start-time))
+    (setq diff-gcs (- gcs-done start-gcs))
+    (if report-wait-time
+       (progn
+         (message "Total %f seconds, %d gcs; per iteration %f seconds %d gcs 
%d responses %f wait"
+                  diff-time
+                  diff-gcs
+                  (/ diff-time count)
+                  (/ (float diff-gcs) count)
+                  (wisi-process--parser-response-count wisi-parser-shared)
+                  (/ cum-wait-time count)))
+
+      (message "Total %f seconds, %d gcs; per iteration %f seconds %d gcs"
+              diff-time
+              diff-gcs
+              (/ diff-time count)
+              (/ (float diff-gcs) count))
+      ))
+  nil)
+
+(defun wisi-time-indent-middle-line-cold-cache (count &optional 
report-wait-time)
+  (goto-char (point-min))
+  (forward-line (1- (/ (count-lines (point-min) (point-max)) 2)))
+  (let ((cum-wait-time 0.0))
+    (wisi-time
+     (lambda ()
+       (wisi-set-parse-try t 'indent)
+       (wisi-invalidate-cache 'indent (point-min))
+       (wisi-indent-line)
+       (when (wisi-process--parser-p wisi-parser-shared)
+        (setq cum-wait-time (+ cum-wait-time 
(wisi-process--parser-total-wait-time wisi-parser-shared)))))
+     count
+     report-wait-time)
+    ))
+
+(defun wisi-time-indent-middle-line-warm-cache (count)
+  (wisi-set-parse-try t 'indent)
+  (wisi-invalidate-cache 'indent (point-min))
+  (goto-char (point-min))
+  (forward-line (/ (count-lines (point-min) (point-max)) 2))
+  (wisi-indent-line)
+  (wisi-time #'wisi-indent-line count))
 
 (provide 'wisi-process-parse)
diff --git a/wisi-run-indent-test.el b/wisi-run-indent-test.el
index 8c0eef098c..b4a82ddaca 100644
--- a/wisi-run-indent-test.el
+++ b/wisi-run-indent-test.el
@@ -1,6 +1,6 @@
 ;;; wisi-run-indent-test.el --- utils for automating indentation and casing 
tests
 ;;
-;; Copyright (C) 2018 - 2020  Free Software Foundation, Inc.
+;; Copyright (C) 2018 - 2022  Free Software Foundation, Inc.
 ;;
 ;; This file is part of GNU Emacs.
 ;;
@@ -19,6 +19,7 @@
 
 (require 'wisi-tests)
 (require 'wisi-prj)
+(require 'wisi-process-parse)
 
 ;; user can set these to t in an EMACSCMD
 (defvar skip-cmds nil)
@@ -26,6 +27,13 @@
 (defvar skip-recase-test nil)
 (defvar skip-write nil)
 
+(defvar save-parser-log nil
+  "If non-nil, a file name telling where to save wisi parser transaction log")
+
+(defvar save-edited-text nil
+  "If non-nil, a file name telling where to save wisi parser edited
+text, after each edit in an incremental parse, and before each partial parse.")
+
 (defun test-in-comment-p ()
   (nth 4 (syntax-ppss)))
 
@@ -44,7 +52,7 @@ FACE may be a list."
     (save-match-data
       (wisi-validate-cache (line-beginning-position) (line-end-position) nil 
'face)
       (font-lock-ensure (line-beginning-position) (line-end-position)))
-    
+
     ;; We don't use face-at-point, because it doesn't respect
     ;; font-lock-face set by the parser! And we want to check for
     ;; conflicts between font-lock-keywords and the parser.
@@ -112,7 +120,7 @@ FACE may be a list."
     )))
 
 (defun test-cache-containing (containing contained)
-  "Test if CONTAINING in next code line has wisi-cache with that contains 
CONTAINED."
+  "Test if CONTAINING in next code line has wisi-cache that contains 
CONTAINED."
   (save-excursion
     (wisi-validate-cache (line-beginning-position 0) (line-end-position 3) nil 
'navigate)
     (beginning-of-line)
@@ -136,201 +144,248 @@ Each item is a list (ACTION PARSE-BEGIN PARSE-END 
EDIT-BEGIN)")
 (defun test-refactor-1 (action inverse-action search-string refactor-string)
   (beginning-of-line)
   (forward-comment (point-max)) ;; forward-comment does not work from inside 
comment
-  (search-forward search-string (line-end-position 7))
+  (when search-string
+    (search-forward search-string (line-end-position 7)))
   (wisi-validate-cache (line-end-position -7) (line-end-position 7) t 
'navigate)
   (search-forward refactor-string (line-end-position 7))
-  (let* ((edit-begin (match-beginning 0))
-        (cache (wisi-goto-statement-start))
-        (parse-begin (point))
-        (parse-end (wisi-cache-end cache)))
-    (setq parse-end (+ parse-end (wisi-cache-last (wisi-get-cache 
(wisi-cache-end cache)))))
+  (let ((edit-begin (match-beginning 0)))
     (push (list
           inverse-action
-          (copy-marker parse-begin nil)
-          (copy-marker parse-end nil)
           (copy-marker edit-begin nil))
          test-refactor-markers)
-    (wisi-refactor wisi--parser action parse-begin parse-end edit-begin)
+    (wisi-refactor wisi-parser-shared action edit-begin)
     ))
 
 (defun test-refactor-inverse ()
   "Reverse refactors done by recent set of `test-refactor-1'."
+  ;; Force parse of forward refactor for partial parse
+  (wisi-validate-cache (line-end-position -7) (line-end-position 7) t 
'navigate)
   (save-excursion
-    (condition-case-unless-debug nil
-       (dolist (item test-refactor-markers)
-         (wisi-refactor wisi--parser
-                        (nth 0 item)
-                        (marker-position (nth 1 item))
-                        (marker-position (nth 2 item))
-                        (marker-position (nth 3 item))))
-      (error nil))
+    (dolist (item test-refactor-markers)
+      (wisi-refactor wisi-parser-shared
+                    (nth 0 item)
+                    (marker-position (nth 1 item))))
     (setq test-refactor-markers nil)))
 
+(defun wisi-test-save-log-1 (buffer log-file-name)
+    (with-current-buffer buffer
+      (message "saving parser transaction log '%s' to '%s'" (buffer-name) 
log-file-name)
+      (write-region nil nil log-file-name)))
+
+(defun wisi-test-save-log ()
+  (interactive)
+  (cond
+   ((stringp save-parser-log)
+    (when (buffer-live-p (wisi-parser-transaction-log-buffer 
wisi-parser-shared))
+      (wisi-test-save-log-1 (wisi-parser-transaction-log-buffer 
wisi-parser-shared) save-parser-log)))
+
+   (t ;; save-parser-log is a list of (LOG-BUFFER-NAME LOG-FILE-NAME)
+    (dolist (item save-parser-log)
+      (wisi-test-save-log-1 (get-buffer (nth 0 item)) (nth 1 item))))
+    ))
+
 (defun run-test-here ()
   "Run an indentation and casing test on the current buffer."
   (interactive)
-  (setq indent-tabs-mode nil)
-  (setq jit-lock-context-time 0.0);; for test-face
-
-  ;; Test files use wisi-prj-select-cached to parse and select a project file.
-  (setq project-find-functions (list #'wisi-prj-current-cached))
-  (setq xref-backend-functions (list #'wisi-prj-xref-backend))
-
-
-  (let ((error-count 0)
-       (test-buffer (current-buffer))
-       cmd-line
-       last-result last-cmd expected-result)
-    ;; Look for EMACS* comments in the file:
-    ;;
-    ;; EMACSCMD: <form>
-    ;;    Executes the lisp form inside a save-excursion, saves the result as 
a lisp object.
-    ;;
-    ;; EMACSRESULT: <form>
-    ;;    point is moved to end of line, <form> is evaluated inside
-    ;;    save-excursion and compared (using `equal') with the result
-    ;;    of the previous EMACSCMD, and the test fails if they don't
-    ;;    match.
-    ;;
-    ;; EMACSRESULT_START:<first list element>
-    ;; EMACSRESULT_ADD:  <list element>
-    ;; EMACSRESULT_FINISH:
-    ;;    build a list, compare it to the result of the previous EMACSCMD.
-    ;;
-    ;; EMACS_SKIP_UNLESS: <form>
-    ;;   skip entire test if form evals nil
-    ;;
-    ;; EMACSDEBUG: <form>
-    ;;    Eval form, display result. Also used for setting breakpoint.
-
-    (goto-char (point-min))
-    (while (and (not skip-cmds)
-               (re-search-forward (concat comment-start "EMACS\\([^:]+\\):") 
nil t))
-      (cond
-       ((string= (match-string 1) "CMD")
-       (looking-at ".*$")
-       (save-excursion
-         (setq cmd-line (line-number-at-pos)
-               last-cmd (match-string 0)
-               last-result
-               (condition-case-unless-debug err
-                   (eval (car (read-from-string last-cmd)))
-                 (error
-                    (setq error-count (1+ error-count))
-                    (message "%s:%d: command: %s"
-                             (buffer-file-name) cmd-line last-cmd)
-                    (message "%s:%d: %s: %s"
-                             (buffer-file-name)
-                             (line-number-at-pos)
-                             (car err)
-                             (cdr err))))
-               )
-         ;; save-excursion does not preserve mapping of buffer to
-         ;; window, but some tests depend on that. For example,
-         ;; execute-kbd-macro doesn’t work properly if current buffer
-         ;; is not visible..
-         (pop-to-buffer test-buffer)))
-
-       ((string= (match-string 1) "RESULT")
-       (looking-at ".*$")
-       (setq expected-result (save-excursion (end-of-line 1) (eval (car 
(read-from-string (match-string 0))))))
-       (unless (equal expected-result last-result)
-         (setq error-count (1+ error-count))
-         (message
-          (concat
-           (format "error: %s:%d:\n" (buffer-file-name) (line-number-at-pos))
-           (format "Result of '%s' does not match.\nGot    '%s',\nexpect '%s'"
-                   last-cmd
-                   last-result
-                   expected-result)
-           ))))
-
-       ((string= (match-string 1) "RESULT_START")
-       (looking-at ".*$")
-       (setq expected-result (list (save-excursion (end-of-line 1) (eval (car 
(read-from-string (match-string 0))))))))
-
-       ((string= (match-string 1) "RESULT_ADD")
-       (looking-at ".*$")
-       (let ((val (save-excursion (end-of-line 1)
-                                  (eval (car (read-from-string (match-string 
0)))))))
-         (when val
-           (setq expected-result (append expected-result (list val))))))
-
-       ((string= (match-string 1) "RESULT_FINISH")
-       (unless (equal (length expected-result) (length last-result))
-         (setq error-count (1+ error-count))
-         (message
-          (concat
-           (format "error: %s:%d:\n" (buffer-file-name) (line-number-at-pos))
-           (format "Length of result of '%s' does not match.\nGot    
'%s',\nexpect '%s'"
-                   last-cmd
-                   (length last-result)
-                   (length expected-result)))))
-
-       (let ((i 0))
-         (while (< i (length expected-result))
-           (unless (equal (nth i expected-result) (nth i last-result))
+  (condition-case-unless-debug err
+      (progn
+       (setq indent-tabs-mode nil)
+       (setq jit-lock-context-time 0.0);; for test-face
+
+       ;; Test files use wisi-prj-select-cached to parse and select a project 
file.
+       (setq project-find-functions (list #'wisi-prj-current-cached))
+       (setq xref-backend-functions (list #'wisi-prj-xref-backend))
+
+       (when (stringp save-edited-text)
+         (wisi-process-parse-save-text wisi-parser-shared save-edited-text t))
+
+       (let ((error-count 0)
+             (pass-count 0)
+             (test-buffer (current-buffer))
+             cmd-line
+             last-result last-cmd expected-result force-fail)
+         ;; Look for EMACS* comments in the file:
+         ;;
+         ;; EMACSCMD: <form>
+         ;;    Executes the lisp form inside a save-excursion, saves the 
result as a lisp object.
+         ;;
+         ;; EMACSRESULT: <form>
+         ;;    point is moved to end of line, <form> is evaluated inside
+         ;;    save-excursion and compared (using `equal') with the result
+         ;;    of the previous EMACSCMD, and the test fails if they don't
+         ;;    match.
+         ;;
+         ;; EMACSRESULT_START:<first list element>
+         ;; EMACSRESULT_ADD:  <list element>
+         ;; EMACSRESULT_FINISH:
+         ;;    build a list, compare it to the result of the previous EMACSCMD.
+         ;;
+         ;; EMACS_SKIP_UNLESS: <form>
+         ;;   skip entire test if form evals nil
+         ;;
+         ;; EMACSDEBUG: <form>
+         ;;    Eval form, display result. Also used for setting breakpoint.
+
+         (goto-char (point-min))
+         (while (and (not skip-cmds)
+                     (re-search-forward (concat comment-start 
"EMACS\\([^:]+\\):") nil t))
+           (cond
+            ((string= (match-string 1) "CMD")
+             (looking-at ".*$")
+             (setq cmd-line (line-number-at-pos)
+                   last-cmd (match-string 0))
+             (let ((msg (format "%s:%d: test %s" (buffer-file-name) cmd-line 
last-cmd)))
+               (wisi-parse-log-message wisi-parser-shared msg)
+               (message "%s" msg)
+               (save-excursion
+                 (setq last-result
+                       (condition-case-unless-debug err
+                           (prog1
+                             (eval (car (read-from-string last-cmd)))
+                             (when (> wisi-debug 1)
+                               (setq msg (concat msg " ... done"))
+                                (wisi-parse-log-message wisi-parser-shared msg)
+                                (message msg)))
+                         ((error wisi-parse-error)
+                          (setq error-count (1+ error-count))
+                          (setq msg (concat msg " ... signaled"))
+                          (setq force-fail t)
+                          (wisi-parse-log-message wisi-parser-shared msg)
+                          (message msg)
+                          (setq msg (format "... %s: %s" (car err) (cdr err)))
+                          (wisi-parse-log-message wisi-parser-shared msg)
+                          (message msg)
+                          nil)))
+                 ))
+               ;; save-excursion does not preserve mapping of buffer to
+               ;; window, but some tests depend on that. For example,
+               ;; execute-kbd-macro doesn’t work properly if current buffer
+               ;; is not visible.
+               (pop-to-buffer test-buffer))
+
+            ((string= (match-string 1) "RESULT")
+             (looking-at ".*$")
+             (setq expected-result (save-excursion (end-of-line 1) (eval (car 
(read-from-string (match-string 0))))))
+             (if (and (not force-fail)
+                      (equal expected-result last-result))
+                 (let ((msg (format "test passes %s:%d:\n" (buffer-file-name) 
(line-number-at-pos))))
+                   (setq pass-count (1+ pass-count))
+                   (wisi-parse-log-message wisi-parser-shared msg)
+                   (message msg))
+
+               (setq error-count (1+ error-count))
+
+               (let ((msg (concat
+                           (format "error: %s:%d:\n" (buffer-file-name) 
(line-number-at-pos))
+                           (if force-fail
+                               "... failed due to signal"
+                             (format "... result of '%s' does not match.\n... 
Got    '%s',\n... expect '%s'"
+                                     last-cmd
+                                     last-result
+                                     expected-result)))))
+                 (wisi-parse-log-message wisi-parser-shared msg)
+                 (message "%s" msg))
+               (setq force-fail nil)))
+
+            ((string= (match-string 1) "RESULT_START")
+             (looking-at ".*$")
+             (setq expected-result
+                   (list (save-excursion (end-of-line 1) (eval (car 
(read-from-string (match-string 0))))))))
+
+            ((string= (match-string 1) "RESULT_ADD")
+             (looking-at ".*$")
+             (let ((val (save-excursion (end-of-line 1)
+                                        (eval (car (read-from-string 
(match-string 0)))))))
+               (when val
+                 (setq expected-result (append expected-result (list val))))))
+
+            ((string= (match-string 1) "RESULT_FINISH")
+             (unless (equal (length expected-result) (length last-result))
+               (setq error-count (1+ error-count))
+               ;; this is used for gpr-query tests, not parser tests,
+               ;; so we don't write to the parser log.
+               (message
+                (concat
+                 (format "error: %s:%d:\n" (buffer-file-name) 
(line-number-at-pos))
+                 (format "Length of result of '%s' does not match.\nGot    
'%s',\nexpect '%s'"
+                         last-cmd
+                         (length last-result)
+                         (length expected-result)))))
+
+             (let ((i 0))
+               (while (< i (length expected-result))
+                 (unless (equal (nth i expected-result) (nth i last-result))
+                   (setq error-count (1+ error-count))
+                   (message
+                    (concat
+                     (format "error: %s:%d:\n" (buffer-file-name) 
(line-number-at-pos))
+                     (format "Nth (%d) result of '%s' does not match.\nGot    
'%s',\nexpect '%s'"
+                             i
+                             last-cmd
+                             (nth i last-result)
+                             (nth i expected-result))
+                     )))
+                 (setq i (1+ i)))))
+
+            ((string= (match-string 1) "_SKIP_UNLESS")
+             (looking-at ".*$")
+             (unless (eval (car (read-from-string (match-string 0))))
+               (setq skip-cmds t)
+               (setq skip-reindent-test t)
+               (setq skip-recase-test t)
+               ;; We don’t set ‘skip-write’ t here, so the *.diff Make target 
succeeds.
+               ))
+
+            ((string= (match-string 1) "DEBUG")
+             (looking-at ".*$")
+             (message "DEBUG: %s:%d %s"
+                      (current-buffer)
+                      (line-number-at-pos)
+                      (save-excursion
+                        (eval (car (read-from-string (match-string 0)))))))
+
+            (t
              (setq error-count (1+ error-count))
-             (message
-              (concat
-               (format "error: %s:%d:\n" (buffer-file-name) 
(line-number-at-pos))
-               (format "Nth (%d) result of '%s' does not match.\nGot    
'%s',\nexpect '%s'"
-                       i
-                       last-cmd
-                       (nth i last-result)
-                       (nth i expected-result))
-               )))
-           (setq i (1+ i)))))
-
-       ((string= (match-string 1) "_SKIP_UNLESS")
-       (looking-at ".*$")
-       (unless (eval (car (read-from-string (match-string 0))))
-         (setq skip-cmds t)
-         (setq skip-reindent-test t)
-         (setq skip-recase-test t)
-         ;; We don’t set ‘skip-write’ t here, so the *.diff Make target 
succeeds.
-         ))
-
-       ((string= (match-string 1) "DEBUG")
-       (looking-at ".*$")
-       (message "DEBUG: %s:%d %s"
-                (current-buffer)
-                (line-number-at-pos)
-                (save-excursion
-                 (eval (car (read-from-string (match-string 0)))))))
-
-       (t
-       (setq error-count (1+ error-count))
-       (error (concat "Unexpected EMACS test command " (match-string 1))))))
-
-    (when (> error-count 0)
-      (error
-       "%s:%d: aborting due to previous errors (%d)"
-       (buffer-file-name) (line-number-at-pos (point)) error-count))
-    )
-
-  (unless skip-reindent-test
-    ;; Reindent the buffer
-    (message "indenting")
-
-    ;; first unindent; if the indentation rules do nothing, the test
-    ;; would pass, otherwise!  Only unindent by 1 column, so comments
-    ;; not currently in column 0 are still not in column 0, in case
-    ;; the mode supports a special case for comments in column 0.
-    (indent-rigidly (point-min) (point-max) -1)
-
-    ;; indent-region uses save-excursion, so we can't goto an error location
-    (indent-region (point-min) (point-max))
-
-    ;; Cleanup the buffer; indenting often leaves trailing whitespace;
-    ;; files must be saved without any.
-    (delete-trailing-whitespace)
-    )
-
-  (when (and wisi-auto-case (not skip-recase-test))
-    (message "casing")
-    (wisi-case-adjust-buffer))
-  )
+             (error (concat "Unexpected EMACS test command " (match-string 
1))))))
+
+         (let ((msg (format "%s:%d tests passed %d"
+                            (buffer-file-name) (line-number-at-pos (point)) 
pass-count)))
+           (wisi-parse-log-message wisi-parser-shared msg)
+           (message msg))
+
+         (when (> error-count 0)
+           (error
+            "%s:%d: aborting due to previous errors (%d)"
+            (buffer-file-name) (line-number-at-pos (point)) error-count))
+         )
+
+       (unless skip-reindent-test
+         ;; Reindent the buffer
+         (message "indenting")
+
+         ;; first unindent; if the indentation rules do nothing, the test
+         ;; would pass, otherwise!  Only unindent by 1 column, so comments
+         ;; not currently in column 0 are still not in column 0, in case
+         ;; the mode supports a special case for comments in column 0.
+         (indent-rigidly (point-min) (point-max) -1)
+
+         ;; indent-region uses save-excursion, so we can't goto an error 
location
+         (indent-region (point-min) (point-max))
+
+         ;; Cleanup the buffer; indenting often leaves trailing whitespace;
+         ;; files must be saved without any.
+         (delete-trailing-whitespace)
+         )
+
+       (when (and wisi-auto-case (not skip-recase-test))
+         (message "casing")
+         (wisi-case-adjust-buffer))
+
+       (wisi-test-save-log))
+    (error
+     (wisi-test-save-log)
+     (signal (car err) (cdr err)))
+    ))
 
 (defvar cl-print-readably); cl-print.el, used by edebug
 
@@ -343,13 +398,13 @@ Each item is a list (ACTION PARSE-BEGIN PARSE-END 
EDIT-BEGIN)")
        (cons 'height 71) ;; characters
        (cons 'left 0) ;; pixels
        (cons 'top 0))))
-(define-key global-map "\C-cp" 'large-screen)
+(define-key global-map "\C-cp" 'large-frame)
 
 (defun run-test (file-name)
   "Run an indentation and casing test on FILE-NAME."
   (interactive "f")
 
-  (package-initialize) ;; for uniquify-files
+  (setq-default indent-tabs-mode nil) ;; no tab chars in files
 
   ;; Let edebug display strings full-length, and show internals of records
   (setq cl-print-readably t)
@@ -378,6 +433,9 @@ Each item is a list (ACTION PARSE-BEGIN PARSE-END 
EDIT-BEGIN)")
   (setq xref-prompt-for-identifier nil)
 
   (let ((dir default-directory))
+    ;; Always wait for initial full parse to complete.
+    (setq wisi-parse-full-background nil)
+
     (find-file file-name) ;; sets default-directory
 
     (run-test-here)
diff --git a/wisi-skel.el b/wisi-skel.el
index 9af5b9b91d..4bfdb88715 100644
--- a/wisi-skel.el
+++ b/wisi-skel.el
@@ -1,6 +1,6 @@
 ;;; wisi-skel.el --- Extensions skeleton  -*- lexical-binding:t -*-
 
-;; Copyright (C) 1987, 1993, 1994, 1996-2020  Free Software Foundation, Inc.
+;; Copyright (C) 1987, 1993, 1994, 1996-2021  Free Software Foundation, Inc.
 
 ;; Authors: Stephen Leake <stephen_leake@stephe-leake.org>
 
@@ -62,8 +62,8 @@ after AFTER-1. If AFTER-1 is a nested alist, add the new 
entry after AFTER-2."
 
 (defun wisi-skel-build-prompt (alist count)
   "Build a prompt from the keys of the ALIST.
-The prompt consists of the first COUNT keys from the alist, separated by `|', 
with
-trailing `...' if there are more keys."
+The prompt consists of the first COUNT keys from the alist,
+separated by `|', with trailing `...' if there are more keys."
   (if (>= count (length alist))
       (concat (mapconcat 'car alist " | ") " : ")
     (let ((alist-1 (butlast alist (- (length alist) count))))
@@ -75,6 +75,9 @@ trailing `...' if there are more keys."
   ;; see test/ada_skel.adb
   )
 
+(defun wisi-skel-enable-parse ()
+  (setq wisi-inhibit-parse nil));
+
 (defun wisi-skel-expand (&optional name)
   "Expand the token or placeholder before point to a skeleton.
 Tokens are defined by `wisi-skel-token-alist'; they must have
@@ -88,7 +91,12 @@ before that as the token."
   ;; Standard comment end included for languages where that is newline.
   (skip-syntax-backward " !>")
 
-  (let* ((wisi-inhibit-parse t) ;; don't parse until skeleton is fully inserted
+  (let* ((wisi-inhibit-parse t)
+        ;; Don't parse until skeleton is fully inserted. However,
+        ;; this is still set when skeleton-end-hook is called; user
+        ;; will probably put wisi-indent-statement on that hook. So
+        ;; we add wisi-skel-enable-parse.
+
         (end (point))
         ;; Include punctuation here, to handle a dotted name (ie Ada.Text_IO)
         (token (progn (skip-syntax-backward "w_.")
@@ -183,5 +191,8 @@ before that as the token."
   (interactive)
   (skip-syntax-backward "^!"))
 
+;;;###autoload
+(add-hook 'skeleton-end-hook #'wisi-skel-enable-parse 90)
+
 (provide 'wisi-skel)
 ;;; wisi-skel.el ends here
diff --git a/wisi-tests.el b/wisi-tests.el
index b730cf13cb..97f0eaba8e 100644
--- a/wisi-tests.el
+++ b/wisi-tests.el
@@ -1,6 +1,6 @@
 ;;; wisi-tests.el --- Common utils for wisi tests -*- lexical-binding:t -*-
 ;;
-;; Copyright (C) 2012 - 2019  Free Software Foundation, Inc.
+;; Copyright (C) 2012 - 2020  Free Software Foundation, Inc.
 ;;
 ;; Author: Stephen Leake <stephen_leake@stephe-leake.org>
 ;; Maintainer: Stephen Leake <stephen_leake@stephe-leake.org>
@@ -98,7 +98,6 @@
        :face-table (symbol-value (intern-soft (concat grammar-name 
"-process-face-table")))
        :token-table (symbol-value (intern-soft (concat grammar-name 
"-process-token-table")))
        )))
-     (setq wisi-mckenzie-disable nil)
      )
     )
 
diff --git a/wisi.adb b/wisi.adb
index b874532cc9..2b16301271 100644
--- a/wisi.adb
+++ b/wisi.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2017 - 2021 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -17,109 +17,38 @@
 
 pragma License (Modified_GPL);
 
-with Ada.Exceptions;
+with Ada.Directories;
 with Ada.Strings.Bounded;
-with Ada.Strings.Unbounded;
+with Ada.Strings.Fixed;
+with Ada.Strings.Maps;
 with Ada.Text_IO;
 with SAL;
-with WisiToken.Semantic_Checks;
+with System.Address_To_Access_Conversions;
+with System.Storage_Elements;
+with WisiToken.Lexer;
 package body Wisi is
    use WisiToken;
 
    Chars_Per_Int : constant Integer := Integer'Width;
 
-   ----------
-   --  body subprogram specs (as needed), alphabetical
-
-   function Indent_Nil_P (Indent : in Indent_Type) return Boolean;
-
-   function Max_Anchor_ID
-     (Data       : in out Parse_Data_Type;
-      First_Line : in     Line_Number_Type;
-      Last_Line  : in     Line_Number_Type)
-     return Integer;
-
-   function Paren_In_Anchor_Line
-     (Data         : in out Parse_Data_Type'Class;
-      Tree         : in     Syntax_Trees.Tree'Class;
-      Anchor_Token : in     Augmented_Token;
-      Offset       : in     Integer)
-     return Integer;
-
    ----------
    --  body subprograms bodies, alphabetical
 
-   procedure Adjust_Paren_State
-     (Data              : in out Parse_Data_Type;
-      Tree              : in     Syntax_Trees.Tree'Class;
-      First_Token_Index : in     Token_Index;
-      First_Line        : in     Line_Number_Type;
-      Adjust            : in     Integer)
-   is begin
-      for I in First_Token_Index .. Data.Terminals.Last_Index loop
-         declare
-            Aug : Augmented_Token renames Get_Aug_Token_Var (Data, Tree, I);
-         begin
-            Aug.Paren_State := Aug.Paren_State + Adjust;
-         end;
-      end loop;
-
-      for Line in First_Line .. Data.Line_Paren_State.Last_Index loop
-         Data.Line_Paren_State (Line) := Data.Line_Paren_State (Line) + Adjust;
-      end loop;
-   end Adjust_Paren_State;
-
-   function Image (Aug : in WisiToken.Base_Token_Class_Access; Descriptor : in 
WisiToken.Descriptor) return String
-   is begin
-      return Image (Augmented_Token_Access (Aug).all, Descriptor);
-   end Image;
-
-   function Image (Action : in WisiToken.Syntax_Trees.Semantic_Action) return 
String
-   is
-      pragma Unreferenced (Action);
-   begin
-      return "action";
-   end Image;
-
-   function Image (Anchor_IDs : in Anchor_ID_Vectors.Vector) return String
+   function Image (Indent : in Indent_Type) return String
    is
-      use Ada.Strings.Unbounded;
-      Result : Unbounded_String := +"(";
+      Prefix : constant String := "(" & Trimmed_Image 
(Indent.Controlling_Token_Line) & ": " &
+        Indent_Label'Image (Indent.Label);
    begin
-      for I in Anchor_IDs.First_Index .. Anchor_IDs.Last_Index loop
-         Result := Result & Integer'Image (Anchor_IDs (I));
-         if I /= Anchor_IDs.Last_Index then
-            Result := Result & ", ";
-         else
-            Result := Result & ")";
-         end if;
-      end loop;
-      return -Result;
-   end Image;
-
-   function Image (Indent : in Indent_Type) return String
-   is begin
       case Indent.Label is
       when Not_Set =>
-         return "(" & Indent_Label'Image (Indent.Label) & ")";
+         return Prefix & ")";
 
       when Int =>
-         return "(" & Indent_Label'Image (Indent.Label) & Integer'Image 
(Indent.Int_Indent) & ")";
-
-      when Anchor_Nil =>
-         return "(" & Indent_Label'Image (Indent.Label) & ", " & Image 
(Indent.Anchor_Nil_IDs) & ", nil)";
-
-      when Anchor_Int =>
-         return "(" & Indent_Label'Image (Indent.Label) & ", " & Image 
(Indent.Anchor_Int_IDs) & ", " & Integer'Image
-           (Indent.Anchor_Int_Indent) & ")";
+         return Prefix & Integer'Image (Indent.Int_Indent) & ")";
 
       when Anchored =>
-         return "(" & Indent_Label'Image (Indent.Label) & ", " & Integer'Image 
(Indent.Anchored_ID) & ", " &
-           Integer'Image (Indent.Anchored_Delta) & ")";
+         return Prefix & "," & Indent.Anchor_Line'Image & "," & 
Indent.Anchor_Delta'Image & ")";
 
-      when Anchor_Anchored =>
-         return "(" & Indent_Label'Image (Indent.Label) & ", " & Image 
(Indent.Anchor_Anchored_IDs) & Integer'Image
-           (Indent.Anchor_Anchored_ID) & ", " & Integer'Image 
(Indent.Anchor_Anchored_Delta) & ")";
       end case;
    end Image;
 
@@ -128,69 +57,56 @@ package body Wisi is
       Indent       : in out Indent_Type)
    with Pre => Delta_Indent.Label = Anchored
    is begin
-      --  Add Delta_Indent to Indent
-
       case Indent.Label is
       when Not_Set =>
-         Indent := (Anchored, Delta_Indent.Anchored_ID, 
Delta_Indent.Anchored_Delta);
+         Indent :=
+           (Anchored, Delta_Indent.Controlling_Token_Line, 
Delta_Indent.Anchor_Line, Delta_Indent.Anchored_Delta);
 
       when Int =>
-         if Delta_Indent.Anchored_Accumulate then
-            Indent := (Anchored, Delta_Indent.Anchored_ID, Indent.Int_Indent + 
Delta_Indent.Anchored_Delta);
-         end if;
-
-      when Anchor_Nil =>
          Indent :=
-           (Anchor_Anchored,
-            Indent.Anchor_Nil_IDs,
-            Delta_Indent.Anchored_ID,
-            Delta_Indent.Anchored_Delta);
-
-      when Anchor_Int =>
-         if Delta_Indent.Anchored_Accumulate then
-            Indent :=
-              (Anchor_Anchored,
-               Indent.Anchor_Int_IDs,
-               Delta_Indent.Anchored_ID,
-               Delta_Indent.Anchored_Delta + Indent.Anchor_Int_Indent);
-         end if;
+           (Anchored, Delta_Indent.Controlling_Token_Line, 
Delta_Indent.Anchor_Line,
+            Delta_Indent.Anchored_Delta + Indent.Int_Indent);
 
-      when Anchored | Anchor_Anchored =>
-         --  already anchored
+      when Anchored =>
+         --  Already anchored, as in nested parens.
          null;
       end case;
    end Indent_Apply_Anchored;
 
-   procedure Indent_Apply_Int (Indent : in out Indent_Type; Offset : in 
Integer)
+   procedure Indent_Apply_Int
+     (Indent                 : in out Indent_Type;
+      Offset                 : in     Integer;
+      Controlling_Token_Line : in     Base_Line_Number_Type)
    is begin
       --  Add an Int indent to Indent
       case Indent.Label is
       when Not_Set =>
-         Indent := (Int, Offset);
+         Indent := (Int, Controlling_Token_Line, Offset);
 
       when Int =>
-         Indent.Int_Indent := Indent.Int_Indent + Offset;
-
-      when Anchor_Nil         =>
-         Indent :=
-           (Label             => Anchor_Int,
-            Anchor_Int_IDs    => Indent.Anchor_Nil_IDs,
-            Anchor_Int_Indent => Offset);
-
-      when Anchor_Int =>
-         Indent.Anchor_Int_Indent := Indent.Anchor_Int_Indent + Offset;
+         if Controlling_Token_Line = Invalid_Line_Number or
+           Indent.Controlling_Token_Line = Invalid_Line_Number or
+           Controlling_Token_Line /= Indent.Controlling_Token_Line
+         then
+            Indent.Controlling_Token_Line := Controlling_Token_Line;
+            Indent.Int_Indent := Indent.Int_Indent + Offset;
+         end if;
 
-      when Anchored | Anchor_Anchored =>
+      when Anchored =>
          null;
       end case;
    end Indent_Apply_Int;
 
    procedure Indent_Line
-     (Data         : in out Parse_Data_Type;
-      Line         : in     Line_Number_Type;
-      Delta_Indent : in     Delta_Type)
+     (Data              : in out Parse_Data_Type;
+      Line              : in     Line_Number_Type;
+      Delta_Indent      : in     Delta_Type;
+      Indenting_Comment : in     Indenting_Comment_Label;
+      Trace             : in     WisiToken.Trace_Access)
    is
-      --  See note in Indent_Anchored_2 for why we can't use renames here.
+      --  We can't use a Reference here, because the Element in reference
+      --  types is constrained (as are all allocated objects of access
+      --  types; AARM 4.8 (6/3)), and we may need to change the Label.
       Indent : Indent_Type := Data.Indents (Line);
    begin
       case Delta_Indent.Label is
@@ -200,148 +116,112 @@ package body Wisi is
             null;
 
          when Int =>
-            Indent_Apply_Int (Indent, Delta_Indent.Simple_Delta.Int_Delta);
+            Indent_Apply_Int
+              (Indent, Delta_Indent.Simple_Delta.Int_Delta, 
Delta_Indent.Simple_Delta.Controlling_Token_Line);
 
          when Anchored =>
             Indent_Apply_Anchored (Delta_Indent.Simple_Delta, Indent);
          end case;
 
       when Hanging =>
-         if Delta_Indent.Hanging_Accumulate or Indent_Nil_P (Data.Indents 
(Line)) then
-            if Line = Delta_Indent.Hanging_First_Line then
-               --  Apply delta_1
+         declare
+            procedure Apply_Delta_1
+            is begin
                case Delta_Indent.Hanging_Delta_1.Label is
                when None =>
                   null;
                when Int =>
-                  Indent_Apply_Int (Indent, 
Delta_Indent.Hanging_Delta_1.Int_Delta);
+                  Indent_Apply_Int
+                    (Indent, Delta_Indent.Hanging_Delta_1.Int_Delta,
+                     Delta_Indent.Hanging_Delta_1.Controlling_Token_Line);
                when Anchored =>
                   Indent_Apply_Anchored (Delta_Indent.Hanging_Delta_1, Indent);
                end case;
-            else
-               if Delta_Indent.Hanging_Paren_State = Data.Line_Paren_State 
(Line) then
-                  case Delta_Indent.Hanging_Delta_2.Label is
-                  when None =>
-                     null;
-                  when Int =>
-                     Indent_Apply_Int (Indent, 
Delta_Indent.Hanging_Delta_2.Int_Delta);
-                  when Anchored =>
-                     Indent_Apply_Anchored (Delta_Indent.Hanging_Delta_2, 
Indent);
-                  end case;
+            end Apply_Delta_1;
+
+            procedure Apply_Delta_2
+            is begin
+               case Delta_Indent.Hanging_Delta_2.Label is
+               when None =>
+                  null;
+               when Int =>
+                  Indent_Apply_Int
+                    (Indent, Delta_Indent.Hanging_Delta_2.Int_Delta,
+                     Delta_Indent.Hanging_Delta_2.Controlling_Token_Line);
+               when Anchored =>
+                  Indent_Apply_Anchored (Delta_Indent.Hanging_Delta_2, Indent);
+               end case;
+            end Apply_Delta_2;
+
+         begin
+            case Indenting_Comment is
+            when None =>
+               if Line = Delta_Indent.Hanging_First_Line then
+                  Apply_Delta_1;
+               else
+                  Apply_Delta_2;
                end if;
-            end if;
-         end if;
-      end case;
 
+            when Leading =>
+               Apply_Delta_1;
+
+            when Trailing =>
+               Apply_Delta_2;
+            end case;
+         end;
+      end case;
       if Trace_Action > Extra then
-         Ada.Text_IO.Put_Line (";; indent_line: " & Line_Number_Type'Image 
(Line) & " => " & Image (Indent));
+         Trace.Put_Line ("indent_line: " & Line_Number_Type'Image (Line) & " 
=> " & Image (Indent));
       end if;
 
       Data.Indents.Replace_Element (Line, Indent);
    end Indent_Line;
 
-   function Indent_Nil_P (Indent : in Indent_Type) return Boolean
-   is begin
-      return Indent.Label in Not_Set | Anchor_Nil;
-   end Indent_Nil_P;
-
-   function Max_Anchor_ID
-     (Data       : in out Parse_Data_Type;
-      First_Line : in     Line_Number_Type;
-      Last_Line  : in     Line_Number_Type)
-     return Integer
-   is
-      Result : Integer := First_Anchor_ID - 1;
-   begin
-      for Line in First_Line .. Last_Line loop
-         declare
-            Indent : Indent_Type renames Data.Indents (Line);
-         begin
-            case Indent.Label is
-            when Not_Set | Int =>
-               null;
-            when Anchor_Nil =>
-               Result := Integer'Max (Result, Indent.Anchor_Nil_IDs 
(Indent.Anchor_Nil_IDs.First_Index));
-            when Anchor_Int =>
-               Result := Integer'Max (Result, Indent.Anchor_Int_IDs 
(Indent.Anchor_Int_IDs.First_Index));
-            when Anchored =>
-               Result := Integer'Max (Result, Indent.Anchored_ID);
-            when Anchor_Anchored =>
-               Result := Integer'Max (Result, Indent.Anchor_Anchored_ID);
-            end case;
-         end;
-      end loop;
-      return Result;
-   end Max_Anchor_ID;
-
    function Paren_In_Anchor_Line
      (Data         : in out Parse_Data_Type'Class;
-      Tree         : in     Syntax_Trees.Tree'Class;
-      Anchor_Token : in     Augmented_Token;
+      Tree         : in     WisiToken.Syntax_Trees.Tree;
+      Anchor_Token : in     Syntax_Trees.Valid_Node_Access;
       Offset       : in     Integer)
      return Integer
+   --  If there is a left_paren in Anchor_Token.Line_Region.First
+   --  containing Anchor_Token, return offset of that paren from first
+   --  char in line + Offset. Else return Offset.
    is
-      use Valid_Node_Index_Arrays;
-      use all type Ada.Containers.Count_Type;
-
       Left_Paren_ID  : Token_ID renames Data.Left_Paren_ID;
       Right_Paren_ID : Token_ID renames Data.Right_Paren_ID;
 
-      I              : Base_Token_Index := Anchor_Token.First_Terminals_Index;
-      Paren_Count    : Integer          := 0;
-      Paren_Char_Pos : Buffer_Pos       := Invalid_Buffer_Pos;
-      Text_Begin_Pos : Buffer_Pos       := Invalid_Buffer_Pos;
+      Begin_Token : constant Syntax_Trees.Valid_Node_Access := 
Tree.Line_Begin_Token
+        (Tree.Line_Region (Anchor_Token, Trailing_Non_Grammar => True).First);
+
+      I : Syntax_Trees.Node_Access := Tree.First_Terminal (Anchor_Token);
+
+      Paren_Count    : Integer    := 0;
+      Paren_Char_Pos : Buffer_Pos := Invalid_Buffer_Pos;
+      Text_Begin_Pos : Buffer_Pos := Invalid_Buffer_Pos;
    begin
-      Find_First :
       loop
          declare
-            Tok : Aug_Token_Const_Ref renames Get_Aug_Token_Const (Data, Tree, 
I);
+            Tok_Char_Region : constant Buffer_Region := Tree.Char_Region (I, 
Trailing_Non_Grammar => False);
          begin
-            if Tok.Deleted then
-               null;
-
-            elsif Tok.ID = Left_Paren_ID then
+            if Tree.ID (I) = Left_Paren_ID then
                Paren_Count := Paren_Count + 1;
                if Paren_Count = 1 then
-                  Paren_Char_Pos := Tok.Char_Region.First;
+                  Paren_Char_Pos := Tok_Char_Region.First;
                end if;
 
-            elsif Tok.ID = Right_Paren_ID then
+            elsif Tree.ID (I) = Right_Paren_ID then
                Paren_Count := Paren_Count - 1;
 
             end if;
 
-            if Tok.First then
-               Text_Begin_Pos := Tok.Char_Region.First;
-               exit Find_First;
-            else
-               if Length (Tok.Inserted_Before) > 0 then
-                  for Node of Tok.Inserted_Before loop
-                     declare
-                        Ins_Tok : Augmented_Token renames Augmented_Token 
(Tree.Augmented (Node).all);
-                     begin
-                        if Ins_Tok.ID = Left_Paren_ID then
-                           Paren_Count := Paren_Count + 1;
-                           if Paren_Count = 1 then
-                              Paren_Char_Pos := Tok.Char_Region.First;
-                           end if;
-
-                        elsif Ins_Tok.ID = Right_Paren_ID then
-                           Paren_Count := Paren_Count - 1;
-
-                        end if;
-
-                        if Ins_Tok.First then
-                           Text_Begin_Pos := Tok.Char_Region.First;
-                           exit Find_First;
-                        end if;
-                     end;
-                  end loop;
-               end if;
+            if I = Begin_Token then
+               Text_Begin_Pos := Tok_Char_Region.First;
+               exit;
             end if;
          end;
-         I := I - 1;
-      end loop Find_First;
+
+         I := Tree.Prev_Terminal (I);
+      end loop;
 
       if Paren_Char_Pos /= Invalid_Buffer_Pos and Text_Begin_Pos /= 
Invalid_Buffer_Pos then
          return 1 + Offset + Integer (Paren_Char_Pos - Text_Begin_Pos);
@@ -403,7 +283,10 @@ package body Wisi is
       end if;
    end Put;
 
-   procedure Put (Line_Number : in Line_Number_Type; Item : in Indent_Type)
+   procedure Put
+     (Tree : in Syntax_Trees.Tree;
+      Line : in Line_Number_Type;
+      Item : in Indent_Type)
    is begin
       --  All Anchors must be resolved at this point, but not all lines have
       --  an indent computed. A negative indent is an error in either the
@@ -417,27 +300,48 @@ package body Wisi is
          declare
             --  We can easily get negative indents when there are syntax 
errors.
             Ind : constant Integer := Integer'Max (0, Item.Int_Indent);
+            Line_Begin_Char_Pos : Base_Buffer_Pos :=
+              (if Line = Line_Number_Type'First
+               then Buffer_Pos'First
+               else Invalid_Buffer_Pos);
+            Node : constant Syntax_Trees.Node_Access :=
+              (if Line = Line_Number_Type'First
+               then Syntax_Trees.Invalid_Node_Access
+               else Tree.Find_New_Line (Line, Line_Begin_Char_Pos));
+            pragma Unreferenced (Node);
          begin
+            if Debug_Mode then
+               if Ind > 100 then
+                  --  This is better than hanging Emacs by returning a huge 
bogus indent.
+                  raise SAL.Programmer_Error with "indent > 100";
+               elsif Line_Begin_Char_Pos = Invalid_Buffer_Pos then
+                  raise SAL.Programmer_Error with "Line_Begin_Char_Pos = 
Invalid_Buffer_Pos, line" & Line'Image;
+               end if;
+            end if;
             Ada.Text_IO.Put_Line
-              ('[' & Indent_Code & Line_Number_Type'Image (Line_Number) & 
Integer'Image (Ind) & ']');
+              --  elisp doesn't need line number, but it is very helpful for 
debugging
+              ('[' & Indent_Code & Line'Image & Line_Begin_Char_Pos'Image & 
Ind'Image & ']');
          end;
 
-      when Anchor_Nil | Anchor_Int | Anchored | Anchor_Anchored =>
+      when Anchored =>
          raise SAL.Programmer_Error with "Indent item has non-int label: " & 
Indent_Label'Image (Item.Label);
       end case;
    end Put;
 
    procedure Put
-     (Item : in Parse.LR.Configuration;
-      Data : in Parse_Data_Type;
-      Tree : in Syntax_Trees.Tree)
+     (Item      : in Parse.Recover_Op_Nodes_Arrays.Vector;
+      Error_Pos : in Buffer_Pos;
+      Tree      : in Syntax_Trees.Tree)
    is
       use Ada.Strings.Unbounded;
-      use Parse.LR;
-      use Parse.LR.Config_Op_Arrays, Parse.LR.Config_Op_Array_Refs;
+      use WisiToken.Parse;
+      use WisiToken.Parse.Recover_Op_Nodes_Arrays;
+      use all type Ada.Containers.Count_Type;
+
+      Descriptor : WisiToken.Descriptor renames Tree.Lexer.Descriptor.all;
 
       --  Output is a sequence of edit regions; each is:
-      --  [edit-pos [inserted token-ids] [deleted token-ids] deleted-region]
+      --  [error-pos edit-pos [inserted token-ids] [deleted token-ids] 
deleted-region]
 
       type State_Label is
         (None,     -- not started yet
@@ -447,94 +351,124 @@ package body Wisi is
       State : State_Label := None;
       --  State of the current edit region.
 
+      Last_Edit_Pos  : Buffer_Pos       := Invalid_Buffer_Pos;
       Line           : Unbounded_String := To_Unbounded_String ("[");
       Deleted_Region : Buffer_Region    := Null_Buffer_Region;
-      Last_Deleted : Config_Op (Delete) := (Delete, Invalid_Token_ID, 
Invalid_Token_Index);
-
-      procedure Start_Edit_Region (Op : in Insert_Delete_Op)
+      Last_Deleted   : Recover_Op_Nodes :=
+        (Op               => Delete,
+         Input_Node_Index => Syntax_Trees.Invalid_Node_Index,
+         Del_ID           => Invalid_Token_ID,
+         Del_Index        => Syntax_Trees.Sequential_Index'Last,
+         Del_Node         => Syntax_Trees.Invalid_Node_Access);
+
+      procedure Start_Edit_Region (Error_Pos, Edit_Pos : in Buffer_Pos)
       is begin
          Append (Line, "[");
-         Append (Line, Get_Aug_Token_Const (Data, Tree, Parse.LR.Token_Index 
(Op)).Char_Region.First'Image);
+         Append (Line, Trimmed_Image (Error_Pos));
+         Append (Line, Edit_Pos'Image);
          Append (Line, "[");
       end Start_Edit_Region;
 
-      function Deleted_Region_Image return String
-      is begin
-         return "(" & Deleted_Region.First'Image & " . " & Buffer_Pos'Image 
(Deleted_Region.Last + 1) & ")";
-      end Deleted_Region_Image;
-
       procedure Terminate_Edit_Region
       is begin
          case State is
          when None =>
             null;
          when Inserted =>
-            Append (Line, "][]" & Deleted_Region_Image & "]");
+            Append (Line, "][]" & Image (Deleted_Region) & "]");
          when Deleted =>
-            Append (Line, "]" & Deleted_Region_Image & "]");
+            --  Emacs (cdr (region)) is after last char to be deleted.
+            Append
+              (Line, "]" & "(" & Trimmed_Image (Integer 
(Deleted_Region.First)) & " ." &
+                 Buffer_Pos'Image (Deleted_Region.Last + 1) & ")" & "]");
          end case;
          Deleted_Region := Null_Buffer_Region;
       end Terminate_Edit_Region;
    begin
       if Trace_Action > Outline then
-         Ada.Text_IO.Put_Line (";; " & Parse.LR.Image (Item.Ops, 
Data.Descriptor.all));
+         Tree.Lexer.Trace.Put_Line ("recover: " & WisiToken.Parse.Image (Item, 
Tree));
+      end if;
+
+      if Item.Length = 0 or not Tree.Parents_Set then
+         --  Parents not set due to failed recover.
+         return;
       end if;
 
       Append (Line, Recover_Code);
-      for I in First_Index (Item.Ops) .. Last_Index (Item.Ops) loop
+      for I in Item.First_Index .. Item.Last_Index loop
          declare
-            Op : Config_Op renames Constant_Ref (Item.Ops, I);
+            use WisiToken.Syntax_Trees;
+
+            Op : constant Recover_Op_Nodes := Element (Item, I);
+
+            Edit_Pos_Node : constant Node_Access :=
+              --  Can be Invalid_Node_Access when recover fails.
+              (case Op.Op is
+               when Insert =>
+                 (if Op.Ins_Node = Invalid_Node_Access
+                  then Invalid_Node_Access
+                  else Tree.First_Source_Terminal (Op.Ins_Node, 
Trailing_Non_Grammar => True, Following => True)),
+
+               when Delete => Op.Del_Node);
+
+            Edit_Pos : constant Buffer_Pos :=
+              (if Edit_Pos_Node = Invalid_Node_Access
+               then Invalid_Buffer_Pos
+               else Tree.Char_Region (Edit_Pos_Node, Trailing_Non_Grammar => 
True).First);
          begin
-            case Op.Op is
-            when Fast_Forward =>
-               Terminate_Edit_Region;
-               State := None;
+            if Last_Edit_Pos = Invalid_Buffer_Pos then
+               Last_Edit_Pos := Edit_Pos;
 
-            when Undo_Reduce | Push_Back =>
-               null;
+            elsif Edit_Pos /= Last_Edit_Pos then
+               Terminate_Edit_Region;
+               State         := None;
+               Last_Edit_Pos := Edit_Pos;
+            end if;
 
+            case Op.Op is
             when Insert =>
                case State is
                when None =>
-                  Start_Edit_Region (Op);
+                  Start_Edit_Region (Error_Pos, Edit_Pos);
 
                when Inserted =>
                   null;
 
                when Deleted =>
                   Terminate_Edit_Region;
-                  Start_Edit_Region (Op);
+                  Start_Edit_Region (Error_Pos, Edit_Pos);
 
                end case;
                Append (Line, Token_ID'Image (Op.Ins_ID));
                State := Inserted;
 
             when Delete =>
-               Deleted_Region := Deleted_Region and Get_Aug_Token_Const (Data, 
Tree, Op.Del_Token_Index).Char_Region;
+               Deleted_Region := Deleted_Region and Tree.Char_Region 
(Op.Del_Node, Trailing_Non_Grammar => False);
                declare
                   Skip : Boolean := False;
                begin
                   case State is
                   when None =>
-                     Start_Edit_Region (Op);
+                     Start_Edit_Region (Error_Pos, Edit_Pos);
                      Append (Line, "][");
 
                   when Inserted =>
                      Append (Line, "][");
 
                   when Deleted =>
-                     if Data.Embedded_Quote_Escape_Doubled and then
-                       ((Last_Deleted.Del_ID = Data.Descriptor.String_1_ID and
-                           Op.Del_ID = Data.Descriptor.String_1_ID) or
-                          (Last_Deleted.Del_ID = Data.Descriptor.String_2_ID 
and
-                             Op.Del_ID = Data.Descriptor.String_2_ID))
+                     if Tree.Lexer.Escape_Delimiter_Doubled 
(Last_Deleted.Del_ID) and then
+                       ((Last_Deleted.Del_ID = Descriptor.String_1_ID and
+                           Op.Del_ID = Descriptor.String_1_ID) or
+                          (Last_Deleted.Del_ID = Descriptor.String_2_ID and
+                             Op.Del_ID = Descriptor.String_2_ID))
                      then
                         declare
-                           Tok_1 : Augmented_Token renames Get_Aug_Token_Const
-                             (Data, Tree, Last_Deleted.Del_Token_Index);
-                           Tok_2 : Augmented_Token renames Get_Aug_Token_Const 
(Data, Tree, Op.Del_Token_Index);
+                           Tok_1_Char_Region : constant Buffer_Region := 
Tree.Char_Region
+                             (Last_Deleted.Del_Node, Trailing_Non_Grammar => 
False);
+                           Tok_2_Char_Region : constant Buffer_Region := 
Tree.Char_Region
+                             (Op.Del_Node, Trailing_Non_Grammar => False);
                         begin
-                           if Tok_1.Char_Region.Last + 1 = 
Tok_2.Char_Region.First then
+                           if Tok_1_Char_Region.Last + 1 = 
Tok_2_Char_Region.First then
                               --  Buffer text was '"""', lexer repair changed 
it to '""""'. The
                               --  repaired text looks like a single string 
with an embedded quote.
                               --  But here, it is two STRING_LITERAL tokens. 
Don't send the second
@@ -565,67 +499,70 @@ package body Wisi is
       Ada.Text_IO.Put_Line (To_String (Line));
    end Put;
 
-   procedure Resolve_Anchors (Data : in out Parse_Data_Type)
+   procedure Resolve_Anchors
+     (Data : in out Parse_Data_Type;
+      Tree : in     Syntax_Trees.Tree)
    is
-      Begin_Indent  : Integer renames Data.Begin_Indent;
-      Anchor_Indent : array (First_Anchor_ID .. Data.Max_Anchor_ID) of Integer;
+      Begin_Indent : Integer renames Data.Begin_Indent;
    begin
       if Trace_Action > Outline then
-         Ada.Text_IO.New_Line;
-         Ada.Text_IO.Put_Line (";; Begin_Indent: " & Integer'Image 
(Data.Begin_Indent));
+         Tree.Lexer.Trace.New_Line;
+         Tree.Lexer.Trace.Put_Line ("Begin_Indent: " & Integer'Image 
(Data.Begin_Indent));
          for I in Data.Indents.First_Index .. Data.Indents.Last_Index loop
-            Ada.Text_IO.Put_Line (";; " & Line_Number_Type'Image (I) & ", " & 
Image (Data.Indents (I)));
+            Tree.Lexer.Trace.Put_Line (Line_Number_Type'Image (I) & ", " & 
Image (Data.Indents (I)));
          end loop;
-         Ada.Text_IO.Put_Line (";; resolve anchors");
+         Tree.Lexer.Trace.Put_Line ("resolve anchors");
       end if;
 
-      for I in Data.Indents.First_Index .. Data.Indents.Last_Index loop
+      for Line in Data.Indents.First_Index .. Data.Indents.Last_Index loop
          declare
-            Indent : constant Indent_Type := Data.Indents (I);
+            Indent : constant Indent_Type := Data.Indents (Line);
          begin
             case Indent.Label is
             when Not_Set =>
-               --  Indent not computed, therefore not output.
-               null;
+               --  We get here in partial_parse when there is no action to set 
indent
+               --  for the first few lines; they are comments or low-level 
statements
+               --  or declarations. ada_mode-recover_partial_28.adb
+               Data.Indents.Replace_Element (Line, (Int, Invalid_Line_Number, 
Data.Begin_Indent));
 
             when Int =>
-               Data.Indents.Replace_Element (I, (Int, Indent.Int_Indent + 
Begin_Indent));
-
-            when Anchor_Nil =>
-               for I of Indent.Anchor_Nil_IDs loop
-                  Anchor_Indent (I) := Begin_Indent;
-               end loop;
-               Data.Indents.Replace_Element (I, (Int, Begin_Indent));
-
-            when Anchor_Int =>
-               for I of Indent.Anchor_Int_IDs loop
-                  Anchor_Indent (I) := Indent.Anchor_Int_Indent + Begin_Indent;
-               end loop;
-               Data.Indents.Replace_Element (I, (Int, Indent.Anchor_Int_Indent 
+ Begin_Indent));
+               Data.Indents.Replace_Element (Line, (Int, Invalid_Line_Number, 
Indent.Int_Indent + Begin_Indent));
 
             when Anchored =>
-               Data.Indents.Replace_Element
-                 (I, (Int, Anchor_Indent (Indent.Anchored_ID) + 
Indent.Anchored_Delta));
-
-            when Anchor_Anchored =>
                declare
-                  Temp : constant Integer :=
-                    Anchor_Indent (Indent.Anchor_Anchored_ID) + 
Indent.Anchor_Anchored_Delta;
+                  Anchor_Line_Indent : Indent_Type renames Data.Indents 
(Indent.Anchor_Line);
                begin
-                  for I of Indent.Anchor_Anchored_IDs loop
-                     Anchor_Indent (I) := Temp;
-                  end loop;
-                  Data.Indents.Replace_Element (I, (Int, Temp));
+                  case Anchor_Line_Indent.Label is
+                  when Not_Set | Anchored =>
+                     raise SAL.Programmer_Error with
+                       "indent line" & Line'Image &
+                       " uses anchor line" & Indent.Anchor_Line'Image &
+                       " which has non-int anchor";
+
+                  when Int =>
+                     Data.Indents.Replace_Element
+                       (Line, (Int, Invalid_Line_Number, 
Anchor_Line_Indent.Int_Indent + Indent.Anchor_Delta));
+                  end case;
                end;
+
             end case;
          end;
       end loop;
+
+      if Trace_Action > Outline then
+         for I in Data.Indents.First_Index .. Data.Indents.Last_Index loop
+            if I in Data.Action_Region_Lines.First .. 
Data.Action_Region_Lines.Last then
+               Tree.Lexer.Trace.Put_Line (Line_Number_Type'Image (I) & ", " & 
Image (Data.Indents (I)));
+            end if;
+         end loop;
+      end if;
    end Resolve_Anchors;
 
    procedure Set_End
      (Data           : in out Parse_Data_Type;
       Containing_Pos : in     Buffer_Pos;
-      End_Pos        : in     Buffer_Pos)
+      End_Pos        : in     Buffer_Pos;
+      Trace          : in     WisiToken.Trace_Access)
    is
       use Navigate_Cursor_Lists;
       I            : Cursor := Data.End_Positions.First;
@@ -639,6 +576,9 @@ package body Wisi is
          begin
             if Cache.Pos in Containing_Pos .. End_Pos then
                Cache.End_Pos := (True, End_Pos);
+               if Trace_Action > Detail then
+                  Trace.Put_Line ("   " & Cache.Pos'Image & " end to " & 
Cache.End_Pos.Item'Image);
+               end if;
                Delete_Cache := True;
             else
                Delete_Cache := False;
@@ -656,243 +596,314 @@ package body Wisi is
       end loop;
    end Set_End;
 
+   function To_Delta (Indent : in Indent_Type) return Delta_Type
+   is begin
+      return
+        (Label                  => Simple,
+         Simple_Delta           =>
+           (case Indent.Label is
+            when Not_Set  => (None, Invalid_Line_Number),
+            when Int      => (Int, Invalid_Line_Number, Indent.Int_Indent),
+            when Anchored => (Anchored, Invalid_Line_Number, 
Indent.Anchor_Line, Indent.Anchor_Delta)));
+   end To_Delta;
+
    ----------
    --  public subprograms (declaration order)
 
-   procedure Initialize
-     (Data              : in out Parse_Data_Type;
-      Lexer             : in     WisiToken.Lexer.Handle;
-      Descriptor        : access constant WisiToken.Descriptor;
-      Base_Terminals    : in     Base_Token_Array_Access;
-      Post_Parse_Action : in     Post_Parse_Action_Type;
-      Begin_Line        : in     Line_Number_Type;
-      End_Line          : in     Line_Number_Type;
-      Begin_Indent      : in     Integer;
-      Params            : in     String)
+   procedure Skip
+     (Source : in     String;
+      Last   : in out Integer;
+      Char   : in     Character)
+   is begin
+      loop
+         if Last = Source'Last then
+            raise Protocol_Error with "at" & Last'Image & ": expecting '" & 
Char & "' found EOI";
+
+         elsif Source (Last + 1) = ' ' then
+            Last := Last + 1;
+            exit when Char = ' ';
+
+         elsif Source (Last + 1) = Char then
+            Last := Last + 1;
+            exit;
+         else
+            raise Protocol_Error with
+              "at" & Last'Image & ": expecting '" & Char & "' found '" & 
Source (Last + 1) & "'";
+         end if;
+      end loop;
+   end Skip;
+
+   function Get_String
+     (Source : in     String;
+      Last   : in out Integer)
+     return String
    is
-      pragma Unreferenced (Params);
+      use Ada.Strings.Fixed;
+      --  First we find the starting '"'; typically at Last + 1, but we
+      --  allow for other cases.
+      First : constant Integer := Index
+        (Source  => Source,
+         Pattern => """",
+         From    => Last + 1);
+
+      --  We must handle an arbitrary sequence of '\', and find the
+      --  terminating '"'; so we search for either.
+      Set  : constant Ada.Strings.Maps.Character_Set := 
Ada.Strings.Maps.To_Set ("\""");
+
+      Temp : Integer := First + 1;
    begin
-      Data.Line_Begin_Char_Pos.Set_First_Last
-        (First   => Begin_Line,
-         Last    => End_Line);
+      Find_End :
+      loop
+         Last := Index
+           (Source => Source,
+            Set    => Set,
+            From   => Temp);
 
-      --  + 1 for data on line following last line; see Lexer_To_Augmented.
-      Data.Line_Paren_State.Set_First_Last
-        (First   => Begin_Line,
-         Last    => End_Line + 1);
+         exit Find_End when Last = 0;
 
-      Data.Lexer             := Lexer;
-      Data.Descriptor        := Descriptor;
-      Data.Base_Terminals    := Base_Terminals;
-      Data.Post_Parse_Action := Post_Parse_Action;
+         case Source (Last) is
+         when '\' =>
+            declare
+               subtype Mod_2_Result is Integer range 0 .. 1;
+               Escape : Integer := 1;
+            begin
+               loop
+                  exit Find_End when Source'Last < Last + Escape;
 
-      case Post_Parse_Action is
-      when Navigate | Face =>
-         null;
-      when Indent =>
-         Data.Indents.Set_First_Last
-           (First   => Begin_Line,
-            Last    => End_Line);
+                  exit when Source (Last + Escape) /= '\';
+                  Escape := @ + 1;
+               end loop;
+               Last := @ + Escape - 1;
 
-         Data.Begin_Indent := Begin_Indent;
-      end case;
+               case Mod_2_Result'(Escape mod 2) is
+               when 0 =>
+                  --  Even number of '\'; next char is not escaped.
+                  null;
 
-      Data.Reset;
-   exception
-   when E : others =>
-      raise SAL.Programmer_Error with "wisi.initialize: " & 
Ada.Exceptions.Exception_Name (E) & ": " &
-        Ada.Exceptions.Exception_Message (E);
-   end Initialize;
+               when 1 =>
+                  --  Odd number of '\'; next char is escaped.
+                  Last := @ + 1;
 
-   overriding procedure Reset (Data : in out Parse_Data_Type)
-   is begin
-      Data.Last_Terminal_Node := WisiToken.Invalid_Node_Index;
+               end case;
+            end;
+
+         when '"' =>
+            exit Find_End;
+         when others =>
+            raise SAL.Programmer_Error;
+         end case;
+         Temp := Last + 1;
+      end loop Find_End;
 
-      Data.Leading_Non_Grammar.Clear;
+      if First = 0 or Last = 0 then
+         raise Protocol_Error with "at" & Last'Image & ": no '""' found for 
string";
+      end if;
 
-      --  Data.Line_Begin_Char_Pos  set in Initialize, overwritten in 
Lexer_To_Augmented
+      return Source (First + 1 .. Last - 1);
+   end Get_String;
 
-      for S of Data.Line_Paren_State loop
-         S := 0;
-      end loop;
-      Data.Current_Paren_State := 0;
+   function Get_Enum
+     (Source : in     String;
+      Last   : in out Integer)
+     return String
+   is
+      use Ada.Strings.Fixed;
+      First : constant Integer := Last + 1;
+   begin
+      Last := Index
+        (Source  => Source,
+         Pattern => " ",
+         From    => First + 1); -- Skip a leading space if present.
 
-      Data.Navigate_Caches.Finalize;
-      Data.Navigate_Caches.Initialize;
+      if Last = 0 then
+         Last := Source'Last;
+      else
+         Last := Last - 1;
+      end if;
+      return Source (First .. Last);
+   end Get_Enum;
 
-      Data.Name_Caches.Finalize;
-      Data.Name_Caches.Initialize;
+   function Get_Integer
+     (Source : in     String;
+      Last   : in out Integer)
+     return Integer
+   is
+      use Ada.Strings.Fixed;
+      First : constant Integer := Last + 1;
+   begin
+      Last := Index
+        (Source  => Source,
+         Pattern => " ",
+         From    => First + 1); -- Skip a leading space if present.
 
-      Data.End_Positions.Clear;
+      if Last = 0 then
+         Last := Source'Last;
+      else
+         Last := Last - 1;
+      end if;
 
-      Data.Face_Caches.Finalize;
-      Data.Face_Caches.Initialize;
+      return Integer'Value (Source (First .. Last));
+   exception
+   when others =>
+      raise Protocol_Error with "at" & First'Image & ": bad integer '" & 
Source (First .. Last) & "'";
+   end Get_Integer;
 
-      for I in Data.Indents.First_Index .. Data.Indents.Last_Index loop
-         Data.Indents.Replace_Element (I, (Label => Not_Set));
+   function To_Unix_Line_Endings (Source : in out String) return Integer
+   --  Return count of line endings converted.
+   is
+      Read              : Integer := Source'First;
+      Write             : Integer := Source'First - 1;
+      Line_Ending_Count : Integer := 0;
+   begin
+      loop
+         exit when Read > Source'Last;
+         if Source (Read) = ASCII.CR and (Read < Source'Last and then Source 
(Read + 1) = ASCII.LF) then
+            Write             := @ + 1;
+            Source (Write)    := ASCII.LF;
+            Read              := @ + 2;
+            Line_Ending_Count := @ + 1;
+         else
+            Write          := @ + 1;
+            Source (Write) := Source (Read);
+            Read           := @ + 1;
+         end if;
       end loop;
-      Data.Max_Anchor_ID := First_Anchor_ID - 1;
-   end Reset;
-
-   function Source_File_Name (Data : in Parse_Data_Type) return String
-   is begin
-      return Data.Lexer.File_Name;
-   end Source_File_Name;
+      return Line_Ending_Count;
+   end To_Unix_Line_Endings;
 
-   function Post_Parse_Action (Data : in Parse_Data_Type) return 
Post_Parse_Action_Type
-   is begin
-      return Data.Post_Parse_Action;
-   end Post_Parse_Action;
+   procedure To_Unix_Line_Endings
+     (Source           : in     Ada.Strings.Unbounded.String_Access;
+      Source_Byte_Last : in out Integer;
+      Source_Char_Last : in out Integer)
+   is
+      Line_End_Count : constant Integer := To_Unix_Line_Endings (Source 
(Source'First .. Source_Byte_Last));
+   begin
+      Source_Byte_Last := @ - Line_End_Count;
+      Source_Char_Last := @ - Line_End_Count;
+   end To_Unix_Line_Endings;
 
-   overriding
-   procedure Lexer_To_Augmented
-     (Data  : in out          Parse_Data_Type;
-      Tree  : in out          Syntax_Trees.Tree'Class;
-      Token : in              Base_Token;
-      Lexer : not null access WisiToken.Lexer.Instance'Class)
+   function Image_Action (Action : in Syntax_Trees.Post_Parse_Action) return 
String
    is
-      use all type Ada.Containers.Count_Type;
+      pragma Unreferenced (Action);
    begin
-      if Lexer.First then
-         Data.Line_Begin_Char_Pos (Token.Line) := Token.Char_Region.First;
+      return "action";
+   end Image_Action;
 
-         if Token.Line > Data.Line_Begin_Char_Pos.First_Index and then
-           Data.Line_Begin_Char_Pos (Token.Line - 1) = Invalid_Buffer_Pos
-         then
-            --  Previous token contains multiple lines; ie %code in 
wisitoken_grammar.wy
-            declare
-               First_Set_Line : Line_Number_Type;
-            begin
-               for Line in reverse Data.Line_Begin_Char_Pos.First_Index .. 
Token.Line - 1 loop
-                  if Data.Line_Begin_Char_Pos (Line) /= Invalid_Buffer_Pos then
-                     First_Set_Line := Line;
-                     exit;
-                  end if;
-               end loop;
-               for Line in First_Set_Line + 1 .. Token.Line - 1 loop
-                  Data.Line_Begin_Char_Pos (Line) := Data.Line_Begin_Char_Pos 
(First_Set_Line); -- good enough
-               end loop;
-            end;
-         end if;
+   procedure Reset_Post_Parse
+     (Data                : in out Parse_Data_Type;
+      Tree                : in     WisiToken.Syntax_Trees.Tree'Class;
+      Post_Parse_Action   : in     Post_Parse_Action_Type;
+      Action_Region_Bytes : in     WisiToken.Buffer_Region;
+      Action_Region_Chars : in     WisiToken.Buffer_Region;
+      Begin_Indent        : in     Integer)
+   is begin
+      if not Tree.Editable then
+         raise Parse_Error with "previous parse failed; can't execute 
post_parse action.";
       end if;
 
-      if Token.ID < Data.Descriptor.First_Terminal then
-         --  Non-grammar token
-
-         if Token.ID = Data.Descriptor.New_Line_ID then
-            Data.Line_Paren_State (Token.Line + 1) := Data.Current_Paren_State;
-         end if;
+      Data.Post_Parse_Action   := Post_Parse_Action;
+      Data.Action_Region_Bytes := Action_Region_Bytes;
+      Data.Action_Region_Chars := Action_Region_Chars;
+      Data.Begin_Indent := Begin_Indent;
 
-         if Data.Last_Terminal_Node = Invalid_Node_Index then
-            Data.Leading_Non_Grammar.Append ((Token with Lexer.First));
-         else
-            declare
-               Containing_Token : Aug_Token_Var_Ref renames Get_Aug_Token_Var 
(Tree, Data.Last_Terminal_Node);
+      case Post_Parse_Action is
+      when Navigate =>
+         Data.Navigate_Caches.Clear;
+         Data.End_Positions.Clear;
+         Data.Name_Caches.Clear;
 
-               Trailing_Blank : constant Boolean :=
-                 Token.ID = Data.Descriptor.New_Line_ID and
-                 (Containing_Token.Non_Grammar.Length > 0 and then
-                    Containing_Token.Non_Grammar
-                      (Containing_Token.Non_Grammar.Last_Index).ID = 
Data.Descriptor.New_Line_ID);
-            begin
-               if Lexer.First and
-                 (Token.ID in Data.First_Comment_ID .. Data.Last_Comment_ID or
-                    Trailing_Blank)
-               then
-                  if Containing_Token.First_Trailing_Comment_Line = 
Invalid_Line_Number then
-                     Containing_Token.First_Trailing_Comment_Line := 
Token.Line;
-                  end if;
-                  Containing_Token.Last_Trailing_Comment_Line  := Token.Line;
-               end if;
+      when Face =>
+         Data.Face_Caches.Clear;
 
-               Containing_Token.Non_Grammar.Append ((Token with Lexer.First));
-            end;
-         end if;
+      when Indent =>
+         Data.Action_Region_Lines :=
+           (First => Tree.Line_At_Byte_Pos (Action_Region_Bytes.First),
+            Last  => Tree.Line_At_Byte_Pos (Action_Region_Bytes.Last));
 
-      else
-         --  grammar token
+         --  We need more lines in Indents than in Action_Region, for nonterms
+         --  that extend outside the action region.
          declare
-            Temp : constant Augmented_Token_Access := new Augmented_Token'
-              (Token with
-               Deleted                     => False,
-               First                       => Lexer.First,
-               Paren_State                 => Data.Current_Paren_State,
-               First_Terminals_Index       => Data.Terminals.Last_Index,
-               Last_Terminals_Index        => Data.Terminals.Last_Index,
-               First_Indent_Line           => (if Lexer.First then Token.Line 
else Invalid_Line_Number),
-               Last_Indent_Line            => (if Lexer.First then Token.Line 
else Invalid_Line_Number),
-               First_Trailing_Comment_Line => Invalid_Line_Number, -- Set by 
Reduce
-               Last_Trailing_Comment_Line  => Invalid_Line_Number,
-               Non_Grammar                 => 
Non_Grammar_Token_Arrays.Empty_Vector,
-               Inserted_Before             => 
Valid_Node_Index_Arrays.Empty_Vector);
+            Tree_Line_Region : constant WisiToken.Line_Region := 
Tree.Line_Region
+              (Tree.Root, Trailing_Non_Grammar => True);
          begin
-            Data.Last_Terminal_Node := Token.Tree_Index;
-
-            if Token.ID = Data.Left_Paren_ID then
-               Data.Current_Paren_State := Data.Current_Paren_State + 1;
+            Data.Indents.Set_First_Last
+              (First => Tree_Line_Region.First,
+               Last  => Tree_Line_Region.Last);
+         end;
 
-            elsif Token.ID = Data.Right_Paren_ID then
-               Data.Current_Paren_State := Data.Current_Paren_State - 1;
-            end if;
+         for I in Data.Indents.First_Index .. Data.Indents.Last_Index loop
+            Data.Indents.Replace_Element (I, (Not_Set, Invalid_Line_Number));
+         end loop;
+      end case;
 
-            Tree.Set_Augmented (Token.Tree_Index, Base_Token_Class_Access 
(Temp));
-         end;
+      if Data.Augmented_Cache_Version = Cache_Version'Last then
+         Tree.Free_Augmented;
+         Data.Augmented_Cache_Version := Cache_Version'First + 1;
+      else
+         Data.Augmented_Cache_Version := @ + 1;
       end if;
-   end Lexer_To_Augmented;
+   end Reset_Post_Parse;
+
+   function Post_Parse_Action (Data : in Parse_Data_Type) return 
Post_Parse_Action_Type
+   is begin
+      return Data.Post_Parse_Action;
+   end Post_Parse_Action;
+
+   function Action_Region_Bytes (Data : in Parse_Data_Type) return 
WisiToken.Buffer_Region
+   is begin
+      return Data.Action_Region_Bytes;
+   end Action_Region_Bytes;
 
    overriding
-   procedure Insert_Token
-     (Data  : in out Parse_Data_Type;
-      Tree  : in out Syntax_Trees.Tree'Class;
-      Token : in     Valid_Node_Index)
+   function Copy_Augmented
+     (User_Data : in Parse_Data_Type;
+      Augmented : in Syntax_Trees.Augmented_Class_Access)
+     return Syntax_Trees.Augmented_Class_Access
    is
-      use Valid_Node_Index_Arrays;
-
-      Before_Index : constant Token_Index := Tree.Before (Token);
-      Before_Aug : Aug_Token_Var_Ref renames Get_Aug_Token_Var (Data, Tree, 
Before_Index);
-
-      --  Set data that allows using Token when computing indent.
-
-      Indent_Line : constant Line_Number_Type :=
-        (if Before_Aug.First
-         then Before_Aug.Line
-         else Invalid_Line_Number);
-
-      --  Set for Insert_After False; see below for True.
-      New_Aug : constant Augmented_Token_Access := new Augmented_Token'
-        (ID                          => Tree.ID (Token),
-         Tree_Index                  => Token,
-         Byte_Region                 => (First | Last => 
Before_Aug.Byte_Region.First),
-         Line                        => Before_Aug.Line,
-         Column                      => Before_Aug.Column,
-         Char_Region                 => (First | Last => 
Before_Aug.Char_Region.First),
-         Deleted                     => False,
-         First                       => Before_Aug.First,
-         Paren_State                 => Before_Aug.Paren_State,
-         First_Terminals_Index       => Invalid_Token_Index,
-         Last_Terminals_Index        => Invalid_Token_Index,
-         First_Indent_Line           => Indent_Line,
-         Last_Indent_Line            => Indent_Line,
-         First_Trailing_Comment_Line => Invalid_Line_Number,
-         Last_Trailing_Comment_Line  => Invalid_Line_Number,
-         Non_Grammar                 => Non_Grammar_Token_Arrays.Empty_Vector,
-         Inserted_Before             => Valid_Node_Index_Arrays.Empty_Vector);
-
-      Prev_Terminal : constant Node_Index := Tree.Prev_Terminal (Token);
-      --  Invalid_Node_Index if Token is inserted before first grammar token
-
-      Insert_After : Boolean := False;
+      Old_Aug : constant Augmented_Access := Augmented_Access (Augmented);
+      New_Aug : constant Augmented_Access := new Wisi.Augmented'(Old_Aug.all);
    begin
-      Tree.Set_Augmented (Token, Base_Token_Class_Access (New_Aug));
+      return Syntax_Trees.Augmented_Class_Access (New_Aug);
+   end Copy_Augmented;
 
-      Append (Before_Aug.Inserted_Before, Token);
+   overriding
+   procedure Initialize_Actions
+     (Data : in out Parse_Data_Type;
+      Tree : in     WisiToken.Syntax_Trees.Tree'Class)
+   is begin
+      --  Parsing is complete, with error recover insert/delete tokens in
+      --  the parse tree. Insert_Token, Delete_Token have been called;
 
-      if Prev_Terminal /= Invalid_Node_Index and Before_Aug.First then
-         declare
-            use all type Ada.Containers.Count_Type;
-            use all type Ada.Text_IO.Count;
+      if Trace_Action > Outline then
+         Tree.Lexer.Trace.Put_Line ("action_region_bytes: " & Image 
(Data.Action_Region_Bytes));
+         Tree.Lexer.Trace.Put_Line ("action_region_lines: " & Image 
(Data.Action_Region_Lines));
+      end if;
+   end Initialize_Actions;
+
+   overriding
+   procedure Insert_Token
+     (Data           : in out Parse_Data_Type;
+      Tree           : in out Syntax_Trees.Tree'Class;
+      Inserted_Token : in     Syntax_Trees.Valid_Node_Access)
+   --  Set data that allows using Inserted_Token when computing indent.
+   is
+      use Syntax_Trees;
+
+      Descriptor : WisiToken.Descriptor renames Tree.Lexer.Descriptor.all;
 
-            --  See test/ada_mode-interactive_2.adb, "Typing ..."; three tests.
+      Inserted_Before : constant Valid_Node_Access := Tree.Next_Terminal 
(Inserted_Token);
+
+      First_Token : constant Node_Access := Tree.Line_Begin_Token
+        (Tree.Line_Region (Inserted_Before, Trailing_Non_Grammar => 
True).First);
+
+      Insert_Location : WisiToken.Insert_Location := Before_Next;
+   begin
+      if First_Token = Inserted_Token then
+         declare
+            use all type Ada.Containers.Count_Type;
+            use all type SAL.Base_Peek_Type;
+
+            --  See ada_mode-interactive_02.adb, "Typing ..."; three tests.
             --
             --  When typing new code, we want a new blank line to be indented 
as
             --  if the code was there already. To accomplish that, we put the
@@ -902,303 +913,185 @@ package body Wisi is
             --  Compare to test/ada_mode-recover_20.adb. There we are not 
typing
             --  new code, but there is a blank line; the right paren is placed 
at
             --  the end of the blank line, causing the comment to be indented.
-
-            Prev_Aug : Aug_Token_Var_Ref renames Get_Aug_Token_Var (Tree, 
Prev_Terminal);
-
-            --  Prev_Aug.Non_Grammar must have at least one New_Line, since
-            --  Before_Aug.First is True. The whitespace after the New_Line is 
not
-            --  given a token.
             --
-            --  If the first two tokens in Prev_Non_Grammar are both New_Lines,
-            --  there is a blank line after the code line (and before any
-            --  comments); assume that is the edit point.
-            Insert_On_Blank_Line : constant Boolean := 
Prev_Aug.Non_Grammar.Length >= 2 and then
-              (Prev_Aug.Non_Grammar (Prev_Aug.Non_Grammar.First_Index).ID = 
Data.Descriptor.New_Line_ID and
-                 Prev_Aug.Non_Grammar (Prev_Aug.Non_Grammar.First_Index + 
1).ID = Data.Descriptor.New_Line_ID);
-
-            --  In Ada, 'end' is Insert_After except when Insert_On_Blank_Line 
is
-            --  True (see test/ada_mode-interactive_2.adb Record_1), so 
Insert_After
-            --  needs Insert_On_Blank_Line.
-         begin
-            Insert_After := Parse_Data_Type'Class (Data).Insert_After (Tree, 
Token, Insert_On_Blank_Line);
-
-            if Insert_After then
-               if Insert_On_Blank_Line then
-                  declare
-                     Prev_Non_Grammar : constant Non_Grammar_Token :=
-                       Prev_Aug.Non_Grammar (Prev_Aug.Non_Grammar.First_Index 
+ 1);
-                     --  The newline nominally after the inserted token.
-                  begin
-                     New_Aug.Byte_Region := (First | Last => 
Prev_Non_Grammar.Byte_Region.Last - 1);
-                     New_Aug.Char_Region := (First | Last => 
Prev_Non_Grammar.Char_Region.Last - 1);
-
-                     New_Aug.First  := True;
-                     New_Aug.Line   := Prev_Non_Grammar.Line;
-                     New_Aug.Column := Prev_Aug.Column + Ada.Text_IO.Count 
(Length (New_Aug.Char_Region)) - 1;
-
-                     New_Aug.First_Indent_Line := Prev_Non_Grammar.Line;
-                     New_Aug.Last_Indent_Line  := Prev_Non_Grammar.Line;
-
-                     for I in Prev_Aug.Non_Grammar.First_Index + 1 .. 
Prev_Aug.Non_Grammar.Last_Index loop
-                        New_Aug.Non_Grammar.Append (Prev_Aug.Non_Grammar (I));
-                     end loop;
-
-                     Prev_Aug.Non_Grammar.Set_First_Last
-                       (Prev_Aug.Non_Grammar.First_Index, 
Prev_Aug.Non_Grammar.First_Index);
-                  end;
-               else
-                  New_Aug.Byte_Region := (First | Last => 
Prev_Aug.Byte_Region.Last);
-                  New_Aug.Char_Region := (First | Last => 
Prev_Aug.Char_Region.Last);
-
-                  New_Aug.First  := False;
-                  New_Aug.Line   := Prev_Aug.Line;
-                  New_Aug.Column := Prev_Aug.Column + Ada.Text_IO.Count 
(Length (Prev_Aug.Char_Region)) - 1;
-
-                  New_Aug.First_Indent_Line := Invalid_Line_Number;
-                  New_Aug.Last_Indent_Line  := Invalid_Line_Number;
-
-                  New_Aug.Non_Grammar  := Prev_Aug.Non_Grammar;
-                  Prev_Aug.Non_Grammar := 
Non_Grammar_Token_Arrays.Empty_Vector;
+            --  Also test/ada_mode-interactive_05.adb Proc_2; error recover
+            --  inserts "null;" before "end"; we want it on the blank line. So
+            --  Insert_After has to see the next source_terminal, which may 
not be
+            --  Inserted_Before.
 
-               end if;
-
-               New_Aug.First_Trailing_Comment_Line := 
Prev_Aug.First_Trailing_Comment_Line;
-               New_Aug.Last_Trailing_Comment_Line  := 
Prev_Aug.Last_Trailing_Comment_Line;
-
-               Prev_Aug.First_Trailing_Comment_Line := Invalid_Line_Number;
-               Prev_Aug.Last_Trailing_Comment_Line  := Invalid_Line_Number;
-            end if;
-         end;
-      end if;
-
-      if New_Aug.First and not Insert_After then
-         Before_Aug.First             := False;
-         Before_Aug.First_Indent_Line := Invalid_Line_Number;
-         Before_Aug.Last_Indent_Line  := Invalid_Line_Number;
-      end if;
-
-      if New_Aug.ID = Data.Left_Paren_ID then
-         Adjust_Paren_State (Data, Tree, Before_Index, New_Aug.Line + 1, +1);
-
-      elsif New_Aug.ID = Data.Right_Paren_ID then
-         Adjust_Paren_State (Data, Tree, Before_Index, New_Aug.Line + 1, -1);
-      end if;
-   end Insert_Token;
-
-   overriding
-   procedure Delete_Token
-     (Data                : in out Parse_Data_Type;
-      Tree                : in out Syntax_Trees.Tree'Class;
-      Deleted_Token_Index : in     WisiToken.Token_Index)
-   is
-      use all type Ada.Containers.Count_Type;
-      Deleted_Token    : Augmented_Token renames Get_Aug_Token_Var (Data, 
Tree, Deleted_Token_Index);
-      Prev_Token_Index : Base_Token_Index := Deleted_Token_Index - 1;
-      Next_Token_Index : Base_Token_Index := Deleted_Token_Index + 1;
-   begin
-      if Deleted_Token.Deleted then
-         --  This can happen if error recovery screws up.
-         if WisiToken.Trace_Action > WisiToken.Detail then
-            Ada.Text_IO.Put_Line (";; delete token again; ignored " & Image 
(Deleted_Token, Data.Descriptor.all));
-         end if;
-         return;
-      end if;
-      if WisiToken.Trace_Action > WisiToken.Detail then
-         Ada.Text_IO.Put_Line (";; delete token " & Image (Deleted_Token, 
Data.Descriptor.all));
-      end if;
+            Next_Source_Terminal : constant Valid_Node_Access :=
+              (if Tree.Label (Inserted_Before) = Syntax_Trees.Source_Terminal
+               then Inserted_Before
+               else Tree.Next_Source_Terminal (Inserted_Before, 
Trailing_Non_Grammar => False));
 
-      Deleted_Token.Deleted := True;
+            Prev_Terminal : constant Valid_Node_Access := Tree.Prev_Terminal 
(Inserted_Token);
+            --  Tree.SOI if Inserted_Token is inserted before first grammar 
token
 
-      if Deleted_Token.Non_Grammar.Length > 0 then
-         --  Move Non_Grammar to previous non-deleted token
-
-         loop
-            exit when Prev_Token_Index = Base_Token_Index'First;
-            exit when Get_Aug_Token_Const (Data, Tree, 
Prev_Token_Index).Deleted = False;
-            Prev_Token_Index := Prev_Token_Index - 1;
-         end loop;
+            Prev_Non_Grammar  : Token_Array_Var_Ref renames 
Tree.Non_Grammar_Var (Prev_Terminal);
+            Token_Non_Grammar : Token_Array_Var_Ref renames 
Tree.Non_Grammar_Var (Inserted_Token);
 
-         if Prev_Token_Index = Base_Token_Index'First then
-            Deleted_Token.Non_Grammar 
(Deleted_Token.Non_Grammar.First_Index).First := Deleted_Token.First;
-            Data.Leading_Non_Grammar.Append (Deleted_Token.Non_Grammar);
-         else
-            declare
-               Prev_Token : Augmented_Token renames Get_Aug_Token_Var (Data, 
Tree, Prev_Token_Index);
+            --  Prev_Non_Grammar must have at least one New_Line, since First
+            --  (Inserted_Token) is True. The whitespace after the New_Line is 
not
+            --  given a token, but comments are.
+            --
+            --  If the first two tokens in Prev_Non_Grammar are both New_Lines,
+            --  there is a blank line after the code line (and before any
+            --  comments); assume that is the edit point; see
+            --  test/ada_mode-interactive_2.adb "A := B \n+C;"
+            Insert_Line      : Base_Line_Number_Type := Invalid_Line_Number;
+            Blank_Line_Index : SAL.Base_Peek_Type    := 0; -- new_line ending 
blank line
+            Comment_Index    : SAL.Base_Peek_Type    := 0; -- first comment 
not on code line
+
+            procedure Check_Non_Grammar
+            --  Set Insert_Line, Blank_Line_Index, Comment_Index
+            is
+               I : SAL.Base_Peek_Type := Prev_Non_Grammar.First_Index;
             begin
-               Prev_Token.Non_Grammar.Append (Deleted_Token.Non_Grammar);
-
-               if Deleted_Token.First_Trailing_Comment_Line /= 
Invalid_Line_Number then
-                  if Prev_Token.First_Trailing_Comment_Line = 
Invalid_Line_Number then
-                     Prev_Token.First_Trailing_Comment_Line := 
Deleted_Token.First_Trailing_Comment_Line;
+               loop
+                  exit when I > Prev_Non_Grammar.Last_Index;
+
+                  if Comment_Index = 0 and
+                    I > Prev_Non_Grammar.First_Index and
+                    Prev_Non_Grammar (I).ID /= Descriptor.New_Line_ID
+                  then
+                     --  Exclude comment on same line as code. 
test/ads_mode-recover_13.adb
+                     Insert_Line   := Prev_Non_Grammar (I).Line_Region.First;
+                     Comment_Index := I;
                   end if;
-                  Prev_Token.Last_Trailing_Comment_Line  := 
Deleted_Token.Last_Trailing_Comment_Line;
-               end if;
-            end;
-         end if;
-      end if;
 
-      --  Data.Terminals.Last_Index is Wisi_EOI; it is never deleted
-      loop
-         exit when Get_Aug_Token_Const (Data, Tree, Next_Token_Index).Deleted 
= False;
-         Next_Token_Index := Next_Token_Index + 1;
-         exit when Next_Token_Index = Data.Terminals.Last_Index;
-      end loop;
-
-      if Deleted_Token.First and
-        (Next_Token_Index = Data.Terminals.Last_Index or else
-           Get_Aug_Token_Const (Data, Tree, Next_Token_Index).Line > 
Deleted_Token.Line)
-      then
-         --  Deleted_Token.Line is now blank; add to previous token non
-         --  grammar.
-         if Prev_Token_Index > Base_Token_Index'First then
-            declare
-               Prev_Token : Augmented_Token renames Get_Aug_Token_Var (Data, 
Tree, Prev_Token_Index);
-            begin
-               if Prev_Token.First_Trailing_Comment_Line = Invalid_Line_Number 
then
-                  Prev_Token.First_Trailing_Comment_Line := Deleted_Token.Line;
-                  Prev_Token.Last_Trailing_Comment_Line  := Deleted_Token.Line;
-               else
-                  if Prev_Token.First_Trailing_Comment_Line > 
Deleted_Token.Line then
-                     Prev_Token.First_Trailing_Comment_Line := 
Deleted_Token.Line;
-                  end if;
-                  if Prev_Token.Last_Trailing_Comment_Line < 
Deleted_Token.Line then
-                     Prev_Token.Last_Trailing_Comment_Line := 
Deleted_Token.Line;
+                  if (Blank_Line_Index = 0 and
+                        I < Prev_Non_Grammar.Last_Index) and then
+                    (Tree.Lexer.Terminated_By_New_Line (Prev_Non_Grammar 
(I).ID) and
+                       Prev_Non_Grammar (I + 1).ID = Descriptor.New_Line_ID)
+                  then
+                     Insert_Line      := Prev_Non_Grammar (I + 
1).Line_Region.First;
+                     Blank_Line_Index := I + 1;
                   end if;
-               end if;
-            end;
-         end if;
-      end if;
 
-      if Deleted_Token.First and Next_Token_Index < Data.Terminals.Last_Index 
then
-         declare
-            Next_Token : Augmented_Token renames Get_Aug_Token_Var (Data, 
Tree, Next_Token_Index);
-         begin
-            if not Next_Token.First then
-               Next_Token.First             := True;
-               Next_Token.First_Indent_Line := Deleted_Token.First_Indent_Line;
-               Next_Token.Last_Indent_Line  := Deleted_Token.Last_Indent_Line;
-            end if;
-         end;
-      end if;
-
-      if Deleted_Token.ID = Data.Left_Paren_ID then
-         Adjust_Paren_State (Data, Tree, Deleted_Token_Index + 1, 
Deleted_Token.Line + 1, -1);
+                  exit when Blank_Line_Index /= 0 and Comment_Index /= 0;
 
-      elsif Deleted_Token.ID = Data.Right_Paren_ID then
-         Adjust_Paren_State (Data, Tree, Deleted_Token_Index + 1, 
Deleted_Token.Line + 1, +1);
+                  I := I + 1;
+               end loop;
+            end Check_Non_Grammar;
+         begin
+            Check_Non_Grammar;
+
+            Insert_Location := Parse_Data_Type'Class (Data).Insert_After
+              (Tree,
+               Insert_Token        => Inserted_Token,
+               Insert_Before_Token => Next_Source_Terminal,
+               Comment_Present     => Comment_Index > 0,
+               Blank_Line_Present  => Blank_Line_Index > 0);
+
+            pragma Assert (Prev_Non_Grammar.Length > 0); --  else First would 
be false in condition above.
+
+            case Insert_Location is
+            when Between =>
+               --  Insert on blank line or comment line
+               --
+               --  test/ada_mode-interactive_2.adb Function_Access_2,
+               --  ada_mode-recover_17.adb missing 'end if' at end.
+               --  Indent for new code line extending previous code.
+               declare
+                  New_Non_Grammar : WisiToken.Lexer.Token_Arrays.Vector;
+                  Start_Index : constant SAL.Peek_Type :=
+                    (if Blank_Line_Index > 0 then Blank_Line_Index else 
Comment_Index);
+               begin
+                  for I in Start_Index .. Prev_Non_Grammar.Last_Index loop
+                     New_Non_Grammar.Append (Prev_Non_Grammar (I));
+                  end loop;
+                  New_Non_Grammar.Append (Token_Non_Grammar);
 
-      end if;
-   end Delete_Token;
+                  Token_Non_Grammar := New_Non_Grammar;
 
-   overriding
-   procedure Reduce
-     (Data    : in out Parse_Data_Type;
-      Tree    : in out Syntax_Trees.Tree'Class;
-      Nonterm : in     Valid_Node_Index;
-      Tokens  : in     Valid_Node_Index_Array)
-   is
-      Aug_Nonterm : constant Augmented_Token_Access := new Augmented_Token'
-        (ID          => Tree.ID (Nonterm),
-         Byte_Region => Tree.Byte_Region (Nonterm),
-         others      => <>);
+                  Prev_Non_Grammar.Set_First_Last 
(Prev_Non_Grammar.First_Index, Start_Index - 1);
 
-      Trailing_Comment_Done : Boolean := False;
-   begin
-      Tree.Set_Augmented (Nonterm, Base_Token_Class_Access (Aug_Nonterm));
+                  Tree.Set_Insert_Location (Inserted_Token, Between);
 
-      for I in reverse Tokens'Range loop
-         --  'reverse' to find token containing trailing comments; last
-         --  non-empty token.
-         declare
-            Aug_Token : Aug_Token_Const_Ref renames Get_Aug_Token_Const_1 
(Tree, Tokens (I));
-         begin
+                  if Trace_Action > WisiToken.Outline then
+                     Tree.Lexer.Trace.Put_Line
+                       ("insert token " & Tree.Image (Inserted_Token, 
Node_Numbers => True, Non_Grammar => True) &
+                          " on line" & Insert_Line'Image & "; move some 
non_grammar from " &
+                          Tree.Image (Prev_Terminal, Node_Numbers => True, 
Non_Grammar => True));
+                  end if;
+               end;
 
-            if Data.Post_Parse_Action = Indent then
-               if Aug_Token.First_Terminals_Index /= Invalid_Token_Index then
-                  Aug_Nonterm.First_Terminals_Index := 
Aug_Token.First_Terminals_Index;
-               end if;
+            when After_Prev =>
+               if Prev_Non_Grammar (Prev_Non_Grammar.First_Index).ID = 
Tree.Lexer.Descriptor.SOI_ID then
+                  --  Don't move SOI non_grammar
 
-               if Aug_Nonterm.Last_Terminals_Index = Invalid_Token_Index then
-                  Aug_Nonterm.Last_Terminals_Index := 
Aug_Token.Last_Terminals_Index;
-               end if;
+                  for I in Prev_Non_Grammar.First_Index + 1 .. 
Prev_Non_Grammar.Last_Index loop
+                     Token_Non_Grammar.Append (Prev_Non_Grammar (I));
+                  end loop;
+                  Prev_Non_Grammar.Set_First_Last 
(Prev_Non_Grammar.First_Index, Prev_Non_Grammar.First_Index);
 
-               Aug_Nonterm.First := Aug_Nonterm.First or Aug_Token.First;
+               else
+                  Token_Non_Grammar := Prev_Non_Grammar;
 
-               if Aug_Token.First_Indent_Line /= Invalid_Line_Number then
-                  Aug_Nonterm.First_Indent_Line := Aug_Token.First_Indent_Line;
-               elsif Trailing_Comment_Done and 
Aug_Token.First_Trailing_Comment_Line /= Invalid_Line_Number then
-                  Aug_Nonterm.First_Indent_Line := 
Aug_Token.First_Trailing_Comment_Line;
+                  Prev_Non_Grammar := 
WisiToken.Lexer.Token_Arrays.Empty_Vector;
                end if;
 
-               if Aug_Nonterm.Last_Indent_Line = Invalid_Line_Number then
-                  if Trailing_Comment_Done and 
Aug_Token.Last_Trailing_Comment_Line /= Invalid_Line_Number then
-                     Aug_Nonterm.Last_Indent_Line := 
Aug_Token.Last_Trailing_Comment_Line;
-                  elsif Aug_Token.Last_Indent_Line /= Invalid_Line_Number then
-                     Aug_Nonterm.Last_Indent_Line := 
Aug_Token.Last_Indent_Line;
-                  end if;
-               end if;
+               Tree.Set_Insert_Location (Inserted_Token, After_Prev);
 
-               if not Trailing_Comment_Done then
-                  Aug_Nonterm.First_Trailing_Comment_Line := 
Aug_Token.First_Trailing_Comment_Line;
-                  Aug_Nonterm.Last_Trailing_Comment_Line  := 
Aug_Token.Last_Trailing_Comment_Line;
-                  Trailing_Comment_Done := True;
+               if Trace_Action > WisiToken.Outline then
+                  Tree.Lexer.Trace.Put_Line
+                    ("insert token " & Tree.Image (Inserted_Token, 
Node_Numbers => True, Non_Grammar => True) &
+                       " after " & Tree.Image (Prev_Terminal, Node_Numbers => 
True, Non_Grammar => True));
                end if;
 
-            end if; --  Compute_Indent
-
-            if Aug_Token.Line /= Invalid_Line_Number then
-               Aug_Nonterm.Line   := Aug_Token.Line;
-               Aug_Nonterm.Column := Aug_Token.Column;
-            end if;
-
-            if Aug_Nonterm.Char_Region.First > Aug_Token.Char_Region.First then
-               Aug_Nonterm.Char_Region.First := Aug_Token.Char_Region.First;
-            end if;
-
-            if Aug_Nonterm.Char_Region.Last < Aug_Token.Char_Region.Last then
-               Aug_Nonterm.Char_Region.Last := Aug_Token.Char_Region.Last;
-            end if;
-
-            Aug_Nonterm.Paren_State := Aug_Token.Paren_State;
+            when Before_Next =>
+               null;
+            end case;
          end;
-      end loop;
-   end Reduce;
+      end if;
+
+      if Insert_Location = Before_Next and Trace_Action > WisiToken.Outline 
then
+         Tree.Lexer.Trace.Put_Line
+           ("insert token " & Tree.Image (Inserted_Token, Node_Numbers => 
True, Non_Grammar => True) &
+              " before " & Tree.Image (Inserted_Before, Node_Numbers => True, 
Non_Grammar => True));
+      end if;
+   end Insert_Token;
 
    procedure Statement_Action
      (Data    : in out Parse_Data_Type;
       Tree    : in     Syntax_Trees.Tree;
-      Nonterm : in     Valid_Node_Index;
-      Tokens  : in     Valid_Node_Index_Array;
+      Nonterm : in     Syntax_Trees.Valid_Node_Access;
       Params  : in     Statement_Param_Array)
    is
-      Nonterm_Tok        : Aug_Token_Const_Ref renames Get_Aug_Token_Const_1 
(Tree, Nonterm);
-      First_Item         : Boolean                := True;
-      Start_Set          : Boolean                := False;
-      Override_Start_Set : Boolean                := False;
-      Containing_Pos     : Nil_Buffer_Pos         := Nil;
+      use all type SAL.Base_Peek_Type;
+
+      Descriptor  : WisiToken.Descriptor renames Tree.Lexer.Descriptor.all;
+
+      First_Item         : Boolean        := True;
+      Start_Set          : Boolean        := False;
+      Override_Start_Set : Boolean        := False;
+      Containing_Pos     : Nil_Buffer_Pos := Nil;
    begin
+      if Trace_Action > Outline then
+         Tree.Lexer.Trace.Put_Line ("Statement_Action " & Tree.Image (Nonterm, 
Children => True));
+      end if;
+
       for Pair of Params loop
-         if not (Pair.Index in Tokens'Range) then
-            raise Fatal_Error with Error_Message
-              (File_Name => Data.Lexer.File_Name,
-               Line      => Nonterm_Tok.Line,
-               Column    => Nonterm_Tok.Column,
-               Message   => "wisi-statement-action: " & Trimmed_Image 
(Tree.Production_ID (Nonterm)) &
+         if Pair.Index > Tree.Child_Count (Nonterm) then
+            raise Fatal_Error with Tree.Error_Message
+              (Nonterm,
+               "wisi-statement-action: " & Trimmed_Image (Tree.Production_ID 
(Nonterm)) &
                  " token index" & SAL.Peek_Type'Image (Pair.Index) &
-                 " not in tokens range (" & SAL.Peek_Type'Image (Tokens'First) 
& " .." &
-                 SAL.Peek_Type'Image (Tokens'Last) & "); bad grammar action.");
+                 " not in tokens range (1 .." & Tree.Child_Count 
(Nonterm)'Image & "); bad grammar action.");
 
-         elsif Tree.Byte_Region (Tokens (Pair.Index)) /= Null_Buffer_Region 
then
+         elsif Overlaps
+           (Tree.Char_Region (Tree.Child (Nonterm, Pair.Index), 
Trailing_Non_Grammar => False),
+            Data.Action_Region_Chars)
+         then
             declare
-               use all type WisiToken.Syntax_Trees.Node_Label;
-               Token  : Aug_Token_Const_Ref renames Get_Aug_Token_Const_1
-                 (Tree,
-                  (if Pair.Class = Statement_End and then
-                     Tree.Label (Tokens (Pair.Index)) = 
WisiToken.Syntax_Trees.Nonterm
-                   then Tree.Last_Terminal (Tokens (Pair.Index))
-                   else Tokens (Pair.Index)));
-
-               Cache_Pos : constant Buffer_Pos         := 
Token.Char_Region.First;
+               use all type Syntax_Trees.Node_Label;
+               Token  : constant Syntax_Trees.Node_Access :=
+                 (if Pair.Class = Statement_End and then
+                    Tree.Label (Tree.Child (Nonterm, Pair.Index)) = 
Syntax_Trees.Nonterm
+                  then Tree.Last_Terminal (Tree.Child (Nonterm, Pair.Index))
+                  else Tree.Child (Nonterm, Pair.Index));
+
+               Cache_Pos : constant Buffer_Pos         := Tree.Char_Region 
(Token, Trailing_Non_Grammar => False).First;
                Cursor    : Navigate_Cache_Trees.Cursor := 
Navigate_Cache_Trees.Find
                  (Data.Navigate_Caches.Iterate, Cache_Pos,
                   Direction => Navigate_Cache_Trees.Unknown);
@@ -1207,7 +1100,7 @@ package body Wisi is
                   declare
                      Cache : Navigate_Cache_Type renames Data.Navigate_Caches 
(Cursor);
                   begin
-                     if Pair.Class = Statement_Start then
+                     if Pair.Class in Statement_Start | Statement_Override then
                         if Start_Set then
                            Cache.Class := Motion;
                         else
@@ -1222,33 +1115,61 @@ package body Wisi is
                      end if;
                      Cache.Statement_ID   := Tree.ID (Nonterm);
                      Cache.Containing_Pos := Containing_Pos;
+                     if Trace_Action > Detail then
+                        Tree.Lexer.Trace.Put_Line
+                          ("   " & Cache.Pos'Image & " nonterm to " & Image 
(Cache.Statement_ID, Descriptor) &
+                             " containing to" & Image (Cache.Containing_Pos));
+                     end if;
                   end;
                else
                   Cursor := Data.Navigate_Caches.Insert
                     ((Pos            => Cache_Pos,
                       Statement_ID   => Tree.ID (Nonterm),
-                      ID             => Token.ID,
-                      Length         => Length (Token.Char_Region),
-                      Class          => (if Override_Start_Set then 
Statement_Start else Pair.Class),
+                      ID             => Tree.ID (Token),
+                      Length         => Length (Tree.Char_Region (Token, 
Trailing_Non_Grammar => False)),
+                      Class          =>
+                        (if Override_Start_Set then Statement_Start
+                         else
+                           (case Pair.Class is
+                            when Statement_Start | Statement_Override =>
+                              (if Start_Set then Motion else Statement_Start),
+                            when others => Pair.Class)),
                       Containing_Pos => Containing_Pos,
                       others         => Nil));
+
+                  if Trace_Action > Detail then
+                     declare
+                        Cache : Navigate_Cache_Type renames 
Data.Navigate_Caches.Constant_Ref (Cursor);
+                     begin
+                        Tree.Lexer.Trace.Put_Line
+                          ("   " & Cache.Pos'Image & " create " & Image 
(Cache.ID, Descriptor) &
+                             ", containing to " & Image 
(Data.Navigate_Caches.Constant_Ref (Cursor).Containing_Pos));
+                     end;
+                  end if;
                end if;
 
                Data.End_Positions.Append (Cursor);
 
                if First_Item then
                   First_Item := False;
-                  if Override_Start_Set or Pair.Class = Statement_Start then
+                  if Override_Start_Set or Pair.Class in Statement_Start | 
Statement_Override then
                      Override_Start_Set := False;
-                     Containing_Pos     := (True, Token.Char_Region.First);
+                     Containing_Pos     := (True, Tree.Char_Region (Token, 
Trailing_Non_Grammar => False).First);
 
                      --  Set containing on all contained caches
                      declare
                         use Navigate_Cache_Trees;
                         Iterator : constant Navigate_Cache_Trees.Iterator := 
Data.Navigate_Caches.Iterate;
-                        Cursor   : Navigate_Cache_Trees.Cursor            := 
Find_In_Range
-                          (Iterator, Ascending, Nonterm_Tok.Char_Region.First 
+ 1, -- don't set containing on start
-                           Nonterm_Tok.Char_Region.Last);
+
+                        Nonterm_Char_Region : constant Buffer_Region := 
Tree.Char_Region
+                          (Nonterm, Trailing_Non_Grammar => False);
+
+                        Cursor : Navigate_Cache_Trees.Cursor :=
+                          (if Length (Nonterm_Char_Region) = 0
+                           then No_Element
+                           else Find_In_Range
+                             (Iterator, Ascending, Nonterm_Char_Region.First + 
1, -- don't set containing on start
+                              Nonterm_Char_Region.Last));
                      begin
                         loop
                            exit when not Has_Element (Cursor);
@@ -1257,8 +1178,13 @@ package body Wisi is
                            begin
                               if not Cache.Containing_Pos.Set then
                                  Cache.Containing_Pos := Containing_Pos;
+                                 if Trace_Action > Detail then
+                                    Tree.Lexer.Trace.Put_Line
+                                      ("   " & Cache.Pos'Image & " containing 
to " & Image
+                                         (Data.Navigate_Caches.Constant_Ref 
(Cursor).Containing_Pos));
+                                 end if;
                               end if;
-                              exit when Nonterm_Tok.Char_Region.Last < 
Cache.Pos + 1;
+                              exit when Nonterm_Char_Region.Last < Cache.Pos + 
1;
                            end;
                            Cursor := Iterator.Next (Cursor);
                         end loop;
@@ -1267,12 +1193,12 @@ package body Wisi is
                end if;
 
                if Pair.Class = Statement_End and Containing_Pos.Set then
-                  Set_End (Data, Containing_Pos.Item, Cache_Pos);
+                  Set_End (Data, Containing_Pos.Item, Cache_Pos, 
Tree.Lexer.Trace);
                end if;
             end;
 
          else
-            --  Token.Byte_Region is null
+            --  Token.Char_Region is empty or outside action_region
             if First_Item and Pair.Class = Statement_Start then
                Override_Start_Set := True;
             end if;
@@ -1282,66 +1208,63 @@ package body Wisi is
 
    procedure Name_Action
      (Data    : in out Parse_Data_Type;
-      Tree    : in     WisiToken.Syntax_Trees.Tree;
-      Nonterm : in     Valid_Node_Index;
-      Tokens  : in     WisiToken.Valid_Node_Index_Array;
+      Tree    : in     Syntax_Trees.Tree;
+      Nonterm : in     Syntax_Trees.Valid_Node_Access;
       Name    : in     WisiToken.Positive_Index_Type)
-   is begin
-      if not (Name in Tokens'Range) then
-         declare
-            Token : Aug_Token_Const_Ref renames Get_Aug_Token_Const_1 (Tree, 
Tokens (Tokens'First));
-         begin
-            raise Fatal_Error with Error_Message
-              (File_Name => Data.Lexer.File_Name,
-               Line      => Token.Line,
-               Column    => Token.Column,
-               Message   => "wisi-name-action: " & Trimmed_Image 
(Tree.Production_ID (Nonterm)) & " name (" &
-                 Trimmed_Image (Name) & ") not in Tokens range (" & 
SAL.Peek_Type'Image (Tokens'First) & " .." &
-                 SAL.Peek_Type'Image (Tokens'Last) & "); bad grammar action.");
-         end;
+   is
+      use all type SAL.Base_Peek_Type;
+      use all type Syntax_Trees.Node_Label;
+   begin
+      if Name > Tree.Child_Count (Nonterm) then
+         raise Grammar_Error with Tree.Error_Message
+           (Nonterm,
+            "wisi-name-action: " & Trimmed_Image (Tree.Production_ID 
(Nonterm)) & " name (" &
+              Trimmed_Image (Name) & ") not in child range (1 .." &
+              Tree.Child_Count (Nonterm)'Image & "); bad grammar action.");
       end if;
 
-      if Tree.Is_Virtual (Tokens (Name)) then
-         --  Virtual tokens have the same Char_Region as the token they are
-         --  inserted before (for indent purposes), which leads to Name_Action
-         --  appearing to be applied twice. test/ada_mode-fatal_error_1.adb.
-         --  They also don't appear in the actual buffer, so setting a face or
-         --  completing on them is pointless.
+      if Length (Tree.Char_Region (Tree.Child (Nonterm, Name), 
Trailing_Non_Grammar => False)) = 0 then
+         --  Token is virtual; it does not appear in the actual buffer, so we
+         --  can't set a text property on it.
+         return;
+      elsif not Overlaps
+        (Tree.Char_Region (Tree.Child (Nonterm, Name), Trailing_Non_Grammar => 
False), Data.Action_Region_Chars)
+      then
          return;
       end if;
 
+      pragma Assert (Tree.Label (Tree.Child (Nonterm, Name)) in 
Source_Terminal | Syntax_Trees.Nonterm);
+
       declare
          use Name_Cache_Trees;
-         Name_Token : Aug_Token_Const_Ref renames Get_Aug_Token_Const_1 (Tree, 
Tokens (Name));
+         Name_Char_Region : constant Buffer_Region := Tree.Char_Region
+           (Tree.Child (Nonterm, Name), Trailing_Non_Grammar => False);
          Cursor     : constant Name_Cache_Trees.Cursor := Find
-           (Data.Name_Caches.Iterate, Name_Token.Char_Region.First,
+           (Data.Name_Caches.Iterate, Name_Char_Region.First,
             Direction => Name_Cache_Trees.Unknown);
       begin
-         if Name_Token.Char_Region = Null_Buffer_Region then
-            return;
-         elsif Has_Element (Cursor) then
-            raise Fatal_Error with Error_Message
-              (File_Name            => Data.Lexer.File_Name,
-               Line                 => Name_Token.Line,
-               Column               => Name_Token.Column,
-               Message              => Tree.Image
-                 (Tokens (Name), Data.Descriptor.all,
-                  Node_Numbers      => WisiToken.Trace_Action > Extra,
-                  Include_RHS_Index => WisiToken.Trace_Action > Extra)
+         if Has_Element (Cursor) then
+            raise Fatal_Error with Tree.Error_Message
+              (Tree.Child (Nonterm, Name), Tree.Image
+                 (Node         => Tree.Child (Nonterm, Name),
+                  Node_Numbers => Trace_Action > Extra,
+                  RHS_Index    => Trace_Action > Extra)
                  & ": wisi-name-action: name set twice.");
          else
             if Trace_Action > Detail then
-               Ada.Text_IO.Put_Line
+               Tree.Lexer.Trace.Put_Line
                  ("Name_Action " & Tree.Image
-                    (Nonterm, Data.Descriptor.all,
-                     Node_Numbers      => WisiToken.Trace_Action > Extra,
-                     Include_RHS_Index => WisiToken.Trace_Action > Extra) & " 
" & Tree.Image
-                       (Tokens (Name), Data.Descriptor.all,
-                        Node_Numbers      => WisiToken.Trace_Action > Extra,
-                        Include_RHS_Index => WisiToken.Trace_Action > Extra));
+                    (Nonterm,
+                     Node_Numbers    => Trace_Action > Extra,
+                     RHS_Index       => Trace_Action > Extra) & " " & 
Tree.Image
+                       (Tree.Child (Nonterm, Name),
+                        Node_Numbers => Trace_Action > Extra,
+                        RHS_Index    => Trace_Action > Extra));
             end if;
 
-            Data.Name_Caches.Insert (Name_Token.Char_Region);
+            if Name_Char_Region /= Null_Buffer_Region then
+               Data.Name_Caches.Insert (Name_Char_Region);
+            end if;
          end if;
       end;
    end Name_Action;
@@ -1349,109 +1272,132 @@ package body Wisi is
    procedure Motion_Action
      (Data    : in out Parse_Data_Type;
       Tree    : in     Syntax_Trees.Tree;
-      Nonterm : in     Valid_Node_Index;
-      Tokens  : in     Valid_Node_Index_Array;
+      Nonterm : in     Syntax_Trees.Valid_Node_Access;
       Params  : in     Motion_Param_Array)
    is
       use Navigate_Cache_Trees;
+      Descriptor  : WisiToken.Descriptor renames Tree.Lexer.Descriptor.all;
 
-      Start          : Nil_Buffer_Pos    := (Set => False);
       Iter           : constant Iterator := Data.Navigate_Caches.Iterate;
       Prev_Cache_Cur : Cursor;
-      Cache_Cur      : Cursor;
    begin
-      if WisiToken.Trace_Action > Outline then
-         Ada.Text_IO.Put_Line
-           ("Motion_Action " & Image (Tree.ID (Nonterm), Data.Descriptor.all) 
& " " &
-              Image (Tree.Byte_Region (Nonterm)));
+      if Trace_Action > Outline then
+         Tree.Lexer.Trace.Put_Line
+           ("Motion_Action " & Image (Tree.ID (Nonterm), Descriptor) & " " &
+              Image (Tree.Byte_Region (Nonterm, Trailing_Non_Grammar => 
False)));
       end if;
       for Param of Params loop
-         if Tree.Byte_Region (Tokens (Param.Index)) /= Null_Buffer_Region then
+         if Overlaps
+           (Tree.Char_Region (Tree.Child (Nonterm, Param.Index), 
Trailing_Non_Grammar => False),
+            Data.Action_Region_Chars)
+         then
             declare
-               use all type WisiToken.Syntax_Trees.Node_Label;
-               Token  : Aug_Token_Const_Ref renames Get_Aug_Token_Const_1 
(Tree, Tokens (Param.Index));
-               Region : constant Buffer_Region := Token.Char_Region;
-               Skip   : Boolean                := False;
+               use all type Syntax_Trees.Node_Label;
+               Token     : constant Syntax_Trees.Valid_Node_Access := 
Tree.Child (Nonterm, Param.Index);
+               Region    : constant Buffer_Region := Tree.Char_Region (Token, 
Trailing_Non_Grammar => False);
+               Cache_Cur : Cursor;
+               Skip      : Boolean;
+               Done      : Boolean := False;
             begin
-               if not Start.Set then
-                  Start := (True, Region.First);
-               end if;
-
-               case Tree.Label (Tokens (Param.Index)) is
-               when Shared_Terminal =>
-                  Cache_Cur := Find (Iter, Region.First);
-               when Virtual_Terminal | Virtual_Identifier =>
-                  return;
+               loop
+                  Skip := False;
 
-               when Syntax_Trees.Nonterm =>
-                  if Param.ID = Invalid_Token_ID then
+                  case Tree.Label (Tree.Child (Nonterm, Param.Index)) is
+                  when Source_Terminal =>
                      Cache_Cur := Find (Iter, Region.First);
+                     Done      := True;
 
-                  else
-                     Skip      := True;
-                     Cache_Cur := Find_In_Range (Iter, Ascending, 
Region.First, Region.Last);
-                     loop
-                        exit when not Has_Element (Cache_Cur);
-                        if Data.Navigate_Caches (Cache_Cur).Pos > Region.Last 
then
-                           Cache_Cur := No_Element;
-                           exit;
-
-                        elsif Data.Navigate_Caches (Cache_Cur).ID = Param.ID 
and
-                          not Data.Navigate_Caches (Cache_Cur).Prev_Pos.Set
-                        then
-                           Skip := False;
-                           exit;
-                        end if;
+                  when Virtual_Terminal | Virtual_Identifier =>
+                     Skip := True;
+                     Done := True;
 
-                        Cache_Cur := Next (Iter, Cache_Cur);
-                     end loop;
-                  end if;
-               end case;
+                  when Syntax_Trees.Nonterm =>
+                     if Param.ID = Invalid_Token_ID then
+                        Cache_Cur := Find (Iter, Region.First);
+                        Done      := True;
 
-               if not Skip then
-                  if not Has_Element (Cache_Cur) then
-                     raise Fatal_Error with Error_Message
-                       (File_Name => Data.Lexer.File_Name,
-                        Line      => Token.Line,
-                        Column    => Token.Column,
-                        Message   => "wisi-motion-action: token " &
-                          WisiToken.Image (Token.ID, Data.Descriptor.all) &
-                          " has no cache; add to statement-action for " &
-                          Trimmed_Image (Tree.Production_ID (Nonterm)) & ".");
-                  end if;
+                     else
+                        Skip := True;
 
-                  if Has_Element (Prev_Cache_Cur) then
-                     declare
-                        Cache      : Navigate_Cache_Type renames 
Data.Navigate_Caches (Cache_Cur);
-                        Prev_Cache : Navigate_Cache_Type renames 
Data.Navigate_Caches (Prev_Cache_Cur);
-                     begin
-                        if not Cache.Prev_Pos.Set then
-                           Cache.Prev_Pos := (True, Prev_Cache.Pos);
-                           if WisiToken.Trace_Action > Detail then
-                              Ada.Text_IO.Put_Line ("   " & Cache.Pos'Image & 
" prev to " & Cache.Prev_Pos.Item'Image);
-                           end if;
+                        if not Has_Element (Cache_Cur) then
+                           Cache_Cur := Find_In_Range (Iter, Ascending, 
Region.First, Region.Last);
                         end if;
 
-                        if not Prev_Cache.Next_Pos.Set then
-                           Prev_Cache.Next_Pos := (True, Cache.Pos);
-                           if WisiToken.Trace_Action > Detail then
-                              Ada.Text_IO.Put_Line
-                                ("   " & Prev_Cache.Pos'Image & " next to " & 
Prev_Cache.Next_Pos.Item'Image);
+                        loop
+                           exit when not Has_Element (Cache_Cur);
+                           if Data.Navigate_Caches (Cache_Cur).Pos > 
Region.Last then
+                              Cache_Cur := No_Element;
+                              exit;
+
+                           elsif Data.Navigate_Caches (Cache_Cur).ID = 
Param.ID and
+                             not Data.Navigate_Caches (Cache_Cur).Prev_Pos.Set
+                           then
+                              Skip := False;
+                              exit;
                            end if;
-                        end if;
-                     end;
-                  end if;
 
-                  loop
-                     --  Set Prev_Cache_Cur to last motion cache in nonterm 
chain
-                     exit when not Data.Navigate_Caches 
(Cache_Cur).Next_Pos.Set;
+                           Cache_Cur := Next (Iter, Cache_Cur);
+                        end loop;
+                     end if;
+                  end case;
+
+                  if not Skip then
+                     if not Has_Element (Cache_Cur) then
+                        raise Fatal_Error with Tree.Error_Message
+                          (Tree.Child (Nonterm, Param.Index),
+                           Message   => "wisi-motion-action: token " &
+                             WisiToken.Image (Tree.ID (Token), Descriptor) &
+                             " has no cache; add to statement-action for " &
+                             Trimmed_Image (Tree.Production_ID (Nonterm)) & 
".");
+                     end if;
 
-                     Cache_Cur := Find (Iter, Data.Navigate_Caches 
(Cache_Cur).Next_Pos.Item);
-                     pragma Assert (Has_Element (Cache_Cur)); --  otherwise 
there's a bug in this subprogram.
+                     if Has_Element (Prev_Cache_Cur) then
+                        declare
+                           Cache      : Navigate_Cache_Type renames 
Data.Navigate_Caches (Cache_Cur);
+                           Prev_Cache : Navigate_Cache_Type renames 
Data.Navigate_Caches (Prev_Cache_Cur);
+                        begin
+                           if Cache.Prev_Pos.Set then
+                              if Trace_Action > Detail then
+                                 Tree.Lexer.Trace.Put_Line
+                                   ("   " & Cache.Pos'Image & " prev already 
at " & Cache.Prev_Pos.Item'Image);
+                              end if;
+                           else
+                              Cache.Prev_Pos := (True, Prev_Cache.Pos);
+                              if Trace_Action > Detail then
+                                 Tree.Lexer.Trace.Put_Line
+                                   ("   " & Cache.Pos'Image & " prev to " & 
Cache.Prev_Pos.Item'Image);
+                              end if;
+                           end if;
 
-                  end loop;
-                  Prev_Cache_Cur := Cache_Cur;
-               end if;
+                           if Prev_Cache.Next_Pos.Set then
+                              if Trace_Action > Detail then
+                                 Tree.Lexer.Trace.Put_Line
+                                   ("   " & Prev_Cache.Pos'Image & " next 
already at " &
+                                      Prev_Cache.Next_Pos.Item'Image);
+                              end if;
+                           else
+                              Prev_Cache.Next_Pos := (True, Cache.Pos);
+                              if Trace_Action > Detail then
+                                 Tree.Lexer.Trace.Put_Line
+                                   ("   " & Prev_Cache.Pos'Image & " next to " 
& Prev_Cache.Next_Pos.Item'Image);
+                              end if;
+                           end if;
+                        end;
+                     end if;
+
+                     if Data.Navigate_Caches (Cache_Cur).Next_Pos.Set then
+                        --  Set Cache_Cur to end of Cache_Cur.Next chain.
+                        --  Handles 'elsif ... then' in if_statement.
+                        loop
+                           Cache_Cur := Find (Iter, Data.Navigate_Caches 
(Cache_Cur).Next_Pos.Item);
+                           exit when not Data.Navigate_Caches 
(Cache_Cur).Next_Pos.Set;
+                        end loop;
+                     end if;
+                     Prev_Cache_Cur := Cache_Cur;
+                     Cache_Cur := Iter.Next (Cache_Cur);
+                  end if;
+                  exit when Done or not Has_Element (Cache_Cur);
+               end loop;
             end;
          end if;
       end loop;
@@ -1460,12 +1406,9 @@ package body Wisi is
    procedure Face_Apply_Action
      (Data    : in out Parse_Data_Type;
       Tree    : in     Syntax_Trees.Tree;
-      Nonterm : in     Valid_Node_Index;
-      Tokens  : in     Valid_Node_Index_Array;
+      Nonterm : in     Syntax_Trees.Valid_Node_Access;
       Params  : in     Face_Apply_Param_Array)
    is
-      pragma Unreferenced (Nonterm);
-
       use Face_Cache_Trees;
 
       Iter       : constant Iterator := Data.Face_Caches.Iterate;
@@ -1473,11 +1416,22 @@ package body Wisi is
       Suffix_Cur : Cursor;
    begin
       for Param of Params loop
-         if Tree.Byte_Region (Tokens (Param.Index)) /= Null_Buffer_Region then
+         if Overlaps
+           (Tree.Char_Region (Tree.Child (Nonterm, Param.Index), 
Trailing_Non_Grammar => False),
+            Data.Action_Region_Chars)
+         then
+            if Trace_Action > Outline then
+               Tree.Lexer.Trace.Put_Line
+                 ("face_apply_action: " & Tree.Image
+                    (Tree.Child (Nonterm, Param.Index), Node_Numbers => True) &
+                    " " & Param.Prefix_Face'Image & " " & 
Param.Suffix_Face'Image);
+            end if;
+
             declare
-               Token : Aug_Token_Const_Ref renames Get_Aug_Token_Const_1 
(Tree, Tokens (Param.Index));
+               Token_Char_Region : constant Buffer_Region := Tree.Char_Region
+                 (Tree.Child (Nonterm, Param.Index), Trailing_Non_Grammar => 
False);
             begin
-               Cache_Cur := Find (Iter, Token.Char_Region.First, Direction => 
Ascending);
+               Cache_Cur := Find (Iter, Token_Char_Region.First, Direction => 
Ascending);
                if Has_Element (Cache_Cur) then
                   declare
                      Cache : Face_Cache_Type renames Data.Face_Caches 
(Cache_Cur);
@@ -1493,7 +1447,7 @@ package body Wisi is
                               Suf_Cache : Face_Cache_Type renames 
Data.Face_Caches (Suffix_Cur);
                            begin
                               if Suffix = Suf_Cache.Class and
-                                Inside (Suf_Cache.Char_Region.First, 
Token.Char_Region)
+                                Contains (Token_Char_Region, 
Suf_Cache.Char_Region.First)
                               then
                                  Suf_Cache.Face := (True, Param.Suffix_Face);
                               end if;
@@ -1505,7 +1459,7 @@ package body Wisi is
                      end case;
                   end;
                else
-                  Data.Face_Caches.Insert ((Token.Char_Region, Suffix, (True, 
Param.Suffix_Face)));
+                  Data.Face_Caches.Insert ((Token_Char_Region, Suffix, (True, 
Param.Suffix_Face)));
                end if;
             end;
          end if;
@@ -1515,25 +1469,27 @@ package body Wisi is
    procedure Face_Apply_List_Action
      (Data    : in out Parse_Data_Type;
       Tree    : in     Syntax_Trees.Tree;
-      Nonterm : in     Valid_Node_Index;
-      Tokens  : in     Valid_Node_Index_Array;
+      Nonterm : in     Syntax_Trees.Valid_Node_Access;
       Params  : in     Face_Apply_Param_Array)
    is
-      pragma Unreferenced (Nonterm);
       use Face_Cache_Trees;
 
       Iter      : constant Iterator := Data.Face_Caches.Iterate;
       Cache_Cur : Cursor;
    begin
       for Param of Params loop
-         if Tree.Byte_Region (Tokens (Param.Index)) /= Null_Buffer_Region then
+         if Overlaps
+           (Tree.Char_Region (Tree.Child (Nonterm, Param.Index), 
Trailing_Non_Grammar => False),
+            Data.Action_Region_Chars)
+         then
             declare
-               Token : Aug_Token_Const_Ref renames Get_Aug_Token_Const_1 
(Tree, Tokens (Param.Index));
+               Token_Char_Region : constant Buffer_Region := Tree.Char_Region
+                 (Tree.Child (Nonterm, Param.Index), Trailing_Non_Grammar => 
False);
             begin
-               Cache_Cur := Find_In_Range (Iter, Ascending, 
Token.Char_Region.First, Token.Char_Region.Last);
+               Cache_Cur := Find_In_Range (Iter, Ascending, 
Token_Char_Region.First, Token_Char_Region.Last);
                loop
                   exit when not Has_Element (Cache_Cur) or else
-                    Data.Face_Caches (Cache_Cur).Char_Region.First > 
Token.Char_Region.Last;
+                    Data.Face_Caches (Cache_Cur).Char_Region.First > 
Token_Char_Region.Last;
                   declare
                      Cache : Face_Cache_Type renames Data.Face_Caches 
(Cache_Cur);
                   begin
@@ -1555,46 +1511,47 @@ package body Wisi is
    procedure Face_Mark_Action
      (Data    : in out Parse_Data_Type;
       Tree    : in     Syntax_Trees.Tree;
-      Nonterm : in     Valid_Node_Index;
-      Tokens  : in     Valid_Node_Index_Array;
+      Nonterm : in     Syntax_Trees.Valid_Node_Access;
       Params  : in     Face_Mark_Param_Array)
    is
-      pragma Unreferenced (Nonterm);
-
       use Face_Cache_Trees;
 
       Iter      : constant Iterator := Data.Face_Caches.Iterate;
       Cache_Cur : Cursor;
    begin
       for Param of Params loop
-         if Tree.Byte_Region (Tokens (Param.Index)) /= Null_Buffer_Region then
+         if Overlaps
+           (Tree.Char_Region (Tree.Child (Nonterm, Param.Index), 
Trailing_Non_Grammar => False),
+            Data.Action_Region_Chars)
+         then
             declare
-               Token : Aug_Token_Const_Ref renames Get_Aug_Token_Const_1 
(Tree, Tokens (Param.Index));
+               Token_Char_Region : constant Buffer_Region := Tree.Char_Region
+                 (Tree.Child (Nonterm, Param.Index), Trailing_Non_Grammar => 
False);
             begin
-               Cache_Cur := Find (Iter, Token.Char_Region.First, Direction => 
Ascending);
+               Cache_Cur := Find (Iter, Token_Char_Region.First, Direction => 
Ascending);
                if Has_Element (Cache_Cur) then
                   declare
                      Cache : Face_Cache_Type renames Data.Face_Caches 
(Cache_Cur);
                      Other_Cur : Cursor := Find_In_Range
-                       (Iter, Ascending, Cache.Char_Region.Last + 1, 
Token.Char_Region.Last);
+                       (Iter, Ascending, Cache.Char_Region.Last + 1, 
Token_Char_Region.Last);
                      To_Delete : Buffer_Pos_Lists.List;
                   begin
                      loop
                         exit when not Has_Element (Other_Cur) or else
-                          Data.Face_Caches (Other_Cur).Char_Region.First > 
Token.Char_Region.Last;
+                          Data.Face_Caches (Other_Cur).Char_Region.First > 
Token_Char_Region.Last;
                         To_Delete.Append (Data.Face_Caches 
(Other_Cur).Char_Region.First);
                         Other_Cur := Next (Iter, Other_Cur);
                      end loop;
 
                      Cache.Class            := Param.Class;
-                     Cache.Char_Region.Last := Token.Char_Region.Last;
+                     Cache.Char_Region.Last := Token_Char_Region.Last;
 
                      for Face of To_Delete loop
                         Data.Face_Caches.Delete (Face);
                      end loop;
                   end;
                else
-                  Data.Face_Caches.Insert ((Token.Char_Region, Param.Class, 
(Set => False)));
+                  Data.Face_Caches.Insert ((Token_Char_Region, Param.Class, 
(Set => False)));
                end if;
             end;
          end if;
@@ -1604,26 +1561,27 @@ package body Wisi is
    procedure Face_Remove_Action
      (Data    : in out Parse_Data_Type;
       Tree    : in     Syntax_Trees.Tree;
-      Nonterm : in     Valid_Node_Index;
-      Tokens  : in     Valid_Node_Index_Array;
+      Nonterm : in     Syntax_Trees.Valid_Node_Access;
       Params  : in     Face_Remove_Param_Array)
    is
-      pragma Unreferenced (Nonterm);
       use Face_Cache_Trees;
 
       Iter      : constant Iterator := Data.Face_Caches.Iterate;
       Cache_Cur : Cursor;
    begin
       for I of Params loop
-         if Tree.Byte_Region (Tokens (I)) /= Null_Buffer_Region then
+         if Overlaps
+           (Tree.Char_Region (Tree.Child (Nonterm, I), Trailing_Non_Grammar => 
False), Data.Action_Region_Chars)
+         then
             declare
-               Token     : Aug_Token_Const_Ref renames Get_Aug_Token_Const_1 
(Tree, Tokens (I));
+               Token_Char_Region : constant Buffer_Region := Tree.Char_Region
+                 (Tree.Child (Nonterm, I), Trailing_Non_Grammar => False);
                To_Delete : Buffer_Pos_Lists.List;
             begin
-               Cache_Cur := Find_In_Range (Iter, Ascending, 
Token.Char_Region.First, Token.Char_Region.Last);
+               Cache_Cur := Find_In_Range (Iter, Ascending, 
Token_Char_Region.First, Token_Char_Region.Last);
                loop
                   exit when not Has_Element (Cache_Cur) or else
-                    Data.Face_Caches (Cache_Cur).Char_Region.First > 
Token.Char_Region.Last;
+                    Data.Face_Caches (Cache_Cur).Char_Region.First > 
Token_Char_Region.Last;
                   To_Delete.Append (Data.Face_Caches 
(Cache_Cur).Char_Region.First);
                   Cache_Cur := Next (Iter, Cache_Cur);
                end loop;
@@ -1662,19 +1620,87 @@ package body Wisi is
       return "(" & Simple_Indent_Param_Label'Image (Item.Label) &
         (case Item.Label is
          when None => "",
-         when Int => Integer'Image (Item.Int_Delta),
-         when Anchored_Label => Positive_Index_Type'Image 
(Item.Anchored_Index) & "," &
-              Integer'Image (Item.Anchored_Delta),
+         when Block | Int => ", " & Trimmed_Image (Item.Int_Delta),
+         when Simple_Param_Anchored => Positive_Index_Type'Image 
(Item.Anchored_Index) & "," &
+           Integer'Image (Item.Anchored_Delta),
          when Language => "<language_function>") & ")";
    end Image;
 
+   function Add_Simple_Indent_Param (Left, Right : in Simple_Indent_Param) 
return Simple_Indent_Param
+   is begin
+      case Left.Label is
+      when None =>
+         return Right;
+
+      when Block =>
+         case Right.Label is
+         when None =>
+            return Left;
+
+         when Block | Int =>
+            return (Block, Left.Int_Delta + Right.Int_Delta);
+
+         when Anchored_0 =>
+            return (Anchored_0, Right.Anchored_Index, Left.Int_Delta + 
Right.Anchored_Delta);
+
+         when Anchored_1 =>
+            return (Anchored_1, Right.Anchored_Index, Left.Int_Delta + 
Right.Anchored_Delta);
+
+         when Language =>
+            raise Grammar_Error with "adding incompatible indent params";
+         end case;
+
+      when Int =>
+         case Right.Label is
+         when None =>
+            return Left;
+
+         when Block =>
+            return (Block, Left.Int_Delta + Right.Int_Delta);
+
+         when Int =>
+            return (Int, Left.Int_Delta + Right.Int_Delta);
+
+         when Anchored_0 =>
+            return (Anchored_0, Right.Anchored_Index, Left.Int_Delta + 
Right.Anchored_Delta);
+
+         when Anchored_1 =>
+            return (Anchored_1, Right.Anchored_Index, Left.Int_Delta + 
Right.Anchored_Delta);
+
+         when Language =>
+            raise Grammar_Error with "adding incompatible indent params";
+         end case;
+
+      when Simple_Param_Anchored =>
+         case Right.Label is
+         when None =>
+            return Left;
+
+         when Block | Int =>
+            case Simple_Param_Anchored'(Left.Label) is
+            when Anchored_0 =>
+               return (Anchored_0, Left.Anchored_Index, Left.Anchored_Delta + 
Right.Int_Delta);
+            when Anchored_1 =>
+               return (Anchored_1, Left.Anchored_Index, Left.Anchored_Delta + 
Right.Int_Delta);
+            end case;
+
+         when Simple_Param_Anchored | Language =>
+            raise Grammar_Error with "adding incompatible indent params";
+         end case;
+
+      when Language =>
+         raise Grammar_Error with "adding incompatible indent params";
+      end case;
+   end Add_Simple_Indent_Param;
+
    function Image (Item : in Indent_Param) return String
    is begin
       return "(" & Indent_Param_Label'Image (Item.Label) & ", " &
         (case Item.Label is
          when Simple => Image (Item.Param),
          when Hanging_Label =>
-            Image (Item.Hanging_Delta_1) & ", "  & Image 
(Item.Hanging_Delta_2) & ")");
+            Image (Item.Hanging_Delta_1) & ", "  & Image 
(Item.Hanging_Delta_2))
+        & ")";
    end Image;
 
    function Image (Item : in Indent_Pair) return String
@@ -1688,57 +1714,115 @@ package body Wisi is
    procedure Indent_Action_0
      (Data    : in out Parse_Data_Type'Class;
       Tree    : in     Syntax_Trees.Tree;
-      Nonterm : in     Valid_Node_Index;
-      Tokens  : in     Valid_Node_Index_Array;
+      Nonterm : in     Syntax_Trees.Valid_Node_Access;
       Params  : in     Indent_Param_Array)
-   is begin
+   is
+      use all type SAL.Base_Peek_Type;
+
+      function In_Line_Region (Node : in Syntax_Trees.Valid_Node_Access) 
return Boolean
+      is
+         Node_Region : constant Line_Region := Tree.Line_Region (Node, 
Trailing_Non_Grammar => True);
+      begin
+         return Node_Region.First > Node_Region.Last -- null region is always 
in active region
+           or else
+           (Contains (Data.Action_Region_Lines, Node_Region.First) or
+              Contains (Data.Action_Region_Lines, Node_Region.Last) or
+              (Node_Region.First < Data.Action_Region_Lines.First and
+                 Node_Region.Last > Data.Action_Region_Lines.Last));
+      end In_Line_Region;
+
+   begin
       if Trace_Action > Outline then
-         Ada.Text_IO.Put_Line (";; indent_action_0: " & Tree.Image (Nonterm, 
Data.Descriptor.all));
+         Tree.Lexer.Trace.Put_Line
+           ("indent_action_0 " & Tree.Image
+              (Nonterm, RHS_Index => True, Node_Numbers => True, Augmented => 
True, Line_Numbers => True));
       end if;
 
-      for I in Tokens'Range loop
-         if (Tree.Is_Virtual_Terminal (Tokens (I)) or
-               Tree.Byte_Region (Tokens (I)) /= Null_Buffer_Region) and
-           I in Params'Range -- in some translated EBNF, not every token has 
an indent param
+      for I in 1 .. Tree.Child_Count (Nonterm) loop
+         if not (Tree.SOI = Tree.Child (Nonterm, I) or Tree.EOI = Tree.Child 
(Nonterm, I)) and then
+           --  We see these in a partial parse.
+
+           (I in Params'Range and then
+              --  In some translated EBNF, not every token has an indent param.
+              In_Line_Region (Tree.Child (Nonterm, I)))
          then
             declare
-               use all type SAL.Base_Peek_Type;
-               Tree_Token        : constant Valid_Node_Index := Tokens (I);
-               Token             : Aug_Token_Const_Ref renames 
Get_Aug_Token_Const_1 (Tree, Tree_Token);
-               Pair              : Indent_Pair renames Params (I);
-               Code_Delta        : Delta_Type;
+               Child : constant Syntax_Trees.Valid_Node_Access := Tree.Child 
(Nonterm, I);
+
+               Indenting : constant Wisi.Indenting := Compute_Indenting (Data, 
Tree, Child);
+
+               Code_Delta : constant Delta_Type :=
+                 (if Indenting.Code = Null_Line_Region
+                  then Null_Delta
+                  else Indent_Compute_Delta
+                    (Data, Tree, Nonterm, Params (I).Code_Delta, Child, 
Indenting_Comment => False));
+
+               Controlling_Token : Syntax_Trees.Node_Access;
                Comment_Param     : Indent_Param;
-               Comment_Param_Set : Boolean                                := 
False;
+               Comment_Param_Set : Boolean := False;
                Comment_Delta     : Delta_Type;
             begin
                if Trace_Action > Detail then
-                  Ada.Text_IO.Put_Line
-                    (";; indent_action_0 a: " & Tree.Image (Tree_Token, 
Data.Descriptor.all) & ": " & Image (Pair));
+                  Tree.Lexer.Trace.Put_Line
+                    ("...    code " & Tree.Image
+                       (Child,
+                        Node_Numbers => Trace_Action > Extra,
+                        Line_Numbers => True) & ": " &
+                       Image (Params (I).Code_Delta));
                end if;
 
-               if Token.First_Indent_Line /= Invalid_Line_Number then
-                  Code_Delta := Indent_Compute_Delta
-                    (Data, Tree, Tokens, Pair.Code_Delta, Tree_Token, 
Indenting_Comment => False);
-
-                  Indent_Token_1 (Data, Tree, Token, Code_Delta, 
Indenting_Comment => False);
+               if Code_Delta /= Null_Delta then
+                  Indent_Token_1
+                    (Data, Tree,
+                     Line_Region       => Indenting.Code,
+                     Delta_Indent      => Code_Delta,
+                     Indenting_Comment => None);
                end if;
 
-               if Token.First_Trailing_Comment_Line /= Invalid_Line_Number then
-                  if Pair.Comment_Present then
-                     Comment_Param     := Pair.Comment_Delta;
+               if Indenting.Comment /= Null_Line_Region then
+                  if Params (I).Comment_Present then
+                     Comment_Param     := Params (I).Comment_Delta;
+                     Controlling_Token := Child;
                      Comment_Param_Set := True;
 
-                  elsif I < Tokens'Last then
+                  elsif I < Tree.Child_Count (Nonterm) then
                      Comment_Param     := Params (I + 1).Code_Delta;
+                     Controlling_Token := Tree.Child (Nonterm, I + 1);
                      Comment_Param_Set := True;
-
                   end if;
 
                   if Comment_Param_Set then
-                     Comment_Delta := Indent_Compute_Delta
-                       (Data, Tree, Tokens, Comment_Param, Tree_Token, 
Indenting_Comment => True);
+                     if Trace_Action > Detail then
+                        Tree.Lexer.Trace.Put_Line
+                          ("... comment " & Tree.Image
+                             (Controlling_Token,
+                              Node_Numbers => Trace_Action > Extra,
+                              Line_Numbers => True) & ": " &
+                             Image (Comment_Param));
+                     end if;
 
-                     Indent_Token_1 (Data, Tree, Token, Comment_Delta, 
Indenting_Comment => True);
+                     Comment_Delta := Indent_Compute_Delta
+                       (Data, Tree, Nonterm, Comment_Param,
+                        Indenting_Token   => Controlling_Token,
+                        Indenting_Comment => True);
+
+                     if Comment_Delta /= Null_Delta then
+                        Indent_Token_1
+                          (Data, Tree,
+                           Line_Region       => Indenting.Comment,
+                           Delta_Indent      => Comment_Delta,
+                           Controlling_Delta => To_Delta
+                             (Data.Indents
+                                (Line_Number_Type'
+                                   (if Params (I).Comment_Present
+                                    then -- 
ada_mode-conditional_expressions.adb case expression for K, if
+                                          --  expression blank line.
+                                       Tree.Line_Region (Controlling_Token, 
Trailing_Non_Grammar => True).Last
+
+                                    else --  
ada_mode-conditional_expressions.adb case expression for K.
+                                       Tree.Line_Region (Controlling_Token, 
Trailing_Non_Grammar => True).First))),
+                           Indenting_Comment => (if Params (I).Comment_Present 
then Trailing else Leading));
+                     end if;
                   end if;
                end if;
             end;
@@ -1746,292 +1830,592 @@ package body Wisi is
       end loop;
    end Indent_Action_0;
 
-   procedure Indent_Action_1
-     (Data    : in out Parse_Data_Type'Class;
-      Tree    : in     Syntax_Trees.Tree;
-      Nonterm : in     Valid_Node_Index;
-      Tokens  : in     Valid_Node_Index_Array;
-      N       : in     Positive_Index_Type;
-      Params  : in     Indent_Param_Array)
-   is
-      use all type Syntax_Trees.Node_Label;
-   begin
-      for I in Tokens'First .. N loop
-         declare
-            Aug : Aug_Token_Const_Ref renames Wisi.Get_Aug_Token_Const_1 
(Tree, Tokens (I));
-         begin
-            if Tree.Label (Tokens (I)) /= Virtual_Terminal and then Aug.First 
then
-               Indent_Action_0 (Data, Tree, Nonterm, Tokens, Params);
-               return;
-            end if;
-         end;
-      end loop;
-   end Indent_Action_1;
-
    function Indent_Hanging_1
      (Data              : in out Parse_Data_Type;
       Tree              : in     Syntax_Trees.Tree;
-      Tokens            : in     Valid_Node_Index_Array;
-      Tree_Indenting    : in     Valid_Node_Index;
+      Nonterm           : in     Syntax_Trees.Valid_Node_Access;
+      Indenting_Token   : in     Syntax_Trees.Valid_Node_Access;
       Indenting_Comment : in     Boolean;
       Delta_1           : in     Simple_Indent_Param;
       Delta_2           : in     Simple_Indent_Param;
-      Option            : in     Boolean;
-      Accumulate        : in     Boolean)
+      Label             : in     Hanging_Label)
      return Delta_Type
    is
-      Indenting_Token : Aug_Token_Const_Ref renames Get_Aug_Token_Const_1 
(Tree, Tree_Indenting);
+      Indenting_Line_Region : constant WisiToken.Line_Region := 
Tree.Line_Region
+        (Indenting_Token, Trailing_Non_Grammar => True);
+
+      Indenting : constant Wisi.Indenting := Compute_Indenting (Data, Tree, 
Indenting_Token);
+
+      function Compute_Hanging_2 return Simple_Indent_Param
+      is begin
+         --  WORKAROUND: GNAT Commmunity 2020 gives a bogus compile error when
+         --  we try to inline this with an if_expression.
+         if Indenting_Line_Region.First = Indenting.Code.First then
+            return Add_Simple_Indent_Param (Delta_1, Delta_2);
+         else
+            return Delta_2;
+         end if;
+      end Compute_Hanging_2;
+
    begin
-      if Indenting_Comment then
-         return Indent_Compute_Delta
-           (Data, Tree, Tokens, (Simple, Delta_1), Tree_Indenting, 
Indenting_Comment);
-      else
-         return
-           (Hanging,
-            Hanging_First_Line  => Indenting_Token.Line,
-            Hanging_Paren_State => Indenting_Token.Paren_State,
-            Hanging_Delta_1     => Indent_Compute_Delta
-              (Data, Tree, Tokens, (Simple, Delta_1), Tree_Indenting, 
Indenting_Comment).Simple_Delta,
-            Hanging_Delta_2     =>
-              (if (not Option) or
-                 Indenting_Token.Line = Indenting_Token.First_Indent_Line -- 
first token in tok is first on line
-               then Indent_Compute_Delta
-                 (Data, Tree, Tokens, (Simple, Delta_2), Tree_Indenting, 
Indenting_Comment).Simple_Delta
-               else Indent_Compute_Delta
-                 (Data, Tree, Tokens, (Simple, Delta_1), Tree_Indenting, 
Indenting_Comment).Simple_Delta),
-            Hanging_Accumulate => Accumulate);
-      end if;
+      return Result : Delta_Type :=
+        (Hanging,
+         Hanging_First_Line  => Indenting_Line_Region.First,
+         Hanging_Delta_1     => Indent_Compute_Delta
+           (Data, Tree, Nonterm,
+            (Simple,
+             (if Indenting_Line_Region.First = Indenting.Code.First
+              then Delta_1
+              else (Label => None))),
+            Indenting_Token, Indenting_Comment).Simple_Delta,
+         Hanging_Delta_2     =>
+           Indent_Compute_Delta
+             (Data, Tree, Nonterm,
+              (Simple,
+               (case Label is
+                when Hanging_0 => Delta_2,
+                when Hanging_1 =>
+                  (if Indenting_Line_Region.First = Indenting.Code.First
+                   then Delta_2 else Delta_1),
+                when Hanging_2 =>
+                   Compute_Hanging_2)),
+              Indenting_Token, Indenting_Comment)
+             .Simple_Delta)
+      do
+         --  Controlling_Token_Line for Delta_2 is the first non-comment
+         --  line indented by Delta_2.
+         if Label = Hanging_1 and
+           Indenting_Line_Region.First /= Indenting.Code.First
+         then
+            --  Only using Delta_1
+            null;
+         else
+            for Line in Indenting.Code.First +
+              (if Indenting_Line_Region.First = Indenting.Code.First then 1 
else 0)
+              .. Indenting.Code.Last
+            loop
+               if Tree.Line_Begin_Token (Line) /= 
Syntax_Trees.Invalid_Node_Access then
+                  Result.Hanging_Delta_2.Controlling_Token_Line := Line;
+                  exit;
+               end if;
+            end loop;
+         end if;
+      end return;
    end Indent_Hanging_1;
 
-   procedure Put_Language_Action
-     (Data    : in Parse_Data_Type;
-      Content : in String)
+   package Node_Access_Address is new System.Address_To_Access_Conversions 
(WisiToken.Syntax_Trees.Node);
+
+   function Address_Image (Item : in WisiToken.Syntax_Trees.Valid_Node_Access) 
return String
    is
-      pragma Unreferenced (Data);
+      use Ada.Strings, Ada.Strings.Fixed;
+      Int : constant System.Storage_Elements.Integer_Address := 
System.Storage_Elements.To_Integer
+        (Node_Access_Address.To_Address (Node_Access_Address.Object_Pointer 
(Item)));
    begin
-      Ada.Text_IO.Put_Line ("[" & Language_Action_Code & Content & "]");
-   end Put_Language_Action;
+      return """" & Trim (Int'Image, Both) & """";
+   end Address_Image;
 
-   procedure Put (Data : in out Parse_Data_Type; Parser : in 
Parse.Base_Parser'Class)
+   function To_Node_Access (Item : in String) return 
WisiToken.Syntax_Trees.Valid_Node_Access
    is
-      use all type Ada.Containers.Count_Type;
+      Int : constant System.Address := System.Storage_Elements.To_Address
+        (System.Storage_Elements.Integer_Address'Value (Item));
+   begin
+      return WisiToken.Syntax_Trees.Valid_Node_Access 
(Node_Access_Address.To_Pointer (Int));
+   end To_Node_Access;
 
-      Last_Term : constant Node_Index := Parser.Tree.Last_Terminal 
(Parser.Tree.Root);
+   procedure Query_Tree
+     (Data  : in Parse_Data_Access_Constant;
+      Tree  : in WisiToken.Syntax_Trees.Tree;
+      Query : in Wisi.Query)
+   is
+      --  See wisi-parse-common.el wisi-parse-tree query for definition of
+      --  queries.
+      use Syntax_Trees;
+   begin
+      if Trace_Action > Outline then
+         Tree.Lexer.Trace.Put_Line ("post-parse tree:");
+         Tree.Print_Tree
+           (Non_Grammar  => True,
+            Line_Numbers => True);
+      end if;
+      case Query.Label is
+      when Point_Query =>
+         declare
+            --  IMPROVEME: if Query.Char_Point is in the whitespace between a
+            --  token A non_grammar and the next grammar token B, this will 
return
+            --  the next grammar token, which is not right; wisi-parse-tree 
says
+            --  it should return A. Let's see if that's actually a problem. 
This
+            --  is better then setting After False; then it would return
+            --  Invalid_Node_Access.
+            Terminal : constant Node_Access := Tree.Find_Char_Pos
+              (Query.Char_Point,
+               Trailing_Non_Grammar => True,
+               After                => True);
+         begin
+            if Terminal = Invalid_Node_Access then
+               Ada.Text_IO.Put_Line ("[" & Query_Tree_Code & Query_Label'Pos 
(Query.Label)'Image & " nil]");
+               return;
+            else
+               declare
+                  Result   : Node_Access :=
+                    (case Point_Query'(Query.Label) is
+                     when Node => Terminal,
+                     when Containing_Statement => Tree.Find_Ancestor 
(Terminal, To_Array (Data.Statement_IDs)),
+                     when Ancestor => Tree.Find_Ancestor (Terminal, To_Array 
(Query.IDs)));
+                  Char_Region : Buffer_Region;
+               begin
+                  case Point_Query'(Query.Label) is
+                  when Node | Ancestor =>
+                     if Result = Invalid_Node_Access then
+                        --  Node is tree.root, or ancestor not found.
+                        Ada.Text_IO.Put_Line ("[" & Query_Tree_Code & 
Query_Label'Pos (Query.Label)'Image & " nil]");
+                        return;
+                     end if;
 
-      function Get_Last_Char_Pos return Buffer_Pos
-      is begin
+                     Char_Region := Tree.Char_Region (Result, 
Trailing_Non_Grammar => False);
 
-         if Last_Term = Invalid_Node_Index then
-            --  All comments, or empty
-            if Data.Leading_Non_Grammar.Length > 0 then
-               return Data.Leading_Non_Grammar 
(Data.Leading_Non_Grammar.Last_Index).Char_Region.Last;
-            else
-               return Buffer_Pos'First;
+                  when Containing_Statement =>
+                     loop
+                        if Result = Invalid_Node_Access then
+                           --  ancestor not found.
+                           Ada.Text_IO.Put_Line ("[" & Query_Tree_Code & 
Query_Label'Pos (Query.Label)'Image & " nil]");
+                           return;
+                        end if;
+
+                        Char_Region := Tree.Char_Region (Result, 
Trailing_Non_Grammar => False);
+
+                        if Query.Char_Point = Char_Region.First and
+                          Tree.ID (Result) /= Tree.Lexer.Descriptor.Accept_ID
+                        then
+                           --  Find container of this statement
+                           Result := Tree.Find_Ancestor (Result, To_Array 
(Data.Statement_IDs));
+                        else
+                           exit;
+                        end if;
+                     end loop;
+                  end case;
+
+                  Ada.Text_IO.Put_Line
+                    ("[" & Query_Tree_Code &
+                       Query_Label'Pos (Query.Label)'Image & " " &
+                       Address_Image (Result) & " " &
+                       Tree.ID (Result)'Image & " " &
+                       Char_Region.First'Image & " " &
+                       Buffer_Pos'Image (Char_Region.Last + 1) & -- Emacs 
region end convention.
+                       "]");
+               end;
             end if;
+         end;
+
+      when Parent | Child =>
+         declare
+            Result   : constant Node_Access :=
+              (if Query.Label = Parent
+               then Tree.Parent (Query.Node, Query.N)
+               else Tree.Child (Query.Node, Positive_Index_Type (Query.N)));
+            Char_Region : constant Buffer_Region := Tree.Char_Region (Result, 
Trailing_Non_Grammar => False);
+         begin
+            Ada.Text_IO.Put_Line
+              ("[" & Query_Tree_Code &
+                 Query_Label'Pos (Query.Label)'Image & " " &
+                 Address_Image (Result) & " " &
+                 Tree.ID (Result)'Image & " " &
+                 Char_Region.First'Image & " " &
+                 Char_Region.Last'Image & "]");
+         end;
+
+      when Print =>
+         Tree.Print_Tree (Line_Numbers => True, Non_Grammar => True);
+         if Tree.Editable then
+            WisiToken.Parse.Put_Errors (Tree);
          else
             declare
-               Aug : Aug_Token_Const_Ref renames Get_Aug_Token_Const_1 
(Parser.Tree, Last_Term);
+               Stream : Stream_ID := Tree.First_Parse_Stream;
             begin
-               if Aug.Non_Grammar.Length = 0 then
-                  return Aug.Char_Region.Last;
-               else
-                  return Aug.Non_Grammar 
(Aug.Non_Grammar.Last_Index).Char_Region.Last;
-               end if;
+               loop
+                  exit when Stream = Invalid_Stream_ID;
+                  WisiToken.Parse.Put_Errors (Tree, Stream);
+                  Tree.Next_Parse_Stream (Stream);
+               end loop;
             end;
          end if;
-      end Get_Last_Char_Pos;
 
-      Last_Char_Pos : constant Buffer_Pos := Get_Last_Char_Pos;
-
-      function Get_Last_Line return Line_Number_Type
-      is begin
-         for I in Data.Line_Begin_Char_Pos.First_Index .. 
Data.Line_Begin_Char_Pos.Last_Index loop
-            if Data.Line_Begin_Char_Pos (I) = Invalid_Buffer_Pos then
-               raise SAL.Programmer_Error with "line_begin_pos" & 
Line_Number_Type'Image (I) & " invalid";
-            end if;
-            if Data.Line_Begin_Char_Pos (I) > Last_Char_Pos then
-               if I > Line_Number_Type'First then
-                  return I - 1;
-               else
-                  return I;
-               end if;
+      when Dump =>
+         declare
+            use Ada.Directories;
+            File_Name : constant String := -Query.File_Name;
+            Normalized_Tree : WisiToken.Syntax_Trees.Tree;
+         begin
+            if Exists (File_Name) then
+               Delete_File (File_Name);
             end if;
-         end loop;
-         return Data.Line_Begin_Char_Pos.Last_Index;
-      end Get_Last_Line;
 
+            WisiToken.Syntax_Trees.Copy_Tree
+              (Source      => Tree,
+               Destination => Normalized_Tree,
+               User_Data   => Syntax_Trees.User_Data_Access_Constant (Data));
+            Normalized_Tree.Put_Tree (-Query.File_Name);
+         end;
+      end case;
+   end Query_Tree;
+
+   procedure Put_Language_Action
+     (Data    : in Parse_Data_Type;
+      Content : in String)
+   is
+      pragma Unreferenced (Data);
    begin
-      if Trace_Action > Outline then
-         Ada.Text_IO.Put_Line
-           (";; last_char_pos:" & Buffer_Pos'Image (Last_Char_Pos + 1) &
-              " last_line:" & Line_Number_Type'Image (Get_Last_Line));
-      end if;
+      Ada.Text_IO.Put_Line ("[" & Language_Action_Code & Content & "]");
+   end Put_Language_Action;
 
+   procedure Put (Data : in out Parse_Data_Type; Parser : in 
Parse.Base_Parser'Class)
+   is
+      Tree : WisiToken.Syntax_Trees.Tree renames Parser.Tree;
+   begin
       --  +1 to match Emacs region
-      Ada.Text_IO.Put_Line ('[' & End_Code & Buffer_Pos'Image (Last_Char_Pos + 
1) & ']');
+      Ada.Text_IO.Put_Line
+        ('[' & End_Code & Buffer_Pos'Image (Tree.Char_Region (Tree.EOI, 
Trailing_Non_Grammar => False).Last + 1) & ']');
 
+      --  Caches are populated by Execute_Actions.
       case Data.Post_Parse_Action is
       when Navigate =>
          for Cache of Data.Navigate_Caches loop
-            Put (Cache);
+            if Contains (Data.Action_Region_Chars, Cache.Pos) then
+               Put (Cache);
+            end if;
          end loop;
+         Data.Navigate_Caches.Clear;
+
          for Cache of Data.Name_Caches loop
-            Put (Cache);
+            if Contains (Outer => Data.Action_Region_Chars, Inner => Cache) 
then
+               Put (Cache);
+            end if;
          end loop;
+         Data.Name_Caches.Clear;
 
       when Face =>
          for Cache of Data.Face_Caches loop
-            Put (Cache);
-         end loop;
-
-      when Indent =>
-
-         Resolve_Anchors (Data);
-
-         if Trace_Action > Outline then
-            Ada.Text_IO.Put_Line (";; indent leading non_grammar");
-         end if;
-         for Token of Data.Leading_Non_Grammar loop
-            if Token.First then
-               Put (Token.Line, (Int, Data.Begin_Indent));
+            if Overlaps (Cache.Char_Region, Data.Action_Region_Chars) then
+               Put (Cache);
             end if;
          end loop;
+         Data.Face_Caches.Clear;
 
-         --  It may be that not all lines in Data.Indents were parsed.
-         if Trace_Action > Outline then
-            Ada.Text_IO.Put_Line (";; indent grammar");
-         end if;
-         for I in Data.Indents.First_Index .. Get_Last_Line loop
-            Put (I, Data.Indents (I));
+      when Indent =>
+         Resolve_Anchors (Data, Tree);
+         for Line in Data.Action_Region_Lines.First .. 
Data.Action_Region_Lines.Last loop
+            Put (Tree, Line, Data.Indents (Line));
          end loop;
+         Data.Indents.Clear;
       end case;
    end Put;
 
-   procedure Put (Lexer_Errors : in Lexer.Error_Lists.List)
+   procedure Put (Item : in WisiToken.Parse.Lexer_Error)
    is begin
-      for Item of Lexer_Errors loop
-         Ada.Text_IO.Put_Line
-           ('[' & Lexer_Error_Code & Buffer_Pos'Image (Item.Char_Pos) &
-              " ""lexer error" &
-              (if Item.Recover_Char (1) = ASCII.NUL
-               then """"
-               elsif Item.Recover_Char (1) = '"'
-               then """ ?\"""
-               else """ ?" & Item.Recover_Char (1)) &
-              "]");
-         if Item.Recover_Char (2) /= ASCII.NUL then
-            raise SAL.Programmer_Error with "lexer error with non-ascii or 
multiple repair char";
-         end if;
-      end loop;
+      Ada.Text_IO.Put_Line
+        ('[' & Lexer_Error_Code & Buffer_Pos'Image (Item.Error.Char_Pos) &
+           " ""lexer error" &
+           (if Item.Error.Recover_Char (1) = ASCII.NUL
+            then """"
+            elsif Item.Error.Recover_Char (1) = '"'
+            then """ ?\"""
+            else """ ?" & Item.Error.Recover_Char (1)) &
+           "]");
+      if Item.Error.Recover_Char (2) /= ASCII.NUL then
+         raise SAL.Programmer_Error with "lexer error with non-ascii or 
multiple repair char";
+      end if;
    end Put;
 
-   procedure Put
-     (Data         : in Parse_Data_Type;
-      Lexer_Errors : in Lexer.Error_Lists.List;
-      Parse_Errors : in Parse.LR.Parse_Error_Lists.List;
-      Tree         : in Syntax_Trees.Tree)
+   procedure Put_Errors (Tree : in Syntax_Trees.Tree)
    is
-      use all type SAL.Base_Peek_Type;
       use Ada.Text_IO;
-      use Semantic_Checks;
+      Descriptor  : WisiToken.Descriptor renames Tree.Lexer.Descriptor.all;
 
-      function Safe_Pos (Node : in Valid_Node_Index) return Buffer_Pos
-      is
-         --  Return a reasonable position for the error at Node.
-         --
-         --  In a successful parse with error recovery, Node is a terminal with
-         --  an augmented token in Data.Terminals, so that is the first
-         --  choice.
-         --
-         --  If this is an error due to a bad recovery, Node may be a virtual
-         --  token, with no position information, so we try to get information
-         --  from its parent.
-         use Syntax_Trees;
+      function Safe_Pos (Token : in Syntax_Trees.Node_Access) return Buffer_Pos
+      is begin
+         if Token = Syntax_Trees.Invalid_Node_Access then
+            return Buffer_Pos'First;
+         else
+            declare
+               Result : constant Buffer_Region := Tree.Char_Region (Token, 
Trailing_Non_Grammar => False);
+            begin
+               if Result = Null_Buffer_Region then
+                  return Buffer_Pos'First;
+               else
+                  return Result.First;
+               end if;
+            end;
+         end if;
+      end Safe_Pos;
 
-         N : Node_Index := Node;
-      begin
-         loop
-            if Tree.Label (N) /= Virtual_Terminal then
-               declare
-                  Ref : Aug_Token_Const_Ref renames Get_Aug_Token_Const_1 
(Tree, N);
-               begin
-                  if Ref.Char_Region /= Null_Buffer_Region then
-                     return Ref.Element.Char_Region.First;
-                  end if;
+      procedure Handle_Error
+        (Err        : in Syntax_Trees.Error_Data'Class;
+         Error_Node : in Syntax_Trees.Valid_Node_Access)
+      is
+         Error_Pos : constant Buffer_Pos := Tree.Char_Region (Error_Node, 
Trailing_Non_Grammar => False).First;
 
-               end;
+         procedure Put_Recover (Item : in 
WisiToken.Parse.Recover_Op_Nodes_Arrays.Vector)
+         is begin
+            Put (Item, Error_Pos, Tree);
+            if Trace_Action > Outline  or WisiToken.Debug_Mode then
+               Tree.Lexer.Trace.Put_Line ("recover: " & Parse.Image (Item, 
Tree));
             end if;
-            N := Tree.Parent (N);
-            exit when N = Invalid_Node_Index;
-         end loop;
-         return Buffer_Pos'First;
-      end Safe_Pos;
+         end Put_Recover;
 
-      function Safe_Pos (Token : in Recover_Token) return Buffer_Pos
-      is begin
-         if Token.Name /= Null_Buffer_Region then
-            return Token.Name.First;
+      begin
+         if Err in WisiToken.Parse.Lexer_Error then
+            Put (WisiToken.Parse.Lexer_Error (Err));
 
-         elsif Token.Byte_Region = Null_Buffer_Region then
-            return Buffer_Pos'First;
+         elsif Err in WisiToken.Parse.Parse_Error then
+            declare
+               Item : WisiToken.Parse.Parse_Error renames 
WisiToken.Parse.Parse_Error (Err);
+            begin
+               Put_Line
+                 ('[' & Parser_Error_Code & Base_Buffer_Pos'Image (Safe_Pos 
(Error_Node)) &
+                    " ""syntax error: expecting " & Image (Item.Expecting, 
Descriptor) &
+                    ", found '" & Image (Tree.ID (Error_Node), Descriptor) & 
"'""]");
+               Put_Recover (Item.Recover_Ops);
+            end;
 
-         else
-            return Token.Byte_Region.First;
+         elsif Err in WisiToken.Parse.In_Parse_Action_Error then
+            declare
+               use all type 
WisiToken.Syntax_Trees.In_Parse_Actions.Status_Label;
+               Item : WisiToken.Parse.In_Parse_Action_Error renames 
WisiToken.Parse.In_Parse_Action_Error
+                 (Err);
+            begin
+               Put_Line
+                 ('[' & In_Parse_Action_Error_Code & Integer'Image
+                    (Syntax_Trees.In_Parse_Actions.Status_Label'Pos 
(Item.Status.Label)) &
+                    (case Item.Status.Label is
+                     when WisiToken.Syntax_Trees.In_Parse_Actions.Ok => "",
+                     when WisiToken.Syntax_Trees.In_Parse_Actions.Error =>
+                        Safe_Pos (Tree.Child (Error_Node, 
Item.Status.Begin_Name))'Image &
+                          Safe_Pos (Tree.Child (Error_Node, 
Item.Status.End_Name))'Image & " """ &
+                          (case 
WisiToken.Syntax_Trees.In_Parse_Actions.Error'(Item.Status.Label) is
+                           when Missing_Name_Error => "missing",
+                           when Extra_Name_Error => "extra",
+                           when Match_Names_Error => "match") &
+                          " name error""]"));
+               Put_Recover (Item.Recover_Ops);
+            end;
+
+         elsif Err in WisiToken.Parse.Error_Message then
+            --  FIXME: convert moved In_Parse_Action_Error to 
In_Parse_Action_Error_Code?
+            declare
+               Item : WisiToken.Parse.Error_Message renames 
WisiToken.Parse.Error_Message (Err);
+            begin
+               Put_Line
+                 ('[' & Parser_Error_Code & Buffer_Pos'Image 
(Buffer_Pos'First) &
+                    " """ & (-Item.Msg) & """]");
+               Put_Recover (Item.Recover_Ops);
+            end;
          end if;
-      end Safe_Pos;
+      end Handle_Error;
 
    begin
-      Put (Lexer_Errors);
-
-      for Item of Parse_Errors loop
-         case Item.Label is
-         when Parse.LR.Action =>
-            Put_Line
-              ('[' & Parser_Error_Code & Buffer_Pos'Image (Safe_Pos 
(Item.Error_Token)) &
-                 " ""syntax error: expecting " & Image (Item.Expecting, 
Data.Descriptor.all) &
-                 ", found '" & Image (Tree.ID (Item.Error_Token), 
Data.Descriptor.all) & "'""]");
-
-         when Parse.LR.Check =>
-            Put_Line
-              ('[' & Check_Error_Code & Integer'Image
-                 (Semantic_Checks.Check_Status_Label'Pos 
(Item.Check_Status.Label)) &
-                 (case Item.Check_Status.Label is
-                  when Ok => "",
-                  when Error =>
-                     Buffer_Pos'Image (Safe_Pos 
(Item.Check_Status.Begin_Name)) &
-                       Buffer_Pos'Image (Safe_Pos 
(Item.Check_Status.End_Name)) &
-                       " ""block name error""]"));
-
-         when Parse.LR.Message =>
-            Put_Line
-              ('[' & Parser_Error_Code & Buffer_Pos'Image (Buffer_Pos'First) &
-                 " """ & (-Item.Msg) & """]");
-         end case;
-
-         if Item.Recover.Stack.Depth > 0 then
-            Put (Item.Recover, Data, Tree);
+      if Tree.Is_Empty then
+         --  No errors.
+         if WisiToken.Debug_Mode then
+            Tree.Lexer.Trace.Put_Line ("empty tree");
          end if;
-      end loop;
-   end Put;
 
-   procedure Put_Error (Data : in Parse_Data_Type; Line_Number : in 
Line_Number_Type; Message : in String)
+      elsif Tree.Editable then
+         for Err_Ref in Tree.Error_Iterate loop
+            Handle_Error (Syntax_Trees.Error (Err_Ref), Tree.Error_Node 
(Err_Ref));
+         end loop;
+      else
+         for Err_Cur in Tree.Stream_Error_Iterate
+           ((if Tree.Stream_Count >= 2
+             then Tree.First_Parse_Stream
+             else Tree.Shared_Stream))
+         loop
+            declare
+               Stream_Err_Ref : constant Syntax_Trees.Stream_Error_Ref := 
Syntax_Trees.Error (Err_Cur);
+            begin
+               Handle_Error (Syntax_Trees.Error (Stream_Err_Ref), 
Tree.Error_Node (Stream_Err_Ref));
+            end;
+         end loop;
+      end if;
+   end Put_Errors;
+
+   procedure Put_Error
+     (Tree        : in Syntax_Trees.Tree;
+      Line_Number : in Line_Number_Type;
+      Message     : in String)
    is
       use Ada.Text_IO;
    begin
-      Put_Line ("(error """ & Error_Message (Data.Lexer.File_Name, 
Line_Number, 0, Message) & """)");
+      Put_Line ("(error """ & Error_Message (Tree.Lexer.File_Name, 
Line_Number, 0, Message) & """)");
    end Put_Error;
 
    ----------
    --  Spec visible private subprograms, alphabetical
 
+   function Compute_Indenting
+     (Data : in Parse_Data_Type'Class;
+      Tree : in Syntax_Trees.Tree;
+      Node : in Syntax_Trees.Valid_Node_Access)
+     return Wisi.Indenting
+   is
+      Aug : constant Augmented_Access := Get_Augmented (Tree, Node);
+   begin
+      if Aug.Cache_Version = Data.Augmented_Cache_Version then
+         return Aug.Indenting;
+      end if;
+
+      declare
+         use all type Ada.Containers.Count_Type;
+         use all type SAL.Base_Peek_Type;
+         use Syntax_Trees;
+         use Lexer;
+
+         --  The precondition guarrantees Prev_Non_Grammar and 
Next_Non_Grammar exist.
+
+         Prev_Non_Grammar  : constant Valid_Node_Access := 
Tree.Prev_Non_Grammar (Node);
+         Next_Non_Grammar  : constant Valid_Node_Access := 
Tree.Next_Non_Grammar (Node);
+         Prev_Terminal     : constant Valid_Node_Access := Tree.Prev_Terminal 
(Node);
+         First_Non_Grammar : constant Node_Access       := 
Tree.First_Non_Grammar (Node);
+         Last_Terminal     : constant Node_Access       := Tree.Last_Terminal 
(Node);
+
+         function Has_New_Line (Node : in Valid_Node_Access) return Boolean
+         is begin
+            return (for some Token of Tree.Non_Grammar_Const (Node) =>
+                      Contains_New_Line (Token.Line_Region));
+         end Has_New_Line;
+
+         function Get_Last (Node : in Valid_Node_Access) return 
Base_Line_Number_Type
+         is
+            Non_Grammar  : Token_Arrays.Vector renames Tree.Non_Grammar_Const 
(Node);
+         begin
+            if Non_Grammar.Length = 0 then
+               return Invalid_Line_Number;
+            else
+               return Non_Grammar (Non_Grammar.Last_Index).Line_Region.Last;
+            end if;
+         end Get_Last;
+
+         function Get_First (Node : in Valid_Node_Access) return 
Base_Line_Number_Type
+         is
+            Non_Grammar : Token_Arrays.Vector renames Tree.Non_Grammar_Const 
(Node);
+         begin
+            if Non_Grammar.Length = 0 then
+               return Invalid_Line_Number;
+            else
+               return Non_Grammar (Non_Grammar.First_Index).Line_Region.First;
+            end if;
+         end Get_First;
+
+         First_Code_Line : constant Base_Line_Number_Type :=
+           --  Correct even if not first in line.
+           (if Prev_Non_Grammar = Prev_Terminal and Has_New_Line 
(Prev_Non_Grammar)
+            then  --  First terminal in Node is first on a line
+               Get_Last (Prev_Non_Grammar)
+            elsif First_Non_Grammar = Invalid_Node_Access
+            then Invalid_Line_Number
+            elsif Last_Terminal = Invalid_Node_Access
+            then Invalid_Line_Number -- No grammar terminals after 
first_non_grammar
+            else Get_First (First_Non_Grammar));
+
+         Last_Code_Line : constant Base_Line_Number_Type :=
+           (if Last_Terminal = Invalid_Node_Access
+            then Get_First (Next_Non_Grammar)
+            elsif Get_First (Last_Terminal) = Invalid_Line_Number
+            then Get_First (Next_Non_Grammar)
+            else Get_First (Last_Terminal));
+
+      begin
+         return Result : Wisi.Indenting do
+            if Last_Terminal = Invalid_Node_Access then
+               --  Node is an empty nonterm
+               Result.Code    := Null_Line_Region;
+               Result.Comment := Null_Line_Region;
+            else
+               if First_Code_Line = Invalid_Line_Number or Last_Code_Line = 
Invalid_Line_Number then
+                  Result.Code := Null_Line_Region;
+               else
+                  if Prev_Non_Grammar = Prev_Terminal then
+                     --  First terminal in Node is first on line.
+                     Result.Code := (First_Code_Line, Last_Code_Line);
+
+                  elsif First_Code_Line = Last_Code_Line then
+                     --  Not first on line, none on next line
+                     Result.Code := Null_Line_Region;
+                  else
+                     --  Not first on line, some on next line
+                     Result.Code :=
+                       (First => First_Code_Line + 1,
+                        Last  => Last_Code_Line);
+                  end if;
+               end if;
+
+               if Last_Terminal = Invalid_Node_Access then
+                  Result.Comment := Null_Line_Region;
+
+               else
+                  declare
+                     Trailing_Non_Grammar : Token_Arrays.Vector renames 
Tree.Non_Grammar_Const (Last_Terminal);
+                  begin
+                     if Trailing_Non_Grammar.Length in 0 | 1 then
+                        --  Single non_grammar either contains a single 
new_line, a new_line comment, or a
+                        --  non-new_line comment (ie placeholder); none need 
indenting.
+                        --  FIXME: handle multi-line comments
+                        Result.Comment := Null_Line_Region;
+
+                     else
+                        if Contains_New_Line (Trailing_Non_Grammar 
(Trailing_Non_Grammar.First_Index).Line_Region)
+                        then
+                           --  First non_grammar terminates code line.
+                           Result.Comment.First := Trailing_Non_Grammar
+                             
(Trailing_Non_Grammar.First_Index).Line_Region.Last;
+                        else
+                           --  First non_grammar is a block comment (ie 
placeholder) on the code
+                           --  line; find first blank or comment line, if any.
+                           declare
+                              First_Line : constant Line_Number_Type := 
Trailing_Non_Grammar
+                                
(Trailing_Non_Grammar.First_Index).Line_Region.Last;
+                           begin
+                              for I in Trailing_Non_Grammar.First_Index + 1 .. 
Trailing_Non_Grammar.Last_Index loop
+                                 if Trailing_Non_Grammar (I).Line_Region.First 
/= First_Line then
+                                    Result.Comment.First := 
Trailing_Non_Grammar (I).Line_Region.First;
+                                    exit;
+                                 end if;
+                              end loop;
+                           end;
+                        end if;
+
+                        Result.Comment.Last :=
+                          Trailing_Non_Grammar 
(Trailing_Non_Grammar.Last_Index).Line_Region.First;
+                     end if;
+                  end;
+               end if;
+            end if;
+
+            Aug.Cache_Version := Data.Augmented_Cache_Version;
+            Aug.Indenting     := Result;
+         end return;
+      end;
+   end Compute_Indenting;
+
+   function Get_Augmented
+     (Tree : in Syntax_Trees.Tree'Class;
+      Node : in Syntax_Trees.Valid_Node_Access)
+     return Augmented_Access
+   is
+      Aug : Augmented_Access := Augmented_Access (Tree.Augmented (Node));
+   begin
+      if Aug = null then
+         Aug := new Augmented;
+         Tree.Set_Augmented (Node, Syntax_Trees.Augmented_Class_Access (Aug));
+      end if;
+      return Aug;
+   end Get_Augmented;
+
    function Image (Item : in Simple_Delta_Type) return String
    is begin
-      return "(" & Simple_Delta_Labels'Image (Item.Label) &
+      return "(" & Trimmed_Image (Item.Controlling_Token_Line) & ": " & 
Simple_Delta_Labels'Image (Item.Label) &
         (case Item.Label is
          when None => "",
          when Int => Integer'Image (Item.Int_Delta),
-         when Anchored => Integer'Image (Item.Anchored_ID) & Integer'Image 
(Item.Anchored_Delta) & " " &
-              Boolean'Image (Item.Anchored_Accumulate))
+         when Anchored => Item.Anchor_Line'Image & Item.Anchored_Delta'Image)
         & ")";
    end Image;
 
@@ -2040,81 +2424,37 @@ package body Wisi is
       return "(" & Delta_Labels'Image (Item.Label) &
         (case Item.Label is
          when Simple => " " & Image (Item.Simple_Delta),
-         when Hanging => Line_Number_Type'Image (Item.Hanging_First_Line) & 
Integer'Image (Item.Hanging_Paren_State) &
-              " " & Image (Item.Hanging_Delta_1) & " " & Image 
(Item.Hanging_Delta_2) & " " &
-              Boolean'Image (Item.Hanging_Accumulate)) & ")";
+         when Hanging => Line_Number_Type'Image (Item.Hanging_First_Line) &
+           " " & Image (Item.Hanging_Delta_1) & " " & Image 
(Item.Hanging_Delta_2)) & ")";
    end Image;
 
    function Current_Indent_Offset
-     (Data         : in Parse_Data_Type;
-      Anchor_Token : in Augmented_Token'Class;
+     (Tree         : in Syntax_Trees.Tree'Class;
+      Anchor_Token : in Syntax_Trees.Valid_Node_Access;
       Offset       : in Integer)
      return Integer
-   is begin
-      return Offset + Integer (Anchor_Token.Char_Region.First - 
Data.Line_Begin_Char_Pos (Anchor_Token.Line));
+   is
+      Line_Begin_Token : constant Syntax_Trees.Node_Access := 
Tree.Line_Begin_Token
+        (Tree.Line_Region (Anchor_Token, Trailing_Non_Grammar => True).First);
+   begin
+      return Offset + Integer
+        (Tree.Char_Region (Anchor_Token, Trailing_Non_Grammar => False).First -
+           (if Line_Begin_Token = WisiToken.Syntax_Trees.Invalid_Node_Access
+            then 0
+            else Tree.Char_Region (Line_Begin_Token, Trailing_Non_Grammar => 
False).First));
    end Current_Indent_Offset;
 
-   function First_Line
-     (Token             : in Augmented_Token;
-      Indenting_Comment : in Boolean)
-     return Line_Number_Type
-   is begin
-      return
-        (if Indenting_Comment then
-           (if Token.First_Trailing_Comment_Line = Invalid_Line_Number
-            then Token.Line
-            else Token.First_Trailing_Comment_Line)
-         else
-           (if Token.First_Indent_Line = Invalid_Line_Number
-            then Token.Line
-            else Token.First_Indent_Line));
-   end First_Line;
-
-   function Get_Aug_Token_Const_1
-     (Tree       : in Syntax_Trees.Tree'Class;
-      Tree_Index : in Valid_Node_Index)
-     return Aug_Token_Const_Ref
-   is begin
-      return To_Aug_Token_Const_Ref (Tree.Augmented (Tree_Index));
-   end Get_Aug_Token_Const_1;
-
-   function Get_Aug_Token_Const
-     (Data  : in Parse_Data_Type;
-      Tree  : in WisiToken.Syntax_Trees.Tree'Class;
-      Token : in WisiToken.Token_Index)
-     return Aug_Token_Const_Ref
-   is begin
-      return Get_Aug_Token_Const_1 (Tree, Data.Terminals.all 
(Token).Tree_Index);
-   end Get_Aug_Token_Const;
-
-   function Get_Aug_Token_Var
-     (Tree       : in Syntax_Trees.Tree'Class;
-      Tree_Index : in Valid_Node_Index)
-     return Aug_Token_Var_Ref
-   is begin
-      return To_Aug_Token_Var_Ref (Tree.Augmented (Tree_Index));
-   end Get_Aug_Token_Var;
-
-   function Get_Aug_Token_Var
-     (Data  : in Parse_Data_Type;
-      Tree  : in WisiToken.Syntax_Trees.Tree'Class;
-      Token : in WisiToken.Token_Index)
-     return Aug_Token_Var_Ref
-   is begin
-      return Get_Aug_Token_Var (Tree, Data.Terminals.all (Token).Tree_Index);
-   end Get_Aug_Token_Var;
-
    function Get_Text
      (Data       : in Parse_Data_Type;
-      Tree       : in WisiToken.Syntax_Trees.Tree;
-      Tree_Index : in WisiToken.Valid_Node_Index)
+      Tree       : in Syntax_Trees.Tree;
+      Tree_Index : in WisiToken.Syntax_Trees.Valid_Node_Access)
      return String
    is
       use all type Syntax_Trees.Node_Label;
    begin
       case Tree.Label (Tree_Index) is
-      when Shared_Terminal | Nonterm =>
-         return Data.Lexer.Buffer_Text (Tree.Byte_Region (Tree_Index));
+      when Source_Terminal | Nonterm =>
+         return Tree.Lexer.Buffer_Text (Tree.Byte_Region (Tree_Index, 
Trailing_Non_Grammar => False));
 
       when Virtual_Terminal | Virtual_Identifier =>
          raise SAL.Programmer_Error;
@@ -2138,269 +2478,137 @@ package body Wisi is
       return Result (Result'First .. Last);
    end Elisp_Escape_Quotes;
 
-   overriding
-   function Image
-     (Item       : in Augmented_Token;
-      Descriptor : in WisiToken.Descriptor)
-     return String
-   is
-      ID_Image : constant String := Image (Item.ID, Descriptor);
-   begin
-      if Item.Line /= Invalid_Line_Number then
-         return "(" & ID_Image &
-           Line_Number_Type'Image (Item.Line) & ":" & Trimmed_Image (Integer 
(Item.Column)) & ")";
-
-      elsif Item.Char_Region = Null_Buffer_Region then
-         if Item.Byte_Region = Null_Buffer_Region then
-            return "(" & ID_Image & ")";
-         else
-            return "(" & ID_Image & ", " & Image (Item.Byte_Region) & ")";
-         end if;
-      else
-         return "(" & ID_Image & ", " & Image (Item.Char_Region) & ")";
-      end if;
-   end Image;
-
    function Indent_Anchored_2
-     (Data        : in out Parse_Data_Type;
-      Anchor_Line : in     Line_Number_Type;
-      Last_Line   : in     Line_Number_Type;
-      Offset      : in     Integer;
-      Accumulate  : in     Boolean)
+     (Data              : in Parse_Data_Type'Class;
+      Tree              : in Syntax_Trees.Tree;
+      Anchor_Token      : in Syntax_Trees.Valid_Node_Access;
+      Indenting_Token   : in Syntax_Trees.Valid_Node_Access;
+      Indenting_Comment : in Boolean;
+      Offset            : in Integer)
      return Delta_Type
    is
-      --  Return an anchored delta
-      use Anchor_ID_Vectors;
-      --  We can't use a Reference here, because the Element in reference
-      --  types is constrained (as are all allocated objects of access
-      --  types; AARM 4.8 (6/3)), and we may need to change the Label.
-      Indent    : Indent_Type      := Data.Indents (Anchor_Line);
-      Anchor_ID : constant Integer := 1 + Max_Anchor_ID (Data, Anchor_Line, 
Last_Line);
+      Anchor_Line    : constant Line_Number_Type := Tree.Line_Region
+        (Anchor_Token, Trailing_Non_Grammar => True).First;
+      Indenting_Line : constant Line_Number_Type :=
+        (if Indenting_Comment
+         then Compute_Indenting (Data, Tree, Indenting_Token).Comment.First
+         else Tree.Line_Region (Indenting_Token, Trailing_Non_Grammar => 
True).Last);
    begin
-      Data.Max_Anchor_ID := Integer'Max (Data.Max_Anchor_ID, Anchor_ID);
-
-      case Indent.Label is
-      when Not_Set =>
-         Indent := (Anchor_Nil, To_Vector (Anchor_ID, 1));
-
-         if Trace_Action > Extra then
-            Ada.Text_IO.Put_Line
-              (";; indent_anchored: " & Line_Number_Type'Image (Anchor_Line) & 
" => " & Image (Indent));
-         end if;
-
-      when Int =>
-         Indent := (Anchor_Int, To_Vector (Anchor_ID, 1), Indent.Int_Indent);
-
-         if Trace_Action > Extra then
-            Ada.Text_IO.Put_Line
-              (";; indent_anchored: " & Line_Number_Type'Image (Anchor_Line) & 
" => " & Image (Indent));
-         end if;
-
-      when Anchor_Nil =>
-         Indent.Anchor_Nil_IDs := Anchor_ID & Indent.Anchor_Nil_IDs;
-
-      when Anchor_Int =>
-         Indent.Anchor_Int_IDs := Anchor_ID & Indent.Anchor_Int_IDs;
-
-      when Anchored =>
-         Indent := (Anchor_Anchored, To_Vector (Anchor_ID, 1), 
Indent.Anchored_ID, Indent.Anchored_Delta);
-
-      when Anchor_Anchored =>
-         Indent.Anchor_Anchored_IDs := Anchor_ID & Indent.Anchor_Anchored_IDs;
-      end case;
-
-      Data.Indents.Replace_Element (Anchor_Line, Indent);
-
-      return (Simple, (Anchored, Anchor_ID, Offset, Accumulate));
+      if Anchor_Line = Indenting_Line then
+         --  test/ada_mode-interactive_1.adb:
+         --  E := (1 =>
+         --          'A');
+         --
+         --  The expression is anchored to itself, which is needed for
+         --  multi-line expressions (ada_annex_p.wy assoc_expression).
+         --  FIXME: need clearer example; what is "the expression"?
+         --  FIXME: need comment example.
+         return Null_Delta;
+      else
+         return
+           (Simple,
+            (Anchored,
+             Controlling_Token_Line => Anchor_Line,
+             Anchor_Line            => Anchor_Line,
+             Anchored_Delta         => Offset));
+      end if;
    end Indent_Anchored_2;
 
    function Indent_Compute_Delta
      (Data              : in out Parse_Data_Type'Class;
       Tree              : in     Syntax_Trees.Tree;
-      Tokens            : in     Valid_Node_Index_Array;
+      Nonterm           : in     Syntax_Trees.Valid_Node_Access;
       Param             : in     Indent_Param;
-      Tree_Indenting    : in     Valid_Node_Index;
+      Indenting_Token   : in     Syntax_Trees.Valid_Node_Access;
       Indenting_Comment : in     Boolean)
      return Delta_Type
-   is
-      Indenting_Token : Aug_Token_Const_Ref renames Get_Aug_Token_Const_1 
(Tree, Tree_Indenting);
-   begin
+   is begin
       --  Evaluate wisi-anchored*, wisi-hanging*.
       case Param.Label is
       when Simple =>
          case Param.Param.Label is
          when None =>
-            return (Simple, (Label => None));
+            return Null_Delta;
 
-         when Int =>
-            return (Simple, (Int, Param.Param.Int_Delta));
+         when Block =>
+            return (Simple, (Int, Invalid_Line_Number, Param.Param.Int_Delta));
 
-         when Anchored_Label =>
+         when Int =>
+            return
+              (Simple,
+               (Int,
+                Tree.Line_Region (Indenting_Token, Trailing_Non_Grammar => 
True).First,
+                Param.Param.Int_Delta));
+
+         when Simple_Param_Anchored =>
+            --  [2] wisi-anchored, wisi-anchored%
             declare
-               Anchor_Token : Aug_Token_Const_Ref renames Get_Aug_Token_Const_1
-                 (Tree, Tokens (Param.Param.Anchored_Index));
+               Anchor_Token  : constant Syntax_Trees.Valid_Node_Access := 
Tree.Child
+                 (Nonterm, Param.Param.Anchored_Index);
             begin
-               case Anchored_Label'(Param.Param.Label) is
-               when Anchored_0 =>
-                  --  [2] wisi-anchored
-                  return Indent_Anchored_2
-                    (Data,
-                     Anchor_Line => Anchor_Token.Line,
-                     Last_Line   => Indenting_Token.Last_Line 
(Indenting_Comment),
-                     Offset      => Current_Indent_Offset (Data, Anchor_Token, 
Param.Param.Anchored_Delta),
-                     Accumulate  => True);
-
-               when Anchored_1 =>
-                  --  [2] wisi-anchored%
-                  return Indent_Anchored_2
-                    (Data,
-                     Anchor_Line => Anchor_Token.Line,
-                     Last_Line   => Indenting_Token.Last_Line 
(Indenting_Comment),
-                     Offset      => Paren_In_Anchor_Line (Data, Tree, 
Anchor_Token, Param.Param.Anchored_Delta),
-                     Accumulate  => True);
-
-               when Anchored_2 =>
-                  --  [2] wisi-anchored%-
-                  return Indent_Anchored_2
-                    (Data,
-                     Anchor_Line => Anchor_Token.Line,
-                     Last_Line   => Indenting_Token.Last_Line 
(Indenting_Comment),
-                     Offset      => Paren_In_Anchor_Line (Data, Tree, 
Anchor_Token, Param.Param.Anchored_Delta),
-                     Accumulate  => False);
-
-               when Anchored_3 =>
-                  --  [2] wisi-anchored*
-                  if Indenting_Token.First then
-                     return Indent_Anchored_2
-                       (Data,
-                        Anchor_Line => Anchor_Token.Line,
-                        Last_Line   => Indenting_Token.Last_Line 
(Indenting_Comment),
-                        Offset      => Current_Indent_Offset (Data, 
Anchor_Token, Param.Param.Anchored_Delta),
-                        Accumulate  => True);
-
-                  else
-                     return Null_Delta;
-                  end if;
-
-               when Anchored_4 =>
-                  --  [2] wisi-anchored*-
-                  return Indent_Anchored_2
-                    (Data,
-                     Anchor_Line => Anchor_Token.Line,
-                     Last_Line   => Indenting_Token.Last_Line 
(Indenting_Comment),
-                     Offset      => Current_Indent_Offset (Data, Anchor_Token, 
Param.Param.Anchored_Delta),
-                     Accumulate  => False);
-
-               end case;
+               return Indent_Anchored_2
+                 (Data, Tree, Anchor_Token, Indenting_Token, Indenting_Comment,
+                  Offset         =>
+                    (case Simple_Param_Anchored'(Param.Param.Label) is
+                     when Anchored_0 =>
+                        --  test/ada_mode-interactive_2.adb 'if (A and B -- 
Comment 1'
+                        Current_Indent_Offset (Tree, Anchor_Token, 
Param.Param.Anchored_Delta),
+                     when Anchored_1 =>
+                        Paren_In_Anchor_Line (Data, Tree, Anchor_Token, 
Param.Param.Anchored_Delta)));
             end;
 
          when Language =>
             return Param.Param.Function_Ptr
-              (Data, Tree, Tokens, Tree_Indenting, Indenting_Comment, 
Param.Param.Args);
+              (Data, Tree, Nonterm, Indenting_Token, Indenting_Comment, 
Param.Param.Args);
          end case;
 
       when Hanging_Label =>
-         case Hanging_Label'(Param.Label) is
-         when Hanging_0 => -- wisi-hanging
-            return Indent_Hanging_1
-              (Data, Tree, Tokens, Tree_Indenting, Indenting_Comment, 
Param.Hanging_Delta_1,
-               Param.Hanging_Delta_2,
-               Option => False, Accumulate => True);
-         when Hanging_1 => -- wisi-hanging-
-            return Indent_Hanging_1
-              (Data, Tree, Tokens, Tree_Indenting, Indenting_Comment, 
Param.Hanging_Delta_1,
-               Param.Hanging_Delta_2,
-               Option => False, Accumulate => False);
-         when Hanging_2 => -- wisi-hanging%
-            return Indent_Hanging_1
-              (Data, Tree, Tokens, Tree_Indenting, Indenting_Comment, 
Param.Hanging_Delta_1,
-               Param.Hanging_Delta_2,
-               Option => True, Accumulate => True);
-         when Hanging_3 => -- wisi-hanging%-
-            return Indent_Hanging_1
-              (Data, Tree, Tokens, Tree_Indenting, Indenting_Comment, 
Param.Hanging_Delta_1,
-               Param.Hanging_Delta_2,
-               Option => True, Accumulate => False);
-         end case;
+         return Indent_Hanging_1
+           (Data, Tree, Nonterm, Indenting_Token, Indenting_Comment, 
Param.Hanging_Delta_1,
+            Param.Hanging_Delta_2, Param.Label);
       end case;
    end Indent_Compute_Delta;
 
    procedure Indent_Token_1
      (Data              : in out Parse_Data_Type;
       Tree              : in     Syntax_Trees.Tree;
-      Indenting_Token   : in     Augmented_Token'Class;
+      Line_Region       : in     WisiToken.Line_Region;
       Delta_Indent      : in     Delta_Type;
-      Indenting_Comment : in     Boolean)
+      Indenting_Comment : in     Indenting_Comment_Label;
+      Controlling_Delta : in     Delta_Type := Null_Delta)
    is
-      --  Aplly Delta_Indent to Indenting_Token
-      First_Line : constant Line_Number_Type := Indenting_Token.First_Line 
(Indenting_Comment);
-      Last_Line  : constant Line_Number_Type := Indenting_Token.Last_Line 
(Indenting_Comment);
+      Indent : Boolean := True;
    begin
       if Trace_Action > Detail then
-         Ada.Text_IO.Put_Line
-           (";; indent_token_1: " & Indenting_Token.Image 
(Data.Descriptor.all) & " " & Image (Delta_Indent) &
-              Line_Number_Type'Image (First_Line) & " .." & 
Line_Number_Type'Image (Last_Line) &
-              (if Indenting_Comment then " comment" else ""));
+         Tree.Lexer.Trace.Put_Line
+           ("indent_token_1:      " &
+              Image (Line_Region) & " " & Image (Delta_Indent) &
+              (if Indenting_Comment /= None then " comment" else " code"));
       end if;
 
-      for Line in First_Line .. Last_Line loop
+      for Line in Line_Region.First .. Line_Region.Last loop
          if Data.Indent_Comment_Col_0 then
+            Indent := True;
             declare
+               use all type Ada.Containers.Count_Type;
                use all type Ada.Text_IO.Count;
+               Line_Begin_Char_Pos : Buffer_Pos;
 
-               function Containing_Token return Base_Token_Index
-               is
-                  --  Return token index of terminal containing non_grammer on 
Line;
-                  --  Invalid_Token_Index if none.
-                  I : Line_Number_Type := Line;
-                  J : Base_Token_Index;
-               begin
-                  if Line < Data.Line_Begin_Token.First_Index then
-                     --  Line is before first grammar token; 
Leading_Non_Grammar checked
-                     --  below.
-                     return Invalid_Token_Index;
-                  end if;
+               Containing : constant Syntax_Trees.Valid_Node_Access := 
Tree.Find_New_Line
+                 (Line, Line_Begin_Char_Pos);
+               --  Line_Begin_Char_Pos is either in a multi-line grammar 
token, or a
+               --  non_grammar token.
 
-                  loop
-                     exit when Data.Line_Begin_Token.all (I) /= 
Base_Token_Arrays.No_Index;
-                     --  No_Index means Line is in a multi-line token, which 
could be a block comment.
-                     I := I - 1;
-                  end loop;
-
-                  J := Data.Line_Begin_Token.all (I);
-                  declare
-                     Aug : Augmented_Token renames Get_Aug_Token_Const (Data, 
Tree, J);
-                  begin
-                     if Line in Aug.First_Trailing_Comment_Line .. 
Aug.Last_Trailing_Comment_Line then
-                        return J;
-                     else
-                        return Invalid_Token_Index;
-                     end if;
-                  end;
-               end Containing_Token;
-
-               Indent     : Boolean                   := True;
-               Containing : constant Base_Token_Index := Containing_Token;
+               Non_Grammar : WisiToken.Lexer.Token_Arrays.Vector renames 
Tree.Non_Grammar_Const (Containing);
             begin
-               if Line < Data.Line_Begin_Token.First_Index then
-                  --  Line is before the first grammar token. We may be doing 
a partial
-                  --  parse where the initial indent is non-zero, so we still 
have to
-                  --  check for column 0.
-                  for Tok of Data.Leading_Non_Grammar loop
-                     if Tok.Line = Line and then
-                       Tok.ID in Data.First_Comment_ID .. Data.Last_Comment_ID 
and then
-                       Tok.Column = 0
-                     then
-                        Indent := False;
-                        exit;
-                     end if;
-                  end loop;
-
-               elsif Containing /= Invalid_Token_Index then
-                  for Tok of Get_Aug_Token_Const (Data, Tree, 
Containing).Non_Grammar loop
-                     if Tok.Line = Line and then
+               if Non_Grammar.Length > 0 and then Line_Begin_Char_Pos in
+                 Non_Grammar (Non_Grammar.First_Index).Byte_Region.First ..
+                   Non_Grammar (Non_Grammar.Last_Index).Byte_Region.Last
+               then
+                  for Tok of Non_Grammar loop
+                     if Tok.Line_Region.First = Line and then
                        Tok.ID in Data.First_Comment_ID .. Data.Last_Comment_ID 
and then
-                       Tok.Column = 0
+                       Lexer.Column (Tok, Line_Begin_Char_Pos) = 0
                      then
                         Indent := False;
                         exit;
@@ -2408,32 +2616,37 @@ package body Wisi is
                   end loop;
                end if;
 
-               if Indent then
-                  Indent_Line (Data, Line, Delta_Indent);
-               else
-                  Indent_Line (Data, Line, (Simple, (Int, 0)));
+               if not Indent then
+                  Indent_Line
+                    (Data, Line, (Simple, (Int, Invalid_Line_Number, 0)), 
Indenting_Comment, Tree.Lexer.Trace);
                end if;
             end;
-         else
-            Indent_Line (Data, Line, Delta_Indent);
+         end if;
+
+         if Indent then
+            if Indenting_Comment /= None and Data.Indents (Line).Label = 
Not_Set then
+               --  In ada_mode-conditional_expressions.adb, case expression 
for K,
+               --  comment before "(if J > 42", the if expression is indented 
by
+               --  ada-indent-aggregate, which returns -1. We need to apply 
that to
+               --  the comment also.
+               --
+               --  However, in ada_mode-nominal.adb, line "-- Comment before 
'end
+               --  case'", we don't want to add Controlling_Delta; that 
applies the
+               --  same indent twice.
+               Indent_Line (Data, Line, Controlling_Delta, Indenting_Comment, 
Tree.Lexer.Trace);
+            end if;
+
+            Indent_Line (Data, Line, Delta_Indent, Indenting_Comment, 
Tree.Lexer.Trace);
          end if;
       end loop;
    end Indent_Token_1;
 
-   function Last_Line
-     (Token             : in Augmented_Token;
-      Indenting_Comment : in Boolean)
-     return Line_Number_Type
-   is begin
-      return
-        (if Indenting_Comment then
-           (if Token.Last_Trailing_Comment_Line = Invalid_Line_Number
-            then Token.Line
-            else Token.Last_Trailing_Comment_Line)
-         else
-           (if Token.Last_Indent_Line = Invalid_Line_Number
-            then Token.Line
-            else Token.Last_Indent_Line));
-   end Last_Line;
+   function Refactor_Parse  (Data : in Parse_Data_Type; Item : in String) 
return Refactor_Action
+   is
+      pragma Unreferenced (Item);
+      pragma Unreferenced (Data);
+   begin
+      return Refactor_Action'Last;
+   end Refactor_Parse;
 
 end Wisi;
diff --git a/wisi.ads b/wisi.ads
index e707ab8aaf..28827cfd5d 100644
--- a/wisi.ads
+++ b/wisi.ads
@@ -10,7 +10,7 @@
 --
 --  [3] wisi-process-parse.el - defines elisp/process API
 --
---  Copyright (C) 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -27,69 +27,114 @@ pragma License (Modified_GPL);
 
 with Ada.Containers.Doubly_Linked_Lists;
 with Ada.Containers.Vectors;
+with Ada.Strings.Unbounded;
 with SAL.Gen_Unbounded_Definite_Red_Black_Trees;
 with SAL.Gen_Unbounded_Definite_Vectors;
-with WisiToken.Parse.LR;
-with WisiToken.Lexer;
+with SAL.Generic_Decimal_Image;
+with WisiToken.Parse;
 with WisiToken.Syntax_Trees;
 package Wisi is
+   use all type WisiToken.Syntax_Trees.Node_Access;
+   use all type WisiToken.Line_Region;
+   use all type WisiToken.Cache_Version;
+   use all type WisiToken.Syntax_Trees.Augmented_Class_Access;
    use all type WisiToken.Base_Buffer_Pos;
 
-   function Image (Aug : in WisiToken.Base_Token_Class_Access; Descriptor : in 
WisiToken.Descriptor) return String;
-   function Image (Action : in WisiToken.Syntax_Trees.Semantic_Action) return 
String;
-   --  For Syntax_Trees.Print_Tree, Parser.Execute_Action
+   Protocol_Error : exception;
 
-   type Post_Parse_Action_Type is (Navigate, Face, Indent);
+   procedure Skip
+     (Source : in     String;
+      Last   : in out Integer;
+      Char   : in     Character);
+   --  Check that Source (Last + 1) = Char. If so, increment Last.
+   --  If not, raise Protocol_Error.
 
-   type Parse_Data_Type
-     (Terminals        : not null access constant 
WisiToken.Base_Token_Arrays.Vector;
-      Line_Begin_Token : not null access constant 
WisiToken.Line_Begin_Token_Vectors.Vector)
-     is new WisiToken.Syntax_Trees.User_Data_Type with private;
+   function Get_String
+     (Source : in     String;
+      Last   : in out Integer)
+     return String;
+   --  Returns content of quoted string in Source at Last + 1 ... Handles
+   --  all '\' escapes by copying them literally into result, while using
+   --  them to find the terminating quote.
+   --
+   --  Raises Protocol_Error for a missing end quote.
 
-   procedure Initialize
-     (Data              : in out Parse_Data_Type;
-      Lexer             : in     WisiToken.Lexer.Handle;
-      Descriptor        : access constant WisiToken.Descriptor;
-      Base_Terminals    : in     WisiToken.Base_Token_Array_Access;
-      Post_Parse_Action : in     Post_Parse_Action_Type;
-      Begin_Line        : in     WisiToken.Line_Number_Type;
-      End_Line          : in     WisiToken.Line_Number_Type;
-      Begin_Indent      : in     Integer;
-      Params            : in     String);
-   --  Begin_Line, Begin_Indent, Line_Count only used for Indent. Params
-   --  contains language-specific indent parameter values.
-
-   overriding procedure Reset (Data : in out Parse_Data_Type);
-   --  Reset for a new parse, with data from previous Initialize.
-
-   function Source_File_Name (Data : in Parse_Data_Type) return String;
-   function Post_Parse_Action (Data : in Parse_Data_Type) return 
Post_Parse_Action_Type;
+   function Get_Enum
+     (Source : in     String;
+      Last   : in out Integer)
+     return String;
+   --  Returns next space-delimited word (nominally the value of some
+   --  enumeration type) in Source at Last + 1 ...
 
-   overriding
-   procedure Lexer_To_Augmented
-     (Data  : in out          Parse_Data_Type;
-      Tree  : in out          WisiToken.Syntax_Trees.Tree'Class;
-      Token : in              WisiToken.Base_Token;
-      Lexer : not null access WisiToken.Lexer.Instance'Class);
+   function Get_Integer
+     (Source : in     String;
+      Last   : in out Integer)
+     return Integer;
+
+   procedure To_Unix_Line_Endings
+     (Source           : in     Ada.Strings.Unbounded.String_Access;
+      Source_Byte_Last : in out Integer;
+      Source_Char_Last : in out Integer);
+   --  Source is assumed to have DOS line endings; convert them to Unix.
+
+   function Image_Action (Action : in 
WisiToken.Syntax_Trees.Post_Parse_Action) return String;
+   --  For Image_Action in Syntax_Trees.Image
+
+   function Elisp_Escape_Quotes (Item : in String) return String;
+   --  Prefix any '"' in Item with '\' for elisp.
+
+   type Base_Post_Parse_Action_Type is (Navigate, Face, Indent, None);
+   --  Must match first few items in wisi-parse-common.el 
wisi-post-parse-actions.
+
+   subtype Post_Parse_Action_Type is Base_Post_Parse_Action_Type range 
Navigate .. Indent;
+
+   type Parse_Data_Type is abstract new WisiToken.Syntax_Trees.User_Data_Type 
with private;
+   type Parse_Data_Access is access all Parse_Data_Type'Class;
+   type Parse_Data_Access_Constant is access constant Parse_Data_Type'Class;
+
+   procedure Initialize (Data : in out Parse_Data_Type)
+   is null;
+   --  Initialize Data before parse.
+   --
+   --  User should later call Reset_Post_Parse before any post_parse
+   --  action.
+
+   procedure Parse_Language_Params
+     (Data   : in out Parse_Data_Type;
+      Params : in     String)
+   is null;
+   --  If Params /= "", set all language-specific parameters from Params,
+   --  in declaration order; otherwise keep default values. Boolean is
+   --  represented by 0 | 1. Parameter values are space delimited.
+
+   procedure Reset_Post_Parse
+     (Data                : in out Parse_Data_Type;
+      Tree                : in     WisiToken.Syntax_Trees.Tree'Class;
+      Post_Parse_Action   : in     Post_Parse_Action_Type;
+      Action_Region_Bytes : in     WisiToken.Buffer_Region;
+      Action_Region_Chars : in     WisiToken.Buffer_Region;
+      Begin_Indent        : in     Integer);
+   --  Reset for a new post-parse action.
+
+   function Post_Parse_Action (Data : in Parse_Data_Type) return 
Post_Parse_Action_Type;
+   function Action_Region_Bytes (Data : in Parse_Data_Type) return 
WisiToken.Buffer_Region;
 
    overriding
-   procedure Insert_Token
-     (Data  : in out Parse_Data_Type;
-      Tree  : in out WisiToken.Syntax_Trees.Tree'Class;
-      Token : in     WisiToken.Valid_Node_Index);
+   function Copy_Augmented
+     (User_Data : in Parse_Data_Type;
+      Augmented : in WisiToken.Syntax_Trees.Augmented_Class_Access)
+     return WisiToken.Syntax_Trees.Augmented_Class_Access;
 
    overriding
-   procedure Delete_Token
-     (Data                : in out Parse_Data_Type;
-      Tree                : in out WisiToken.Syntax_Trees.Tree'Class;
-      Deleted_Token_Index : in     WisiToken.Token_Index);
+   procedure Initialize_Actions
+     (Data : in out Parse_Data_Type;
+      Tree : in     WisiToken.Syntax_Trees.Tree'Class);
 
    overriding
-   procedure Reduce
-     (Data    : in out Parse_Data_Type;
-      Tree    : in out WisiToken.Syntax_Trees.Tree'Class;
-      Nonterm : in     WisiToken.Valid_Node_Index;
-      Tokens  : in     WisiToken.Valid_Node_Index_Array);
+   procedure Insert_Token
+     (Data           : in out Parse_Data_Type;
+      Tree           : in out WisiToken.Syntax_Trees.Tree'Class;
+      Inserted_Token : in     WisiToken.Syntax_Trees.Valid_Node_Access);
 
    type Navigate_Class_Type is (Motion, Statement_End, Statement_Override, 
Statement_Start, Misc);
    --  Matches [1] wisi-class-list.
@@ -104,16 +149,14 @@ package Wisi is
    procedure Statement_Action
      (Data    : in out Parse_Data_Type;
       Tree    : in     WisiToken.Syntax_Trees.Tree;
-      Nonterm : in     WisiToken.Valid_Node_Index;
-      Tokens  : in     WisiToken.Valid_Node_Index_Array;
+      Nonterm : in     WisiToken.Syntax_Trees.Valid_Node_Access;
       Params  : in     Statement_Param_Array);
    --  Implements [2] wisi-statement-action.
 
    procedure Name_Action
      (Data    : in out Parse_Data_Type;
       Tree    : in     WisiToken.Syntax_Trees.Tree;
-      Nonterm : in     WisiToken.Valid_Node_Index;
-      Tokens  : in     WisiToken.Valid_Node_Index_Array;
+      Nonterm : in     WisiToken.Syntax_Trees.Valid_Node_Access;
       Name    : in     WisiToken.Positive_Index_Type);
    --  Implements [2] wisi-name-action.
 
@@ -140,8 +183,7 @@ package Wisi is
    procedure Motion_Action
      (Data    : in out Parse_Data_Type;
       Tree    : in     WisiToken.Syntax_Trees.Tree;
-      Nonterm : in     WisiToken.Valid_Node_Index;
-      Tokens  : in     WisiToken.Valid_Node_Index_Array;
+      Nonterm : in     WisiToken.Syntax_Trees.Valid_Node_Access;
       Params  : in     Motion_Param_Array);
    --  Implements [2] wisi-motion-action.
 
@@ -156,16 +198,14 @@ package Wisi is
    procedure Face_Apply_Action
      (Data    : in out Parse_Data_Type;
       Tree    : in     WisiToken.Syntax_Trees.Tree;
-      Nonterm : in     WisiToken.Valid_Node_Index;
-      Tokens  : in     WisiToken.Valid_Node_Index_Array;
+      Nonterm : in     WisiToken.Syntax_Trees.Valid_Node_Access;
       Params  : in     Face_Apply_Param_Array);
    --  Implements [2] wisi-face-apply-action.
 
    procedure Face_Apply_List_Action
      (Data    : in out Parse_Data_Type;
       Tree    : in     WisiToken.Syntax_Trees.Tree;
-      Nonterm : in     WisiToken.Valid_Node_Index;
-      Tokens  : in     WisiToken.Valid_Node_Index_Array;
+      Nonterm : in     WisiToken.Syntax_Trees.Valid_Node_Access;
       Params  : in     Face_Apply_Param_Array);
    --  Implements [2] wisi-face-apply-list-action.
 
@@ -181,8 +221,7 @@ package Wisi is
    procedure Face_Mark_Action
      (Data    : in out Parse_Data_Type;
       Tree    : in     WisiToken.Syntax_Trees.Tree;
-      Nonterm : in     WisiToken.Valid_Node_Index;
-      Tokens  : in     WisiToken.Valid_Node_Index_Array;
+      Nonterm : in     WisiToken.Syntax_Trees.Valid_Node_Access;
       Params  : in     Face_Mark_Param_Array);
    --  Implements [2] wisi-face-mark-action.
 
@@ -191,8 +230,7 @@ package Wisi is
    procedure Face_Remove_Action
      (Data    : in out Parse_Data_Type;
       Tree    : in     WisiToken.Syntax_Trees.Tree;
-      Nonterm : in     WisiToken.Valid_Node_Index;
-      Tokens  : in     WisiToken.Valid_Node_Index_Array;
+      Nonterm : in     WisiToken.Syntax_Trees.Valid_Node_Access;
       Params  : in     Face_Remove_Param_Array);
    --  Implements [2] wisi-face-remove-action.
 
@@ -201,17 +239,16 @@ package Wisi is
    --
    --  Indent functions are represented by the Indent_Param type.
 
-   type Simple_Indent_Param_Label is -- not hanging
+   type Simple_Indent_Param_Label is
+     --  Not hanging
      (None,
       Int,
       Anchored_0, -- [2] wisi-anchored
       Anchored_1, -- [2] wisi-anchored%
-      Anchored_2, -- [2] wisi-anchored%-
-      Anchored_3, -- [2] wisi-anchored*
-      Anchored_4, -- [2] wisi-anchored*-
+      Block,      -- [2] wisi-block
       Language    -- [2] language-specific function
      );
-   subtype Anchored_Label is Simple_Indent_Param_Label range Anchored_0 .. 
Anchored_4;
+   subtype Simple_Param_Anchored is Simple_Indent_Param_Label range Anchored_0 
.. Anchored_1;
 
    --  Arguments to language-specific functions are integers; one of
    --  delta, Token_Number, or Token_ID - the syntax does not distinguish
@@ -225,11 +262,16 @@ package Wisi is
 
    type Delta_Type (<>) is private;
 
+   type Indenting_Comment_Label is (None, Leading, Trailing);
+   --  None    : indenting code
+   --  Leading : comment indent from following token
+   --  Trailing: comment indent from preceding token
+
    type Language_Indent_Function is access function
      (Data              : in out Parse_Data_Type'Class;
       Tree              : in     WisiToken.Syntax_Trees.Tree;
-      Tree_Tokens       : in     WisiToken.Valid_Node_Index_Array;
-      Tree_Indenting    : in     WisiToken.Valid_Node_Index;
+      Nonterm           : in     WisiToken.Syntax_Trees.Valid_Node_Access;
+      Tree_Indenting    : in     WisiToken.Syntax_Trees.Valid_Node_Access;
       Indenting_Comment : in     Boolean;
       Args              : in     Indent_Arg_Arrays.Vector)
      return Delta_Type;
@@ -242,10 +284,10 @@ package Wisi is
       when None =>
          null;
 
-      when Int =>
+      when Block | Int =>
          Int_Delta : Integer;
 
-      when Anchored_Label =>
+      when Simple_Param_Anchored =>
          Anchored_Index : WisiToken.Positive_Index_Type;
          Anchored_Delta : Integer;
 
@@ -257,14 +299,15 @@ package Wisi is
 
    function Image (Item : in Simple_Indent_Param) return String;
 
+   function Add_Simple_Indent_Param (Left, Right : in Simple_Indent_Param) 
return Simple_Indent_Param;
+
    type Indent_Param_Label is
      (Simple,
       Hanging_0, -- [2] wisi-hanging
-      Hanging_1, -- [2] wisi-hanging-
-      Hanging_2, -- [2] wisi-hanging%
-      Hanging_3  -- [2] wisi-hanging%-
+      Hanging_1, -- [2] wisi-hanging%
+      Hanging_2  -- [2] wisi-hanging*
      );
-   subtype Hanging_Label is Indent_Param_Label range Hanging_0 .. Hanging_3;
+   subtype Hanging_Label is Indent_Param_Label range Hanging_0 .. Hanging_2;
 
    type Indent_Param (Label : Indent_Param_Label := Simple) is
    record
@@ -296,35 +339,36 @@ package Wisi is
 
    type Indent_Param_Array is array (WisiToken.Positive_Index_Type range <>) 
of Indent_Pair;
 
+   type Indenting is record
+      Code : WisiToken.Line_Region := WisiToken.Null_Line_Region;
+      --  Lines that need indenting; first token on these lines is contained
+      --  in this token. Includes blank and comment lines between
+      --  grammar tokens, but excludes trailing blanks and comments after the
+      --  last token, so they can be indented differently.
+
+      Comment : WisiToken.Line_Region := WisiToken.Null_Line_Region;
+      --  Trailing comment or blank lines (after the last contained grammar
+      --  token). Excludes comment following code on a line.
+   end record;
+
    procedure Indent_Action_0
      (Data    : in out Parse_Data_Type'Class;
       Tree    : in     WisiToken.Syntax_Trees.Tree;
-      Nonterm : in     WisiToken.Valid_Node_Index;
-      Tokens  : in     WisiToken.Valid_Node_Index_Array;
+      Nonterm : in     WisiToken.Syntax_Trees.Valid_Node_Access;
       Params  : in     Indent_Param_Array);
    --  Implements [2] wisi-indent-action.
 
-   procedure Indent_Action_1
-     (Data    : in out Parse_Data_Type'Class;
-      Tree    : in     WisiToken.Syntax_Trees.Tree;
-      Nonterm : in     WisiToken.Valid_Node_Index;
-      Tokens  : in     WisiToken.Valid_Node_Index_Array;
-      N       : in     WisiToken.Positive_Index_Type;
-      Params  : in     Indent_Param_Array);
-   --  Implements [2] wisi-indent-action*.
-
    function Indent_Hanging_1
      (Data              : in out Parse_Data_Type;
       Tree              : in     WisiToken.Syntax_Trees.Tree;
-      Tokens            : in     WisiToken.Valid_Node_Index_Array;
-      Tree_Indenting    : in     WisiToken.Valid_Node_Index;
+      Nonterm           : in     WisiToken.Syntax_Trees.Valid_Node_Access;
+      Indenting_Token   : in     WisiToken.Syntax_Trees.Valid_Node_Access;
       Indenting_Comment : in     Boolean;
       Delta_1           : in     Simple_Indent_Param;
       Delta_2           : in     Simple_Indent_Param;
-      Option            : in     Boolean;
-      Accumulate        : in     Boolean)
+      Label             : in     Hanging_Label)
      return Delta_Type;
-   --  Implements [2] wisi-hanging, wisi-hanging%, wisi-hanging%-.
+   --  Implements [2] wisi-hanging, wisi-hanging%, wisi-hanging*
    --
    --  Language specific child packages may override this to implement
    --  language-specific cases.
@@ -332,137 +376,112 @@ package Wisi is
    ----------
    --  Other
 
+   type Refactor_Action is range 0 .. Integer'Last;
+
+   function Refactor_Parse  (Data : in Parse_Data_Type; Item : in String) 
return Refactor_Action;
+
+   procedure Refactor_Help (Data : in Parse_Data_Type) is null;
+
    procedure Refactor
      (Data       : in out Parse_Data_Type;
       Tree       : in out WisiToken.Syntax_Trees.Tree;
-      Action     : in     Positive;
+      Action     : in     Refactor_Action;
       Edit_Begin : in     WisiToken.Buffer_Pos) is null;
 
-   type Arg_Index_Array is array (Positive range <>) of 
WisiToken.Positive_Index_Type;
+   type Query_Label is (Node, Containing_Statement, Ancestor, Parent, Child, 
Print, Dump);
+   --  Must match wisi-parse-common.el wisi-parse-tree-queries
 
-   procedure Put_Language_Action
-     (Data    : in Parse_Data_Type;
-      Content : in String);
-   --  Send a Language_Action message to Emacs.
+   subtype Point_Query is Query_Label range Node .. Ancestor;
+   subtype Node_Query is Query_Label range Parent .. Child;
 
-   procedure Put (Data : in out Parse_Data_Type; Parser : in 
WisiToken.Parse.Base_Parser'Class);
-   --  Perform additional post-parse actions, then put result to
-   --  Ada.Text_IO.Current_Output, as encoded responses as defined in [3]
-   --  wisi-process-parse--execute.
+   type Query (Label : Query_Label) is
+   record
+      case Label is
+      when Point_Query =>
+         Char_Point : WisiToken.Buffer_Pos;
 
-   procedure Put (Lexer_Errors : in WisiToken.Lexer.Error_Lists.List);
-   procedure Put
-     (Data         : in Parse_Data_Type;
-      Lexer_Errors : in WisiToken.Lexer.Error_Lists.List;
-      Parse_Errors : in WisiToken.Parse.LR.Parse_Error_Lists.List;
-      Tree         : in WisiToken.Syntax_Trees.Tree);
-   --  Put Lexer_Errors and Parse_Errors to Ada.Text_IO.Current_Output,
-   --  as encoded error responses as defined in [3]
-   --  wisi-process-parse--execute.
+         case Label is
+         when Ancestor =>
+            IDs : WisiToken.Token_ID_Arrays.Vector;
+         when others =>
+            null;
+         end case;
 
-   procedure Put_Error (Data : in Parse_Data_Type; Line_Number : in 
WisiToken.Line_Number_Type; Message : in String);
-   --  Put an error elisp form to Ada.Text_IO.Standard_Output.
+      when Parent | Child =>
+         Node : WisiToken.Syntax_Trees.Node_Access;
+         N    : Positive;
 
-private
+      when Print =>
+         null;
 
-   type Non_Grammar_Token is new WisiToken.Base_Token with record
-      First : Boolean := False;
+      when Dump =>
+         File_Name : Ada.Strings.Unbounded.Unbounded_String;
+      end case;
    end record;
 
-   package Non_Grammar_Token_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
-     (WisiToken.Token_Index, Non_Grammar_Token, Default_Element => (others => 
<>));
+   function Address_Image (Item : in WisiToken.Syntax_Trees.Valid_Node_Access) 
return String;
+   --  Hexadecimal address of Item, for Query_Tree.
 
-   type Augmented_Token is new WisiToken.Base_Token with record
-      --  Most fields are set by Lexer_To_Augmented at parse time; others
-      --  are set by Reduce for nonterminals.
+   function To_Node_Access (Item : in String) return 
WisiToken.Syntax_Trees.Valid_Node_Access;
 
-      Deleted : Boolean := False;
-      --  Set True by Parse_Data_Type.Delete_Token; Non_Grammar tokens are
-      --  moved to the previous non-deleted token.
+   function Get_Token_IDs
+     (User_Data    : in     Parse_Data_Type;
+      Command_Line : in     String;
+      Last         : in out Integer)
+     return WisiToken.Token_ID_Arrays.Vector
+   is abstract;
+   --  Read an aggregate of Token_Enum_IDs from Command_Line.
+   --
+   --  Dispatching on User_Data because Token_Enum_IDs is
+   --  language-specific.
 
-      --  The following fields are only needed for indent.
+   procedure Query_Tree
+     (Data  : in Parse_Data_Access_Constant;
+      Tree  : in WisiToken.Syntax_Trees.Tree;
+      Query : in Wisi.Query);
 
-      First : Boolean := False;
-      --  For a terminal, True if the token is first on a line.
-      --
-      --  For a nonterminal, True if some contained token's First is True.
+   type Arg_Index_Array is array (Positive range <>) of 
WisiToken.Positive_Index_Type;
 
-      Paren_State : Integer := 0;
-      --  Parenthesis nesting count, before token.
+   procedure Put_Language_Action
+     (Data    : in Parse_Data_Type;
+      Content : in String);
+   --  Send a Language_Action message to Emacs.
 
-      First_Terminals_Index : WisiToken.Base_Token_Index := 
WisiToken.Invalid_Token_Index;
-      --  For virtual tokens, Invalid_Token_Index
-      --
-      --  For terminal tokens, index of this token in Parser.Terminals.
-      --
-      --  For nonterminal tokens, index of first contained token in
-      --  Parser.Terminals.
+   procedure Put (Data : in out Parse_Data_Type; Parser : in 
WisiToken.Parse.Base_Parser'Class);
+   --  Perform additional post-parse actions, then put result to
+   --  Ada.Text_IO.Current_Output, as encoded responses as defined in [3]
+   --  wisi-process-parse--execute.
 
-      Last_Terminals_Index : WisiToken.Base_Token_Index := 
WisiToken.Base_Token_Arrays.No_Index;
-      --  For non-virtual nonterminal tokens, index of last contained
-      --  token in Parser.Terminals.
-      --
-      --  For all others, same as First_Terminals_Index.
+   procedure Put_Errors (Tree : in WisiToken.Syntax_Trees.Tree);
+   --  Put errors in Tree to Ada.Text_IO.Current_Output,
+   --  as encoded error and recover responses as defined in [3]
+   --  wisi-process-parse--execute.
 
-      First_Indent_Line : WisiToken.Line_Number_Type := 
WisiToken.Invalid_Line_Number;
-      Last_Indent_Line  : WisiToken.Line_Number_Type := 
WisiToken.Invalid_Line_Number;
-      --  Lines that need indenting; first token on these lines is contained
-      --  in this token. If First is False, these are Invalid_Line_Number.
-      --
-      --  First_, Last_Indent_Line include blank and comment lines between
-      --  grammar tokens, but exclude trailing blanks and comments after the
-      --  last token, so they can be indented differently.
+   procedure Put_Error
+     (Tree        : in WisiToken.Syntax_Trees.Tree;
+      Line_Number : in WisiToken.Line_Number_Type;
+      Message     : in String);
+   --  Put an error elisp form to Ada.Text_IO.Current_Output.
 
-      First_Trailing_Comment_Line : WisiToken.Line_Number_Type := 
WisiToken.Invalid_Line_Number;
-      Last_Trailing_Comment_Line  : WisiToken.Line_Number_Type := 
WisiToken.Invalid_Line_Number;
-      --  Trailing comment or blank lines (after the last contained grammar
-      --  token) that need indenting. Excludes comments following code on a
-      --  line. If there are no such lines, these are Invalid_Line_Number.
+   function Integer_Filled_Image is new SAL.Generic_Decimal_Image (Integer);
 
-      Non_Grammar : Non_Grammar_Token_Arrays.Vector;
-      --  For terminals, non-grammar tokens immediately following. For
-      --  nonterminals, empty.
+private
 
-      Inserted_Before : WisiToken.Valid_Node_Index_Arrays.Vector;
-      --  Tokens inserted before this token by error recovery.
+   type Augmented is new WisiToken.Syntax_Trees.Base_Augmented with
+   record
+      Cache_Version : WisiToken.Cache_Version := WisiToken.Cache_Version'First;
 
+      Indenting : Wisi.Indenting;
+      --  Computed on demand; see Compute_Indenting.
    end record;
-   type Augmented_Token_Access is access all Augmented_Token;
-   type Augmented_Token_Access_Constant is access constant Augmented_Token;
-
-   type Aug_Token_Const_Ref (Element : not null access constant 
Augmented_Token) is null record with
-     Implicit_Dereference => Element;
-
-   function To_Aug_Token_Const_Ref (Item : in 
WisiToken.Base_Token_Class_Access) return Aug_Token_Const_Ref
-     is (Element => Augmented_Token_Access_Constant (Item));
-
-   type Aug_Token_Var_Ref (Element : not null access Augmented_Token) is null 
record with
-     Implicit_Dereference => Element;
+   type Augmented_Access is access all Augmented;
 
-   function To_Aug_Token_Var_Ref (Item : in WisiToken.Base_Token_Class_Access) 
return Aug_Token_Var_Ref
-     is (Element => Augmented_Token_Access (Item));
-
-   overriding
-   function Image
-     (Item       : in Augmented_Token;
-      Descriptor : in WisiToken.Descriptor)
-     return String;
-   --  Return a string for debug/test messages
-
-   function First_Line
-     (Token             : in Augmented_Token;
-      Indenting_Comment : in Boolean)
-     return WisiToken.Line_Number_Type;
-   function Last_Line
-     (Token             : in Augmented_Token;
-      Indenting_Comment : in Boolean)
-     return WisiToken.Line_Number_Type;
-   --  Return first and last line in Token's region.
-
-   package Line_Paren_Vectors is new SAL.Gen_Unbounded_Definite_Vectors
-     (WisiToken.Line_Number_Type, Integer, Default_Element => Integer'Last);
-   package Line_Begin_Pos_Vectors is new SAL.Gen_Unbounded_Definite_Vectors
-     (WisiToken.Line_Number_Type, WisiToken.Buffer_Pos, Default_Element => 
WisiToken.Invalid_Buffer_Pos);
+   function Get_Augmented
+     (Tree : in WisiToken.Syntax_Trees.Tree'Class;
+      Node : in WisiToken.Syntax_Trees.Valid_Node_Access)
+     return Augmented_Access;
+   --  Return Node.Augmented. If that is null, set it to the default
+   --  Augmented first.
 
    type Nil_Buffer_Pos (Set : Boolean := False) is record
       case Set is
@@ -475,8 +494,14 @@ private
 
    Nil : constant Nil_Buffer_Pos := (Set => False);
 
+   function Image (Item : in Nil_Buffer_Pos) return String
+   is (if Item.Set then Item.Item'Image else " nil");
+
    type Navigate_Cache_Type is record
-      Pos            : WisiToken.Buffer_Pos; -- implicit in [1] wisi-cache
+      Pos : WisiToken.Buffer_Pos;
+      --  Implicit in [1] wisi-cache. This is a character position in the
+      --  source text; it must be on a Source_Terminal (not a virtual 
terminal).
+
       Statement_ID   : WisiToken.Token_ID;   -- [1] wisi-cache-nonterm
       ID             : WisiToken.Token_ID;   -- [1] wisi-cache-token
       Length         : Natural;              -- [1] wisi-cache-last
@@ -501,6 +526,7 @@ private
 
    package Name_Cache_Trees is new SAL.Gen_Unbounded_Definite_Red_Black_Trees
      (WisiToken.Buffer_Region, WisiToken.Buffer_Pos);
+   --  Character positions of names.
 
    type Nil_Integer (Set : Boolean := False) is record
       case Set is
@@ -521,12 +547,14 @@ private
 
    package Face_Cache_Trees is new SAL.Gen_Unbounded_Definite_Red_Black_Trees 
(Face_Cache_Type, WisiToken.Buffer_Pos);
 
-   type Indent_Label is (Not_Set, Int, Anchor_Nil, Anchor_Int, Anchored, 
Anchor_Anchored);
-
-   package Anchor_ID_Vectors is new Ada.Containers.Vectors (Natural, Positive);
+   type Indent_Label is (Not_Set, Int, Anchored);
 
    type Indent_Type (Label : Indent_Label := Not_Set) is record
       --  Indent values may be negative while indents are being computed.
+
+      Controlling_Token_Line : WisiToken.Base_Line_Number_Type := 
WisiToken.Invalid_Line_Number;
+      --  See [2] Indent actions for description of controlling token.
+
       case Label is
       when Not_Set =>
          null;
@@ -534,34 +562,18 @@ private
       when Int =>
          Int_Indent : Integer;
 
-      when Anchor_Nil =>
-         Anchor_Nil_IDs : Anchor_ID_Vectors.Vector; --  Largest ID first.
-
-      when Anchor_Int =>
-         Anchor_Int_IDs    : Anchor_ID_Vectors.Vector; --  Largest ID first.
-         Anchor_Int_Indent : Integer; --  Indent for this token.
-
       when Anchored =>
-         Anchored_ID    : Positive;
-         Anchored_Delta : Integer; -- added to Anchor_Indent of Anchor_ID
-
-      when Anchor_Anchored =>
-         Anchor_Anchored_IDs   : Anchor_ID_Vectors.Vector;
-         Anchor_Anchored_ID    : Natural;
-         Anchor_Anchored_Delta : Integer;
+         Anchor_Line  : WisiToken.Line_Number_Type;
+         Anchor_Delta : Integer;
       end case;
    end record;
-   First_Anchor_ID : constant Positive := Positive'First;
 
    package Indent_Vectors is new SAL.Gen_Unbounded_Definite_Vectors
      (WisiToken.Line_Number_Type, Indent_Type, Default_Element => (others => 
<>));
    package Navigate_Cursor_Lists is new Ada.Containers.Doubly_Linked_Lists
      (Navigate_Cache_Trees.Cursor, Navigate_Cache_Trees."=");
 
-   type Parse_Data_Type
-     (Terminals        : not null access constant 
WisiToken.Base_Token_Arrays.Vector;
-      Line_Begin_Token : not null access constant 
WisiToken.Line_Begin_Token_Vectors.Vector)
-     is new WisiToken.Syntax_Trees.User_Data_Type with
+   type Parse_Data_Type is abstract new WisiToken.Syntax_Trees.User_Data_Type 
with
    record
       --  Aux token info
       First_Comment_ID : WisiToken.Token_ID := WisiToken.Invalid_Token_ID;
@@ -569,35 +581,17 @@ private
       Left_Paren_ID    : WisiToken.Token_ID := WisiToken.Invalid_Token_ID;
       Right_Paren_ID   : WisiToken.Token_ID := WisiToken.Invalid_Token_ID;
 
-      Embedded_Quote_Escape_Doubled : Boolean := False;
-
-      --  Data from parsing
-
-      --  All Augmented_Tokens are stored in the syntax tree.
-      Last_Terminal_Node : WisiToken.Node_Index := 
WisiToken.Invalid_Node_Index;
-
-      Leading_Non_Grammar : Non_Grammar_Token_Arrays.Vector;
-      --  non-grammar tokens before first grammar token.
-
-      Line_Begin_Char_Pos : Line_Begin_Pos_Vectors.Vector;
-      --  Character position at the start of the first token on each line.
-      --  Cached from Line_Begin_Token to simplify indent computations.
-
-      Line_Paren_State : Line_Paren_Vectors.Vector;
-      --  Parenthesis nesting state at the start of each line; used by
-      --  Indent. Set by Lexer_To_Augmented on New_Line_ID, updated by
-      --  Insert_Token, Delete_Token.
-
-      Current_Paren_State : Integer;
-      --  Current parenthesis nesting state; used by Indent. Set by
-      --  Lexer_To_Augmented on Left_Paren_ID, Right_Paren_ID.
+      Statement_IDs : WisiToken.Token_ID_Arrays.Vector;
+      --  Nonterms returned by containing_statement query.
 
       --  Data for post-parse actions
 
-      Lexer             : WisiToken.Lexer.Handle;
-      Descriptor        : access constant WisiToken.Descriptor;
-      Base_Terminals    : WisiToken.Base_Token_Array_Access;
-      Post_Parse_Action : Post_Parse_Action_Type;
+      Post_Parse_Action   : Post_Parse_Action_Type;
+      Action_Region_Bytes : WisiToken.Buffer_Region := 
WisiToken.Null_Buffer_Region;
+      Action_Region_Chars : WisiToken.Buffer_Region := 
WisiToken.Null_Buffer_Region;
+      Action_Region_Lines : WisiToken.Line_Region   := 
WisiToken.Null_Line_Region;
+      --  Actions are applied to tokens that overlap this region.
+
       Navigate_Caches   : Navigate_Cache_Trees.Tree;  -- Set by Navigate.
       Name_Caches       : Name_Cache_Trees.Tree;      -- Set by Navigate.
       End_Positions     : Navigate_Cursor_Lists.List; -- Dynamic data for 
Navigate.
@@ -608,29 +602,16 @@ private
       --  Copied from language-specific parameters
       Indent_Comment_Col_0 : Boolean := False;
 
-      --  Dynamic data for Indent
-      Max_Anchor_ID : Integer;
+      Augmented_Cache_Version : WisiToken.Cache_Version := 
WisiToken.Cache_Version'First + 1;
    end record;
 
    type Simple_Delta_Labels is (None, Int, Anchored);
 
-   --  subtype Non_Anchored_Delta_Labels is Simple_Delta_Labels range None .. 
Int;
-
-   --  type Non_Anchored_Delta (Label : Non_Anchored_Delta_Labels := None) is
-   --  record
-   --     case Label is
-   --     when None =>
-   --        null;
-   --     when Int =>
-   --        Int_Delta : Integer;
-   --     end case;
-   --  end record;
-
-   --  function Image (Item : in Non_Anchored_Delta) return String;
-   --  For debugging
-
    type Simple_Delta_Type (Label : Simple_Delta_Labels := None) is
    record
+      Controlling_Token_Line : WisiToken.Base_Line_Number_Type;
+      --  If Invalid_Line_Number, delta should not be ignored.
+
       case Label is
       when None =>
          null;
@@ -639,9 +620,8 @@ private
          Int_Delta : Integer;
 
       when Anchored =>
-         Anchored_ID         : Natural;
-         Anchored_Delta      : Integer;
-         Anchored_Accumulate : Boolean;
+         Anchor_Line    : WisiToken.Line_Number_Type;
+         Anchored_Delta : Integer;
 
       end case;
    end record;
@@ -658,15 +638,17 @@ private
          Simple_Delta : Simple_Delta_Type;
 
       when Hanging =>
-         Hanging_First_Line  : WisiToken.Line_Number_Type;
-         Hanging_Paren_State : Integer;
-         Hanging_Delta_1     : Simple_Delta_Type; -- indentation of first line
-         Hanging_Delta_2     : Simple_Delta_Type; -- indentation of 
continuation lines
-         Hanging_Accumulate  : Boolean;
+         Hanging_First_Line : WisiToken.Line_Number_Type;
+
+         Hanging_Delta_1 : Simple_Delta_Type;
+         --  Indentation of first line in token; Null_Delta if first line does
+         --  not need indenting
+
+         Hanging_Delta_2 : Simple_Delta_Type; -- indentation of continuation 
lines
       end case;
    end record;
 
-   Null_Delta : constant Delta_Type := (Simple, (Label => None));
+   Null_Delta : constant Delta_Type := (Simple, (None, 
WisiToken.Invalid_Line_Number));
 
    function Image (Item : in Delta_Type) return String;
    --  For debugging
@@ -674,96 +656,86 @@ private
    ----------
    --  Utilities for language-specific child packages
 
+   Emacs_Lisp_New_Line : constant String := "\n";
+   --  For includinge New_Line in a text string sent to Emacs.
+
+   function Compute_Indenting
+     (Data : in Parse_Data_Type'Class;
+      Tree : in WisiToken.Syntax_Trees.Tree;
+      Node : in WisiToken.Syntax_Trees.Valid_Node_Access)
+     return Wisi.Indenting
+   with Pre => Tree.Line_Region (Node, Trailing_Non_Grammar => False) /= 
WisiToken.Null_Line_Region and
+               Tree.SOI /= Node and Tree.EOI /= Node;
+   --  Return Node.Augmented.Indenting, computing it first if needed.
+
    function Current_Indent_Offset
-     (Data         : in Parse_Data_Type;
-      Anchor_Token : in Augmented_Token'Class;
+     (Tree         : in WisiToken.Syntax_Trees.Tree'Class;
+      Anchor_Token : in WisiToken.Syntax_Trees.Valid_Node_Access;
       Offset       : in Integer)
      return Integer;
    --  Return offset from beginning of first token on line containing
    --  Anchor_Token, to beginning of Anchor_Token, plus Offset.
 
-   function Get_Aug_Token_Const_1
-     (Tree       : in WisiToken.Syntax_Trees.Tree'Class;
-      Tree_Index : in WisiToken.Valid_Node_Index)
-     return Aug_Token_Const_Ref;
-   --  WORKAROUND: GNAT Community 2019 can't do the overload resolution
-   --  between the two Get_Aug_Token_Const without an explicit renames,
-   --  so we add _1 to this one.
-
-   function Get_Aug_Token_Const
-     (Data  : in Parse_Data_Type;
-      Tree  : in WisiToken.Syntax_Trees.Tree'Class;
-      Token : in WisiToken.Token_Index)
-     return Aug_Token_Const_Ref;
-
-   function Get_Aug_Token_Var
-     (Tree       : in WisiToken.Syntax_Trees.Tree'Class;
-      Tree_Index : in WisiToken.Valid_Node_Index)
-     return Aug_Token_Var_Ref;
-
-   function Get_Aug_Token_Var
-     (Data  : in Parse_Data_Type;
-      Tree  : in WisiToken.Syntax_Trees.Tree'Class;
-      Token : in WisiToken.Token_Index)
-     return Aug_Token_Var_Ref;
-
-   --  function Get_First_Terminal
-   --    (Data  : in Parse_Data_Type;
-   --     Tree  : in WisiToken.Syntax_Trees.Tree'Class;
-   --     Token : in WisiToken.Token_Index)
-   --    return Aug_Token_Const_Ref;
-   --  Return Augmented for first Token.Inserted_Before, or if that is
-   --  empty, for Token.
-
    function Get_Text
      (Data       : in Parse_Data_Type;
       Tree       : in WisiToken.Syntax_Trees.Tree;
-      Tree_Index : in WisiToken.Valid_Node_Index)
+      Tree_Index : in WisiToken.Syntax_Trees.Valid_Node_Access)
      return String;
    --  Return text contained by Tree_Index token in source file
    --  (lexer.buffer).
 
-   function Elisp_Escape_Quotes (Item : in String) return String;
-   --  Prefix any '"' in Item with '\' for elisp.
-
    function Indent_Anchored_2
-     (Data        : in out Parse_Data_Type;
-      Anchor_Line : in     WisiToken.Line_Number_Type;
-      Last_Line   : in     WisiToken.Line_Number_Type;
-      Offset      : in     Integer;
-      Accumulate  : in     Boolean)
+     (Data              : in Parse_Data_Type'Class;
+      Tree              : in WisiToken.Syntax_Trees.Tree;
+      Anchor_Token      : in WisiToken.Syntax_Trees.Valid_Node_Access;
+      Indenting_Token   : in WisiToken.Syntax_Trees.Valid_Node_Access;
+      Indenting_Comment : in Boolean;
+      Offset            : in Integer)
      return Delta_Type;
+   --  If Anchor_Token.Line = Indenting_Token.Line, return Null_Delta. 
Otherwise
+   --  return an anchored delta using Anchor_Token.Line, Offset.
 
    function Indent_Compute_Delta
      (Data              : in out Parse_Data_Type'Class;
       Tree              : in     WisiToken.Syntax_Trees.Tree;
-      Tokens            : in     WisiToken.Valid_Node_Index_Array;
+      Nonterm           : in     WisiToken.Syntax_Trees.Valid_Node_Access;
       Param             : in     Indent_Param;
-      Tree_Indenting    : in     WisiToken.Valid_Node_Index;
+      Indenting_Token   : in     WisiToken.Syntax_Trees.Valid_Node_Access;
       Indenting_Comment : in     Boolean)
      return Delta_Type;
+   --  Return indent defined by Param for Tree_Indenting in Nonterm.
 
    procedure Indent_Token_1
      (Data              : in out Parse_Data_Type;
       Tree              : in     WisiToken.Syntax_Trees.Tree;
-      Indenting_Token   : in     Augmented_Token'Class;
+      Line_Region       : in     WisiToken.Line_Region;
       Delta_Indent      : in     Delta_Type;
-      Indenting_Comment : in     Boolean);
+      Indenting_Comment : in     Indenting_Comment_Label;
+      Controlling_Delta : in     Delta_Type := Null_Delta);
+   --  Apply Delta_Indent to lines in Line_Region.
+   --
+   --  Controlling_Delta should be Null_Delta if Indenting_Comment is
+   --  None; it should be any existing indent for
+   --  Controlling_Token.Line_Region.[First | Last] if Indenting_Comment
+   --  is Leading | Trailing. This allows adding previously computed
+   --  indents for the token controlling a comment line to the comment
+   --  line indent.
+   --
    --  Sets Data.Indents, so caller may not be in a renames for a
    --  Data.Indents element.
 
    --  Visible for language-specific children. Must match list in
    --  [3] wisi-process-parse--execute.
-   Navigate_Cache_Code  : constant String := "1";
-   Face_Property_Code   : constant String := "2";
-   Indent_Code          : constant String := "3";
-   Lexer_Error_Code     : constant String := "4";
-   Parser_Error_Code    : constant String := "5";
-   Check_Error_Code     : constant String := "6";
-   Recover_Code         : constant String := "7 ";
-   End_Code             : constant String := "8";
-   Name_Property_Code   : constant String := "9";
-   Edit_Action_Code     : constant String := "10";
-   Language_Action_Code : constant String := "11 ";
-
+   Navigate_Cache_Code        : constant String := "1";
+   Face_Property_Code         : constant String := "2";
+   Indent_Code                : constant String := "3";
+   Lexer_Error_Code           : constant String := "4";
+   Parser_Error_Code          : constant String := "5";
+   In_Parse_Action_Error_Code : constant String := "6";
+   Recover_Code               : constant String := "7 ";
+   End_Code                   : constant String := "8";
+   Name_Property_Code         : constant String := "9";
+   Edit_Action_Code           : constant String := "10";
+   Language_Action_Code       : constant String := "11 "; -- used by 
wisitoken_grammar for Check_Parens
+   Query_Tree_Code            : constant String := "12";
 end Wisi;
diff --git a/wisi.el b/wisi.el
index 2b4d318441..52bbf93649 100644
--- a/wisi.el
+++ b/wisi.el
@@ -1,4 +1,4 @@
-;;; wisi.el --- Utilities for implementing an indentation/navigation engine 
using a generalized LALR parser -*- lexical-binding:t -*-
+;;; wisi.el --- Utilities for implementing an indentation/navigation engine 
using a generalized LR parser -*- lexical-binding:t -*-
 ;;
 ;; Copyright (C) 2012 - 2022  Free Software Foundation, Inc.
 ;;
@@ -7,7 +7,7 @@
 ;; Keywords: parser
 ;;  indentation
 ;;  navigation
-;; Version: 3.1.8
+;; Version: 3.1.3
 ;; package-requires: ((emacs "25.3") (seq "2.20"))
 ;; URL: http://stephe-leake.org/ada/wisitoken.html
 ;;
@@ -38,33 +38,37 @@
 ;; 5.0 indentation, font-lock, and navigation, which are parser based.
 ;;
 ;; The approach to indenting a given token is to parse the buffer,
-;; computing a delta indent at each parse action.
+;; computing a delta indent for each token in a grammar production in
+;; a post-parse action.
 ;;
-;; The parser actions also cache face and navigation information
-;; as text properties on tokens in statements.
+;; Other post-parse actions cache face and navigation information as
+;; text properties on tokens.
 ;;
 ;; The three reasons to run the parser (indent, face, navigate) occur
 ;; at different times (user indent, font-lock, user navigate), so only
 ;; the relevant parser actions are run.
 ;;
 ;; Parsing can be noticeably slow in large files, so sometimes we do a
-;; partial parse, and keep a list of parsed regions.
+;; partial or incremental parse. We keep a list of regions where the
+;; post-parse actions have been run and the results are valid, to
+;; determine what needs to be parsed or updated.
 ;;
 ;; Since we have a cache (the text properties), we need to consider
 ;; when to invalidate it.  Ideally, we invalidate only when a change
-;; to the buffer would change the result of a parse that crosses that
-;; change, or starts after that change.  Changes in whitespace
-;; (indentation and newlines) do not affect an Ada parse.  Other
-;; languages are sensitive to newlines (Bash for example) or
-;; indentation (Python).  Adding comments does not change a parse,
-;; unless code is commented out.
+;; to the buffer would change the result of a parse or post-parse
+;; action that crosses that change, or starts after that change.
+;; Changes in whitespace (indentation and newlines) do not affect an
+;; Ada parse.  Other languages are sensitive to newlines (Bash for
+;; example) or indentation (Python).  Adding comments does not change
+;; a parse, unless code is commented out.
 ;;
 ;; For navigate, we expect fully accurate results, and can tolerate
-;; one initial delay, so we always parse the entire file.
+;; one initial delay, so we always parse the entire file, and then use
+;; incremental parse for updates.
 ;;
 ;; For font-lock, we only parse the portion of the file requested by
-;; font-lock, so we keep a list of regions, and edit that list when
-;; the buffer is changed..
+;; font-lock, using the list of parsed regions, and edit that list
+;; when the buffer is changed.
 ;;
 ;; For indenting, we expect fast results, and can tolerate some
 ;; inaccuracy until the editing is done, so we allow partial parse. We
@@ -87,8 +91,8 @@
 ;; - SMIE
 ;;
 ;;   We don't use this because it is designed to parse small snippets
-;;   of code. For Ada indentation, we always need to parse the entire
-;;   buffer.
+;;   of code. For Ada (and some other languages) indentation, we
+;;   always need to parse the entire buffer.
 ;;
 ;; - semantic
 ;;
@@ -99,8 +103,10 @@
 ;;   correction, and thus fails in most editing situations.
 ;;
 ;; We use the WisiToken tool wisi-bnf-generate to compile BNF or EBNF
-;; to Ada source, See ada-mode.info and wisi.info for more information
-;; on the developer tools used for ada-mode and wisi.
+;; to Ada source, WisiToken provides a generalized LR parser, with
+;; robust error correction and incremental or partial parse. See
+;; ada-mode.info and wisi.info for more information on the developer
+;; tools used for wisi.
 ;;
 ;;; Code:
 
@@ -112,6 +118,18 @@
 (require 'wisi-fringe)
 (require 'xref)
 
+(defgroup wisi nil
+  "Options for Wisi package."
+  :group 'programming)
+
+(defcustom wisi-process-time-out 5.0
+  "Time out waiting for parser response. An error occurs if there
+  is no response from the parser after waiting this amount (in
+  seconds)."
+  :type 'float
+  :safe 'numberp)
+(make-variable-buffer-local 'wisi-process-time-out)
+
 (defcustom wisi-size-threshold most-positive-fixnum
   "Max size (in characters) for using wisi parser results for anything."
   :type 'integer
@@ -134,6 +152,18 @@ Useful when debugging parser or parser actions."
   :group 'wisi
   :safe 'booleanp)
 
+(defcustom wisi-incremental-parse-enable nil
+  "If non-nil, use incremental parse when possible."
+  :type 'boolean
+  :group 'wisi
+  :safe 'booleanp)
+
+(defcustom wisi-parse-full-background t
+  "If non-nil, do initial full parse in background."
+  :type 'boolean
+  :group 'wisi
+  :safe 'booleanp)
+
 (defconst wisi-error-buffer-name "*wisi syntax errors*"
   "Name of buffer for displaying syntax errors.")
 
@@ -202,8 +232,6 @@ If PARSE-RESULT is non-nil, use it instead of calling 
`syntax-ppss'."
   (forward-char 1)
   (funcall indent-line-function))
 
-;;;; token info cache
-
 (defvar-local wisi-parse-failed nil
   "Non-nil when last parse failed - cleared when parse succeeds.")
 
@@ -215,11 +243,11 @@ If PARSE-RESULT is non-nil, use it instead of calling 
`syntax-ppss'."
   "Non-nil when parse is needed due to text change.
 Cleared when parse succeeds.")
 
-(defun wisi-parse-try (&optional parse-action)
-  (cdr (assoc (or parse-action wisi--parse-action) wisi--parse-try)))
+(defun wisi-parse-try (parse-action)
+  (cdr (assoc parse-action wisi--parse-try)))
 
-(defun wisi-set-parse-try (value &optional parse-action)
-  (setcdr (assoc (or parse-action wisi--parse-action) wisi--parse-try) value))
+(defun wisi-set-parse-try (value parse-action)
+  (setcdr (assoc parse-action wisi--parse-try) value))
 
 (defvar-local wisi--last-parse-region
   (list
@@ -228,8 +256,8 @@ Cleared when parse succeeds.")
    (cons 'indent nil))
   "Last region on which parse was requested.")
 
-(defun wisi-last-parse-region (&optional parse-action)
-  (cdr (assoc (or parse-action wisi--parse-action) wisi--last-parse-region)))
+(defun wisi-last-parse-region (parse-action)
+  (cdr (assoc parse-action wisi--last-parse-region)))
 
 (defun wisi-set-last-parse-region (begin end parse-action)
   (setcdr (assoc parse-action wisi--last-parse-region) (cons begin end)))
@@ -243,7 +271,7 @@ Cleared when parse succeeds.")
 Regions in a list are in random order.")
 
 (defun wisi--contained-region (begin end region)
-  "Non-nil if BEGIN and END (buffer positions) are both contained in REGION (a 
cons of positions)."
+  "Non-nil if BEGIN and END are both contained in REGION (a cons of 
positions)."
   ;; We assume begin < end
   (and (<= (car region) begin)
        (<= end (cdr region))))
@@ -253,9 +281,9 @@ Regions in a list are in random order.")
   (and (<= (car region) pos)
        (<= pos (cdr region))))
 
-(defun wisi-cache-covers-region (begin end &optional parse-action)
+(defun wisi-cache-covers-region (begin end parse-action)
   "Non-nil if BEGIN END is contained in a parsed region."
-  (let ((region-list (cdr (assoc (or parse-action wisi--parse-action) 
wisi--cached-regions)))
+  (let ((region-list (cdr (assoc parse-action wisi--cached-regions)))
        region)
     (while (and region-list
                (marker-buffer (caar region-list)) ;; this can fail after 
editing during ediff-regions.
@@ -289,17 +317,17 @@ Regions in a list are in random order.")
 
     result))
 
-(defun wisi-cache-set-region (region)
-  "Set the cached region list for `wisi--parse-action' to REGION."
-  (setcdr (assoc wisi--parse-action wisi--cached-regions)
+(defun wisi-cache-set-region (region parse-action)
+  "Set the cached region list for PARSE-ACTION to REGION."
+  (setcdr (assoc parse-action wisi--cached-regions)
          (list (cons (copy-marker (car region))
                      (copy-marker (cdr region))))))
 
-(defun wisi-cache-add-region (region)
-  "Add REGION to the cached region list for `wisi--parse-action'."
+(defun wisi-cache-add-region (region parse-action)
+  "Add REGION to the cached region list for PARSE-ACTION."
   (push (cons (copy-marker (car region))
              (copy-marker (cdr region)))
-       (cdr (assoc wisi--parse-action wisi--cached-regions))))
+       (cdr (assoc parse-action wisi--cached-regions))))
 
 (defun wisi-cache-delete-regions-after (parse-action pos)
   "Delete any PARSE-ACTION parsed region at or after POS.
@@ -325,8 +353,9 @@ Truncate any region that overlaps POS."
     ))
 
 (defun wisi--delete-face-cache (after)
-  (with-silent-modifications
-    (remove-text-properties after (point-max) '(font-lock-face nil)))
+  ;; We don't do remove-text-properties here; done in
+  ;; wisi-fontify-region to avoid flicker and bad fontify due to
+  ;; syntax errors.
   (if (= after (point-min))
       (setcdr (assoc 'face wisi--cached-regions) nil)
     (wisi-cache-delete-regions-after 'face after)))
@@ -369,7 +398,8 @@ Truncate any region that overlaps POS."
       (wisi--delete-face-cache after))
 
      ((eq 'navigate action)
-      (when (wisi-cache-covers-pos 'navigate after)
+      (when (and (not wisi-incremental-parse-enable)
+                (wisi-cache-covers-pos 'navigate after))
        ;; We goto statement start to ensure that motion within nested
        ;; structures is properly done (ie prev/next on ’elsif’ is not
        ;; set by wisi-motion-action if already set by a lower level
@@ -383,7 +413,9 @@ Truncate any region that overlaps POS."
          ;; call that because it would call ‘wisi-validate-cache’,
          ;; which would call ‘wisi-invalidate-cache’; infinite loop.
          ;; If this needed a navigate parse to succeed, we would not
-         ;; get here.
+         ;; get here. FIXME: not true for incremental parse. Best
+         ;; solution is to always ask the parser for the correct pos,
+         ;; not use text property caches.
          (let ((cache (or (wisi-get-cache (point))
                           (wisi-backward-cache))))
            (cond
@@ -419,23 +451,45 @@ Truncate any region that overlaps POS."
      )
     )))
 
-(defun wisi-reset-parser ()
-  "Force a parse."
-  (interactive)
+(defun wisi-force-parse ()
+  "Force a parse when `wisi-validate-cache' is next invoked.
+For debugging."
+  (setf (wisi-parser-local-lexer-errors wisi-parser-local) nil)
+  (setf (wisi-parser-local-parse-errors wisi-parser-local) nil)
   (syntax-ppss-flush-cache (point-min)) ;; necessary after edit during 
ediff-regions
+
+  (setq wisi--changes nil)
+  (setq wisi--change-beg most-positive-fixnum)
+  (setq wisi--change-end nil)
+  (setq wisi--deleted-syntax nil)
+  (setq wisi-indenting-p nil)
+
   (setq wisi--cached-regions ;; necessary instead of wisi-invalidate after 
ediff-regions
        (list
         (cons 'face nil)
         (cons 'navigate nil)
         (cons 'indent nil)))
+  (setq wisi-parse-failed nil)
   (wisi-set-parse-try t 'indent)
   (wisi-set-parse-try t 'face)
   (wisi-set-parse-try t 'navigate)
   (wisi-set-last-parse-region (point-min) (point-min) 'indent)
   (wisi-set-last-parse-region (point-min) (point-min) 'face)
   (wisi-set-last-parse-region (point-min) (point-min) 'navigate)
+  (wisi-invalidate-cache 'indent (point-min))
+  (wisi-invalidate-cache 'face (point-min))
+  (with-silent-modifications
+    (remove-text-properties (point-min) (point-max) '(font-lock-face nil 
fontified nil)))
+  (wisi-invalidate-cache 'navigate (point-min))
   (wisi-fringe-clean))
 
+(defun wisi-reset-parser ()
+  "Delete any saved parse state, force a parse."
+  (interactive)
+  (wisi-force-parse)
+  (when wisi-parser-shared
+    (wisi-parse-reset wisi-parser-shared)))
+
 ;; wisi--change-* keep track of buffer modifications.
 ;; If wisi--change-end comes before wisi--change-beg, it means there were
 ;; no modifications.
@@ -458,45 +512,59 @@ Set by `wisi-before-change', used and reset by 
`wisi--post-change'.")
   "Non-nil when `wisi-indent-region' is actively indenting.
 Used to ignore whitespace changes in before/after change hooks.")
 
-(defvar-local wisi--last-parse-action nil
-  "Value of `wisi--parse-action' when `wisi-validate-cache' was last run.")
+(defvar-local wisi--changes nil
+  "Cached list of args to wisi-after-change, for incremental parse.
+Each element is
+(INSERT-BEGIN-BYTE-POS INSERT-BEGIN-CHAR-POS
+ INSERT-END-BYTE-POS INSERT-END-CHAR-POS
+ DELETED-BYTE-COUNT DELETED-CHAR-COUNT INSERTED-TEXT)")
+
+(defvar-local wisi--affected-text 0
+  "Cached text of range passed to `wisi-before-change',
+used by `wisi-after-change' to get byte count of actual
+deleted range.")
 
 (defun wisi-before-change (begin end)
   "For `before-change-functions'."
   ;; begin . (1- end) is range of text being deleted
-  (unless wisi-indenting-p
-    ;; We set wisi--change-beg, -end even if only inserting, so we
-    ;; don't have to do it again in wisi-after-change.
-    (setq wisi--change-beg (min wisi--change-beg begin))
-
-    ;; `buffer-base-buffer' deals with edits in indirect buffers
-    ;; created by ediff-regions-*
+  (with-demoted-errors "wisi-before-change signaled: %s"
+    (when wisi-incremental-parse-enable
+      (setq wisi--affected-text (buffer-substring-no-properties begin end)))
 
-    (cond
-     ((null wisi--change-end)
-      (setq wisi--change-end (make-marker))
-      (set-marker wisi--change-end end (or (buffer-base-buffer) 
(current-buffer))))
+    (unless wisi-indenting-p
+      ;; We set wisi--change-beg, -end even if only inserting, so we
+      ;; don't have to do it again in wisi-after-change.
+      (setq wisi--change-beg (min wisi--change-beg begin))
 
-     ((> end wisi--change-end)
-      (set-marker wisi--change-end end (or (buffer-base-buffer) 
(current-buffer))))
-     )
+      ;; `buffer-base-buffer' deals with edits in indirect buffers
+      ;; created by ediff-regions-*
 
-    (unless (= begin end)
       (cond
-       ((or (null wisi--deleted-syntax)
-           (= 0 wisi--deleted-syntax))
-       (save-excursion
-         (if (or (nth 4 (syntax-ppss begin)) ; in comment, moves point to begin
-                 (= end (skip-syntax-forward " " end)));; whitespace
-             (setq wisi--deleted-syntax 0)
-           (setq wisi--deleted-syntax 2))))
+       ((null wisi--change-end)
+       (setq wisi--change-end (make-marker))
+       (set-marker wisi--change-end end (or (buffer-base-buffer) 
(current-buffer))))
 
-       (t
-       ;; wisi--deleted-syntax is 2; no change.
-       )
-       ))))
+       ((> end wisi--change-end)
+       (set-marker wisi--change-end end (or (buffer-base-buffer) 
(current-buffer))))
+       )
+
+      (unless (= begin end)
+       (cond
+        ((or (null wisi--deleted-syntax)
+             (= 0 wisi--deleted-syntax))
+         (save-excursion
+           (if (or (nth 4 (syntax-ppss begin)) ; in comment, moves point to 
begin
+                   (= end (skip-syntax-forward " " end)));; whitespace
+               (setq wisi--deleted-syntax 0)
+             (setq wisi--deleted-syntax 2))))
+
+        (t
+         ;; wisi--deleted-syntax is 2; no change.
+         )
+        )))
+    ))
 
-(defun wisi-after-change (begin end _length)
+(defun wisi-after-change (begin end length)
   "For `after-change-functions'"
   ;; begin . end is range of text being inserted (empty if equal);
   ;; length is the size of the deleted text.
@@ -508,30 +576,37 @@ Used to ignore whitespace changes in before/after change 
hooks.")
   ;; remove fontification from the entire word, so it is all
   ;; refontified consistently.
 
-  (let (word-begin word-end)
-    (save-excursion
-      (goto-char end)
-      (skip-syntax-forward "w_")
-      (setq word-end (point))
-      (goto-char begin)
-      (skip-syntax-backward "w_")
-      (setq word-begin (point)))
-    (if (get-text-property word-begin 'font-lock-face)
-       (with-silent-modifications
-         (remove-text-properties
-          word-begin word-end
-          '(font-lock-face nil wisi-cache nil wisi-indent nil fontified nil)))
-
-      ;; No point in removing
-      ;; 'fontified here; that's already handled by jit-lock.
+  (with-demoted-errors "wisi-after-change signaled: %s"
+    (when wisi-incremental-parse-enable
+      ;; Sometimes length disagrees with (begin end) passed to
+      ;; wisi-before-change. For example, for 'downcase-word applied to
+      ;; "First", before-change (begin end) is entire word, but
+      ;; after-change length is 1, because only the F is actually
+      ;; replaced.
+      (let ((deleted (substring wisi--affected-text 0 length)))
+       (push
+        (list (position-bytes begin) begin (position-bytes end) end
+              (string-bytes deleted) length
+              (buffer-substring-no-properties begin end))
+        wisi--changes)))
+
+    (let (word-begin word-end)
+      (save-excursion
+       (goto-char end)
+       (skip-syntax-forward "w_")
+       (setq word-end (point))
+       (goto-char begin)
+       (skip-syntax-backward "w_")
+       (setq word-begin (point)))
       (with-silent-modifications
-       (remove-text-properties
-        begin end
-        '(font-lock-face nil wisi-cache nil wisi-indent nil))))
+           (remove-text-properties
+            word-begin word-end
+            '(font-lock-face nil wisi-cache nil wisi-indent nil fontified 
nil)))
+      )
     ))
 
 (defun wisi--post-change (begin end)
-  "Update wisi text properties for changes in region BEG END."
+  "Invalidate wisi text properties for changes in region BEGIN END."
   ;; (syntax-ppss-flush-cache begin) is in before-change-functions
 
   (save-excursion
@@ -633,8 +708,8 @@ Used to ignore whitespace changes in before/after change 
hooks.")
 (defun wisi-goto-error ()
   "Move point to position in last error message (if any)."
   (cond
-   ((wisi-parser-parse-errors wisi--parser)
-    (let ((data (car (wisi-parser-parse-errors wisi--parser))))
+   ((wisi-parser-local-parse-errors wisi-parser-local)
+    (let ((data (car (wisi-parser-local-parse-errors wisi-parser-local))))
       (cond
        ((wisi--parse-error-pos data)
        (push-mark)
@@ -655,23 +730,23 @@ Used to ignore whitespace changes in before/after change 
hooks.")
             ;; just stay at eob.
             nil))))
        )))
-   ((wisi-parser-lexer-errors wisi--parser)
+   ((wisi-parser-local-lexer-errors wisi-parser-local)
     (push-mark)
-    (goto-char (wisi--lexer-error-pos (car (wisi-parser-lexer-errors 
wisi--parser)))))
+    (goto-char (wisi--lexer-error-pos (car (wisi-parser-local-lexer-errors 
wisi-parser-local)))))
    ))
 
 (defun wisi-show-parse-error ()
   "Show current wisi-parse errors."
   (interactive)
   (cond
-   ((or (wisi-parser-lexer-errors wisi--parser)
-       (wisi-parser-parse-errors wisi--parser))
-    (if (and (= 1 (+ (length (wisi-parser-lexer-errors wisi--parser))
-                    (length (wisi-parser-parse-errors wisi--parser))))
-            (or (and (wisi-parser-parse-errors wisi--parser)
-                     (not (wisi--parse-error-repair (car 
(wisi-parser-parse-errors wisi--parser)))))
-                (and (wisi-parser-lexer-errors wisi--parser)
-                     (not (wisi--lexer-error-inserted (car 
(wisi-parser-lexer-errors wisi--parser)))))))
+   ((or (wisi-parser-local-lexer-errors wisi-parser-local)
+       (wisi-parser-local-parse-errors wisi-parser-local))
+    (if (and (= 1 (+ (length (wisi-parser-local-lexer-errors 
wisi-parser-local))
+                    (length (wisi-parser-local-parse-errors 
wisi-parser-local))))
+            (or (and (wisi-parser-local-parse-errors wisi-parser-local)
+                     (not (wisi--parse-error-repair (car 
(wisi-parser-local-parse-errors wisi-parser-local)))))
+                (and (wisi-parser-local-lexer-errors wisi-parser-local)
+                     (not (wisi--lexer-error-inserted (car 
(wisi-parser-local-lexer-errors wisi-parser-local)))))))
        ;; There is exactly one error; if there is error correction
        ;; information, use a ’compilation’ buffer, so
        ;; *-fix-compiler-error will call
@@ -679,18 +754,18 @@ Used to ignore whitespace changes in before/after change 
hooks.")
        ;; error.
        (progn
          (wisi-goto-error)
-         (message (or (and (wisi-parser-parse-errors wisi--parser)
-                           (wisi--parse-error-message (car 
(wisi-parser-parse-errors wisi--parser))))
-                      (and (wisi-parser-lexer-errors wisi--parser)
-                           (wisi--lexer-error-message (car 
(wisi-parser-lexer-errors wisi--parser)))))
+         (message (or (and (wisi-parser-local-parse-errors wisi-parser-local)
+                           (wisi--parse-error-message (car 
(wisi-parser-local-parse-errors wisi-parser-local))))
+                      (and (wisi-parser-local-lexer-errors wisi-parser-local)
+                           (wisi--lexer-error-message (car 
(wisi-parser-local-lexer-errors wisi-parser-local)))))
                   ))
 
       ;; else show all errors in a ’compilation’ buffer
       (setq wisi-error-buffer (get-buffer-create wisi-error-buffer-name))
 
-      (let ((lexer-errs (sort (cl-copy-seq (wisi-parser-lexer-errors 
wisi--parser))
-                             (lambda (a b) (< (wisi--parse-error-pos a) 
(wisi--parse-error-pos b)))))
-           (parse-errs (sort (cl-copy-seq (wisi-parser-parse-errors 
wisi--parser))
+      (let ((lexer-errs (sort (cl-copy-seq (wisi-parser-local-lexer-errors 
wisi-parser-local))
+                             (lambda (a b) (< (wisi--lexer-error-pos a) 
(wisi--lexer-error-pos b)))))
+           (parse-errs (sort (cl-copy-seq (wisi-parser-local-parse-errors 
wisi-parser-local))
                              (lambda (a b) (< (wisi--parse-error-pos a) 
(wisi--parse-error-pos b)))))
            (dir default-directory))
        (with-current-buffer wisi-error-buffer
@@ -733,37 +808,51 @@ Used to ignore whitespace changes in before/after change 
hooks.")
        (next-error))
       ))
 
-   ((wisi-parse-try wisi--last-parse-action)
-    (message "need parse"))
-
    (t
     (message "parse succeeded"))
    ))
 
 (defun wisi-kill-parser ()
   "Kill the background process running the parser for the current buffer.
-Usefull if the parser appears to be hung."
+Useful if the parser appears to be hung."
   (interactive)
-  (wisi-parse-kill wisi--parser)
-  ;; also force re-parse
-  (wisi-reset-parser)
-  )
+  (wisi-parse-kill wisi-parser-shared)
+  (wisi-force-parse))
 
 (defun wisi-partial-parse-p (begin end)
-  (and (wisi-process--parser-p wisi--parser)
-       (not (and (= begin (point-min))
-                (= end (point-max))))
-       (>= (point-max) wisi-partial-parse-threshold)))
-
-(defun wisi--run-parse (begin parse-end)
-  "Run the parser, on at least region BEGIN PARSE-END."
-  (unless (or (buffer-narrowed-p)
-             (= (point-min) (point-max))) ;; some parsers can’t handle an 
empty buffer.
+  (or (buffer-narrowed-p)
+      ;; If narrowed, we are in a submode of mmm-mode, or the user has
+      ;; narrowed for some other reason. We must use partial parse,
+      ;; not incremental, because we can't trust the saved
+      ;; parse_context.
+
+      (and (not wisi-incremental-parse-enable)
+          (not (and (= begin (point-min))
+                    (= end (point-max))))
+          (>= (point-max) wisi-partial-parse-threshold))))
+
+(defun wisi--run-parse (parse-action begin parse-end)
+  "Run post-parse actions on region BEGIN PARSE-END.
+Run the parser first if needed."
+  ;; The buffer might be narrowed for several reasons: the user
+  ;; narrowed to focus on a region or subprogram, or we are in an
+  ;; mmm-mode and indenting an Ada subregion. In the latter two cases,
+  ;; we don't need to widen; the narrowed region contains a complete
+  ;; production. If the user has narrowed to an arbitrary region, the
+  ;; parse will probably be incorrect.
+  (unless (or (= (point-min) (point-max)) ;; some parsers can’t handle an 
empty buffer.
+             (and (< wisi-debug 2)
+                   (eq parse-action 'face)
+                  (null font-lock-mode))) ;; disabling font-lock in a buffer 
does _not_ prevent it calling parse!
     (let* ((partial-parse-p (wisi-partial-parse-p begin parse-end))
           (msg (when (> wisi-debug 0)
-                 (format "wisi: %sparsing %s %s:%d %d %d ..."
-                         (if partial-parse-p "partial " "")
-                         wisi--parse-action
+                 (format "wisi: %s %s %s:%d %d %d ..."
+                         (cond
+                          (partial-parse-p "parse partial")
+                          (wisi-incremental-parse-enable
+                           (if wisi--changes "parse incremental" "post-parse"))
+                          (t "parse"))
+                         parse-action
                          (buffer-name)
                          begin
                          (if (markerp parse-end) (marker-position parse-end) 
parse-end)
@@ -773,10 +862,9 @@ Usefull if the parser appears to be hung."
       (when msg
        (message msg))
 
-      (setq wisi--last-parse-action wisi--parse-action)
-      (wisi-set-last-parse-region begin parse-end wisi--parse-action)
+      (wisi-set-last-parse-region begin parse-end parse-action)
 
-      (unless (eq wisi--parse-action 'face)
+      (unless (eq parse-action 'face)
        (when (buffer-live-p wisi-error-buffer)
          (with-current-buffer wisi-error-buffer
            (setq buffer-read-only nil)
@@ -785,21 +873,35 @@ Usefull if the parser appears to be hung."
            (when (get-buffer-window wisi-error-buffer)
              (delete-window (get-buffer-window wisi-error-buffer))))))
 
-      (condition-case-unless-debug err
+      (condition-case err
          (save-excursion
-           (if partial-parse-p
-             (let ((send-region (wisi-parse-expand-region wisi--parser begin 
parse-end)))
-               (setq parsed-region (wisi-parse-current wisi--parser (car 
send-region) (cdr send-region) parse-end))
-               (wisi-cache-add-region parsed-region))
-
-             ;; parse full buffer
+           (cond
+            (partial-parse-p
+             (let ((send-region (wisi-parse-expand-region wisi-parser-shared 
begin parse-end)))
+               (setq parsed-region
+                     (wisi-parse-current wisi-parser-shared parse-action (car 
send-region) (cdr send-region) parse-end))
+               (wisi-cache-add-region parsed-region parse-action)
+               (setq wisi-parse-failed nil)))
+
+            (wisi-incremental-parse-enable
+             (when wisi--changes
+               (wisi-parse-incremental wisi-parser-shared parse-action)
+               (when (> wisi-debug 1) (wisi-parse-log-message 
wisi-parser-shared  "parse succeeded"))
+               (setq wisi-parse-failed nil))
+             ;; Don't clear wisi-parse-failed if only run post-parse.
+             (wisi-post-parse wisi-parser-shared parse-action begin parse-end)
+             (setq parsed-region (cons begin parse-end))
+             (wisi-cache-add-region parsed-region parse-action))
+
+            (t ;; parse full buffer
              (setq parsed-region (cons (point-min) (point-max)))
-             (wisi-cache-set-region (wisi-parse-current wisi--parser 
(point-min) (point-max) (point-max))))
-
-           (when (> wisi-debug 0) (message "... parsed %s" parsed-region))
-           (setq wisi-parse-failed nil))
+             (wisi-cache-set-region
+              (wisi-parse-current wisi-parser-shared parse-action (point-min) 
(point-max) (point-max))
+              parse-action)
+             (setq wisi-parse-failed nil))
+            ))
        (wisi-parse-error
-        (cl-ecase wisi--parse-action
+        (cl-ecase parse-action
           (face
            ;; Caches set by failed elisp parse are ok, but some parse
            ;; failures return 'nil' in parse-region.
@@ -807,14 +909,16 @@ Usefull if the parser appears to be hung."
              (wisi--delete-face-cache (cdr parsed-region))))
 
           (navigate
-           ;; elisp parse partially resets caches
+           ;; don't trust parse result
            (wisi--delete-navigate-cache (point-min)))
 
           (indent
            ;; parse does not set caches; see `wisi-indent-region'
            nil))
+        (when (> wisi-debug 1)
+          (wisi-parse-log-message wisi-parser-shared  (format "parse failed in 
%s" (current-buffer))))
         (setq wisi-parse-failed t)
-        ;; parser should have stored this error message in parser-error-msgs
+        ;; parser should have stored an error message in parser-error-msgs
         (when (> wisi-debug 0)
           (signal (car err) (cdr err)))
         )
@@ -827,19 +931,19 @@ Usefull if the parser appears to be hung."
       (unless partial-parse-p
        (wisi-fringe-display-errors
         (append
-         (seq-map (lambda (err) (wisi--lexer-error-pos err)) 
(wisi-parser-lexer-errors wisi--parser))
-         (seq-map (lambda (err) (wisi--parse-error-pos err)) 
(wisi-parser-parse-errors wisi--parser)))))
+         (seq-map (lambda (err) (wisi--lexer-error-pos err)) 
(wisi-parser-local-lexer-errors wisi-parser-local))
+         (seq-map (lambda (err) (wisi--parse-error-pos err)) 
(wisi-parser-local-parse-errors wisi-parser-local)))))
 
-      (when (> wisi-debug 1)
-       (if (or (wisi-parser-lexer-errors wisi--parser)
-               (wisi-parser-parse-errors wisi--parser))
+      (when (> wisi-debug 2)
+       (if (or (wisi-parser-local-lexer-errors wisi-parser-local)
+               (wisi-parser-local-parse-errors wisi-parser-local))
            (progn
              (message "%s error" msg)
              (wisi-goto-error)
-             (error (or (and (wisi-parser-lexer-errors wisi--parser)
-                             (wisi--lexer-error-message (car 
(wisi-parser-lexer-errors wisi--parser))))
-                        (and (wisi-parser-parse-errors wisi--parser)
-                             (wisi--parse-error-message (car 
(wisi-parser-parse-errors wisi--parser))))
+             (error (or (and (wisi-parser-local-lexer-errors wisi-parser-local)
+                             (wisi--lexer-error-message (car 
(wisi-parser-local-lexer-errors wisi-parser-local))))
+                        (and (wisi-parser-local-parse-errors wisi-parser-local)
+                             (wisi--parse-error-message (car 
(wisi-parser-local-parse-errors wisi-parser-local))))
                         )))
 
          ;; no error
@@ -847,8 +951,7 @@ Usefull if the parser appears to be hung."
        ))))
 
 (defun wisi--check-change ()
-  "Process `wisi--change-beg', `wisi--change-end'.
-`wisi--parse-action' must be bound."
+  "Process `wisi--change-beg', `wisi--change-end'."
   (when (and wisi--change-beg
             wisi--change-end
             (or (integerp wisi--change-beg)
@@ -862,10 +965,15 @@ Usefull if the parser appears to be hung."
     ))
 
 (defun wisi-validate-cache (begin end error-on-fail parse-action)
-  "Ensure cached data for PARSE-ACTION is valid in region BEGIN END in current 
buffer."
+  "Ensure cached data for PARSE-ACTION is valid in region BEGIN END"
+
+  ;; Tolerate (point) +- size exeeding buffer limits.
+  (setq begin (max begin (point-min)))
+  (setq end (min end (point-max)))
+
   (if (and (not wisi-inhibit-parse)
           (< (point-max) wisi-size-threshold))
-      (let ((wisi--parse-action parse-action))
+      (progn
        (wisi--check-change)
 
        ;; Now we can rely on wisi-cache-covers-region.
@@ -875,11 +983,11 @@ Usefull if the parser appears to be hung."
        ;; retrying a failed parse until the text changes again.
        (cond
         ((and (not wisi-parse-failed)
-              (wisi-cache-covers-region begin end))
+              (wisi-cache-covers-region begin end parse-action))
          (when (> wisi-debug 0)
            (message "parse %s skipped: cache-covers-region %s %s.%s"
                     parse-action
-                    (wisi-cache-covers-region begin end)
+                    (wisi-cache-covers-region begin end parse-action)
                     begin end)))
 
         ((and wisi-parse-failed
@@ -889,24 +997,76 @@ Usefull if the parser appears to be hung."
            (message "parse %s skipped: parse-failed" parse-action)))
 
         (t
-         (progn
-           (wisi-set-parse-try nil)
-           (wisi--run-parse begin end))))
+         (wisi-set-parse-try nil parse-action)
+         (wisi--run-parse parse-action begin end)))
 
        ;; We want this error even if we did not try to parse; it means
        ;; the parse results are not valid.
        (when (and error-on-fail wisi-parse-failed)
          (error "parse %s failed" parse-action))
        )
+
+    ;; else
     (when (> wisi-debug 0)
       (message "parse %s skipped inihibit-parse %s wisi-size-threshold %d"
               parse-action
               wisi-inhibit-parse
               wisi-size-threshold))))
 
+(defun wisi-validate-cache-current-statement (error-on-fail parse-action)
+  "Validate PARSE-ACTION caches on at least statement containing point.
+If point is in trailing comment of a statement, validate at least
+that and the next one.  If ERROR-ON-FAIL, signal error if parse
+fails."
+  (let (parse-begin parse-end)
+    (cond
+     (wisi-incremental-parse-enable
+      (let ((pos (point))
+           query-result
+           done)
+       (while (not done)
+         (setq query-result (wisi-parse-tree-query wisi-parser-shared 
'containing-statement pos))
+         (when (null parse-begin)
+           (setq parse-begin (car (wisi-tree-node-char-region query-result))))
+         (setq parse-end   (cdr (wisi-tree-node-char-region query-result)))
+         (if (<= (point) parse-end)
+             (setq done t)
+           (setq pos
+                 ;; point is in whitespace or comment after a preceding
+                 ;; statement; ada_mode-interactive_01.adb procedure
+                 ;; Proc_1. Normally that would be included in the
+                 ;; statement trailing non_grammar, but incremental
+                 ;; parse can make that not so.
+                 (save-excursion
+                   (if (nth 8 (syntax-ppss pos))
+                       (progn
+                         ;; in comment - forward-comment must start out
+                         ;; of a comment
+                         (goto-char (nth 8 (syntax-ppss pos))) ;; start of 
comment or string
+                         (forward-comment 100)
+                         (skip-syntax-forward " >")) ;; new-line might also be 
comment end.
+                     ;; in whitespace
+                     (beginning-of-line 2))
+                   (point)))))))
+
+     (t
+      (setq parse-begin (point-min)
+           parse-end   (point-max))))
+
+    (wisi-validate-cache parse-begin parse-end error-on-fail parse-action)))
+
 (defun wisi-fontify-region (begin end)
   "For `jit-lock-functions'."
-  (wisi-validate-cache begin end nil 'face))
+  (with-silent-modifications
+    (remove-text-properties begin end '(font-lock-face nil)))
+
+  (if wisi-parse-full-active
+      ;; Record region to fontify when full parse is done.
+      (let ((region (cdr wisi-parse-full-active)))
+       (when (< begin (car region)) (setf (car region) begin))
+       (when (> end (cdr region)) (setf (cdr region) end)))
+
+    (wisi-validate-cache begin end nil 'face)))
 
 (defun wisi-get-containing-cache (cache)
   "Return cache from (wisi-cache-containing CACHE)."
@@ -969,7 +1129,8 @@ If LIMIT (a buffer position) is reached, throw an error."
 cache. Otherwise move to cache-next, or cache-end, or next cache
 if both nil.  Return cache found."
   (unless (eobp)
-    (wisi-validate-cache (point-min) (point-max) t 'navigate)
+    (wisi-validate-cache-current-statement t 'navigate)
+
     (let ((cache (wisi-get-cache (point))))
       (if (and cache
               (not (eq (wisi-cache-class cache) 'statement-end)))
@@ -986,7 +1147,7 @@ if both nil.  Return cache found."
 (defun wisi-backward-statement-keyword ()
   "If not at a cached token, move backward to prev
 cache. Otherwise move to cache-prev, or prev cache if nil."
-  (wisi-validate-cache (point-min) (point-max) t 'navigate)
+  (wisi-validate-cache-current-statement t 'navigate)
   (let ((cache (wisi-get-cache (point)))
        prev)
     (when cache
@@ -1000,7 +1161,8 @@ cache. Otherwise move to cache-prev, or prev cache if 
nil."
   ))
 
 (defun wisi-forward-sexp (&optional arg)
-  "For `forward-sexp-function'."
+  "If on paren or quote, move to matching. Otherwise to next statement keyword.
+For `forward-sexp-function'."
   (interactive "^p")
   (or arg (setq arg 1))
   (cond
@@ -1072,14 +1234,15 @@ Return start cache."
   "Move point to token at start of statement point is in or after.
 Return start cache."
   (interactive)
-  (wisi-validate-cache (point-min) (point-max) t 'navigate)
+  (wisi-validate-cache-current-statement t 'navigate)
   (wisi-goto-start (or (wisi-get-cache (point))
                       (wisi-backward-cache))))
 
 (defun wisi-goto-statement-end ()
   "Move point to token at end of statement point is in or before."
   (interactive)
-  (wisi-validate-cache (point-min) (point-max) t 'navigate)
+  (wisi-validate-cache-current-statement t 'navigate)
+
   (let ((cache (or (wisi-get-cache (point))
                   (wisi-forward-cache))))
     (when (wisi-cache-end cache)
@@ -1090,7 +1253,7 @@ Return start cache."
 (defun wisi-goto-containing-statement-start ()
   "Move point to the start of the statement containing the current statement."
   (interactive)
-  (wisi-validate-cache (point-min) (point-max) t 'navigate)
+  (wisi-validate-cache-current-statement t 'navigate)
   (let ((cache (or (wisi-get-cache (point))
                   (wisi-backward-cache))))
     (when cache
@@ -1148,28 +1311,37 @@ the comment on the previous line."
    ))
 
 (defun wisi-indent-statement ()
-  "Indent region given by `wisi-goto-start', `wisi-cache-end'."
+  "Indent statement point is in or after."
   (interactive)
-  (wisi-validate-cache (point-min) (point-max) t 'navigate)
+  (cond
+   (wisi-incremental-parse-enable
+    (let ((containing (wisi-parse-tree-query wisi-parser-shared 
'containing-statement (point))))
+      (when containing
+       (save-excursion
+         (indent-region (car (wisi-tree-node-char-region containing))
+                        (cdr (wisi-tree-node-char-region containing)))))))
 
-  (save-excursion
-    (let ((cache (or (wisi-get-cache (point))
-                    (wisi-backward-cache))))
-      (when cache
-       ;; can be nil if in header comment
-       (let ((start (progn (wisi-goto-start cache) (point)))
-             (end (if (wisi-cache-end cache)
+   (t ;; partial parse.
+    (wisi-validate-cache (point-min) (point-max) t 'navigate)
+
+    (save-excursion
+      (let ((cache (or (wisi-get-cache (point))
+                      (wisi-backward-cache))))
+       (when cache
+         ;; can be nil if in header comment
+         (let ((start (progn (wisi-goto-start cache) (point)))
+               (end (if (wisi-cache-end cache)
                         ;; nil when cache is statement-end
-                        (wisi-cache-end cache)
+                        (marker-position (wisi-cache-end cache))
                       (point))))
-         (indent-region start end)
-         ))
-      )))
+           (indent-region start end)
+           ))
+       )))))
 
 (defun wisi-indent-containing-statement ()
   "Indent region given by `wisi-goto-containing-statement-start', 
`wisi-cache-end'."
   (interactive)
-  (wisi-validate-cache (point-min) (point-max) t 'navigate)
+  (wisi-validate-cache-current-statement t 'navigate)
 
   (save-excursion
     (let ((cache (or (wisi-get-cache (point))
@@ -1179,10 +1351,14 @@ the comment on the previous line."
        (let ((start (progn
                       (setq cache (wisi-goto-containing (wisi-goto-start 
cache)))
                       (point)))
-             (end (if (wisi-cache-end cache)
-                        ;; nil when cache is statement-end
-                        (wisi-cache-end cache)
-                      (point))))
+             end)
+         (unless cache
+             (wisi-validate-cache-current-statement t 'navigate)
+             (setq cache (wisi-get-cache (point))))
+         (if cache
+             (setq end (or (wisi-cache-end cache) ;; nil when cache is 
statement-end
+                           (point)))
+           (error "containing not set; try M-x wisi-reset-parser"))
          (indent-region start end)
          ))
       )))
@@ -1194,11 +1370,12 @@ indentation column, or nil if function does not know 
how to
 indent that line. Run after parser indentation, so other lines
 are indented correctly.")
 
-(defvar-local wisi-post-indent-fail-hook
+(defvar-local wisi-post-indent-fail-hook nil
   "Function to reindent portion of buffer.
-Called from `wisi-indent-region' when a parse succeeds after
-failing; assumes user was editing code that is now syntactically
-correct. Must leave point at indentation of current line.")
+Called from `wisi-indent-region' with no args when a parse
+succeeds after failing; assumes user was editing code that is now
+syntactically correct. Must leave point at indentation of current
+line.")
 
 (defvar-local wisi-indent-failed nil
   "Non-nil when indent fails due to parse fail.
@@ -1241,20 +1418,29 @@ for parse errors. BEGIN, END is the parsed region."
   (let ((indent (get-text-property (1- (point)) 'wisi-indent)))
     (if indent
        (when (and (wisi-partial-parse-p begin end)
-                  (< 0 (length (wisi-parser-parse-errors wisi--parser))))
-         (dolist (err (wisi-parser-parse-errors wisi--parser))
+                  (< 0 (length (wisi-parser-local-parse-errors 
wisi-parser-local))))
+         (dolist (err (wisi-parser-local-parse-errors wisi-parser-local))
            (dolist (repair (wisi--parse-error-repair err))
              ;; point is at bol; error pos may be at first token on same line.
              (save-excursion
                (back-to-indentation)
                (when (>= (point) (wisi--parse-error-repair-pos repair))
-                 (setq indent (max 0 (wisi-parse-adjust-indent wisi--parser 
indent repair))))
+                 (setq indent (max 0 (wisi-parse-adjust-indent 
wisi-parser-shared indent repair))))
                ))))
-      ;; parse did not compute indent for point. Assume the error will
-      ;; go away soon as the user edits the code, so just return 0.
-      (if (= wisi-debug 0)
-         (setq indent 0)
-       (error "nil indent for line %d" (line-number-at-pos (point)))))
+
+      ;; parse did not compute indent for point.
+      (cond
+       ((= (point) (point-max))
+       ;; point-max is after the last char, so the parser does not compute 
indent
+       (setq indent 0))
+
+       ((= wisi-debug 0)
+       ;; Assume the error will go away soon as the user edits the
+       ;; code, so just return 0.
+       (setq indent 0))
+
+       (t ;; wisi-debug > 0
+       (error "error: nil indent for line %d" (line-number-at-pos (point))))))
 
     indent))
 
@@ -1268,10 +1454,10 @@ If INDENT-BLANK-LINES is non-nil, also indent blank 
lines (for use as
                 (wisi-safe-marker-pos begin)
                 (wisi-safe-marker-pos end)))
 
-    (let ((wisi--parse-action 'indent)
-         (parse-required nil)
+    (let ((parse-required nil)
          (end-mark (copy-marker end))
-         (prev-indent-failed wisi-indent-failed))
+         (prev-indent-failed wisi-indent-failed)
+         (done nil))
 
       (when (< 0 wisi-debug)
        (message "wisi-indent-region %d %d"
@@ -1286,13 +1472,13 @@ If INDENT-BLANK-LINES is non-nil, also indent blank 
lines (for use as
        (setq begin (line-beginning-position))
 
        (when (bobp) (forward-line))
-       (while (and (not parse-required)
-                   (or (and (= begin end) (= (point) end))
-                       (< (point) end))
-                   (not (eobp)))
+       (while (not done)
          (unless (get-text-property (1- (point)) 'wisi-indent)
            (setq parse-required t))
-         (forward-line))
+         (setq done (or parse-required (eobp)))
+         (unless done (forward-line))
+         (setq done (or done (>= (point) end)))
+         )
        )
 
       ;; A parse either succeeds and sets the indent cache on all
@@ -1302,16 +1488,16 @@ If INDENT-BLANK-LINES is non-nil, also indent blank 
lines (for use as
                 (or (not wisi-parse-failed)
                     (wisi-parse-try 'indent)))
 
-       (wisi-set-parse-try nil)
-       (wisi--run-parse begin end)
+       (wisi-set-parse-try t 'indent)
+       (wisi--run-parse 'indent begin end)
 
        ;; If there were errors corrected, the indentation is
        ;; potentially ambiguous; see
        ;; test/ada_mode-interactive_2.adb. Or it was a partial parse,
        ;; where errors producing bad indent are pretty much expected.
        (unless (wisi-partial-parse-p begin end)
-         (setq wisi-indent-failed (< 0 (+ (length (wisi-parser-lexer-errors 
wisi--parser))
-                                          (length (wisi-parser-parse-errors 
wisi--parser))))))
+         (setq wisi-indent-failed (< 0 (+ (length 
(wisi-parser-local-lexer-errors wisi-parser-local))
+                                          (length 
(wisi-parser-local-parse-errors wisi-parser-local))))))
        )
 
       (if wisi-parse-failed
@@ -1361,7 +1547,7 @@ If INDENT-BLANK-LINES is non-nil, also indent blank lines 
(for use as
            ;; ambiguous, this one is not.
            (goto-char end-mark)
            (when (< 0 wisi-debug)
-             (message "wisi-indent-region post-parse-fail-hook"))
+             (message "wisi-indent-region wisi-post-indent-fail-hook"))
            (run-hooks 'wisi-post-indent-fail-hook))
          ))
       )))
@@ -1374,6 +1560,11 @@ If INDENT-BLANK-LINES is non-nil, also indent blank 
lines (for use as
     (when (>= (point) savep)
       (setq to-indent t))
 
+    ;; (1+ line-end-pos) is needed to compute indent for a line. It
+    ;; can exceed (point-max); the parser must be able to handle that.
+    ;;
+    ;; IMPROVEME: change parser 'indent' action to take lines, not
+    ;; buffer positions.
     (wisi-indent-region (line-beginning-position (1+ (- 
wisi-indent-context-lines))) (1+ (line-end-position)) t)
 
     (goto-char savep)
@@ -1382,37 +1573,38 @@ If INDENT-BLANK-LINES is non-nil, also indent blank 
lines (for use as
 
 (defun wisi-repair-error-1 (data)
   "Repair error reported in DATA (a ’wisi--parse-error’ or 
’wisi--lexer-error’)"
-  (let ((wisi--parse-action 'navigate))
-    (cond
-     ((wisi--lexer-error-p data)
-      (goto-char (1+ (wisi--lexer-error-pos data)))
-      (insert (wisi--lexer-error-inserted data)))
-     ((wisi--parse-error-p data)
-      (dolist (repair (wisi--parse-error-repair data))
-       (goto-char (wisi--parse-error-repair-pos repair))
-       (when (< 0 (length (wisi--parse-error-repair-deleted repair)))
-         (delete-region (car (wisi--parse-error-repair-deleted-region repair))
-                        (cdr (wisi--parse-error-repair-deleted-region repair)))
-         (when (= ?  (char-after (point)))
-           (delete-char 1)))
-       (dolist (id (wisi--parse-error-repair-inserted repair))
-         (when (and (not (bobp))
-                    (not (= ?\( (char-before (point))))
-                    (member (syntax-class (syntax-after (1- (point)))) '(2 
3))) ;; word or symbol
-           (insert " "))
-         (insert (cdr (assoc id (wisi-parser-repair-image wisi--parser)))))
+  (cond
+   ((wisi--lexer-error-p data)
+    (goto-char (1+ (wisi--lexer-error-pos data)))
+    (insert (wisi--lexer-error-inserted data)))
+   ((wisi--parse-error-p data)
+    (dolist (repair (wisi--parse-error-repair data))
+      (goto-char (wisi--parse-error-repair-pos repair))
+      (when (< 0 (length (wisi--parse-error-repair-deleted repair)))
+       (delete-region (car (wisi--parse-error-repair-deleted-region repair))
+                      (cdr (wisi--parse-error-repair-deleted-region repair)))
+       (when (= ?  (char-after (point)))
+         (delete-char 1)))
+      (dolist (id (wisi--parse-error-repair-inserted repair))
+       (when (and (not (bobp))
+                  (member (syntax-class (syntax-after (1- (point)))) '(2 3))) 
;; word or symbol
+         (insert " "))
+       (insert (cdr (assoc id (wisi-parser-repair-image wisi-parser-shared))))
+       (when (and (not (eobp))
+                  (member (syntax-class (syntax-after (point))) '(2 3))) ;; 
word or symbol
+         (insert " "))
        ))
-     )))
+    )))
 
 (defun wisi-repair-error ()
   "Repair the current error."
   (interactive)
   (let ((wisi-inhibit-parse t)) ;; don’t let the error list change while we 
are processing it.
-    (if (= 1 (+ (length (wisi-parser-lexer-errors wisi--parser))
-               (length (wisi-parser-parse-errors wisi--parser))))
+    (if (= 1 (+ (length (wisi-parser-local-lexer-errors wisi-parser-local))
+               (length (wisi-parser-local-parse-errors wisi-parser-local))))
        (progn
-         (wisi-repair-error-1 (or (car (wisi-parser-lexer-errors wisi--parser))
-                                  (car (wisi-parser-parse-errors 
wisi--parser)))))
+         (wisi-repair-error-1 (or (car (wisi-parser-local-lexer-errors 
wisi-parser-local))
+                                  (car (wisi-parser-local-parse-errors 
wisi-parser-local)))))
       (if (buffer-live-p wisi-error-buffer)
          (let ((err
                 (with-current-buffer wisi-error-buffer
@@ -1426,7 +1618,7 @@ If INDENT-BLANK-LINES is non-nil, also indent blank lines 
(for use as
 If non-nil, only repair errors in BEG END region."
   (interactive)
   (let ((wisi-inhibit-parse t)) ;; don’t let the error list change while we 
are processing it.
-    (dolist (data (wisi-parser-lexer-errors wisi--parser))
+    (dolist (data (wisi-parser-local-lexer-errors wisi-parser-local))
       (when (or (null beg)
                (and (not (= 0 (wisi--lexer-error-inserted data)))
                     (wisi--lexer-error-pos data)
@@ -1434,7 +1626,7 @@ If non-nil, only repair errors in BEG END region."
                     (<= (wisi--lexer-error-pos data) end)))
        (wisi-repair-error-1 data)))
 
-    (dolist (data (wisi-parser-parse-errors wisi--parser))
+    (dolist (data (wisi-parser-local-parse-errors wisi-parser-local))
       (when (or (null beg)
                (and (wisi--parse-error-pos data)
                     (<= beg (wisi--parse-error-pos data))
@@ -1457,7 +1649,7 @@ If non-nil, only repair errors in BEG END region."
       ident)))
 
 (defun wisi-next-name-region ()
-  "Return the next region at or after point with text property 'wisi-name'."
+  "Return the next region at or after point with text property wisi-name."
   (let* ((begin
          (if (get-text-property (point) 'wisi-name)
              (point)
@@ -1466,7 +1658,7 @@ If non-nil, only repair errors in BEG END region."
     (cons begin end)))
 
 (defun wisi-prev-name-region ()
-  "Return the prev region at or before point with text property 'wisi-name'."
+  "Return the prev region at or before point with text property wisi-name."
   (let* ((end
          (if (get-text-property (point) 'wisi-name)
              (point)
@@ -1475,12 +1667,12 @@ If non-nil, only repair errors in BEG END region."
     (cons begin end)))
 
 (defun wisi-next-name ()
-  "Return the text at or after point with text property 'wisi-name'."
+  "Return the text at or after point with text property wisi-name."
   (let ((region (wisi-next-name-region)))
     (buffer-substring-no-properties (car region) (cdr region))))
 
 (defun wisi-prev-name ()
-  "Return the text at or before point with text property 'wisi-name'."
+  "Return the text at or before point with text property wisi-name."
   (let ((region (wisi-prev-name-region)))
     (buffer-substring-no-properties (car region) (cdr region))))
 
@@ -1488,12 +1680,12 @@ If non-nil, only repair errors in BEG END region."
   "Match line number encoded into identifier by `wisi-names'.")
 
 (defun wisi-names (append-lines alist)
-  "List of names; each is text from one 'wisi-name property in current buffer.
+  "List of names; each is text from one wisi-name property in current buffer.
 If APPEND-LINES is non-nil, each name has the line number it
 occurs on appended. If ALIST is non-nil, the result is an alist
 where the car is a list (FILE LINE COL)."
-  (when wisi--parser
-    ;; wisi--parser is nil in a non-language buffer, like Makefile
+  (when wisi-parser-shared
+    ;; wisi-parser-shared is nil in a non-language buffer, like Makefile
     (wisi-validate-cache (point-min) (point-max) t 'navigate)
     (let ((table nil)
          (pos (point-min))
@@ -1523,9 +1715,13 @@ where the car is a list (FILE LINE COL)."
 
 ;;;; debugging
 
-(defun wisi-show-region ()
+(defun wisi-show-region (&optional region)
   (interactive)
   (cond
+   (region
+    (set-mark  (car region))
+    (goto-char (cdr region)))
+
    ((use-region-p)
     (message "(%s . %s)" (region-beginning) (region-end)))
    (t
@@ -1588,64 +1784,6 @@ where the car is a list (FILE LINE COL)."
      (wisi-indent-region begin end))
     ))
 
-(defun wisi-time (func count &optional report-wait-time)
-  "call FUNC COUNT times, show total time"
-  (interactive "afunction \nncount ")
-
-  (let ((start-time (float-time))
-       (start-gcs gcs-done)
-       (cum-wait-time 0.0)
-        (i 0)
-        diff-time
-       diff-gcs)
-    (while (not (eq (1+ count) (setq i (1+ i))))
-      (save-excursion
-        (funcall func))
-      (when report-wait-time
-       (setq cum-wait-time (+ cum-wait-time 
(wisi-process--parser-total-wait-time wisi--parser)))))
-    (setq diff-time (- (float-time) start-time))
-    (setq diff-gcs (- gcs-done start-gcs))
-    (if report-wait-time
-       (progn
-         (message "Total %f seconds, %d gcs; per iteration %f seconds %d gcs 
%d responses %f wait"
-                  diff-time
-                  diff-gcs
-                  (/ diff-time count)
-                  (/ (float diff-gcs) count)
-                  (wisi-process--parser-response-count wisi--parser)
-                  (/ cum-wait-time count)))
-
-      (message "Total %f seconds, %d gcs; per iteration %f seconds %d gcs"
-              diff-time
-              diff-gcs
-              (/ diff-time count)
-              (/ (float diff-gcs) count))
-      ))
-  nil)
-
-(defun wisi-time-indent-middle-line-cold-cache (count &optional 
report-wait-time)
-  (goto-char (point-min))
-  (forward-line (1- (/ (count-lines (point-min) (point-max)) 2)))
-  (let ((cum-wait-time 0.0))
-    (wisi-time
-     (lambda ()
-       (wisi-set-parse-try t 'indent)
-       (wisi-invalidate-cache 'indent (point-min))
-       (wisi-indent-line)
-       (when (wisi-process--parser-p wisi--parser)
-        (setq cum-wait-time (+ cum-wait-time 
(wisi-process--parser-total-wait-time wisi--parser)))))
-     count
-     report-wait-time)
-    ))
-
-(defun wisi-time-indent-middle-line-warm-cache (count)
-  (wisi-set-parse-try t 'indent)
-  (wisi-invalidate-cache 'indent (point-min))
-  (goto-char (point-min))
-  (forward-line (/ (count-lines (point-min) (point-max)) 2))
-  (wisi-indent-line)
-  (wisi-time #'wisi-indent-line count))
-
 (defun wisi-show-indent ()
   "Show indent cache for current line."
   (interactive)
@@ -1674,10 +1812,8 @@ where the car is a list (FILE LINE COL)."
 
 (cl-defun wisi-setup (&key indent-calculate post-indent-fail parser)
   "Set up a buffer for parsing files with wisi."
-  (when wisi--parser
-    (wisi-kill-parser))
-
-  (setq wisi--parser parser)
+  (setq wisi-parser-shared parser)
+  (setq wisi-parser-local (make-wisi-parser-local))
   (setq wisi--cached-regions
        (list
         (cons 'face nil)
@@ -1708,6 +1844,8 @@ where the car is a list (FILE LINE COL)."
   (add-hook 'after-change-functions #'wisi-after-change nil t)
   (setq wisi--change-end (copy-marker (point-min) t))
 
+  (add-hook 'kill-buffer-hook 'wisi-parse-kill-buf 90 t)
+
   (set (make-local-variable 'comment-indent-function) 'wisi-comment-indent)
 
   (add-hook 'completion-at-point-functions #'wisi-completion-at-point -90 t)
@@ -1720,8 +1858,17 @@ where the car is a list (FILE LINE COL)."
   (remove-hook 'hack-local-variables-hook #'wisi-post-local-vars)
 
   (unless wisi-disable-face
-    (jit-lock-register #'wisi-fontify-region)))
+    (jit-lock-register #'wisi-fontify-region))
+
+  (when wisi-incremental-parse-enable
+    (when wisi-save-all-changes
+      (setf (wisi-parser-local-all-changes wisi-parser-local) nil))
 
+    ;; We don't wait for this to complete here, so users can scroll
+    ;; around while the initial parse runs. font-lock will not work
+    ;; during that time (the parser is busy, the buffer is read-only).
+    (when (< 0 wisi-debug) (message "start initial full parse in %s" 
(current-buffer)))
+    (wisi-parse-incremental wisi-parser-shared 'none :full t :nowait 
wisi-parse-full-background)))
 
 (provide 'wisi)
 ;;; wisi.el ends here
diff --git a/wisi.gpr.gp b/wisi.gpr.gp
index 54c92394dc..e70448a13f 100644
--- a/wisi.gpr.gp
+++ b/wisi.gpr.gp
@@ -3,7 +3,7 @@
 --  Make installed and source ELPA package wisi Ada code available for
 --  other projects.
 --
---  Copyright (C) 2017, 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2017, 2019, 2021 Free Software Foundation, Inc.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -20,8 +20,9 @@
 with "gnatcoll";
 with "standard_common";
 #if ELPA="no"
-with "sal";
-with "wisitoken";
+with "aunit_ext";
+with "sal_devel";
+with "wisitoken_devel";
 #end if;
 project Wisi is
 
@@ -37,7 +38,12 @@ project Wisi is
       for Exec_Dir use ".";
    end case;
 
-   for Languages use ("Ada", "C"); -- C needed for wisitoken-bnf-generate; 
wisitoken_grammar_re2c.c
+   for Languages use
+   ("Ada"
+#if ELPA="yes" 
+   ,"C" -- C needed for wisitoken_grammar_re2c.c
+#end if;
+   );
 
    package Compiler is
 
@@ -46,17 +52,19 @@ project Wisi is
          for Default_Switches ("Ada") use
            Standard_Common.Compiler.Common_Switches &
            Standard_Common.Compiler.Style_Checks &
-           Standard_Common.Compiler.Debug_Switches & "-gnat2020";
-
+           Standard_Common.Compiler.Debug_Switches;
+#if ELPA="yes" 
          for Default_Switches ("C") use 
Standard_Common.Compiler.Debug_Switches_C;
+#end if;
 
       when "Normal" =>
          for Default_Switches ("Ada") use
            Standard_Common.Compiler.Common_Switches &
            Standard_Common.Compiler.Style_Checks &
-           Standard_Common.Compiler.Release_Switches & "-gnat2020";
-
+           Standard_Common.Compiler.Release_Switches;
+#if ELPA="yes" 
          for Default_Switches ("C") use 
Standard_Common.Compiler.Release_Switches_C;
+#end if;
       end case;
 
    end Compiler;
diff --git a/wisi.texi b/wisi.texi
index a439add9fe..0f2408d4cf 100644
--- a/wisi.texi
+++ b/wisi.texi
@@ -2,7 +2,7 @@
 @settitle Wisi
 
 @copying
-Copyright @copyright{} 1999 - 2022  Free Software Foundation, Inc.
+Copyright @copyright{} 1999 - 2021  Free Software Foundation, Inc.
 
 @quotation
 Permission is granted to copy, distribute and/or modify this document
@@ -25,7 +25,7 @@ developing GNU and promoting software freedom.''
 
 @titlepage
 @sp 10
-@title Wisi Version 3.1.8
+@title Wisi Version 3.1.2
 @page
 @vskip 0pt plus 1filll
 @insertcopying
@@ -37,7 +37,7 @@ developing GNU and promoting software freedom.''
 @node Top
 @top Top
 
-Wisi Version 3.1.8
+Wisi Version 3.1.2
 @end ifnottex
 
 @menu
@@ -67,20 +67,19 @@ operations useful for compilation and cross-reference.
 
 Grammar actions are specified in the grammar file, in a nonterminal
 declaration. We assume the user is familiar with parser grammars and
-grammar actions. For example, a simple ``if'' statement can be
+grammar actions. For example, an ``if'' statement can be
 declared as:
 
 @example
 if_statement
-  : IF expression THEN statements elsif_list ELSE statements END IF SEMICOLON
+  : 'if' expression 'then' statements elsif_list 'else' statements 'end' 'if' 
';'
     %((wisi-statement-action [1 statement-start 3 motion 6 motion 10 
statement-end])
       (wisi-motion-action [1 3 5 6 10])
       (wisi-indent-action [nil
-                           [(wisi-hanging% ada-indent-broken (* 2 
ada-indent-broken))
-                            ada-indent-broken]
+                           ada-indent-broken
                            nil
-                           [ada-indent ada-indent] nil nil
-                           [ada-indent ada-indent] nil nil nil]))%
+                           [(wisi-block ada-indent) ada-indent] nil nil
+                           [(wisi-block ada-indent) ada-indent] nil nil nil]))%
 @end example
 
 The item before @code{:} is the ``left hand side'', or
@@ -90,27 +89,26 @@ nonterminal (separated by @code{|}).
 
 The items enclosed in ``%()%'' are the grammar actions. They are
 specified as list of elisp forms; an earlier version of the wisi
-package generated an elisp parser. We keep the elisp form because it
-is compact, and easier to read and write than the equivalent Ada
-code. The @code{wisi-bnf-generate} tool converts the elisp into the
-required Ada statements.
+package generated a parser in elisp, now it only generates Ada. We
+keep the elisp form for grammar actions because it is compact, and
+easier to read and write than the equivalent Ada code. The WisiToken
+tool @code{wisi-bnf-generate} converts the elisp into the required Ada
+statements.
 
 There are two classes of actions; in-parse and post-parse. WisiToken
-calls these ``semantic checks'' and ``user actions''. The in-parse
+calls these ``semantic checks'' and ``semantic actions''. The in-parse
 actions are done as parsing procedes; they provide extra checks that
 can cause the parse to fail. Currently the only one provided is
 @code{match-names}; it is used to check that the declaration and end
 names in named Ada blocks are the same (which can aid significantly in
 error correction). In the grammar file, in-parse actions are specified
 in a second @code{%()%} block, which can be omitted if empty. In this
-document, the term ``action'' means ``post-parse action'', we use
-``in-parse action'' unless the meaning is clear from context.
+document, the term ``action'' generally means ``post-parse action'',
+we use ``in-parse action'' unless the meaning is clear from context.
 
 Executing the wisi grammar actions creates text properties in the
 source file; those text properties are then used by elisp code for
-various purposes.
-
-The text properties applied are:
+various purposes. The text properties created are:
 
 @table @code
 @item wisi-cache
@@ -172,17 +170,17 @@ font-lock regular expressions instead. One reason for this
 is so keywords are still highlighted when the parser fails, which
 can happen if there are severe syntax errors.
 
-Other items, like function and package names, are typically marked
-with @code{font-lock-face} by the parser.
+Other items, like function, package, and type names, are typically
+marked with @code{font-lock-face} by the parser.
 
 @item fontified
 Another standard font-lock text property; applied whenever
 @code{font-lock-face} is.
 
 @item wisi-indent
-Contains the indent (in characters) for the next line; applied to
-the newline character on the preceding line. The first line in a
-buffer is assumed to have indent 0.
+Contains the indent (in characters) for the next line; applied to the
+newline character at the end of the preceding line. The first line in
+a buffer is assumed to have indent 0.
 
 @end table
 
@@ -266,7 +264,7 @@ stored in Emacs text properties.
 
 @item wisi-name-action TOKEN
 TOKEN is a token index. Create a @code{wisi-name} text property on the
-token.
+the @code{char_region} of the token.
 
 @end table
 
@@ -304,48 +302,79 @@ token, and does not apply a face if there are no such 
marks.
 @section Indent actions
 
 Indents are computed for each line in a cumulative way as the grammar
-actions are executed. Initially, each indent is set to @code{nil},
-which means ``not computed''; this is not the same as the value
-@code{0}. The grammar actions are executed in a bottom-up fashion; low
-level productions are executed before higher level ones. In general,
-the indent action for a production specifies a ``delta indent''; the
-indent is incremented by that amount. When all productions have been
-processed, the indent has been computed for all lines.
+actions are executed. Initially, the indent for all lines are set to
+@code{nil}, which means ``not computed''; this is not the same as the
+value @code{0}. The grammar actions are executed in a bottom-up
+fashion; low level productions are executed before higher level
+ones. In general, the indent action for a production specifies a
+``delta indent''; the indent for a line is incremented by that
+amount. When all productions in a parse tree have been processed, the
+indent has been computed for all lines.
+
+Indent actions specify a delta indent for each token in a
+production. If the token is a nonterminal, it can contain multiple
+lines of text; the delta indent applies to the lines where the first
+token on the line is contained by the nonterminal.
+
+When a delta indent is applied to the indent for a line, it is either
+added or ignored. We call the token whose delta indent is being
+applied the ``controlling'' token. The indent for each line has a
+field storing the line number of the last controlling token that added
+to the indent. If the line number of the controlling token for the
+delta indent begin applied is the same as the stored controlling token
+line number, the indent is ignored. @ref{Indent example} for
+explanations of why this rule is needed. On the other hand, it may be
+necessary to use @code{wisi-block} or @code{wisi-anchored} to override
+this rule.
 
 Indents are often given as a function call; the arguments to the
 function can be other function calls, or integer
-expressions. @code{wisitoken-bnf-generate} supports only simple integer
-expressions; those using integers, integer-valued variables,
-parenthesis, + (plus), - (minus), and * (multiply).
+expressions. @code{wisitoken-bnf-generate} supports only simple
+integer expressions; those using integers, integer-valued variables, +
+(plus), - (minus), and * (multiply). All expressions are lisp forms;
+``ada-indent plus 1'' is written as @code{(+ ada-indent 1)}.
+
+Indent for comments are computed in the same way, except that the
+delta that applies to a comment that follows a token is given by the
+indent action entry for the next token. If a token is the last in a
+production, the comment indent is @code{nil}. These rules often give
+the wrong indent for a comment, so they can be overridden by specifing
+a comment indent for a token using @code{[CODE-INDENT
+COMMENT-INDENT]}; see below. Indent functions can also modify how
+comments are indented.
 
 @table @code
-@item wisi-indent-action [INDENT ...]
-The argument is a vector, giving an indent for each token in the
+@item wisi-indent-action [DELTA ...]
+The argument is a vector, giving a delta indent for each token in the
 production right-hand side.
 
 For terminals, the indents only have meaning, and are only computed,
-if the token is the first on a line. For nonterminals where the indent
-is not a variant of @code{wisi-hanging}, the indent is only computed
-if the first terminal token in the nonterminal is the first on a
-line. See @code{wisi-hanging} in @ref{Indent functions} for the
-remaining case.
+if the token is the first on a line. For nonterminals, the indent is
+applied to all lines where the first token on the line is contained in
+the nonterminal.
 
 An indent can have several forms.  In the descriptions below, the
-``current token'' is given by the position of the indent expression in
-the @code{wisi-indent-action} argument list.
+``current token'' is the token in the production right hand side at
+the same position as the indent expression in the
+@code{wisi-indent-action} argument list.
 
 @table @asis
 @item An integer
-This gives a delta indent; it is added to the total indent for the
-line.
+The simplest delta indent.
 
 @item A variable name
-The name is translated to an Ada identifier by replacing ``-'' with
-``_'', and applying @code{Camel_Case}. The translated name must
-identify a directly visible run-time Ada integer variable; this is
-checked at Ada compile time. It provides an integer delta indent.
-
-For example, in Ada two indent variable names are @code{ada-indent}
+An integer delta indent; the value can be changed at runtime.
+
+The variable is an elisp variable; the name is translated to an Ada
+identifier by replacing ``-'' with ``_'', and applying
+@code{Camel_Case}. The translated name must identify a directly
+visible run-time Ada integer variable; this is checked at Ada compile
+time (variables declared in a separate package can be made visible by
+placing a with clause in a @code{%code} declaration in the grammar
+file). The elisp variable value is copied to the Ada variable value at
+the start of each indent compute session.
+
+For example, in ada-mode two indent variable names are @code{ada-indent}
 and @code{ada-indent-broken}, giving the basic ident, and the
 continuation line indent. They are runtime variables so different
 projects can specify them as part of a coding standard.
@@ -353,27 +382,31 @@ projects can specify them as part of a coding standard.
 @item A function call
 A function that computes a delta indent. See @ref{Indent functions}.
 
-@item [CODE-INDENT , COMMENT-INDENT]
+@item [CODE-INDENT COMMENT-INDENT]
 A vector giving separate indents for code and comments.
 
 Normally, the indent for trailing comments (on lines with no code,
-after all code in the token) is given by the indent of the following
-token in the production. When the current token is the last, or the
-following tokens may be empty, or the indent of the following token
-would be wrong for some reason (for example, it is a block end), the
-comment indent may be specified separately. If it is not specified,
-and the indent from the next token is not available, the indent for
-the current token is used for code and comments.
+after all code in the token) is given by the indent of the first line
+of code in the following token in the production; this overrides that
+and the comment indent is given by COMMENT-INDENT applied to the
+current token.
 
-Comment lines that are not trailing are indented by CODE-INDENT.
+When the current token is the last, and a separate comment indent is
+not specified, the comment indent is nil.
+
+Comment lines that are not trailing (that is, they are between tokens
+in the nonterminal being indented) are indented by CODE-INDENT.
 
 @item (label . INDENT)
-If token labels are used in a right hand side, they must be given
-explicitly in the indent arguments, using the lisp ``cons''
-syntax. Labels are normally only used with EBNF grammars, which expand
-into multiple right hand sides, with optional tokens simply left
-out. Explicit labels on the indent arguments allow them to be left out
-as well.
+Specifies that the indent applies to the token with the same label. If
+any argument in an indent action is labeled, all must be labeled, and
+thus all tokens in the production must be labeled. This
+can improve readability in a long production.
+
+When the grammar file uses EBNF meta-syntax, implicit labels are
+automatically generated for all tokens, unless they are explicitly
+labeled; this allows keeping track of which optional tokens are left
+out when the production is converted to BNF internally.
 
 @end table
 
@@ -387,178 +420,456 @@ as well.
 @node Indent functions
 @subsection Indent functions
 @table @code
-@item wisi-anchored TOKEN OFFSET
-Sets the indent for the current token to be OFFSET (an integer
-expression) from the start of TOKEN (a token index); the
-current token is ``anchored to'' TOKEN.
+@item wisi-block DELTA
+Sets the delta indent for the current token to be DELTA, and ignores
+the controlling token line when adding delta indents. This is usually
+needed in block statements; @ref{Indent example} @code{if_statement}.
 
-@item wisi-anchored* TOKEN OFFSET
-Sets the indent for the current token to be OFFSET from the start of
-TOKEN, but only if TOKEN is the first token on a line; otherwise no indent
+DELTA can be any indent expression, except a variant of
+@code{wisi-hanging}.
 
-@item wisi-anchored*- TOKEN OFFSET
-Sets the indent for the current token to be OFFSET from the start of
-TOKEN, but only if TOKEN is the first token on a line and the indent
-for the current token accumulated so far is nil.
+@item wisi-anchored ANCHOR OFFSET
+Sets the delta indent for the current token to be OFFSET (an integer
+expression) from the start of ANCHOR (a token index). Subsequent
+higher level delta indents are ignored; the current token is
+``anchored to'' ANCHOR.
 
-@item wisi-anchored% TOKEN OFFSET
-If there is an opening parenthesis containing TOKEN in the line
-containing TOKEN, set the current indent to OFFSET from that
-parenthesis. Otherwise, OFFSET gives an indent delta.
+A trailing comment following the current token is indented the same as
+the code in the token.
 
-@item wisi-anchored%- TOKEN OFFSET
-Same as @code{wisi-anchored%}, but only if the current token
-accumulated indent is nil.
+@item wisi-anchored% ANCHOR OFFSET
+Sets the delta indent for the current token to be OFFSET (an integer
+expression) from a containing left parenthesis in the line containing
+ANCHOR (a token index), or the start of the line containing ANCHOR if
+there is no such paren; the current token is ``anchored to'' the paren
+or the start of the line. Subsequent higher level delta indents are
+ignored.
 
 @item wisi-hanging DELTA-1 DELTA-2
-The current token is assumed to be a nonterminal. If the text it
-contains spans multiple lines, use DELTA-1 for the first line, DELTA-2
-for the rest. If the current token is only on one line, use DELTA-1.
+Use DELTA-1 for the first line in the current token, DELTA-2 for the
+rest. Trailing comments use DELTA-1 if there is only one code line,
+DELTA-2 if there is more than one.
 
-DELTA-1 and DELTA-2 can be any IDENT expression, except a variant of
+DELTA-1 and DELTA-2 can be any ident expression, except a variant of
 @code{wisi-hanging}.
 
+@code{wisi-hanging} is useful when the lower level productions for the
+current token do not have indent actions.
+
 @item wisi-hanging% DELTA-1 DELTA-2
-Similar to @code{wisi-hanging}; if the first terminal token in the
-current nonterminal is the first token on the first line, use DELTA-1
-for the first line and DELTA-2 for the rest. Otherwise, use DELTA-1
-for all lines.
+If the first token in the nonterminal is first on its line, use
+DELTA-1 for the first line, DELTA-2 for the rest. Otherwise, use
+DELTA-1 for all lines.
 
-@item wisi-hanging%- DELTA-1 DELTA-2
-Same as @code{wisi-hanging%}, except applied only if the current token
-accumulated indent is nil.
+@item wisi-hanging* DELTA-1 DELTA-2
+If the first token in the nonterminal is first on its line, use
+DELTA-1 for the first line, DELTA-1 + DELTA-2 for the rest. Otherwise,
+use DELTA-2 for all lines.
 
 @item Language-specific function
 Language-specific indent functions are specified by an
 @code{%elisp_indent} declaration in the grammar file. Each function
-specifies how many arguments it accepts; this is checked at action
-runtime, not during grammar generation. Each argument is an INDENT as
-described above, or a token ID prefixed by @code{'} (to allow
-distinguishing token IDs from variable names).
+declaration specifies how many arguments it accepts; this is checked
+at grammar generation time. Each argument is a delta indent expression
+as described above, a token index, or a token ID prefixed by @code{'}
+(to allow distinguishing token IDs from variable names).
 @end table
 
 @node Indent example
 @subsection Indent example
+To illustrate how indents are computed, we walk thru the computation
+for some example code.
 
-The example @code{if_statement} grammar nonterminal is:
+The simple grammar used for this example is:
 
 @example
 if_statement
-  : IF expression THEN statements elsif_list ELSE statements END IF SEMICOLON
+  : 'if' expression 'then' statements 'end' 'if' ';'
     %((wisi-indent-action [nil
-                           [(wisi-hanging% ada-indent-broken (* 2 
ada-indent-broken))
-                            ada-indent-broken]
+                           ada-indent-broken
                            nil
-                           [ada-indent ada-indent] nil nil
-                           [ada-indent ada-indent] nil nil nil]))%
+                           [(wisi-block ada-indent) ada-indent]
+                           nil nil nil]))%
+
+expression
+  : term
+    %((wisi-indent-action [(wisi-hanging nil ada-indent-broken)]))%
+
+term
+  : primary
+  | primary '+' primary
+  | primary '<' primary
+  | term 'and' term
+
+primary : integer | identifier | function_call
+
+function_call
+  : identifier formal_part
+    %((wisi-indent-action [nil ada-indent-broken]))%
+
+formal_part
+  : '(' expression_list ')'
+    %((wisi-indent-action [nil
+                           (wisi-anchored 1 1)
+                           (wisi-anchored 1 0)]))%
+
+expression_list : expression | expression_list ',' expression
+
+statements : statement | statements statement
+
+statement : function_call ';' | assigment | if_statment
+
+assign_value : ':=' expression
+
+assignment
+  : identifier assign_value ';'
+    %((wisi-indent-action [nil (wisi-hanging ada-indent-broken (* 2 
ada-indent-broken)) nil]))%
+@end example
+
+Note that we have split out @code{assign_value} from
+@code{assignment}, so we can apply the @code{wisi-hanging} indent
+function to it; see the first example below for an explanation of why
+this is needed.
+
+The indent variables have the values:
+@example
+ada-indent 3
+ada-indent-broken 2
+@end example
+
+First we consider a simple example (the line numbers and indents are
+on the left):
+@example
+ 1: nil : G
+ 2: nil :   :=
+ 3: nil :     F +
+ 4: nil :       Compute_Something
+ 5: nil :         (Arg_1,
+ 6: nil :          H +
+ 7: nil :            I);
+@end example
+The assignment statement is fully spread out on different lines, as
+might be required if the names or subexpressions are long. In this
+case, none of the delta indents are ignored when applied to a line
+(except anchored lines), which is why they all need to be present in
+the indent actions. Thus we can leave out the stored controlling token
+in the line indents for this example.
+
+Indents are computed in bottom up order; the first indent action
+computed in this code is for @code{expression} @code{H + I} lines 6
+and 7; the delta indent expression is:
+@example
+   [(wisi-hanging nil ada-indent-broken) ada-indent-broken]
 @end example
+This gives separate indents for the code and a trailing comment; there
+is no trailing comment in this example (there is one in the next
+example). This gives a delta indent of nil for line 6, and 2 for line 7.
 
-We trace how the indent is computed for this sample Ada code:
+The next indent action is for @code{formal_part} @code{(Arg_1, H + I)}
+lines 5, 6. The indent action is:
+@example
+%((wisi-indent-action [nil (wisi-anchored 1 1) (wisi-anchored 1 0)]))%
+@end example
+
+For each token, the delta indent computed by this is:
+@table @code
+@item ( : nil
+Leaves line 5 at nil.
+
+@item expression_list : (wisi-anchored 1 1)
+The first @code{1} is token indenx of the anchor token; the left
+parenthesis on line 5. The second @code{1} is the offset from the
+anchor; thus the delta indent is @code{Anchored, 5, 1}; anchored to
+line 5 with an offset of 1. This delta indent is applied to the lines
+whose first tokens are contained by the @code{expression_list}; that
+is lines 6 and 7. Line 6 indent is currently nil, so the indent is set
+to @code{Anchored, 5, 1}. Line 7 indent is currently 2, so that is
+added to 1, setting the indent to @code{Anchored, 5, 3}.
+
+@item ) : (wisi-anchored 1 1)
+')' is not first on a line, so this leaves line 6 indent unchanged.
+@end table
+
+Next indent action is @code{function_call} @code{Compute_Something
+(Arg_1, H + I)} on lines 4 .. 7. The indent action is
+@code{%((wisi-indent-action [nil ada-indent-broken]))%}; this applies
+a delta indent of 2 to the @code{formal_part} on lines 5 .. 7,
+leaving line 5 at 4, and 6 and 7 unchanged.
+
+Next is @code{expression} @code{F + Compute_Something (Arg_1, H + I)}
+on lines 3 .. 7.  This computes a delta indent of nil for line 3, and
+2 for lines 4 .. 7, leaving 3 at nil, 4 at 2, 5 at 6, and 6 and 7
+unchanged.
+
+Last indent action is node 16 @code{assignment} on lines 1 .. 6; the
+indent is @code{(wisi-hanging ada-indent-broken (* 2
+ada-indent-broken))}, applied to all the lines contained by
+@code{assign_value}, which is lines 2 thru 7. This gives a delta
+indent of 2 for line 2, and 4 for lines 3 .. 7. This gives the
+indents:
+@example
+                   : 12345678901
+ 1: nil            : G
+ 2: 2              :   :=
+ 3: 4              :     F +
+ 4: 6              :       Compute_Something
+ 5: 8              :         (Arg_1,
+ 6: Anchored, 5, 1 :          H +
+ 7: Anchored, 5, 3 :            I);
+@end example
+The final step is compute all the anchored lines; for line 6, we add 1
+to the indent for line 5, leaving 9; for line 7, add 3, leaving 11.
+
+Now consider a more extensive example:
 
 @example
  1: if A < B and
  2:   C < D
  3:   --  comment on expression
  4: then
- 5:    if E then
- 6:       Do_E;
- 7:       -- comment on statement
- 8:    elsif F then
- 9:       G := A + Compute_Something
-10:         (arg_1, arg_2);
-11:    end if;
-12: end if;
+ 5:    G := F + Compute_Something
+ 6:      (Arg_1,
+ 7:       Arg_2);
+ 8:    -- comment on statement
+ 9:
+10:    Do_E;
+11:
+12:    -- comment before 'end if'
+13: end if;
+@end example
+Here the assignment is on fewer lines, illustrating why we need the
+rule about ignoring some delta indents, and @code{wisi-block} to
+override that rule.
+
+To understand the order in which indents are computed, we need the
+syntax tree produced by parsing this code; that is shown here, with
+node numbers on the left for reference. Note that comments are stored
+in the node containing the terminal token node preceding the
+comment. Node 20 'formal_part' is empty; ``Do_E'' has no arguments.
+@example
+ 1  if_statement
+ 2    'if'
+ 3    expression
+ 4      term
+ 5        primary
+ 6          identifier "A"
+ 7        '<'
+ 8        primary
+ 9          identifier "B"
+10      'and'
+11      term
+12        primary
+13          identifier "C"
+14        '<'
+15        primary
+16          identifier "D", "--  comment on expression"
+17    'then'
+18    statements
+19      statements
+20        statement
+21          assignment
+22            identifier "G"
+23            assign_value
+24              ':='
+25              expression
+26                term
+27                  primary
+28                    identifier "F"
+29                  '+'
+30                  primary
+31                    function_call
+32                      identifier "Compute_Something"
+33                      formal_part
+34                        '('
+35                        expression_list
+36                          expression_list
+37                            expression
+38                              term
+39                                primary
+40                                  identifier "Arg_1"
+41                          ','
+42                          expression
+43                            term
+44                              primary
+45                                identifier "Arg_2"
+46                        ')'
+47            ';' "-- comment on statement", blank line
+48      statement
+49        function_call
+50          identifier "Do_E"
+51        formal_part
+52        ';', blank line, "-- comment before 'end if'"
+53    'end'
+54    'if'
+55    ';'
+@end example
+
+Actions are computed by traversing the tree depth first. Thus the
+first node considered is node 6; it is an @code{identifier}, which is
+a terminal token and has no indent action. The next nodes considered
+are 5, 7, 9, 8, 4, 10, 13, 12, 14, 16, 15; all have no action. Next is
+node 3 @code{expression} on lines 1 and 2, which has the indent
+action:
+@example
+   [(wisi-hanging nil ada-indent-broken) ada-indent-broken]
 @end example
+This gives separate indents for the code and the trailing comment.
+The code is @code{A < B and C < D} on lines 1 and 2, with
+a trailing comment of @code{--  comment on expression} on line 3. Since the
+first token in the expression follows @code{if} on line 1, it is not first on
+the line; thus @code{wisi-hanging} gives a delta indent of 2 for line
+2, leaving line 1 at nil.
 
-First, the indent for the lower-level nonterminals (@code{expression,
-statements, elsif_list}) are computed. Assume they set the indent for
-line 10 to 2 (for the hanging expression) and leave the rest at nil.
+The comment on line 3 is given an indent of 2. Note that if the
+comment indent had not been given separately in this indent action, it
+would have been given the indent of the next token, which is nil.
 
-Next, the action for the inner @code{if_statement} is executed. Most
-of the tokens specify an indent of @code{nil}, which means the current
-accumulated indent is not changed. For the others, the action is as
-follows:
+The stored controlling token line for lines 2 and 3 is 1.
 
-@table @code
-@item expression:
-The expression @code{E} is contained on one line, and it is not the
-first token on that line, so the indent for line 5 is not changed.
-
-@item statements: [ada-indent ada-indent]
-This specifies separate indents for code and trailing comments,
-because otherwise the trailing comments would be indented with the
-following @code{THEN}; instead they are indented with the expression
-code; see the comment on line 7.
-
-Here @code{ada-indent} is 3, so the indent for lines 6 and 7 (for the
-first occurence of @code{statments}) is
-incremented from @code{nil} to @code{3}.
-
-For the second occurence of @code{statements}, line 9 is incremented
-from @code{nil} to @code{3}, and line 10 from @code{2} to @code{5}.
-@end table
+Next is node 37 @code{expression} @code{Arg_1} line 9; it is all on one line
+and not the first token, so the indent is left at nil. Similarly for
+node 45 @code{expression} @code{Arg_2} line 10.
 
-At this point, the accumulated indents are (the indent is given after
-the line number):
+At this point, the indents for all the lines are (the stored
+controlling token line and indent is given after the line number):
 @example
- 1: nil : if A < B and
- 2: nil :   C < D
- 3: nil :   --  comment on expression
- 4: nil : then
- 5: nil :    if E then
- 6:   3 :       Do_E;
- 7:   3 :       -- comment on statement
- 8: nil :    elsif F then
- 9:   3 :       G := A + Compute_Something
-10:   5 :         (arg_1, arg_2);
-11: nil :    end if;
-12: nil : end if;
+ 1: nil nil : if A < B and
+ 2: 1   2   :   C < D
+ 3: 1   2   :   --  comment on expression
+ 4: nil nil : then
+ 5: nil nil :    G := A + Compute_Something
+ 6: nil nil :      (Arg_1,
+ 7: nil nil :       Arg_2);
+ 8: nil nil :    -- comment on statement
+ 9: nil nil :
+10: nil nil :    Do_E;
+11: nil nil :
+12: nil nil :    -- comment before 'end if'
+13: nil nil : end if;
 @end example
 
-Then the action is executed for the outer @code{if_statement}:
+Next indent action is node 33 @code{formal_part} @code{(Arg_1, Arg_2)}
+lines 6 and 7. The indent action is
+@example
+%((wisi-indent-action [nil
+                       (wisi-anchored 1 1)
+                       (wisi-anchored 1 0)]))%
+@end example
+This computes a delta indent for the @code{expression_list} of
+@code{Anchored, 5, 1)}; anchored to the left parenthesis on line 5
+with an offset of 1. This delta indent is applied to the lines whose
+first tokens are contained by the @code{expression_list}; that is just
+line 7. Since the indent for line 7 is currently nil, it is set to
+@code{Anchored, 5, 1)}, controlling token line 5.
+
+Next is node 31 @code{function_call} @code{Compute_Something (Arg_1,
+Arg_2)} on lines 5 .. 7. The indent action is @code{[nil
+ada-indent-broken]}, which gives a delta indent of 2 for the
+@code{formal_part}. This is applied to lines 6 and 7, leaving line 6
+at 2, stored controlling token line 5; and 7 unchanged.
+
+Next is node 25 @code{expression} @code{F + Compute_Something (Arg_1,
+Arg_2)} on lines 5 .. 7; this computes a delta indent of 2 for lines
+6 and 7. The controlling token line is 5, and the stored controlling
+token for line 6 is also 5, so this delta indent is ignored for line
+6. Line 7 is anchored, so the delta indent is also ignored. Thus the
+indent for lines 6 and 7 are unchanged.
+
+Next is node 16 @code{assignment} on lines 5 .. 7. The indent for
+@code{assign_value} is @code{(wisi-hanging ada-indent-broken (* 2
+ada-indent-broken))}; this computes a delta indent of 2 for lines 6
+and 7; it is ignored as the delta indent from node 25 was.
+
+The comment and blank line on lines 8 and 9 are stored in node 47, and
+there is no following token in the production, so the delta indent for
+line 8 is nil.
+
+The next action computed is node 21 @code{function_call} @code{Do_E;}
+on line 10. The indent action is @code{%((wisi-indent-action [nil
+ada-indent-broken]))%}; since the code is all on one line this leaves
+the indent for line 10 at nil. The indent for the trailing comment and
+blank line on lines 11 and 12 are also left at nil.
+
+At this point, the indents are:
+@example
+ 1: nil nil          : if A < B and
+ 2: 1   2            :   C < D
+ 3: 1   2            :   --  comment on expression
+ 4: nil nil          : then
+ 5: nil nil          :    G := F + Compute_Something
+ 6: 5   2            :      (Arg_1,
+ 7: 6   Anchored 5 1 :       Arg_2);
+ 8: nil nil          :    -- comment on statement
+ 9: nil nil          :
+10: nil nil          :    Do_E;
+11: nil nil          :
+12: nil nil          :    -- comment before 'end if'
+13: nil nil          : end if;
+@end example
+
+The final indent action is for node 1 @code{if_statement} on lines 1
+thru 13. The indent for each token in the production is:
 
 @table @code
-@item expression: [(wisi-hanging% ada-indent-broken (* 2 ada-indent-broken)) 
ada-indent-broken]
-This specifies separate indents for code and trailing comments,
-because otherwise the trailing comments would be indented with the
-following @code{THEN}; instead they are indented with the expression
-code; see the comment on line 3.
+@item if: nil
+Leaves line 1 at nil.
+
+@item expression: ada-indent-broken
+The expression is @code{A < B and C < D} on lines 1 and 2, with a
+comment on line 3; the controlling token line is 1, the same as the
+stored controlling token line for lines 2 and 3, so this is ignored.
+
+@item then: nil
+Applies delta of nil to line 4.
 
-In this case, @code{wisi-hanging%} returns DELTA-1, which is
-@code{ada-indent-broken}, which is 2. So the indent for line 2 is
-incremented from @code{nil} to @code{2}.
+Note that specifying the indent for the comment following an
+expression in the @code{expression} indent action enforces a style of
+indenting the comment with the last line of the expression.
 
-The indent for line 3 is also incremented from @code{nil} to @code{2}.
+@item statements: [(wisi-block ada-indent) ada-indent]
+Applies a delta indent of 3 to the code and comment on lines 5 thru
+10, and 3 to the trailing comment on lines 11 and
+12. @code{wisi-block} says to ignore the controlling token line; that
+is 5, so this would be ignored for line 6; this is why we need
+@code{wisi-block}. The delta is ignored for the Anchored line 7.
 
-@item statements: [ada-indent ada-indent]
-Here there is only one statement; the nested @code{if_statement}. The
-indent for lines 5 .. 11 are each incremented by 3.
+Note that the indent for comments after statements is given here, not
+at a lower level; it would be tedious to add it to each statement.
+
+@item end if; : nil
+Leaves line 13 at nil.
 @end table
 
-The final result is:
+The indents so far:
 @example
- 1: nil : if A < B and
- 2:   2 :   C < D
- 3:   2 :   --  comment on expression
- 4: nil : then
- 5:   3 :    if E then
- 6:   6 :       Do_E;
- 7:   6 :       -- comment on statement
- 8:   3 :    elsif F then
- 9:   6 :       G := A + Compute_Something
-10:   8 :         (arg_1, arg_2);
-11:   6 :    end if;
-12: nil : end if;
+ 1: nil nil          : if A < B and
+ 2: 1   2            :   C < D
+ 3: 1   2            :   --  comment on expression
+ 4: nil nil          : then
+ 5: 5   3            :    G := F + Compute_Something
+ 6: 5   5            :      (Arg_1,
+ 7: 5   Anchored 5 1 :       Arg_2);
+ 8: 5   3            :    -- comment on statement
+ 9: 5   3            :
+10: 5   3            :    Do_E;
+11: 5   3            :
+12: 5   3            :    -- comment before 'end if'
+13: nil nil          : end if;
 @end example
 
+The final step is compute the anchored lines; that sets the indent
+for line 7 to 6.
+
 In a full grammar, the top production should specify an indent of 0,
 not nil, for tokens that are not indented; then every line will have a
-non-nil indent. However, in normal operation a nil indent is treated
-as 0; the @code{wisi-indent} text property is not set for lines that
-have nil indent, and @code{wisi-indent-region} detects that and uses 0
-for the indent. You can set the variable @code{wisi-debug} to a value
-> 0 to signal an error for nil indents; this is useful to catch indent
-errors during grammar development.
+non-nil indent.
+
+However, in normal operation a nil indent is treated as 0; the
+@code{wisi-indent} text property is not set for lines that have nil
+indent, and @code{wisi-indent-region} detects that and uses 0 for the
+indent. You can set the variable @code{wisi-debug} to a value > 0 to
+signal an error for nil indents; this is useful to catch indent errors
+during grammar development.
 
 @node In-parse actions
 @section In-parse actions
@@ -575,7 +886,7 @@ The arguments are token indices, giving a range of
 tokens. LAST-TOKEN may be omitted if it is the same as FIRST-TOKEN.
 
 Set the @code{name} component of the left-hand-side to the merger of
-the @code{name} or @code{byte-region} components of the identified tokens.
+the @code{name} or @code{byte_region} components of the identified tokens.
 
 @item wisi-match-name START-TOKEN END-TOKEN
 The arguments are token indices. Compare the text contained by the
diff --git a/wisitoken-bnf-generate.adb b/wisitoken-bnf-generate.adb
index 821ef6fe76..19b6ab37c4 100644
--- a/wisitoken-bnf-generate.adb
+++ b/wisitoken-bnf-generate.adb
@@ -3,7 +3,7 @@
 --  Parser for Wisi grammar files, producing Ada source
 --  files for a parser.
 --
---  Copyright (C) 2012 - 2015, 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2012 - 2015, 2017 - 2022 Free Software Foundation, Inc.
 --
 --  The WisiToken package is free software; you can redistribute it
 --  and/or modify it under terms of the GNU General Public License as
@@ -19,6 +19,7 @@
 
 pragma License (Modified_GPL);
 
+with Ada.Calendar;
 with Ada.Command_Line;
 with Ada.Directories;
 with Ada.Exceptions;
@@ -28,6 +29,7 @@ with Ada.Strings.Maps;
 with Ada.Strings.Unbounded;
 with Ada.Text_IO;
 with GNAT.Traceback.Symbolic;
+with System.Multiprocessors;
 with WisiToken.BNF.Generate_Utils;
 with WisiToken.BNF.Output_Ada;
 with WisiToken.BNF.Output_Ada_Common;
@@ -35,13 +37,15 @@ with WisiToken.BNF.Output_Ada_Emacs;
 with WisiToken.BNF.Output_Elisp_Common;
 with WisiToken.Generate.LR.LALR_Generate;
 with WisiToken.Generate.LR.LR1_Generate;
+with WisiToken.Generate.LR1_Items;
 with WisiToken.Generate.Packrat;
 with WisiToken.Parse.LR.Parser_No_Recover; -- for reading BNF file
 with WisiToken.Productions;
 with WisiToken.Syntax_Trees;
 with WisiToken.Text_IO_Trace;
+with WisiToken.Generate.Tree_Sitter;
+with WisiToken_Grammar_Editing;
 with WisiToken_Grammar_Runtime;
-with Wisitoken_Grammar_Actions;
 with Wisitoken_Grammar_Main;
 procedure WisiToken.BNF.Generate
 is
@@ -50,7 +54,7 @@ is
       use Ada.Text_IO;
       First : Boolean := True;
    begin
-      Put_Line (Standard_Error, "version 2.1"); -- matches release version in 
Docs/wisitoken.html
+      Put_Line (Standard_Error, "version 3.0"); -- matches release version in 
Docs/wisitoken.html
       Put_Line (Standard_Error, "wisitoken-bnf-generate [options] {wisi 
grammar file}");
       Put_Line (Standard_Error, "Generate source code implementing a parser 
for the grammar.");
       New_Line (Standard_Error);
@@ -106,33 +110,39 @@ is
       --  verbosity meaning is actually determined by output choice;
       --  they should be consistent with this description.
       Put_Line
-        (Standard_Error, "  -v <EBNF level> <Table level> <Minimal_Complete 
level>: sets verbosity (default 0):");
+        (Standard_Error, "  --verbosity <key=value ...> sets verbosity levels 
(default 0):");
       Put_Line (Standard_Error, "     0 - only error messages to standard 
error");
       Put_Line (Standard_Error, "     1 - add diagnostics to standard out");
       Put_Line (Standard_Error, "     2 - more diagnostics to standard out, 
ignore unused tokens, unknown conflicts");
       Put_Line (Standard_Error, "  --generate ...: override grammar file 
%generate directive");
-      Put_Line (Standard_Error, "  --output_bnf <file_name> : output 
translated BNF source to file_name");
+      Put_Line (Standard_Error, "  --output_bnf : output translated EBNF 
source to <grammar file name base>_bnf.wy");
       Put_Line (Standard_Error, "  --suffix <string>; appended to grammar file 
name");
       Put_Line (Standard_Error, "  --ignore_conflicts; ignore excess/unknown 
conflicts");
       Put_Line (Standard_Error,
                 "  --test_main; generate standalone main program for running 
the generated parser, modify file names");
-      Put_Line (Standard_Error, "  --time; output execution time of various 
stages");
-
+      Put_Line (Standard_Error,
+                "  --task_count n; number of tasks used to compute LR1 items; 
0 means CPU count." &
+                  " Default 1 unless %lr1_hash_table_size specified; then 0.");
+      Put_Line (Standard_Error, "  --lr1_hash_table_size n; default 113; 
bigger should be faster");
+      Put_Line (Standard_Error, "verbosity keys:");
+      Enable_Trace_Help;
    end Put_Usage;
 
-   Language_Name         : Ada.Strings.Unbounded.Unbounded_String; -- The 
language the grammar defines
-   Output_File_Name_Root : Ada.Strings.Unbounded.Unbounded_String;
-   Suffix                : Ada.Strings.Unbounded.Unbounded_String;
-   BNF_File_Name         : Ada.Strings.Unbounded.Unbounded_String;
-   Output_BNF            : Boolean := False;
-   Ignore_Conflicts      : Boolean := False;
-   Test_Main             : Boolean := False;
+   Language_Name           : Ada.Strings.Unbounded.Unbounded_String; -- The 
language the grammar defines
+   Output_File_Name_Root   : Ada.Strings.Unbounded.Unbounded_String;
+   Suffix                  : Ada.Strings.Unbounded.Unbounded_String;
+   Output_BNF              : Boolean                          := False;
+   Ignore_Conflicts        : Boolean                          := False;
+   Test_Main               : Boolean                          := False;
+   Generate_Task_Count     : System.Multiprocessors.CPU_Range := 1;
+   Generate_Task_Count_Set : Boolean                          := False;
 
    Command_Generate_Set : Generate_Set_Access; -- override grammar file 
declarations
 
-   Trace          : aliased WisiToken.Text_IO_Trace.Trace 
(Wisitoken_Grammar_Actions.Descriptor'Access);
+   Trace          : aliased WisiToken.Text_IO_Trace.Trace;
    Input_Data     : aliased WisiToken_Grammar_Runtime.User_Data_Type;
    Grammar_Parser : WisiToken.Parse.LR.Parser_No_Recover.Parser;
+   Log_File       : Ada.Text_IO.File_Type; -- not used
 
    procedure Use_Input_File (File_Name : in String)
    is
@@ -141,12 +151,12 @@ is
    begin
       Output_File_Name_Root := +Ada.Directories.Base_Name (File_Name) & Suffix;
 
-      Wisitoken_Grammar_Main.Create_Parser
-        (Parser    => Grammar_Parser,
-         Trace     => Trace'Unchecked_Access,
-         User_Data => Input_Data'Unchecked_Access);
+      WisiToken.Parse.LR.Parser_No_Recover.New_Parser
+        (Grammar_Parser, Wisitoken_Grammar_Main.Create_Lexer 
(Trace'Unchecked_Access),
+         Wisitoken_Grammar_Main.Create_Parse_Table, 
Wisitoken_Grammar_Main.Create_Productions,
+         Input_Data'Unchecked_Access);
 
-      Grammar_Parser.Lexer.Reset_With_File (File_Name);
+      Grammar_Parser.Tree.Lexer.Reset_With_File (File_Name);
 
       declare
          Language_Name_Dir   : constant Integer := Ada.Strings.Fixed.Index
@@ -175,20 +185,16 @@ begin
       loop
          exit when Argument (Arg_Next)(1) /= '-';
 
-         --   --help, -v first, then alphabetical
+         --   --help, --verbosity first, then alphabetical
 
          if Argument (Arg_Next) = "--help" then
             Put_Usage;
             return;
 
-         elsif Argument (Arg_Next) = "-v" then
-            Arg_Next  := Arg_Next + 1;
-            WisiToken.Trace_Generate_EBNF := Integer'Value (Argument 
(Arg_Next));
-            Arg_Next  := Arg_Next + 1;
-            WisiToken.Trace_Generate_Table := Integer'Value (Argument 
(Arg_Next));
-            Arg_Next  := Arg_Next + 1;
-            WisiToken.Trace_Generate_Minimal_Complete := Integer'Value 
(Argument (Arg_Next));
-            Arg_Next  := Arg_Next + 1;
+         elsif Argument (Arg_Next) = "--verbosity" then
+            Arg_Next := Arg_Next + 1;
+            WisiToken.Enable_Trace (Argument (Arg_Next));
+            Arg_Next := Arg_Next + 1;
 
          elsif Argument (Arg_Next) = "--ignore_conflicts" then
             Ignore_Conflicts := True;
@@ -202,7 +208,7 @@ begin
             begin
                begin
                   Tuple.Gen_Alg := Generate_Algorithm'Value (Argument 
(Arg_Next));
-                  Arg_Next     := Arg_Next + 1;
+                  Arg_Next      := Arg_Next + 1;
                exception
                when Constraint_Error =>
                   raise User_Error with "invalid value for 
generator_algorithm: '" & Argument (Arg_Next) & ";";
@@ -222,13 +228,11 @@ begin
                            Tuple.Text_Rep := True;
                            Arg_Next := Arg_Next + 1;
 
-                        elsif (for some I of Lexer_Image => To_Lower (Text) =  
I.all) then
+                        elsif (for some I of Lexer_Image => To_Lower (Text) = 
I.all) then
                            Tuple.Lexer := To_Lexer (Text);
                            Arg_Next := Arg_Next + 1;
 
-                        elsif (for some I in Valid_Interface =>
-                                 To_Lower (Text) = To_Lower 
(Valid_Interface'Image (I)))
-                        then
+                        elsif (for some I of Interface_Image => To_Lower 
(Text) = I.all)  then
                            Tuple.Interface_Kind := 
WisiToken.BNF.Valid_Interface'Value (Text);
                            Arg_Next := Arg_Next + 1;
 
@@ -241,25 +245,42 @@ begin
                Add (Command_Generate_Set, Tuple);
             end;
 
+         elsif Argument (Arg_Next) = "--lr1_hash_table_size" then
+            Arg_Next := Arg_Next + 1;
+
+            Input_Data.Language_Params.LR1_Hash_Table_Size := Positive'Value 
(Argument (Arg_Next));
+            if not Generate_Task_Count_Set then
+               Generate_Task_Count := 0;
+            end if;
+
+            Arg_Next := Arg_Next + 1;
+
          elsif Argument (Arg_Next) = "--output_bnf" then
-            Output_BNF    := True;
-            Arg_Next      := Arg_Next + 1;
-            BNF_File_Name := +Argument (Arg_Next);
-            Arg_Next      := Arg_Next + 1;
+            Output_BNF := True;
+            Arg_Next   := Arg_Next + 1;
 
          elsif Argument (Arg_Next) = "--suffix" then
             Arg_Next := Arg_Next + 1;
             Suffix   := +Argument (Arg_Next);
             Arg_Next := Arg_Next + 1;
 
+         elsif Argument (Arg_Next) = "--task_count" then
+            Arg_Next   := @ + 1;
+            Generate_Task_Count_Set := True;
+            declare
+               use System.Multiprocessors;
+            begin
+               Generate_Task_Count := CPU_Range'Value (Argument (Arg_Next));
+               if Generate_Task_Count = 0 then
+                  Generate_Task_Count := Number_Of_CPUs;
+               end if;
+            end;
+            Arg_Next   := @ + 1;
+
          elsif Argument (Arg_Next) = "--test_main" then
             Arg_Next  := Arg_Next + 1;
             Test_Main := True;
 
-         elsif Argument (Arg_Next) = "--time" then
-            Arg_Next := Arg_Next + 1;
-            WisiToken.Trace_Time := True;
-
          else
             raise User_Error with "invalid argument '" & Argument (Arg_Next) & 
"'";
          end if;
@@ -273,11 +294,21 @@ begin
       end if;
    end;
 
+   if Trace_Generate > Outline then
+      Trace.Put_Line ("parse grammar file");
+   end if;
+
    begin
-      Grammar_Parser.Parse;
+      Grammar_Parser.Parse (Log_File); -- Execute_Actions only does meta phase
    exception
    when WisiToken.Syntax_Error =>
-      Grammar_Parser.Put_Errors;
+      if Grammar_Parser.Tree.Parents_Set then
+         Grammar_Parser.Put_Errors;
+      elsif Grammar_Parser.Tree.Stream_Count >= 2 then
+         Grammar_Parser.Put_Errors (Grammar_Parser.Tree.Last_Parse_Stream);
+      else
+         Grammar_Parser.Put_Errors (Grammar_Parser.Tree.Shared_Stream);
+      end if;
       raise;
    when E : WisiToken.Parse_Error =>
       WisiToken.Generate.Put_Error (Ada.Exceptions.Exception_Message (E));
@@ -287,28 +318,75 @@ begin
    declare
       use all type Ada.Strings.Unbounded.Unbounded_String;
       use Ada.Text_IO;
+      use all type WisiToken_Grammar_Runtime.Meta_Syntax;
 
       Generate_Set    : Generate_Set_Access;
       Multiple_Tuples : Boolean;
 
       Lexer_Done : Lexer_Set := (others => False);
 
+      BNF_Tree : Syntax_Trees.Tree; --  Only filled when source is EBNF, alg 
is LR
+
       --  In general, all of the data in Generate_Utils.Generate_Data
       --  depends on the generate tuple parameters. However, if
       --  'If_Lexer_Present' is false, then they don't depend on the lexer,
-      --  and if 'If_Parser_Present' is false, then they don't depend on the
+      --  and if 'If_Parser_Present' is false, then they don't depend on
       --  Gen_Alg, except for the parser table. But it's not worth trying to
       --  cache results in those cases; they only happen in test grammars,
       --  which are small.
 
+      procedure Translate_To_BNF
+      is begin
+         if Trace_Generate > Outline or Trace_Generate_EBNF > Outline then
+            Ada.Text_IO.Put_Line ("Translate EBNF tree to BNF");
+         end if;
+
+         declare
+            Time_Start : constant Ada.Calendar.Time := Ada.Calendar.Clock;
+            Tree       : WisiToken.Syntax_Trees.Tree renames 
Grammar_Parser.Tree;
+         begin
+            Syntax_Trees.Copy_Tree
+              (Source      => Tree,
+               Destination => BNF_Tree,
+               User_Data   => Input_Data'Unchecked_Access);
+
+            if Trace_Generate_EBNF > Detail then
+               Ada.Text_IO.Put_Line ("EBNF tree:");
+               BNF_Tree.Print_Tree;
+            end if;
+
+            WisiToken_Grammar_Editing.Translate_EBNF_To_BNF (BNF_Tree, 
Input_Data);
+
+            if Trace_Generate_EBNF > Detail then
+               Ada.Text_IO.New_Line;
+               Ada.Text_IO.Put_Line ("BNF tree:");
+               BNF_Tree.Print_Tree;
+            end if;
+
+            if Output_BNF then
+               --  FIXME: if %if is present, it can change the bnf tree; 
output one for each tuple.
+               WisiToken_Grammar_Editing.Print_Source (-Output_File_Name_Root 
& "_bnf.wy", BNF_Tree, Input_Data);
+            end if;
+
+            if Trace_Time then
+               Ada.Text_IO.Put_Line
+                 ("translate to bnf time:" & Duration'Image (Ada.Calendar."-" 
(Ada.Calendar.Clock, Time_Start)));
+            end if;
+
+            if WisiToken.Generate.Error then
+               raise WisiToken.Grammar_Error with "errors during translating 
EBNF to BNF: aborting";
+            end if;
+         end;
+      end Translate_To_BNF;
+
       procedure Parse_Check
         (Lexer  : in Lexer_Type;
          Parser : in Generate_Algorithm;
          Phase  : in WisiToken_Grammar_Runtime.Action_Phase)
+      --  Ensure that the proper grammar file post-parse processing has been 
done.
       is
          use all type Ada.Containers.Count_Type;
          use all type WisiToken_Grammar_Runtime.Action_Phase;
-         use all type WisiToken_Grammar_Runtime.Meta_Syntax;
       begin
          Input_Data.User_Parser := Parser;
          Input_Data.User_Lexer  := Lexer;
@@ -318,53 +396,55 @@ begin
          Input_Data.Reset; -- only resets Other data
 
          Input_Data.Phase := Phase;
-         Grammar_Parser.Execute_Actions;
 
          case Phase is
          when Meta =>
+            if Trace_Generate > Outline then
+               Trace.Put_Line ("post-parse grammar file META");
+            end if;
+
+            Grammar_Parser.Execute_Actions;
+
             case Input_Data.Meta_Syntax is
             when Unknown =>
                Input_Data.Meta_Syntax := BNF_Syntax;
 
-            when BNF_Syntax =>
+            when BNF_Syntax | EBNF_Syntax =>
                null;
+            end case;
 
-            when EBNF_Syntax =>
-               declare
-                  Tree  : WisiToken.Syntax_Trees.Tree renames 
Grammar_Parser.Parsers.First_State_Ref.Tree;
-               begin
-                  if Trace_Generate_EBNF > Outline then
-                     Ada.Text_IO.Put_Line ("Translate EBNF tree to BNF");
-                  end if;
-
-                  if Trace_Generate_EBNF > Detail then
-                     Ada.Text_IO.Put_Line ("EBNF tree:");
-                     Tree.Print_Tree
-                       (Wisitoken_Grammar_Actions.Descriptor,
-                        Image_Action => 
WisiToken_Grammar_Runtime.Image_Grammar_Action'Access);
-                  end if;
+         when Other =>
+            case Valid_Generate_Algorithm'(Parser) is
+            when LR_Generate_Algorithm | Packrat_Generate_Algorithm =>
+               --  IMPROVEME: for now, Packrat requires a BNF tree; 
eventually, it
+               --  will use the EBNF tree.
 
-                  WisiToken_Grammar_Runtime.Translate_EBNF_To_BNF (Tree, 
Input_Data);
+               if Input_Data.Meta_Syntax = EBNF_Syntax and BNF_Tree.Is_Empty 
then
+                  Translate_To_BNF;
+               end if;
 
-                  if Trace_Generate_EBNF > Detail then
-                     Ada.Text_IO.New_Line;
-                     Ada.Text_IO.Put_Line ("BNF tree:");
-                     Tree.Print_Tree
-                       (Wisitoken_Grammar_Actions.Descriptor,
-                        Image_Action => 
WisiToken_Grammar_Runtime.Image_Grammar_Action'Access);
+               if BNF_Tree.Is_Empty then
+                  if Trace_Generate > Outline then
+                     Trace.Put_Line ("post-parse grammar file OTHER, main 
tree");
                   end if;
 
-                  if Output_BNF then
-                     WisiToken_Grammar_Runtime.Print_Source (-BNF_File_Name, 
Tree, Input_Data);
+                  Grammar_Parser.Execute_Actions;
+               else
+                  if Trace_Generate > Outline then
+                     Trace.Put_Line ("post-parse grammar file OTHER, bnf 
tree");
                   end if;
 
-                  if WisiToken.Generate.Error then
-                     raise WisiToken.Grammar_Error with "errors during 
translating EBNF to BNF: aborting";
-                  end if;
-               end;
+                  WisiToken.Parse.LR.Parser_No_Recover.Execute_Actions
+                    (BNF_Tree, Grammar_Parser.Productions, 
Input_Data'Unchecked_Access);
+               end if;
+
+            when External =>
+               null;
+
+            when Tree_Sitter =>
+               null;
             end case;
 
-         when Other =>
             if Input_Data.Rule_Count = 0 or Input_Data.Tokens.Rules.Length = 0 
then
                raise WisiToken.Grammar_Error with "no rules";
             end if;
@@ -376,15 +456,16 @@ begin
          raise;
       end Parse_Check;
 
+      Cached_Recursions : WisiToken.Generate.Recursions := 
WisiToken.Generate.Empty_Recursions;
    begin
-      --  Get the the input file quads, translate EBNF
+      --  Get the generate tuples
       Parse_Check (None, None, WisiToken_Grammar_Runtime.Meta);
 
       if Command_Generate_Set = null then
          if Input_Data.Generate_Set = null then
             raise User_Error with
               WisiToken.Generate.Error_Message
-                (Input_Data.Grammar_Lexer.File_Name, 1,
+                (Grammar_Parser.Tree.Lexer.File_Name, 1,
                  "generate algorithm, output_language, lexer, interface not 
specified");
          end if;
 
@@ -393,202 +474,288 @@ begin
          Generate_Set := Command_Generate_Set;
       end if;
 
-      Multiple_Tuples := Generate_Set'Length > 1;
+      Multiple_Tuples :=
+        --  Preserve output file names when overriding file %generate
+        (if Input_Data.Generate_Set = null then False else 
Input_Data.Generate_Set'Length > 1) or
+           (if Command_Generate_Set = null then False else 
Command_Generate_Set'Length > 1);
 
       for Tuple of Generate_Set.all loop
-         Parse_Check
-           (Lexer  => Tuple.Lexer,
-            Parser => Tuple.Gen_Alg,
-            Phase  => WisiToken_Grammar_Runtime.Other);
-
-         declare
-            use Ada.Real_Time;
-
-            Time_Start : Time;
-            Time_End   : Time;
-
-            Generate_Data : aliased WisiToken.BNF.Generate_Utils.Generate_Data 
:=
-              WisiToken.BNF.Generate_Utils.Initialize (Input_Data, 
Ignore_Conflicts);
-
-            Packrat_Data : WisiToken.Generate.Packrat.Data
-              (Generate_Data.Descriptor.First_Terminal, 
Generate_Data.Descriptor.First_Nonterminal,
-               Generate_Data.Descriptor.Last_Nonterminal);
-
-            Parse_Table_File_Name : constant String :=
-              (if WisiToken.Trace_Generate_Table = 0 and Tuple.Gen_Alg in LALR 
.. Packrat_Proc
-               then -Output_File_Name_Root & "_" & To_Lower 
(Generate_Algorithm'Image (Tuple.Gen_Alg)) &
-                 (if Input_Data.If_Lexer_Present
-                  then "_" & Lexer_Image (Input_Data.User_Lexer).all
-                  else "") &
-                  ".parse_table"
-               else "");
-
-            procedure Parse_Table_Append_Stats
-            is
-               Parse_Table_File : File_Type;
-            begin
-               Open (Parse_Table_File, Append_File, Parse_Table_File_Name);
-               Set_Output (Parse_Table_File);
-               Generate_Data.Parser_State_Count :=
-                 Generate_Data.LR_Parse_Table.State_Last - 
Generate_Data.LR_Parse_Table.State_First + 1;
-               WisiToken.BNF.Generate_Utils.Put_Stats (Input_Data, 
Generate_Data);
-               Set_Output (Standard_Output);
-               Close (Parse_Table_File);
-            end Parse_Table_Append_Stats;
+         if Trace_Generate > Outline then
+            Trace.New_Line;
+            Trace.Put_Line ("process tuple " & Image (Tuple));
+         end if;
 
-         begin
-            if not Lexer_Done (Input_Data.User_Lexer) then
-               Lexer_Done (Input_Data.User_Lexer) := True;
-               case Input_Data.User_Lexer is
-               when re2c_Lexer =>
-                  WisiToken.BNF.Output_Ada_Common.Create_re2c
-                    (Input_Data, Tuple, Generate_Data, -Output_File_Name_Root);
-               when others =>
-                  null;
-               end case;
+         case Tuple.Gen_Alg is
+         when None | External =>
+            if Input_Data.Meta_Syntax = EBNF_Syntax and BNF_Tree.Is_Empty then
+               --  'none' is used in unit tests to test bnf translation.
+               Translate_To_BNF;
             end if;
 
-            case Tuple.Gen_Alg is
-            when None =>
-               --  Just translate EBNF to BNF, done in Parse_Check
-               null;
 
-            when LALR =>
+         when Tree_Sitter =>
+            Parse_Check
+              (Lexer  => Tuple.Lexer,
+               Parser => Tuple.Gen_Alg,
+               Phase  => WisiToken_Grammar_Runtime.Other);
 
-               Time_Start := Clock;
+            declare
+               use WisiToken.Generate.Tree_Sitter;
 
-               if Generate_Data.Grammar 
(Generate_Data.Descriptor.Accept_ID).LHS = Invalid_Token_ID then
-                  WisiToken.Generate.Put_Error
-                    (WisiToken.Generate.Error_Message
-                       (Grammar_Parser.Lexer.File_Name, 1,
-                        "%start token not specified or not found; no LALR 
parse table generated"));
-               else
-                  Generate_Data.LR_Parse_Table := 
WisiToken.Generate.LR.LALR_Generate.Generate
-                    (Generate_Data.Grammar,
-                     Generate_Data.Descriptor.all,
-                     Generate_Utils.To_Conflicts
-                       (Generate_Data, Input_Data.Conflicts, 
Input_Data.Grammar_Lexer.File_Name),
-                     Generate_Utils.To_McKenzie_Param (Generate_Data, 
Input_Data.McKenzie_Recover),
-                     Parse_Table_File_Name,
-                     Include_Extra     => Test_Main,
-                     Ignore_Conflicts  => Ignore_Conflicts,
-                     Partial_Recursion => 
Input_Data.Language_Params.Partial_Recursion);
-
-                  if WisiToken.Trace_Time then
-                     Time_End := Clock;
-
-                     Put_Line
-                       (Standard_Error,
-                        "LALR " & Lexer_Image (Tuple.Lexer).all & " generate 
time:" &
-                          Duration'Image (To_Duration (Time_End - 
Time_Start)));
+               procedure Translate (Tree : in out Syntax_Trees.Tree)
+               is begin
+                  if Trace_Generate > Outline then
+                     Ada.Text_IO.New_Line;
+                     Ada.Text_IO.Put_Line ("output tree_sitter grammar");
                   end if;
 
-                  if Parse_Table_File_Name /= "" then
-                     Parse_Table_Append_Stats;
-                  end if;
-               end if;
+                  Eliminate_Empty_Productions (Input_Data, Tree);
 
-            when LR1 =>
-               Time_Start := Clock;
+                  Print_Tree_Sitter
+                    (Input_Data,
+                     Tree,
+                     Tree.Lexer,
+                     Output_File_Name => -Output_File_Name_Root & ".js",
+                     Language_Name    => -Language_Name);
 
-               if Generate_Data.Grammar 
(Generate_Data.Descriptor.Accept_ID).LHS = Invalid_Token_ID then
-                  WisiToken.Generate.Put_Error
-                    (WisiToken.Generate.Error_Message
-                       (Grammar_Parser.Lexer.File_Name, 1,
-                        "%start token not specified or not found; no LALR 
parse table generated"));
-               else
-                  Generate_Data.LR_Parse_Table := 
WisiToken.Generate.LR.LR1_Generate.Generate
-                    (Generate_Data.Grammar,
-                     Generate_Data.Descriptor.all,
-                     Generate_Utils.To_Conflicts
-                       (Generate_Data, Input_Data.Conflicts, 
Input_Data.Grammar_Lexer.File_Name),
-                     Generate_Utils.To_McKenzie_Param (Generate_Data, 
Input_Data.McKenzie_Recover),
-                     Parse_Table_File_Name,
-                     Include_Extra     => Test_Main,
-                     Ignore_Conflicts  => Ignore_Conflicts,
-                     Partial_Recursion => 
Input_Data.Language_Params.Partial_Recursion);
-
-                  if Trace_Time then
-                     Time_End := Clock;
-
-                     Put_Line
-                       (Standard_Error,
-                        "LR1 " & Lexer_Image (Tuple.Lexer).all & " generate 
time:" &
-                          Duration'Image (To_Duration (Time_End - 
Time_Start)));
+                  if WisiToken.Generate.Error then
+                     --  FIXME: support --warning=error
+                     raise WisiToken.Grammar_Error with "errors during 
translating grammar to tree-sitter: aborting";
                   end if;
+               end Translate;
+            begin
 
-                  if Parse_Table_File_Name /= "" then
-                     Parse_Table_Append_Stats;
-                  end if;
-               end if;
+               Translate (Grammar_Parser.Tree);
+
+               Create_Test_Main (-Output_File_Name_Root);
+            end;
 
-            when Packrat_Generate_Algorithm =>
-               --  The only significant computation done for Packrat is First, 
done
-               --  in Initialize; not worth timing.
+         when LR_Packrat_Generate_Algorithm =>
+            Parse_Check
+              (Lexer  => Tuple.Lexer,
+               Parser => Tuple.Gen_Alg,
+               Phase  => WisiToken_Grammar_Runtime.Other);
+
+            if Input_Data.Language_Params.LR1_Hash_Table_Size /=
+              WisiToken.Generate.LR1_Items.Item_Set_Trees.Default_Rows and
+              not Generate_Task_Count_Set
+            then
+               Generate_Task_Count := System.Multiprocessors.Number_Of_CPUs;
+            end if;
 
-               Packrat_Data := WisiToken.Generate.Packrat.Initialize
-                 (Input_Data.Grammar_Lexer.File_Name, Generate_Data.Grammar, 
Generate_Data.Source_Line_Map,
-                  Generate_Data.Descriptor.First_Terminal);
+            declare
+               use Ada.Real_Time;
+
+               Time_Start : Time;
+               Time_End   : Time;
+
+               --  We could use a cached Generate_Data if not
+               --  (Input_Data.If_Lexer_Present or 
Input_Data.If_Parser_Present), but
+               --  that would not save much time, and would complicate this 
logic. We
+               --  do cache Recursions.
+               Generate_Data : aliased 
WisiToken.BNF.Generate_Utils.Generate_Data :=
+                 WisiToken.BNF.Generate_Utils.Initialize
+                   (Input_Data'Unchecked_Access, 
Grammar_Parser.Tree.Lexer.File_Name, Ignore_Conflicts);
+
+               Packrat_Data : WisiToken.Generate.Packrat.Data
+                 (Generate_Data.Descriptor.First_Terminal, 
Generate_Data.Descriptor.First_Nonterminal,
+                  Generate_Data.Descriptor.Last_Nonterminal);
+
+               Parse_Table_File_Name : constant String :=
+                 (if Tuple.Gen_Alg in LALR .. Packrat_Proc
+                  then -Output_File_Name_Root & "_" & To_Lower 
(Tuple.Gen_Alg'Image) &
+                    (if Tuple.Gen_Alg = LR1 and Test_Main
+                     then "_t" & Ada.Strings.Fixed.Trim 
(Generate_Task_Count'Image, Ada.Strings.Both)
+                     else "") &
+                    (if Input_Data.If_Lexer_Present
+                     then "_" & Lexer_Image (Input_Data.User_Lexer).all
+                     else "") &
+                    ".parse_table"
+                  else "");
+
+               procedure Parse_Table_Append_Stats
+               is
+                  Parse_Table_File : File_Type;
+               begin
+                  Open (Parse_Table_File, Append_File, Parse_Table_File_Name);
+                  Set_Output (Parse_Table_File);
+                  Generate_Data.Parser_State_Count :=
+                    Generate_Data.LR_Parse_Table.State_Last - 
Generate_Data.LR_Parse_Table.State_First + 1;
+                  WisiToken.BNF.Generate_Utils.Put_Stats (Input_Data, 
Generate_Data);
+                  Set_Output (Standard_Output);
+                  Close (Parse_Table_File);
+               end Parse_Table_Append_Stats;
 
-               if Parse_Table_File_Name /= "" then
-                  declare
-                     Parse_Table_File : File_Type;
-                  begin
-                     Create (Parse_Table_File, Out_File, 
Parse_Table_File_Name);
-                     Set_Output (Parse_Table_File);
-                     Put_Line ("Tokens:");
-                     WisiToken.Put_Tokens (Generate_Data.Descriptor.all);
-                     New_Line;
-                     Put_Line ("Productions:");
-                     WisiToken.Productions.Put (Generate_Data.Grammar, 
Generate_Data.Descriptor.all);
-                     Set_Output (Standard_Output);
-                     Close (Parse_Table_File);
-                  end;
+            begin
+               if not Lexer_Done (Input_Data.User_Lexer) then
+                  Lexer_Done (Input_Data.User_Lexer) := True;
+                  case Input_Data.User_Lexer is
+                  when re2c_Lexer =>
+                     WisiToken.BNF.Output_Ada_Common.Create_re2c_File
+                       (Input_Data, Tuple, Generate_Data, 
-Output_File_Name_Root);
+                  when others =>
+                     null;
+                  end case;
                end if;
 
-               Packrat_Data.Check_All (Generate_Data.Descriptor.all);
+               case LR_Packrat_Generate_Algorithm'(Tuple.Gen_Alg) is
+               when LALR =>
+
+                  Time_Start := Clock;
+
+                  if Generate_Data.Grammar 
(Generate_Data.Descriptor.Accept_ID).LHS = Invalid_Token_ID then
+                     WisiToken.Generate.Put_Error
+                       (WisiToken.Generate.Error_Message
+                          (Grammar_Parser.Tree.Lexer.File_Name, 1,
+                           "%start token not specified or not found; no LALR 
parse table generated"));
+                  else
+                     if Trace_Generate > Outline then
+                        Trace.Put_Line ("generate LALR parse table");
+                     end if;
+
+                     Generate_Data.LR_Parse_Table := 
WisiToken.Generate.LR.LALR_Generate.Generate
+                       (Generate_Data.Grammar,
+                        Generate_Data.Descriptor.all,
+                        Grammar_Parser.Tree.Lexer.File_Name,
+                        Generate_Data.Conflicts,
+                        Generate_Utils.To_McKenzie_Param (Generate_Data, 
Input_Data.McKenzie_Recover),
+                        Input_Data.Max_Parallel,
+                        Parse_Table_File_Name,
+                        Include_Extra         => Test_Main,
+                        Ignore_Conflicts      => Ignore_Conflicts,
+                        Partial_Recursion     => 
Input_Data.Language_Params.Partial_Recursion,
+                        Use_Cached_Recursions => not 
(Input_Data.If_Lexer_Present or Input_Data.If_Parser_Present),
+                        Recursions            => Cached_Recursions);
+
+                     if WisiToken.Trace_Time then
+                        Time_End := Clock;
+
+                        Put_Line
+                          (Standard_Error,
+                           "LALR " & Lexer_Image (Tuple.Lexer).all & " 
generate time:" &
+                             Duration'Image (To_Duration (Time_End - 
Time_Start)));
+                     end if;
+
+                     if Parse_Table_File_Name /= "" then
+                        Parse_Table_Append_Stats;
+                     end if;
+                  end if;
 
-            when External =>
-               null;
-            end case;
+               when LR1 =>
+                  Time_Start := Clock;
+
+                  if Generate_Data.Grammar 
(Generate_Data.Descriptor.Accept_ID).LHS = Invalid_Token_ID then
+                     WisiToken.Generate.Put_Error
+                       (WisiToken.Generate.Error_Message
+                          (Grammar_Parser.Tree.Lexer.File_Name, 1,
+                           "%start token not specified or not found; no LALR 
parse table generated"));
+                  else
+                     if Trace_Generate > Outline then
+                        Trace.Put_Line ("generate LR1 parse table");
+                     end if;
+
+                     Generate_Data.LR_Parse_Table := 
WisiToken.Generate.LR.LR1_Generate.Generate
+                       (Generate_Data.Grammar,
+                        Generate_Data.Descriptor.all,
+                        Grammar_Parser.Tree.Lexer.File_Name,
+                        Generate_Data.Conflicts,
+                        Generate_Utils.To_McKenzie_Param (Generate_Data, 
Input_Data.McKenzie_Recover),
+                        Input_Data.Max_Parallel,
+                        Parse_Table_File_Name,
+                        Include_Extra         => Test_Main,
+                        Ignore_Conflicts      => Ignore_Conflicts,
+                        Partial_Recursion     => 
Input_Data.Language_Params.Partial_Recursion,
+                        Task_Count            => Generate_Task_Count,
+                        Hash_Table_Size       => 
Input_Data.Language_Params.LR1_Hash_Table_Size,
+                        Use_Cached_Recursions => not 
(Input_Data.If_Lexer_Present or Input_Data.If_Parser_Present),
+                        Recursions            => Cached_Recursions);
+
+                     if Trace_Time then
+                        Time_End := Clock;
+
+                        Put_Line
+                          (Standard_Error,
+                           "LR1 " & Lexer_Image (Tuple.Lexer).all & " generate 
time:" &
+                             Duration'Image (To_Duration (Time_End - 
Time_Start)));
+                     end if;
+
+                     if Parse_Table_File_Name /= "" then
+                        Parse_Table_Append_Stats;
+                     end if;
+                  end if;
 
-            if WisiToken.Generate.Error then
-               raise WisiToken.Grammar_Error with "errors: aborting";
-            end if;
+               when Packrat_Generate_Algorithm =>
+                  --  The only significant computation done for Packrat is 
First, done
+                  --  in Initialize; not worth timing.
+
+                  Packrat_Data := WisiToken.Generate.Packrat.Initialize
+                    (Grammar_Parser.Tree.Lexer.File_Name, 
Generate_Data.Grammar, Generate_Data.Source_Line_Map,
+                     Generate_Data.Descriptor.First_Terminal);
+
+                  if Parse_Table_File_Name /= "" then
+                     declare
+                        Parse_Table_File : File_Type;
+                     begin
+                        Create (Parse_Table_File, Out_File, 
Parse_Table_File_Name);
+                        Set_Output (Parse_Table_File);
+                        Put_Line ("Tokens:");
+                        WisiToken.Put_Tokens (Generate_Data.Descriptor.all);
+                        New_Line;
+                        Put_Line ("Productions:");
+                        WisiToken.Productions.Put (Generate_Data.Grammar, 
Generate_Data.Descriptor.all);
+                        Set_Output (Standard_Output);
+                        Close (Parse_Table_File);
+                     end;
+                  end if;
+
+                  Packrat_Data.Check_All (Generate_Data.Descriptor.all, 
Input_Data.Suppress);
+
+                  if WisiToken.Generate.Warning then
+                     WisiToken.Generate.Put_Warning ("warnings during packrat 
generation");
+                  end if;
+               end case;
 
-            case Tuple.Gen_Alg is
-            when LR_Generate_Algorithm =>
-               if Tuple.Text_Rep then
-                  WisiToken.Generate.LR.Put_Text_Rep
-                    (Generate_Data.LR_Parse_Table.all,
-                     -Output_File_Name_Root & "_" &
-                       To_Lower (Generate_Algorithm_Image (Tuple.Gen_Alg).all) 
&
-                       "_parse_table.txt",
-                     Generate_Data.Action_Names.all, 
Generate_Data.Check_Names.all);
+               if WisiToken.Generate.Error then
+                  raise WisiToken.Grammar_Error with "errors: aborting";
                end if;
 
-            when others =>
-               null;
-            end case;
+               case Tuple.Gen_Alg is
+               when LR_Generate_Algorithm =>
+                  if Tuple.Text_Rep then
+                     WisiToken.Generate.LR.Put_Text_Rep
+                       (Generate_Data.LR_Parse_Table.all,
+                        Text_Rep_File_Name
+                          (-Output_File_Name_Root, Tuple, Generate_Task_Count, 
Input_Data.If_Lexer_Present, Test_Main));
+                  end if;
+
+               when others =>
+                  null;
+               end case;
 
-            if Tuple.Gen_Alg /= None then
                case Tuple.Out_Lang is
                when Ada_Lang =>
+                  if Trace_Generate > Outline then
+                     Trace.Put_Line ("output Ada");
+                  end if;
+
                   WisiToken.BNF.Output_Ada
-                    (Input_Data, -Output_File_Name_Root, Generate_Data, 
Packrat_Data, Tuple, Test_Main,
-                     Multiple_Tuples);
+                    (Input_Data, Grammar_Parser.Tree.Lexer.File_Name, 
-Output_File_Name_Root, Generate_Data,
+                     Packrat_Data, Tuple, Test_Main, Multiple_Tuples, 
Generate_Task_Count);
 
                when Ada_Emacs_Lang =>
+                  if Trace_Generate > Outline then
+                     Trace.Put_Line ("output Ada for Emacs");
+                  end if;
                   WisiToken.BNF.Output_Ada_Emacs
-                    (Input_Data, -Output_File_Name_Root, Generate_Data, 
Packrat_Data, Tuple,
-                     Test_Main, Multiple_Tuples, -Language_Name);
+                    (Input_Data, Grammar_Parser.Tree.Lexer.File_Name, 
-Output_File_Name_Root, Generate_Data,
+                     Packrat_Data, Tuple, Test_Main, Multiple_Tuples, 
-Language_Name);
 
                end case;
                if WisiToken.Generate.Error then
                   raise WisiToken.Grammar_Error with "errors: aborting";
                end if;
-            end if;
-         end;
+            end;
+         end case;
       end loop;
    end;
 exception
diff --git a/wisitoken-bnf-generate_grammar.adb 
b/wisitoken-bnf-generate_grammar.adb
index cd165c4155..9477f711d3 100644
--- a/wisitoken-bnf-generate_grammar.adb
+++ b/wisitoken-bnf-generate_grammar.adb
@@ -2,7 +2,7 @@
 --
 --  Output Ada source code to recreate Grammar.
 --
---  Copyright (C) 2018 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -72,7 +72,7 @@ begin
                end if;
             end if;
             if Action_Names (Prod.LHS) /= null and then Action_Names 
(Prod.LHS)(RHS_Index) /= null then
-               Indent_Line ("RHS.Action     := " & Action_Names 
(Prod.LHS)(RHS_Index).all & "'Access;");
+               Indent_Line ("RHS.Post_Parse_Action := " & Action_Names 
(Prod.LHS)(RHS_Index).all & "'Access;");
             end if;
             Indent_Line ("Prod.RHSs (" & Trimmed_Image (RHS_Index) & ") := 
RHS;");
             Indent := Indent - 3;
diff --git a/wisitoken-bnf-generate_packrat.adb 
b/wisitoken-bnf-generate_packrat.adb
index b4592e5022..ec0216487a 100644
--- a/wisitoken-bnf-generate_packrat.adb
+++ b/wisitoken-bnf-generate_packrat.adb
@@ -6,7 +6,7 @@
 --
 --  See wisitoken-parse-packrat.ads.
 --
---  Copyright (C) 2018, 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018, 2020 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -31,13 +31,12 @@ procedure WisiToken.BNF.Generate_Packrat
 is
    use WisiToken.Generate;
 
-   Descriptor   : WisiToken.Descriptor renames Generate_Data.Descriptor.all;
-   Action_Names : Names_Array_Array renames Generate_Data.Action_Names.all;
+   Descriptor : WisiToken.Descriptor renames Generate_Data.Descriptor.all;
 
    subtype Terminal is Token_ID range Descriptor.First_Terminal .. 
Descriptor.Last_Terminal;
 
-   --  FIXME: optimize memoizing? small productions not worth the memory cost?
-   --  or just use langkit space optimization.
+   --  FIXME packrat: optimize memoizing? small productions not worth the
+   --  memory cost? or just use langkit space optimization.
 
    function Parser_Name (Nonterm : in Token_ID) return String
    is begin
@@ -47,7 +46,8 @@ is
    procedure Put_Parser_Spec (Name : in String)
    is begin
       Indent_Line ("function " & Name);
-      Indent_Start ("  (Parser : in out Generated.Parser; Last_Pos : in 
Base_Token_Index) return Result_Type");
+      Indent_Start
+        ("  (Parser : in out Generated.Parser; Last_Pos : in 
Syntax_Trees.Stream_Index) return Result_Type");
    end Put_Parser_Spec;
 
    function Var_Suffix (I, J : in Integer) return String
@@ -68,9 +68,14 @@ is
       Indent_Line ("is");
       Indent := Indent + 3;
 
-      Indent_Line ("Descriptor : WisiToken.Descriptor renames 
Parser.Trace.Descriptor.all;");
-      Indent_Line ("Start_Pos  : constant Token_Index := Last_Pos + 1; --  
first token in current nonterm");
-      Indent_Line ("Pos        : Base_Token_Index := Last_Pos; --  last token 
parsed.");
+      Indent_Line ("Tree       : Syntax_Trees.Tree renames Parser.Tree;");
+      Indent_Line ("Descriptor : WisiToken.Descriptor renames 
Tree.Lexer.Descriptor.all;");
+      Indent_Line
+        ("Start_Pos  : constant Syntax_Trees.Stream_Index := Tree.Stream_Next 
(Tree.Shared_Stream, Last_Pos);");
+      Indent_Line ("Start_Pos_Index : constant Syntax_Trees.Node_Index :=");
+      Indent_Line ("  Tree.Get_Node_Index (Tree.Shared_Stream, Start_Pos);");
+      Indent_Line ("Pos        : Syntax_Trees.Stream_Index := Last_Pos; --  
last token parsed.");
+      Indent_Line ("Next_Pos   : Syntax_Trees.Stream_Index := Start_Pos;");
 
       for RHS_Index in Prod.RHSs.First_Index .. Prod.RHSs.Last_Index loop
          declare
@@ -78,7 +83,7 @@ is
          begin
             for Token_Index in RHS.Tokens.First_Index .. RHS.Tokens.Last_Index 
loop
                if RHS.Tokens (Token_Index) in Descriptor.First_Terminal .. 
Descriptor.Last_Terminal then
-                  Indent_Line ("Pos_" & Var_Suffix (RHS_Index, Token_Index) & 
"  : Token_Index;");
+                  Indent_Line ("Pos_" & Var_Suffix (RHS_Index, Token_Index) & 
"  : Syntax_Trees.Stream_Index;");
                else
                   Indent_Line ("Memo_" & Var_Suffix (RHS_Index, Token_Index) & 
" : Memo_Entry;");
                end if;
@@ -87,7 +92,7 @@ is
       end loop;
 
       if Data.Direct_Left_Recursive (Prod.LHS) then
-         Indent_Line ("Pos_Recurse_Last : Base_Token_Index := Last_Pos;");
+         Indent_Line ("Pos_Recurse_Last : Syntax_Trees.Stream_Index := 
Last_Pos;");
          Indent_Line ("Result_Recurse   : Memo_Entry;");
       end if;
 
@@ -95,25 +100,25 @@ is
       Indent_Line ("begin");
       Indent := Indent + 3;
 
-      Indent_Line ("if Pos = Parser.Terminals.Last_Index then");
+      Indent_Line ("if Next_Pos = Syntax_Trees.Invalid_Stream_Index then");
       Indent_Line ("   return (State => Failure);");
       Indent_Line ("end if;");
       Indent_Line ("declare");
-      Indent_Line ("   Memo : Memo_Entry renames Parser.Derivs (" & Result_ID 
& ")(Start_Pos);");
+      Indent_Line
+        ("   Memo : Memo_Entry renames Parser.Derivs (" & Result_ID & 
")(Start_Pos_Index);");
       Indent_Line ("begin");
       Indent := Indent + 3;
       Indent_Line ("case Memo.State is");
       Indent_Line ("when Success =>");
-      Indent_Line ("   return Parser.Derivs (" & Result_ID & ")(Start_Pos);");
+      Indent_Line ("   return Parser.Derivs (" & Result_ID & 
")(Start_Pos_Index);");
       Indent_Line ("when Failure =>");
 
-      --  FIXME: Could simplify this when not doing left recursion
       Indent_Line ("   goto RHS_" & Trimmed_Image (Prod.RHSs.Last_Index) & 
"_Fail;");
 
       Indent_Line ("when No_Result =>");
       Indent_Line ("   if Memo.Recursive then");
-      Indent_Start ("      raise Recursive with Image (" & Result_ID & ", 
Descriptor) &");
-      Put_Line (" Token_Index'Image (Start_Pos) & "": recursive"";");
+      Indent_Line ("      raise Recursive with Image (" & Result_ID & ", 
Descriptor) &");
+      Indent_Line ("        Start_Pos_Index'Image & "": recursive"";");
       Indent_Line ("   end if;");
       Indent_Line ("   Memo.Recursive := True;");
       Indent_Line ("end case;");
@@ -123,7 +128,7 @@ is
 
       if Data.Direct_Left_Recursive (Prod.LHS) then
          --  This is the top of the 'while' loop in [warth 2008] figure 3 
Grow-LR.
-         Indent_Line ("Parser.Derivs (" & Result_ID & ").Replace_Element 
(Start_Pos, (State => Failure));");
+         Indent_Line ("Parser.Derivs (" & Result_ID & ").Replace_Element 
(Start_Pos_Index, (State => Failure));");
          Indent_Line ("<<Recurse_Start>>");
       end if;
 
@@ -138,33 +143,29 @@ is
                   Indent := Indent + 2;
                else
                   Indent_Line ("Parser.Derivs (" & Result_ID & 
").Replace_Element");
-                  Indent_Line ("  (Start_Pos,");
+                  Indent_Line ("  (Start_Pos_Index,");
                   Indent := Indent + 3;
                end if;
-               Indent_Line ("(State              => Success,");
-               Indent_Line (" Result             => Parser.Tree.Add_Nonterm");
+               Indent_Line ("(State            => Success,");
+               Indent_Line (" Result           => Parser.Tree.Add_Nonterm");
 
                Indent := Indent + 3;
-               Indent_Line ("(Production      => (" & Result_ID & ", " & 
Trimmed_Image (RHS_Index) & "),");
-               Indent_Line
-                 (" Action          => " &
-                    (if Action_Names (Prod.LHS) = null or else Action_Names 
(Prod.LHS)(RHS_Index) = null
-                     then "null,"
-                     else Action_Names (Prod.LHS)(RHS_Index).all & 
"'Access,"));
+               Indent_Line ("(Production    => (" & Result_ID & ", " & 
Trimmed_Image (RHS_Index) & "),");
 
                if RHS.Tokens.Length = 0 then
-                  Indent_Line (" Children        => (1 .. 0 => 
Invalid_Node_Index),");
+                  Indent_Line (" Children      => (1 .. 0 => 
Syntax_Trees.Invalid_Node_Access),");
 
                elsif RHS.Tokens.Length = 1 then
-                  Indent_Start (" Children        => ");
+                  Indent_Start (" Children      => ");
                   if RHS.Tokens (RHS.Tokens.First_Index) in Terminal then
-                     Put ("(1 => Tree_Index (Pos_" & Var_Suffix (RHS_Index, 
RHS.Tokens.First_Index) & ")),");
+                     Put ("(1 => Tree.Get_Node (Tree.Shared_Stream, Pos_" &
+                            Var_Suffix (RHS_Index, RHS.Tokens.First_Index) & 
")),");
                   else
                      Put ("(1 => Memo_" & Var_Suffix (RHS_Index, 
RHS.Tokens.First_Index) & ".Result),");
                   end if;
 
                else
-                  Indent_Line (" Children        =>");
+                  Indent_Line (" Children      =>");
 
                   for Token_Index in RHS.Tokens.First_Index .. 
RHS.Tokens.Last_Index loop
                      if RHS.Tokens (Token_Index) in Terminal then
@@ -172,7 +173,7 @@ is
                           ((if Token_Index = RHS.Tokens.First_Index
                             then "  ("
                             else "   ") &
-                             "Tree_Index (Pos_" & Var_Suffix (RHS_Index, 
Token_Index) & ")");
+                             "Tree.Get_Node (Tree.Shared_Stream, Pos_" & 
Var_Suffix (RHS_Index, Token_Index) & ")");
                      else
                         Indent_Start
                           ((if Token_Index = RHS.Tokens.First_Index
@@ -188,9 +189,12 @@ is
                   end loop;
                end if;
 
-               Indent_Line (" Default_Virtual => False),");
+               Indent_Line (" Clear_Parents => True),");
+               --  We must be able to steal nodes from failed nonterms;
+               --  body_instantiation_conflict.wy.
+
                Indent := Indent - 3;
-               Indent_Start (" Last_Token      => Pos)");
+               Indent_Start (" Last_Pos         => Pos)");
 
                if Data.Direct_Left_Recursive (Prod.LHS) then
                   Put_Line (";");
@@ -206,6 +210,7 @@ is
          begin
             Indent_Wrap_Comment (Productions.Image (Prod.LHS, RHS_Index, 
RHS.Tokens, Descriptor), Ada_Comment);
             Indent_Line ("Pos := Last_Pos;");
+            Indent_Line ("Next_Pos := Tree.Stream_Next (Tree.Shared_Stream, 
Pos);");
 
             if RHS.Tokens.Length = 0 then
                Finish;
@@ -216,9 +221,10 @@ is
                      Var_Suf : constant String := Var_Suffix (RHS_Index, 
Token_Index);
                   begin
                      if RHS.Tokens (Token_Index) in Terminal then
-                        Indent_Line ("if Parser.Terminals (Pos + 1).ID = " & 
ID & " then");
+                        Indent_Line ("if Tree.ID (Tree.Shared_Stream, 
Next_Pos) = " & ID & " then");
                         Indent := Indent + 3;
-                        Indent_Line ("Pos := Pos + 1;");
+                        Indent_Line ("Pos := Next_Pos;");
+                        Indent_Line ("Next_Pos := Tree.Stream_Next 
(Tree.Shared_Stream, Pos);");
                         Indent_Line ("Pos_" & Var_Suf & " := Pos;");
                         if Token_Index = RHS.Tokens.Last_Index then
                            Finish;
@@ -235,7 +241,8 @@ is
                         Indent_Line ("case Result_States'(Memo_" & Var_Suf & 
".State) is");
                         Indent_Line ("when Success =>");
                         Indent := Indent + 3;
-                        Indent_Line ("Pos := Memo_" & Var_Suf & 
".Last_Token;");
+                        Indent_Line ("Pos := Memo_" & Var_Suf & ".Last_Pos;");
+                        Indent_Line ("Next_Pos := Tree.Stream_Next 
(Tree.Shared_Stream, Pos);");
                         if Token_Index = RHS.Tokens.Last_Index then
                            Finish;
                         end if;
@@ -257,32 +264,34 @@ is
       if Data.Direct_Left_Recursive (Prod.LHS) then
          Indent_Line ("Result_Recurse := (State => Failure);");
       else
-         Indent_Line ("Parser.Derivs (" & Result_ID & ").Replace_Element 
(Start_Pos, (State => Failure));");
-         Indent_Line ("return Parser.Derivs (" & Result_ID & ")(Start_Pos);");
+         Indent_Line ("Parser.Derivs (" & Result_ID & ").Replace_Element 
(Start_Pos_Index, (State => Failure));");
+         Indent_Line ("return Parser.Derivs (" & Result_ID & 
")(Start_Pos_Index);");
       end if;
 
       if Data.Direct_Left_Recursive (Prod.LHS) then
          Indent_Line ("<<Finish>>");
          Indent_Line ("if Result_Recurse.State = Success then");
          Indent := Indent + 3;
-         Indent_Line ("if Pos > Pos_Recurse_Last then");
+         Indent_Line ("if Tree.Get_Node_Index (Tree.Shared_Stream, Pos) >");
+         Indent_Line ("  Tree.Get_Node_Index (Tree.Shared_Stream, 
Pos_Recurse_Last)");
+         Indent_Line ("then");
          --  made progress, try again
          Indent := Indent + 3;
-         Indent_Line ("Parser.Derivs (" & Result_ID & ").Replace_Element 
(Start_Pos, Result_Recurse);");
+         Indent_Line ("Parser.Derivs (" & Result_ID & ").Replace_Element 
(Start_Pos_Index, Result_Recurse);");
          Indent_Line ("Pos_Recurse_Last := Pos;");
          Indent_Line ("if WisiToken.Trace_Parse > Detail then");
-         Indent_Line ("   Parser.Trace.Put_Line");
-         Indent_Line
-           ("     (Parser.Tree.Image (Result_Recurse.Result, Descriptor, 
Include_Children => True));");
+         Indent_Line ("   Tree.Lexer.Trace.Put_Line");
+         Indent_Line ("     (Parser.Tree.Image (Result_Recurse.Result,");
+         Indent_Line ("      Children => True, Terminal_Node_Numbers => 
True));");
          Indent_Line ("end if;");
          Indent_Line ("goto Recurse_Start;");
          Indent := Indent - 3;
          Indent_Line
            ("elsif Pos = Pos_Recurse_Last and then " &
-              "Parser.Tree.Buffer_Region_Is_Empty (Result_Recurse.Result) 
then");
+              "Parser.Tree.Is_Empty_Nonterm (Result_Recurse.Result) then");
          --  Parse succeeded producing an empty nonterm; don't try again. This
          --  special case is not in [warth 2008].
-         Indent_Line ("   Parser.Derivs (" & Result_ID & ").Replace_Element 
(Start_Pos, Result_Recurse);");
+         Indent_Line ("   Parser.Derivs (" & Result_ID & ").Replace_Element 
(Start_Pos_Index, Result_Recurse);");
          Indent_Line ("end if;");
          Indent := Indent - 3;
          Indent_Line ("end if;");
@@ -293,15 +302,15 @@ is
          Indent_Line ("<<Succeed>>");
          Indent_Line ("if WisiToken.Trace_Parse > Detail then");
          Indent := Indent + 3;
-         Indent_Line ("Parser.Trace.Put_Line");
+         Indent_Line ("Tree.Lexer.Trace.Put_Line");
          Indent_Line ("  (Parser.Tree.Image");
-         Indent_Line
-           ("    (Parser.Derivs (" & Result_ID & ")(Start_Pos).Result, 
Descriptor, Include_Children => True));");
+         Indent_Line ("    (Parser.Derivs (" & Result_ID & 
")(Start_Pos_Index).Result,");
+         Indent_Line ("     Children => True, Terminal_Node_Numbers => 
True));");
          Indent := Indent - 3;
          Indent_Line ("end if;");
       end if;
 
-      Indent_Line ("return Parser.Derivs (" & Result_ID & ")(Start_Pos);");
+      Indent_Line ("return Parser.Derivs (" & Result_ID & 
")(Start_Pos_Index);");
       Indent := Indent - 3;
       Indent_Line ("end " & Parser_Name (Prod.LHS) & ";");
       New_Line;
@@ -311,6 +320,8 @@ begin
    Indent_Line ("use WisiToken;");
    Indent_Line ("use WisiToken.Parse.Packrat;");
    Indent_Line ("use WisiToken.Parse.Packrat.Generated;");
+   Indent_Line ("use all type WisiToken.Syntax_Trees.Stream_Index;");
+   Indent_Line ("use all type WisiToken.Syntax_Trees.Node_Index;");
 
    for Prod of Data.Grammar loop
       Put_Parser_Spec (Parser_Name (Prod.LHS)); Put_Line (";");
@@ -322,9 +333,9 @@ begin
    end loop;
 
    Indent_Line ("function Parse_wisitoken_accept_1");
-   Indent_Line
-     --  WORKAROUND: using Parse.Packrat.Parser'Class here generates GNAT bug 
box with GPL 2018
-     ("  (Parser : in out WisiToken.Parse.Base_Parser'Class; Last_Pos : in 
Base_Token_Index) return Result_Type");
+   --  WORKAROUND: using Parse.Packrat.Parser'Class here generates GNAT bug 
box with GPL 2018
+   Indent_Line ("  (Parser   : in out WisiToken.Parse.Base_Parser'Class;");
+   Indent_Line ("   Last_Pos : in Syntax_Trees.Stream_Index) return 
Result_Type");
    Indent_Line ("is begin");
    Indent_Line ("   return Parse_wisitoken_accept (Generated.Parser (Parser), 
Last_Pos);");
    Indent_Line ("end Parse_wisitoken_accept_1;");
diff --git a/wisitoken-bnf-generate_utils.adb b/wisitoken-bnf-generate_utils.adb
index b5622d0558..a9eccb2132 100644
--- a/wisitoken-bnf-generate_utils.adb
+++ b/wisitoken-bnf-generate_utils.adb
@@ -2,7 +2,7 @@
 --
 --  see spec
 --
---  Copyright (C) 2014, 2015, 2017 - 2020  All Rights Reserved.
+--  Copyright (C) 2014, 2015, 2017 - 2022  All Rights Reserved.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -22,41 +22,73 @@ with Ada.Exceptions;
 with Ada.Text_IO;
 with WisiToken.Generate; use WisiToken.Generate;
 with WisiToken.Syntax_Trees;
+with WisiToken.Text_IO_Trace;
 with WisiToken.Wisi_Ada;
+with WisiToken_Grammar_Editing;
+with Wisitoken_Grammar_Main;
 package body WisiToken.BNF.Generate_Utils is
 
    --  For Constant_Reference
    Aliased_EOI_Name              : aliased constant 
Ada.Strings.Unbounded.Unbounded_String := +EOI_Name;
+   Aliased_SOI_Name              : aliased constant 
Ada.Strings.Unbounded.Unbounded_String := +SOI_Name;
    Aliased_WisiToken_Accept_Name : aliased constant 
Ada.Strings.Unbounded.Unbounded_String :=
      +WisiToken_Accept_Name;
 
-   --  body specs, as needed.
+   --  body specs, as needed, alphabetical.
+   function Kind (Container : in Token_Container; Cursor : in Token_Cursor) 
return String;
+
+   function Find_Token_ID (Container : in Token_Container; Token : in String) 
return Token_ID;
+   --  Token may be a %keyword name, or a string literal giving the
+   --  actual keyword.
+
+   function First
+     (Container    : in Token_Container;
+      Non_Grammar  : in Boolean := True;
+      Nonterminals : in Boolean := True)
+     return Token_Cursor;
+
+   function Name (Container : in Token_Container; Cursor : in Token_Cursor) 
return String;
+
+   procedure Next
+     (Container    : in     Token_Container;
+      Cursor       : in out Token_Cursor;
+      Nonterminals : in     Boolean := True;
+      Include_SOI  : in     Boolean := True);
+
+   function To_Conflicts
+     (Container        : in Token_Container;
+      Ignore_Conflicts : in Boolean;
+      Conflicts        : in WisiToken.BNF.Conflict_Lists.List;
+      Source_File_Name : in String)
+     return WisiToken.Generate.LR.Conflict_Lists.Tree;
+
+   function Value (Container : in Token_Container; Cursor : in Token_Cursor) 
return String;
 
    ----------
    --  Body subprograms
 
-   function Find_Kind (Data : aliased Generate_Data; Target_Kind : in String) 
return Token_ID
+   function Find_Kind (Container : in Token_Container; Target_Kind : in 
String) return Token_ID
    is begin
-      for Cursor in All_Tokens (Data).Iterate loop
-         if Kind (Cursor) = Target_Kind then
+      for Cursor in Container.Iterate loop
+         if Kind (Container, Cursor) = Target_Kind then
             return ID (Cursor);
          end if;
       end loop;
       return Invalid_Token_ID;
    end Find_Kind;
 
-   function Name_1 (Cursor : in Token_Cursor) return String
+   function Name_1 (Data : in Generate_Data; Cursor : in Token_Cursor) return 
String
    is begin
       --   This function is used to compute Descriptor.Image
       case Cursor.Kind is
       when Non_Grammar_Kind =>
-         return -Cursor.Data.Tokens.Non_Grammar (Cursor.Token_Kind).Tokens 
(Cursor.Token_Item).Name;
+         return -Data.Tokens.Non_Grammar (Cursor.Token_Kind).Tokens 
(Cursor.Token_Item).Name;
 
       when Terminals_Keywords =>
-         return -Cursor.Data.Tokens.Keywords (Cursor.Keyword).Name;
+         return -Data.Tokens.Keywords (Cursor.Keyword).Name;
 
       when Terminals_Others =>
-         return -Cursor.Data.Tokens.Tokens (Cursor.Token_Kind).Tokens 
(Cursor.Token_Item).Name;
+         return -Data.Tokens.Tokens (Cursor.Token_Kind).Tokens 
(Cursor.Token_Item).Name;
 
       when EOI =>
          return EOI_Name;
@@ -65,7 +97,10 @@ package body WisiToken.BNF.Generate_Utils is
          return WisiToken_Accept_Name;
 
       when Nonterminal =>
-         return -Cursor.Data.Tokens.Rules (Cursor.Nonterminal).Left_Hand_Side;
+         return -Data.Tokens.Rules (Cursor.Nonterminal).Left_Hand_Side;
+
+      when SOI =>
+         return SOI_Name;
 
       when Done =>
          raise SAL.Programmer_Error with "token cursor is done";
@@ -140,7 +175,7 @@ package body WisiToken.BNF.Generate_Utils is
                      end loop;
                   end if;
                   RHSs (RHS_Index) :=
-                    (Tokens => Tokens, Action => null, Check => null, 
Recursion => <>);
+                    (Tokens => Tokens, Post_Parse_Action => null, 
In_Parse_Action => null, Recursion => <>);
                   if Length (Right_Hand_Side.Action) > 0 then
                      Action_All_Empty := False;
                      Action_Names (RHS_Index) := new String'
@@ -155,7 +190,7 @@ package body WisiToken.BNF.Generate_Utils is
                   Data.Source_Line_Map (LHS).RHS_Map (RHS_Index) := 
Right_Hand_Side.Source_Line;
                exception
                when E : Not_Found =>
-                  --  From "&"
+                  --  From "Find_Token"
                   Put_Error
                     (Error_Message
                        (Source_File_Name, Right_Hand_Side.Source_Line, 
Ada.Exceptions.Exception_Message (E)));
@@ -163,7 +198,8 @@ package body WisiToken.BNF.Generate_Utils is
                RHS_Index := RHS_Index + 1;
             end loop;
 
-            Data.Grammar (LHS) := LHS <= RHSs;
+            Data.Grammar (LHS) := (LHS, Rule.Optimized_List, RHSs);
+
             if not Action_All_Empty then
                Data.Action_Names (LHS) := new Names_Array'(Action_Names);
             end if;
@@ -187,15 +223,16 @@ package body WisiToken.BNF.Generate_Utils is
    --  Public subprograms, declaration order
 
    function Initialize
-     (Input_Data       : aliased in WisiToken_Grammar_Runtime.User_Data_Type;
-      Ignore_Conflicts :         in Boolean := False)
+     (Input_Data        : in WisiToken_Grammar_Runtime.User_Data_Access;
+      Grammar_File_Name : in String;
+      Ignore_Conflicts  : in Boolean := False)
      return Generate_Data
    is
       EOI_ID : constant Token_ID := Token_ID
         (Count (Input_Data.Tokens.Non_Grammar) + Count 
(Input_Data.Tokens.Tokens)) + Token_ID
           (Input_Data.Tokens.Keywords.Length) + Token_ID'First;
    begin
-      return Result : aliased Generate_Data :=
+      return Result : Generate_Data :=
         (Tokens => Input_Data.Tokens'Access,
 
          Descriptor => new WisiToken.Descriptor
@@ -204,6 +241,8 @@ package body WisiToken.BNF.Generate_Utils is
                then Token_ID (Count (Input_Data.Tokens.Non_Grammar)) + 
Token_ID'First
                else Token_ID'First),
             Last_Terminal     => EOI_ID,
+            SOI_ID            => EOI_ID + 1 + Token_ID 
(Input_Data.Tokens.Rules.Length) + 1,
+            --  SOI after last_nonterm to preserve test results
             EOI_ID            => EOI_ID,
             Accept_ID         => EOI_ID + 1,
             First_Nonterminal => EOI_ID + 1,
@@ -211,10 +250,21 @@ package body WisiToken.BNF.Generate_Utils is
 
          others => <>)
       do
-         Result.Descriptor.Case_Insensitive := 
Input_Data.Language_Params.Case_Insensitive;
-         Result.Descriptor.New_Line_ID      := Find_Kind (Result, "new-line");
-         Result.Descriptor.String_1_ID      := Find_Kind (Result, 
"string-single");
-         Result.Descriptor.String_2_ID      := Find_Kind (Result, 
"string-double");
+         declare
+            Container : constant Token_Container :=
+              (Result.Tokens, WisiToken.Descriptor_Access_Constant 
(Result.Descriptor));
+         begin
+            Result.Descriptor.Case_Insensitive := 
Input_Data.Language_Params.Case_Insensitive;
+            Result.Descriptor.New_Line_ID      := Find_Kind (Container, 
"new-line");
+            Result.Descriptor.String_1_ID      := Find_Kind (Container, 
"string-single");
+            if Result.Descriptor.String_1_ID = Invalid_Token_ID then
+               Result.Descriptor.String_1_ID := Find_Kind (Container, 
"string-single-one-line");
+            end if;
+            Result.Descriptor.String_2_ID      := Find_Kind (Container, 
"string-double");
+            if Result.Descriptor.String_2_ID = Invalid_Token_ID then
+               Result.Descriptor.String_2_ID := Find_Kind (Container, 
"string-double-one-line");
+            end if;
+         end;
 
          --  Image set in loop below, which also updates these widths.
          Result.Descriptor.Terminal_Image_Width := 0;
@@ -222,13 +272,11 @@ package body WisiToken.BNF.Generate_Utils is
 
          Result.Descriptor.Last_Lookahead       :=
            (case (Input_Data.User_Parser) is
-            when None                                  => Invalid_Token_ID,
-            when LR1                                   => 
Result.Descriptor.Last_Terminal,
-            when LALR                                  => 
Result.Descriptor.First_Nonterminal,
-            when Packrat_Generate_Algorithm | External => Invalid_Token_ID);
+            when LALR   => Result.Descriptor.First_Nonterminal,
+            when others => Result.Descriptor.Last_Terminal);
 
          for Cursor in All_Tokens (Result).Iterate loop
-            Result.Descriptor.Image (ID (Cursor)) := new String'(Name_1 
(Cursor));
+            Result.Descriptor.Image (ID (Cursor)) := new String'(Name_1 
(Result, Cursor));
          end loop;
 
          for ID in Result.Descriptor.Image'Range loop
@@ -243,24 +291,132 @@ package body WisiToken.BNF.Generate_Utils is
             end if;
          end loop;
 
-         To_Grammar (Result, Input_Data.Grammar_Lexer.File_Name, 
-Input_Data.Language_Params.Start_Token);
+         To_Grammar (Result, Grammar_File_Name, 
-Input_Data.Language_Params.Start_Token);
          Result.Ignore_Conflicts := Ignore_Conflicts;
+
+         Result.Conflicts := To_Conflicts
+           ((Result.Tokens, Descriptor_Access_Constant (Result.Descriptor)), 
Ignore_Conflicts, Input_Data.Conflicts,
+            Grammar_File_Name);
+
+         if WisiToken.Generate.Error then
+            raise WisiToken.Grammar_Error with "errors during initializing 
grammar: aborting";
+         end if;
       end return;
    end Initialize;
 
-   function Find_Token_ID (Data : aliased in Generate_Data; Token : in String) 
return Token_ID
-   is begin
-      for Cursor in All_Tokens (Data).Iterate loop
-         if Name (Cursor) = Token then
-            return ID (Cursor);
+   procedure Parse_Grammar_File
+     (Grammar_Parser    : in out WisiToken.Parse.LR.Parser_No_Recover.Parser;
+      Grammar_File_Name : in     String)
+   is
+      Trace      : aliased WisiToken.Text_IO_Trace.Trace;
+      Input_Data : aliased WisiToken_Grammar_Runtime.User_Data_Type;
+      Log_File   : Ada.Text_IO.File_Type;
+   begin
+      WisiToken.Parse.LR.Parser_No_Recover.New_Parser
+        (Grammar_Parser, Wisitoken_Grammar_Main.Create_Lexer 
(Trace'Unchecked_Access),
+         Wisitoken_Grammar_Main.Create_Parse_Table, 
Wisitoken_Grammar_Main.Create_Productions,
+         Input_Data'Unchecked_Access);
+
+      Grammar_Parser.Tree.Lexer.Reset_With_File (Grammar_File_Name);
+
+      Grammar_Parser.Parse (Log_File);
+      Grammar_Parser.Execute_Actions; -- Meta phase.
+   exception
+   when WisiToken.Syntax_Error =>
+      Grammar_Parser.Put_Errors;
+      raise;
+   when E : WisiToken.Parse_Error =>
+      WisiToken.Generate.Put_Error (Ada.Exceptions.Exception_Message (E));
+      raise;
+   end Parse_Grammar_File;
+
+   function Parse_Grammar_File
+     (Grammar_File_Name  : in     String;
+      Input_Data         : in     WisiToken_Grammar_Runtime.User_Data_Access;
+      Generate_Algorithm : in     WisiToken.BNF.Generate_Algorithm;
+      Lexer              : in     WisiToken.BNF.Lexer_Type;
+      Trace              : in out WisiToken.Trace'Class;
+      Ignore_Conflicts   : in     Boolean)
+     return Generate_Data
+   is
+      use all type WisiToken_Grammar_Runtime.Meta_Syntax;
+      Grammar_Parser : WisiToken.Parse.LR.Parser_No_Recover.Parser;
+      Log_File       : Ada.Text_IO.File_Type;
+   begin
+      WisiToken.Parse.LR.Parser_No_Recover.New_Parser
+        (Grammar_Parser, Wisitoken_Grammar_Main.Create_Lexer 
(Trace'Unchecked_Access),
+         Wisitoken_Grammar_Main.Create_Parse_Table, 
Wisitoken_Grammar_Main.Create_Productions,
+         Syntax_Trees.User_Data_Access (Input_Data));
+
+      Grammar_Parser.Tree.Lexer.Reset_With_File (Grammar_File_Name);
+
+      Grammar_Parser.Parse (Log_File);
+      Grammar_Parser.Execute_Actions; -- Meta phase.
+
+      if Input_Data.Meta_Syntax = WisiToken_Grammar_Runtime.EBNF_Syntax then
+         WisiToken_Grammar_Editing.Translate_EBNF_To_BNF
+           (Grammar_Parser.Tree,
+            WisiToken_Grammar_Runtime.User_Data_Type (Input_Data.all));
+         if WisiToken.Generate.Error then
+            raise WisiToken.Grammar_Error with "errors during translating EBNF 
to BNF: aborting";
          end if;
-      end loop;
-      raise Not_Found with "token '" & Token & "' not found";
+      end if;
+
+      Input_Data.Reset;
+      Input_Data.Phase       := WisiToken_Grammar_Runtime.Other;
+      Input_Data.User_Parser := Generate_Algorithm;
+      Input_Data.User_Lexer  := Lexer;
+
+      Grammar_Parser.Execute_Actions; -- populates Input_Data.Tokens
+
+      if WisiToken.Generate.Error then
+         raise WisiToken.Grammar_Error with "errors during parsing grammar: 
aborting";
+      end if;
+
+      return Initialize (Input_Data, Grammar_File_Name, Ignore_Conflicts);
+   exception
+   when WisiToken.Syntax_Error =>
+      Grammar_Parser.Put_Errors;
+      raise;
+   when E : WisiToken.Parse_Error =>
+      WisiToken.Generate.Put_Error (Ada.Exceptions.Exception_Message (E));
+      raise;
+   end Parse_Grammar_File;
+
+   function Find_Token_ID (Data : in Generate_Data; Token : in String) return 
Token_ID
+   is
+      Container : Token_Container (Data.Tokens, Descriptor_Access_Constant 
(Data.Descriptor));
+   begin
+      return Find_Token_ID (Container, Token);
+   end Find_Token_ID;
+
+   function Find_Token_ID (Container : in Token_Container; Token : in String) 
return Token_ID
+   is begin
+      if Token (Token'First) = ''' then
+         --  Token is a literal; the keyword itself.
+         declare
+            Value_String : constant String := """" & Token (Token'First + 1 .. 
Token'Last - 1) & """";
+         begin
+            for Cursor in Container.Iterate loop
+               if Value (Container, Cursor) = Value_String then
+                  return ID (Cursor);
+               end if;
+            end loop;
+            raise Not_Found with "token value " & Token & " not found";
+         end;
+      else
+         for Cursor in Container.Iterate loop
+            if Name (Container, Cursor) = Token then
+               return ID (Cursor);
+            end if;
+         end loop;
+         raise Not_Found with "token '" & Token & "' not found";
+      end if;
    end Find_Token_ID;
 
    function All_Tokens (Data : aliased in Generate_Data) return Token_Container
    is begin
-      return (Data => Data'Access);
+      return (Data.Tokens, WisiToken.Descriptor_Access_Constant 
(Data.Descriptor));
    end All_Tokens;
 
    function Constant_Reference
@@ -271,13 +427,13 @@ package body WisiToken.BNF.Generate_Utils is
       case Cursor.Kind is
       when Non_Grammar_Kind =>
          return
-           (Element => Container.Data.Tokens.Non_Grammar 
(Cursor.Token_Kind).Tokens (Cursor.Token_Item).Name'Access);
+           (Element => Container.Tokens.Non_Grammar (Cursor.Token_Kind).Tokens 
(Cursor.Token_Item).Name'Access);
 
       when Terminals_Keywords =>
-         return (Element => Container.Data.Tokens.Keywords 
(Cursor.Keyword).Name'Access);
+         return (Element => Container.Tokens.Keywords 
(Cursor.Keyword).Name'Access);
 
       when Terminals_Others =>
-         return (Element => Container.Data.Tokens.Tokens 
(Cursor.Token_Kind).Tokens (Cursor.Token_Item).Name'Access);
+         return (Element => Container.Tokens.Tokens (Cursor.Token_Kind).Tokens 
(Cursor.Token_Item).Name'Access);
 
       when EOI =>
          return (Element => Aliased_EOI_Name'Access);
@@ -286,7 +442,10 @@ package body WisiToken.BNF.Generate_Utils is
          return (Element => Aliased_WisiToken_Accept_Name'Access);
 
       when Nonterminal =>
-         return (Element => Container.Data.Tokens.Rules 
(Cursor.Nonterminal).Left_Hand_Side'Access);
+         return (Element => Container.Tokens.Rules 
(Cursor.Nonterminal).Left_Hand_Side'Access);
+
+      when SOI =>
+         return (Element => Aliased_SOI_Name'Access);
 
       when Done =>
          raise SAL.Programmer_Error with "token cursor is done";
@@ -297,6 +456,7 @@ package body WisiToken.BNF.Generate_Utils is
    is new Iterator_Interfaces.Forward_Iterator with record
       Non_Grammar  : Boolean;
       Nonterminals : Boolean;
+      Include_SOI  : Boolean;
    end record;
 
    overriding function First (Object : Iterator) return Token_Cursor;
@@ -304,43 +464,45 @@ package body WisiToken.BNF.Generate_Utils is
 
    overriding function First (Object : Iterator) return Token_Cursor
    is begin
-      return First (Object.Container.Data.all, Object.Non_Grammar, 
Object.Nonterminals);
+      return First (Object.Container.all, Object.Non_Grammar, 
Object.Nonterminals);
    end First;
 
    overriding function Next (Object  : Iterator; Position : Token_Cursor) 
return Token_Cursor
    is
       Next_Position : Token_Cursor := Position;
    begin
-      Next (Next_Position, Object.Nonterminals);
+      Next (Object.Container.all, Next_Position, Object.Nonterminals, 
Object.Include_SOI);
       return Next_Position;
    end Next;
 
    function Iterate
-     (Container    : aliased    Token_Container;
+     (Container    : aliased in Token_Container;
       Non_Grammar  :         in Boolean := True;
-      Nonterminals :         in Boolean := True)
+      Nonterminals :         in Boolean := True;
+      Include_SOI  :         in Boolean := True)
      return Iterator_Interfaces.Forward_Iterator'Class
    is begin
-      return Iterator'(Container'Access, Non_Grammar, Nonterminals);
+      return Iterator'(Container'Access, Non_Grammar, Nonterminals, 
Include_SOI);
    end Iterate;
 
    function Next_Kind_Internal
-     (Cursor       : in out Token_Cursor;
-      Nonterminals : in     Boolean)
+     (Container    : in     Token_Container;
+      Cursor       : in out Token_Cursor;
+      Nonterminals : in     Boolean;
+      Include_SOI  : in     Boolean)
      return Boolean
+   --  Advance Cursor to the next kind; return True if any of that
+   --  kind exist, or kind is Done; False otherwise.
    is begin
-      --  Advance Cursor to the next kind; return True if any of that
-      --  kind exist, or kind is Done; False otherwise.
       case Cursor.Kind is
       when Non_Grammar_Kind =>
 
          Cursor :=
-           (Data        => Cursor.Data,
-            Kind        => Terminals_Keywords,
-            ID          => Cursor.Data.Descriptor.First_Terminal,
+           (Kind        => Terminals_Keywords,
+            ID          => Container.Descriptor.First_Terminal,
             Token_Kind  => WisiToken.BNF.Token_Lists.No_Element,
             Token_Item  => String_Triple_Lists.No_Element,
-            Keyword     => Cursor.Data.Tokens.Keywords.First,
+            Keyword     => Container.Tokens.Keywords.First,
             Nonterminal => Rule_Lists.No_Element);
 
          return String_Pair_Lists.Has_Element (Cursor.Keyword);
@@ -348,16 +510,15 @@ package body WisiToken.BNF.Generate_Utils is
       when Terminals_Keywords =>
 
          Cursor :=
-           (Data        => Cursor.Data,
-            Kind        => Terminals_Others,
+           (Kind        => Terminals_Others,
             ID          => Cursor.ID,
-            Token_Kind  => Cursor.Data.Tokens.Tokens.First,
+            Token_Kind  => Container.Tokens.Tokens.First,
             Token_Item  => String_Triple_Lists.No_Element,
             Keyword     => String_Pair_Lists.No_Element,
             Nonterminal => Rule_Lists.No_Element);
 
          if WisiToken.BNF.Token_Lists.Has_Element (Cursor.Token_Kind) then
-            Cursor.Token_Item := Cursor.Data.Tokens.Tokens 
(Cursor.Token_Kind).Tokens.First;
+            Cursor.Token_Item := Container.Tokens.Tokens 
(Cursor.Token_Kind).Tokens.First;
             return WisiToken.BNF.String_Triple_Lists.Has_Element 
(Cursor.Token_Item);
          else
             return False;
@@ -366,8 +527,7 @@ package body WisiToken.BNF.Generate_Utils is
       when Terminals_Others =>
 
          Cursor :=
-           (Data        => Cursor.Data,
-            Kind        => EOI,
+           (Kind        => EOI,
             ID          => Cursor.ID,
             Token_Kind  => WisiToken.BNF.Token_Lists.No_Element,
             Token_Item  => String_Triple_Lists.No_Element,
@@ -378,10 +538,9 @@ package body WisiToken.BNF.Generate_Utils is
 
       when EOI =>
          if Nonterminals then
-            if Rule_Lists.Has_Element (Cursor.Data.Tokens.Rules.First) then
+            if Rule_Lists.Has_Element (Container.Tokens.Rules.First) then
                Cursor :=
-                 (Data        => Cursor.Data,
-                  Kind        => WisiToken_Accept,
+                 (Kind        => WisiToken_Accept,
                   ID          => Cursor.ID,
                   Token_Kind  => WisiToken.BNF.Token_Lists.No_Element,
                   Token_Item  => String_Triple_Lists.No_Element,
@@ -399,18 +558,25 @@ package body WisiToken.BNF.Generate_Utils is
       when WisiToken_Accept =>
 
          Cursor :=
-           (Data        => Cursor.Data,
-            Kind        => Nonterminal,
+           (Kind        => Nonterminal,
             ID          => Cursor.ID,
             Token_Kind  => WisiToken.BNF.Token_Lists.No_Element,
             Token_Item  => String_Triple_Lists.No_Element,
             Keyword     => String_Pair_Lists.No_Element,
-            Nonterminal => Cursor.Data.Tokens.Rules.First);
+            Nonterminal => Container.Tokens.Rules.First);
 
          --  Can't get here with no rules
          return True;
 
       when Nonterminal =>
+         if Include_SOI then
+            Cursor.Kind := SOI;
+         else
+            Cursor.Kind := Done;
+         end if;
+         return True;
+
+      when SOI =>
          Cursor.Kind := Done;
          return True;
 
@@ -420,23 +586,33 @@ package body WisiToken.BNF.Generate_Utils is
    end Next_Kind_Internal;
 
    function First
-     (Data         : aliased in Generate_Data;
-      Non_Grammar  :         in Boolean;
-      Nonterminals :         in Boolean)
+     (Data         : in Generate_Data;
+      Non_Grammar  : in Boolean := True;
+      Nonterminals : in Boolean := True)
+     return Token_Cursor
+   is
+      Container : Token_Container (Data.Tokens, Descriptor_Access_Constant 
(Data.Descriptor));
+   begin
+      return First (Container, Non_Grammar, Nonterminals);
+   end First;
+
+   function First
+     (Container    : in Token_Container;
+      Non_Grammar  : in Boolean := True;
+      Nonterminals : in Boolean := True)
      return Token_Cursor
    is
       Cursor : Token_Cursor :=
-        (Data        => Data'Access,
-         Kind        => Non_Grammar_Kind,
+        (Kind        => Non_Grammar_Kind,
          ID          => Token_ID'First,
-         Token_Kind  => Data.Tokens.Non_Grammar.First,
+         Token_Kind  => Container.Tokens.Non_Grammar.First,
          Token_Item  => String_Triple_Lists.No_Element,
          Keyword     => String_Pair_Lists.No_Element,
          Nonterminal => Rule_Lists.No_Element);
    begin
       if Non_Grammar then
          if WisiToken.BNF.Token_Lists.Has_Element (Cursor.Token_Kind) then
-            Cursor.Token_Item := Cursor.Data.Tokens.Non_Grammar 
(Cursor.Token_Kind).Tokens.First;
+            Cursor.Token_Item := Container.Tokens.Non_Grammar 
(Cursor.Token_Kind).Tokens.First;
             if WisiToken.BNF.String_Triple_Lists.Has_Element 
(Cursor.Token_Item) then
                return Cursor;
             end if;
@@ -445,12 +621,27 @@ package body WisiToken.BNF.Generate_Utils is
 
       --  There are no non_grammar tokens, or Non_Grammar false
       loop
-         exit when Next_Kind_Internal (Cursor, Nonterminals);
+         exit when Next_Kind_Internal (Container, Cursor, Nonterminals, 
Include_SOI => False);
       end loop;
       return Cursor;
    end First;
 
-   procedure Next (Cursor : in out Token_Cursor; Nonterminals : in Boolean)
+   procedure Next
+     (Data         : in     Generate_Data;
+      Cursor       : in out Token_Cursor;
+      Nonterminals : in     Boolean := True;
+      Include_SOI  : in     Boolean := True)
+   is
+      Container : Token_Container (Data.Tokens, Descriptor_Access_Constant 
(Data.Descriptor));
+   begin
+      Next (Container, Cursor, Nonterminals, Include_SOI);
+   end Next;
+
+   procedure Next
+     (Container    : in     Token_Container;
+      Cursor       : in out Token_Cursor;
+      Nonterminals : in     Boolean := True;
+      Include_SOI  : in     Boolean := True)
    is begin
       Cursor.ID := Cursor.ID + 1;
 
@@ -463,7 +654,7 @@ package body WisiToken.BNF.Generate_Utils is
             WisiToken.BNF.Token_Lists.Next (Cursor.Token_Kind);
 
             if WisiToken.BNF.Token_Lists.Has_Element (Cursor.Token_Kind) then
-               Cursor.Token_Item := Cursor.Data.Tokens.Non_Grammar 
(Cursor.Token_Kind).Tokens.First;
+               Cursor.Token_Item := Container.Tokens.Non_Grammar 
(Cursor.Token_Kind).Tokens.First;
                if String_Triple_Lists.Has_Element (Cursor.Token_Item) then
                   return;
                end if;
@@ -471,7 +662,7 @@ package body WisiToken.BNF.Generate_Utils is
          end if;
 
          loop
-            exit when Next_Kind_Internal (Cursor, Nonterminals);
+            exit when Next_Kind_Internal (Container, Cursor, Nonterminals, 
Include_SOI);
          end loop;
          return;
 
@@ -484,7 +675,7 @@ package body WisiToken.BNF.Generate_Utils is
          end if;
 
          loop
-            exit when Next_Kind_Internal (Cursor, Nonterminals);
+            exit when Next_Kind_Internal (Container, Cursor, Nonterminals, 
Include_SOI);
          end loop;
          return;
 
@@ -495,7 +686,7 @@ package body WisiToken.BNF.Generate_Utils is
          else
             WisiToken.BNF.Token_Lists.Next (Cursor.Token_Kind);
             if WisiToken.BNF.Token_Lists.Has_Element (Cursor.Token_Kind) then
-               Cursor.Token_Item := Cursor.Data.Tokens.Tokens 
(Cursor.Token_Kind).Tokens.First;
+               Cursor.Token_Item := Container.Tokens.Tokens 
(Cursor.Token_Kind).Tokens.First;
                if WisiToken.BNF.String_Triple_Lists.Has_Element 
(Cursor.Token_Item) then
                   return;
                end if;
@@ -503,19 +694,19 @@ package body WisiToken.BNF.Generate_Utils is
          end if;
 
          loop
-            exit when Next_Kind_Internal (Cursor, Nonterminals);
+            exit when Next_Kind_Internal (Container, Cursor, Nonterminals, 
Include_SOI);
          end loop;
          return;
 
       when EOI =>
-         if Next_Kind_Internal (Cursor, Nonterminals) then
+         if Next_Kind_Internal (Container, Cursor, Nonterminals, Include_SOI) 
then
             return;
          else
             raise SAL.Programmer_Error;
          end if;
 
       when WisiToken_Accept =>
-         if Next_Kind_Internal (Cursor, Nonterminals) then
+         if Next_Kind_Internal (Container, Cursor, Nonterminals, Include_SOI) 
then
             return;
          else
             raise SAL.Programmer_Error;
@@ -528,10 +719,17 @@ package body WisiToken.BNF.Generate_Utils is
          end if;
 
          loop
-            exit when Next_Kind_Internal (Cursor, Nonterminals);
+            exit when Next_Kind_Internal (Container, Cursor, Nonterminals, 
Include_SOI);
          end loop;
          return;
 
+      when SOI =>
+         if Next_Kind_Internal (Container, Cursor, Nonterminals, Include_SOI) 
then
+            return;
+         else
+            raise SAL.Programmer_Error;
+         end if;
+
       when Done =>
          null;
       end case;
@@ -547,22 +745,36 @@ package body WisiToken.BNF.Generate_Utils is
       return Cursor.ID;
    end ID;
 
-   function Name (Cursor : in Token_Cursor) return String
+   function Name (Data : in Generate_Data; Cursor : in Token_Cursor) return 
String
+   is
+      Container : Token_Container (Data.Tokens, Descriptor_Access_Constant 
(Data.Descriptor));
+   begin
+      return Name (Container, Cursor);
+   end Name;
+
+   function Name (Container : in Token_Container; Cursor : in Token_Cursor) 
return String
    is begin
-      return Cursor.Data.Descriptor.Image (Cursor.ID).all;
+      return Container.Descriptor.Image (Cursor.ID).all;
    end Name;
 
-   function Kind (Cursor : in Token_Cursor) return String
+   function Kind (Data : in Generate_Data; Cursor : in Token_Cursor) return 
String
+   is
+      Container : Token_Container (Data.Tokens, Descriptor_Access_Constant 
(Data.Descriptor));
+   begin
+      return Kind (Container, Cursor);
+   end Kind;
+
+   function Kind (Container : in Token_Container; Cursor : in Token_Cursor) 
return String
    is begin
       case Cursor.Kind is
       when Non_Grammar_Kind =>
-         return -Cursor.Data.Tokens.Non_Grammar (Cursor.Token_Kind).Kind;
+         return -Container.Tokens.Non_Grammar (Cursor.Token_Kind).Kind;
 
       when Terminals_Keywords =>
          return "keyword";
 
       when Terminals_Others =>
-         return -Cursor.Data.Tokens.Tokens (Cursor.Token_Kind).Kind;
+         return -Container.Tokens.Tokens (Cursor.Token_Kind).Kind;
 
       when EOI =>
          return "EOI";
@@ -573,24 +785,34 @@ package body WisiToken.BNF.Generate_Utils is
       when Nonterminal =>
          return "nonterminal";
 
+      when SOI =>
+         return "SOI";
+
       when Done =>
          raise SAL.Programmer_Error with "token cursor is done";
       end case;
    end Kind;
 
-   function Value (Cursor : in Token_Cursor) return String
+   function Value (Data : in Generate_Data; Cursor : in Token_Cursor) return 
String
+   is
+      Container : Token_Container (Data.Tokens, Descriptor_Access_Constant 
(Data.Descriptor));
+   begin
+      return Value (Container, Cursor);
+   end Value;
+
+   function Value (Container : in Token_Container; Cursor : in Token_Cursor) 
return String
    is begin
       case Cursor.Kind is
       when Non_Grammar_Kind =>
-         return -Cursor.Data.Tokens.Non_Grammar (Cursor.Token_Kind).Tokens 
(Cursor.Token_Item).Value;
+         return -Container.Tokens.Non_Grammar (Cursor.Token_Kind).Tokens 
(Cursor.Token_Item).Value;
 
       when Terminals_Keywords =>
-         return -Cursor.Data.Tokens.Keywords (Cursor.Keyword).Value;
+         return -Container.Tokens.Keywords (Cursor.Keyword).Value;
 
       when Terminals_Others =>
-         return -Cursor.Data.Tokens.Tokens (Cursor.Token_Kind).Tokens 
(Cursor.Token_Item).Value;
+         return -Container.Tokens.Tokens (Cursor.Token_Kind).Tokens 
(Cursor.Token_Item).Value;
 
-      when EOI | WisiToken_Accept | Nonterminal =>
+      when EOI | SOI | WisiToken_Accept | Nonterminal =>
             return "";
 
       when Done =>
@@ -598,19 +820,19 @@ package body WisiToken.BNF.Generate_Utils is
       end case;
    end Value;
 
-   function Repair_Image (Cursor : in Token_Cursor) return String
+   function Repair_Image (Data : in Generate_Data; Cursor : in Token_Cursor) 
return String
    is begin
       case Cursor.Kind is
       when Non_Grammar_Kind =>
-         return -Cursor.Data.Tokens.Non_Grammar (Cursor.Token_Kind).Tokens 
(Cursor.Token_Item).Repair_Image;
+         return -Data.Tokens.Non_Grammar (Cursor.Token_Kind).Tokens 
(Cursor.Token_Item).Repair_Image;
 
       when Terminals_Keywords =>
          return "";
 
       when Terminals_Others =>
-         return -Cursor.Data.Tokens.Tokens (Cursor.Token_Kind).Tokens 
(Cursor.Token_Item).Repair_Image;
+         return -Data.Tokens.Tokens (Cursor.Token_Kind).Tokens 
(Cursor.Token_Item).Repair_Image;
 
-      when EOI | WisiToken_Accept | Nonterminal =>
+      when EOI | SOI | WisiToken_Accept | Nonterminal =>
             return "";
 
       when Done =>
@@ -619,29 +841,38 @@ package body WisiToken.BNF.Generate_Utils is
    end Repair_Image;
 
    function To_Conflicts
-     (Data             : aliased in out Generate_Data;
-      Conflicts        :         in     WisiToken.BNF.Conflict_Lists.List;
-      Source_File_Name :         in     String)
-     return WisiToken.Generate.LR.Conflict_Lists.List
+     (Container        : in Token_Container;
+      Ignore_Conflicts : in Boolean;
+      Conflicts        : in WisiToken.BNF.Conflict_Lists.List;
+      Source_File_Name : in String)
+     return WisiToken.Generate.LR.Conflict_Lists.Tree
    is
       use WisiToken.Generate.LR;
-      Result   : WisiToken.Generate.LR.Conflict_Lists.List;
-      Conflict : WisiToken.Generate.LR.Conflict;
+      Result : WisiToken.Generate.LR.Conflict_Lists.Tree;
    begin
       for Item of Conflicts loop
+         declare
+            Conflict : WisiToken.Generate.LR.Conflict;
          begin
-            Conflict :=
-              (Conflict_Parse_Actions'Value (-Item.Action_A),
-               Find_Token_ID (Data, -Item.LHS_A),
-               Conflict_Parse_Actions'Value (-Item.Action_B),
-               Find_Token_ID (Data, -Item.LHS_B),
-               -1,
-               Find_Token_ID (Data, -Item.On));
-
-            Result.Append (Conflict);
+            for I of Item.Items loop
+               Conflict.Items.Insert
+                 ((Verb => WisiToken.Parse.LR.Conflict_Parse_Actions'Value 
(-I.Name),
+                   LHS  => Find_Token_ID (Container, -I.Value)));
+            end loop;
+
+            Conflict.On := Find_Token_ID (Container, -Item.On);
+
+            if String'(-Item.Resolution)'Length = 0 then
+               Conflict.Resolution := Invalid_Token_ID;
+            else
+               Conflict.Resolution := Find_Token_ID (Container, 
-Item.Resolution);
+            end if;
+
+            Result.Insert (Conflict);
+
          exception
          when E : Not_Found =>
-            if not Data.Ignore_Conflicts then
+            if not Ignore_Conflicts then
                Put_Error
                  (Error_Message
                     (Source_File_Name, Item.Source_Line, 
Ada.Exceptions.Exception_Message (E)));
@@ -686,7 +917,7 @@ package body WisiToken.BNF.Generate_Utils is
          Fast_Forward                => Item.Fast_Forward,
          Matching_Begin              => Item.Matching_Begin,
          Ignore_Check_Fail           => Item.Ignore_Check_Fail,
-         Task_Count                  => 0,
+         Zombie_Limit                => Item.Zombie_Limit,
          Check_Limit                 => Item.Check_Limit,
          Check_Delta_Limit           => Item.Check_Delta_Limit,
          Enqueue_Limit               => Item.Enqueue_Limit);
diff --git a/wisitoken-bnf-generate_utils.ads b/wisitoken-bnf-generate_utils.ads
index a2f31a2b03..7379308d4d 100644
--- a/wisitoken-bnf-generate_utils.ads
+++ b/wisitoken-bnf-generate_utils.ads
@@ -3,7 +3,7 @@
 --  Utilities for translating input file structures to WisiToken
 --  structures needed for LALR.Generate.
 --
---  Copyright (C) 2014, 2015, 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2014, 2015, 2017 - 2022 Free Software Foundation, Inc.
 --
 --  The WisiToken package is free software; you can redistribute it
 --  and/or modify it under terms of the GNU General Public License as
@@ -22,14 +22,17 @@ pragma License (Modified_GPL);
 with Ada.Iterator_Interfaces;
 with WisiToken.Generate.LR;
 with WisiToken.Parse.LR;
+with WisiToken.Parse.LR.Parser_No_Recover;
 with WisiToken.Productions;
 with WisiToken_Grammar_Runtime;
 package WisiToken.BNF.Generate_Utils is
 
    EOI_Name : constant String := "Wisi_EOI";
-   --  EOI_Name is used for Descriptor.EOI_ID token; it must match Emacs 
ada-mode
-   --  wisi.el wisi-eoi-term. It must be a valid Ada identifier when
-   --  "_ID" is appended.
+   --  EOI_Name is used for Descriptor.EOI_ID token. It must be a valid
+   --  Ada identifier when "_ID" is appended.
+
+   SOI_Name : constant String := "Wisi_SOI";
+   --  Similar to EOI_Name
 
    WisiToken_Accept_Name : constant String := "wisitoken_accept";
 
@@ -49,28 +52,44 @@ package WisiToken.BNF.Generate_Utils is
       --  splitting them out.
 
       Ignore_Conflicts   : Boolean                       := False;
-      Conflicts          : WisiToken.Generate.LR.Conflict_Lists.List;
+      Conflicts          : WisiToken.Generate.LR.Conflict_Lists.Tree;
       LR_Parse_Table     : WisiToken.Parse.LR.Parse_Table_Ptr;
       Parser_State_Count : WisiToken.Unknown_State_Index := 0;
    end record;
 
    function Initialize
-     (Input_Data       : aliased in WisiToken_Grammar_Runtime.User_Data_Type;
-      Ignore_Conflicts :         in Boolean := False)
+     (Input_Data        : in WisiToken_Grammar_Runtime.User_Data_Access;
+      Grammar_File_Name : in String;
+      Ignore_Conflicts  : in Boolean := False)
      return Generate_Data;
 
-   function Find_Token_ID (Data : aliased in Generate_Data; Token : in String) 
return Token_ID;
+   procedure Parse_Grammar_File
+     (Grammar_Parser    : in out WisiToken.Parse.LR.Parser_No_Recover.Parser;
+      Grammar_File_Name : in     String);
+
+   function Parse_Grammar_File
+     (Grammar_File_Name  : in     String;
+      Input_Data         : in     WisiToken_Grammar_Runtime.User_Data_Access;
+      Generate_Algorithm : in     WisiToken.BNF.Generate_Algorithm;
+      Lexer              : in     WisiToken.BNF.Lexer_Type;
+      Trace              : in out WisiToken.Trace'Class;
+      Ignore_Conflicts   : in     Boolean)
+     return Generate_Data;
 
-   type Token_Container (Data : not null access constant Generate_Data) is 
tagged null record
+   type Token_Container
+     (Tokens     : not null access constant WisiToken.BNF.Tokens;
+      Descriptor : WisiToken.Descriptor_Access_Constant)
+     is tagged null record
    with
      Constant_Indexing => Constant_Reference,
      Default_Iterator  => Iterate,
      Iterator_Element  => Ada.Strings.Unbounded.Unbounded_String;
-   --  We need a container type to define an iterator; the actual data is
-   --  in Data.Tokens. The Iterator_Element is given by Token_Name below.
+   --  The Iterator_Element is given by Token_Name below.
 
    function All_Tokens (Data : aliased in Generate_Data) return 
Token_Container;
 
+   function Find_Token_ID (Data : in Generate_Data; Token : in String) return 
Token_ID;
+
    type Token_Constant_Reference_Type
      (Element : not null access constant 
Ada.Strings.Unbounded.Unbounded_String)
      is null record
@@ -85,6 +104,7 @@ package WisiToken.BNF.Generate_Utils is
    --  4. EOI
    --  5. Accept
    --  6. Nonterminals
+   --  7. SOI
    --
    --  Within each group, tokens occur in the order they were declared in
    --  the grammar file.
@@ -98,51 +118,50 @@ package WisiToken.BNF.Generate_Utils is
    function Has_Element (Cursor : in Token_Cursor) return Boolean is (not 
Is_Done (Cursor));
    package Iterator_Interfaces is new Ada.Iterator_Interfaces (Token_Cursor, 
Has_Element);
    function Iterate
-     (Container    : aliased    Token_Container;
+     (Container    : aliased in Token_Container;
       Non_Grammar  :         in Boolean := True;
-      Nonterminals :         in Boolean := True)
+      Nonterminals :         in Boolean := True;
+      Include_SOI  :         in Boolean := True)
      return Iterator_Interfaces.Forward_Iterator'Class;
 
    function First
-     (Data         : aliased in Generate_Data;
-      Non_Grammar  :         in Boolean;
-      Nonterminals :         in Boolean)
+     (Data         : in Generate_Data;
+      Non_Grammar  : in Boolean := True;
+      Nonterminals : in Boolean := True)
      return Token_Cursor;
-   procedure Next (Cursor : in out Token_Cursor; Nonterminals : in Boolean);
+
+   procedure Next
+     (Data         : in     Generate_Data;
+      Cursor       : in out Token_Cursor;
+      Nonterminals : in     Boolean := True;
+      Include_SOI  : in     Boolean := True);
 
    function ID (Cursor : in Token_Cursor) return Token_ID;
 
-   function Name (Cursor : in Token_Cursor) return String;
+   function Name (Data : in Generate_Data; Cursor : in Token_Cursor) return 
String;
    --  Return the token name from the .wy file:
    --  Keywords: Keywords (i).name
    --  Tokens  : Tokens (i).Tokens (j).name
    --  Rules   : Rules (i).Left_Hand_Side
 
-   function Kind (Cursor : in Token_Cursor) return String;
+   function Kind (Data : in Generate_Data; Cursor : in Token_Cursor) return 
String;
    --  Return the token kind from the .wy file:
    --  Keywords: "keyword"
    --  Tokens  : Tokens (i).Kind
    --  Rules   : "nonterminal"
 
-   function Value (Cursor : in Token_Cursor) return String;
+   function Value (Data : in Generate_Data; Cursor : in Token_Cursor) return 
String;
    --  Return the token value from the .wy file:
    --  Keywords: Keywords (i).value
    --  Tokens  : Tokens (i).Tokens (j).Value
    --  Rules   : empty string (they have no Value)
 
-   function Repair_Image (Cursor : in Token_Cursor) return String;
+   function Repair_Image (Data : in Generate_Data; Cursor : in Token_Cursor) 
return String;
    --  Return the token repair image from the .wy file:
    --  Keywords: empty string
    --  Tokens  : Tokens (i).Tokens (j).Repair_Image
    --  Rules   : empty string
 
-   function To_Conflicts
-     (Data             : aliased in out Generate_Data;
-      Conflicts        :         in     WisiToken.BNF.Conflict_Lists.List;
-      Source_File_Name :         in     String)
-     return WisiToken.Generate.LR.Conflict_Lists.List;
-   --  Not included in Initialize because algorithms have no conflicts.
-
    function To_Nonterminal_ID_Set
      (Data : aliased in Generate_Data;
       Item :         in String_Lists.List)
@@ -160,9 +179,9 @@ package WisiToken.BNF.Generate_Utils is
 private
 
    type Token_Cursor_Kind is
-     (Non_Grammar_Kind, Terminals_Keywords, Terminals_Others, EOI, 
WisiToken_Accept, Nonterminal, Done);
+     (Non_Grammar_Kind, Terminals_Keywords, Terminals_Others, EOI, 
WisiToken_Accept, Nonterminal, SOI, Done);
 
-   type Token_Cursor (Data : not null access constant Generate_Data) is record
+   type Token_Cursor is record
       Kind        : Token_Cursor_Kind;
       ID          : Token_ID;
       Token_Kind  : WisiToken.BNF.Token_Lists.Cursor; -- Non_Grammar or 
Tokens, depending on Kind
diff --git a/wisitoken-bnf-output_ada.adb b/wisitoken-bnf-output_ada.adb
index 3857a0f97e..0b727b1504 100644
--- a/wisitoken-bnf-output_ada.adb
+++ b/wisitoken-bnf-output_ada.adb
@@ -4,7 +4,7 @@
 --  parameters, and a parser for that grammar. The grammar parser
 --  actions must be Ada.
 --
---  Copyright (C) 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  The WisiToken package is free software; you can redistribute it
 --  and/or modify it under terms of the GNU General Public License as
@@ -23,6 +23,7 @@ pragma License (Modified_GPL);
 with Ada.Strings.Fixed;
 with Ada.Text_IO; use Ada.Text_IO;
 with GNAT.Regexp;
+with System.Multiprocessors;
 with WisiToken.BNF.Generate_Packrat;
 with WisiToken.BNF.Generate_Utils;
 with WisiToken.BNF.Output_Ada_Common; use WisiToken.BNF.Output_Ada_Common;
@@ -30,19 +31,24 @@ with WisiToken.Generate.Packrat;
 with WisiToken_Grammar_Runtime;
 procedure WisiToken.BNF.Output_Ada
   (Input_Data            :         in WisiToken_Grammar_Runtime.User_Data_Type;
+   Grammar_File_Name     :         in String;
    Output_File_Name_Root :         in String;
    Generate_Data         : aliased in 
WisiToken.BNF.Generate_Utils.Generate_Data;
    Packrat_Data          :         in WisiToken.Generate.Packrat.Data;
    Tuple                 :         in Generate_Tuple;
    Test_Main             :         in Boolean;
-   Multiple_Tuples       :         in Boolean)
+   Multiple_Tuples       :         in Boolean;
+   Generate_Task_Count   :         in System.Multiprocessors.CPU_Range)
 is
    Common_Data : Output_Ada_Common.Common_Data := 
WisiToken.BNF.Output_Ada_Common.Initialize
-     (Input_Data, Tuple, Output_File_Name_Root, Check_Interface => False);
+     (Input_Data, Tuple, Grammar_File_Name, Output_File_Name_Root, 
Check_Interface => False);
 
    Gen_Alg_Name : constant String :=
      (if Test_Main or Multiple_Tuples
-      then "_" & Generate_Algorithm_Image (Common_Data.Generate_Algorithm).all
+      then "_" & Generate_Algorithm_Image (Common_Data.Generate_Algorithm).all 
&
+         (if Common_Data.Generate_Algorithm = LR1
+         then "_t" & Ada.Strings.Fixed.Trim (Generate_Task_Count'Image, 
Ada.Strings.Both)
+         else "")
       else "");
 
    function Symbol_Regexp (Item : in String) return String
@@ -61,7 +67,7 @@ is
       Label_Count  : in              Ada.Containers.Count_Type;
       Package_Name : in              String)
    is
-      use all type Ada.Containers.Count_Type;
+      pragma Unreferenced (Label_Count);
       use GNAT.Regexp;
       use Generate_Utils;
       use WisiToken.Generate;
@@ -71,7 +77,6 @@ is
       User_Data_Regexp : constant Regexp := Compile (Symbol_Regexp 
("User_Data"), Case_Sensitive => False);
       Tree_Regexp      : constant Regexp := Compile (Symbol_Regexp ("Tree"), 
Case_Sensitive      => False);
       Nonterm_Regexp   : constant Regexp := Compile (Symbol_Regexp 
("Nonterm"), Case_Sensitive   => False);
-      Tokens_Regexp    : constant Regexp := Compile (Symbol_Regexp ("Tokens"), 
Case_Sensitive    => False);
 
       Body_File : File_Type;
    begin
@@ -83,19 +88,21 @@ is
       Put_Raw_Code (Ada_Comment, Input_Data.Raw_Code (Actions_Body_Context));
       New_Line;
 
-      if Label_Count > 0 then
-         Put_Line ("with SAL;");
+      --  If labels are used in actions, we need to add 'with SAL;' here,
+      --  for Peek_Type. However, we can't just check Label_Count > 0,
+      --  because some declared labels are not actually used in actions. The
+      --  user will have to add 'with SAL;' in a code declaration.
+
+      if Input_Data.Check_Count > 0 then
+         --  For Match_Names etc
+         Indent_Line ("with  WisiToken.In_Parse_Actions; use 
WisiToken.In_Parse_Actions;");
+         New_Line;
       end if;
 
       Put_Line ("package body " & Package_Name & " is");
       Indent := Indent + 3;
       New_Line;
 
-      if Input_Data.Check_Count > 0 then
-         Indent_Line ("use WisiToken.Semantic_Checks;");
-         New_Line;
-      end if;
-
       Put_Raw_Code (Ada_Comment, Input_Data.Raw_Code (Actions_Body_Pre));
 
       --  generate Action and Check subprograms.
@@ -109,13 +116,6 @@ is
             LHS_ID    : constant WisiToken.Token_ID := Find_Token_ID 
(Generate_Data, -Rule.Left_Hand_Side);
             RHS_Index : Integer                     := 0;
 
-            function Is_Elisp (Action : in Unbounded_String) return Boolean
-            is begin
-               return Length (Action) >= 6 and then
-                 (Slice (Action, 1, 6) = "(progn" or
-                    Slice (Action, 1, 5) = "wisi-");
-            end Is_Elisp;
-
             procedure Put_Labels (RHS : in RHS_Type; Line : in String)
             is
                Output : array (Rule.Labels.First_Index .. 
Rule.Labels.Last_Index) of Boolean := (others => False);
@@ -154,7 +154,7 @@ is
 
          begin
             for RHS of Rule.Right_Hand_Sides loop
-               if Length (RHS.Action) > 0 and then not Is_Elisp (RHS.Action) 
then
+               if Length (RHS.Action) > 0 then
                   declare
                      Line : constant String := -RHS.Action;
                      --  Actually multiple lines; we assume the formatting is 
adequate.
@@ -164,7 +164,6 @@ is
                      Unref_User_Data : Boolean := True;
                      Unref_Tree      : Boolean := True;
                      Unref_Nonterm   : Boolean := True;
-                     Unref_Tokens    : Boolean := True;
                      Need_Comma      : Boolean := False;
 
                      procedure Check_Unref (Line : in String)
@@ -178,21 +177,17 @@ is
                         if Match (Line, Nonterm_Regexp) then
                            Unref_Nonterm := False;
                         end if;
-                        if Match (Line, Tokens_Regexp) then
-                           Unref_Tokens := False;
-                        end if;
                      end Check_Unref;
                   begin
                      Check_Unref (Line);
                      Indent_Line ("procedure " & Name);
                      Indent_Line (" (User_Data : in out 
WisiToken.Syntax_Trees.User_Data_Type'Class;");
                      Indent_Line ("  Tree      : in out 
WisiToken.Syntax_Trees.Tree;");
-                     Indent_Line ("  Nonterm   : in     
WisiToken.Valid_Node_Index;");
-                     Indent_Line ("  Tokens    : in     
WisiToken.Valid_Node_Index_Array)");
+                     Indent_Line ("  Nonterm   : in     
WisiToken.Syntax_Trees.Valid_Node_Access)");
                      Indent_Line ("is");
 
                      Indent := Indent + 3;
-                     if Unref_User_Data or Unref_Tree or Unref_Nonterm or 
Unref_Tokens then
+                     if Unref_User_Data or Unref_Tree or Unref_Nonterm then
                         Indent_Start ("pragma Unreferenced (");
 
                         if Unref_User_Data then
@@ -207,10 +202,6 @@ is
                            Put ((if Need_Comma then ", " else "") & "Nonterm");
                            Need_Comma := True;
                         end if;
-                        if Unref_Tokens then
-                           Put ((if Need_Comma then ", " else "") & "Tokens");
-                           Need_Comma := True;
-                        end if;
                         Put_Line (");");
                      end if;
 
@@ -226,31 +217,31 @@ is
                   end;
                end if;
 
-               if Length (RHS.Check) > 0 and then not Is_Elisp (RHS.Check) then
+               if Length (RHS.Check) > 0 then
                   declare
                      use Ada.Strings.Fixed;
                      Line          : constant String  := -RHS.Check;
                      Name          : constant String  := Check_Names 
(LHS_ID)(RHS_Index).all;
-                     Unref_Lexer   : constant Boolean := 0 = Index (Line, 
"Lexer");
+                     Unref_Tree    : constant Boolean := 0 = Index (Line, 
"Tree");
                      Unref_Nonterm : constant Boolean := 0 = Index (Line, 
"Nonterm");
                      Unref_Tokens  : constant Boolean := 0 = Index (Line, 
"Tokens");
                      Unref_Recover : constant Boolean := 0 = Index (Line, 
"Recover_Active");
                      Need_Comma    : Boolean          := False;
                   begin
                      Indent_Line ("function " & Name);
-                     Indent_Line (" (Lexer          : access constant 
WisiToken.Lexer.Instance'Class;");
-                     Indent_Line ("  Nonterm        : in out 
WisiToken.Recover_Token;");
-                     Indent_Line ("  Tokens         : in     
WisiToken.Recover_Token_Array;");
-                     Indent_Line ("  Recover_Active : in     Boolean)");
-                     Indent_Line (" return 
WisiToken.Semantic_Checks.Check_Status");
+                     Indent_Line ("  (Tree           : in     
WisiToken.Syntax_Trees.Tree;");
+                     Indent_Line ("   Nonterm        : in out 
WisiToken.Syntax_Trees.Recover_Token;");
+                     Indent_Line ("   Tokens         : in     
WisiToken.Syntax_Trees.Recover_Token_Array;");
+                     Indent_Line ("   Recover_Active : in     Boolean)");
+                     Indent_Line ("  return 
WisiToken.Syntax_Trees.In_Parse_Actions.Status");
                      Indent_Line ("is");
 
                      Indent := Indent + 3;
-                     if Unref_Lexer or Unref_Nonterm or Unref_Tokens or 
Unref_Recover then
+                     if Unref_Tree or Unref_Nonterm or Unref_Tokens or 
Unref_Recover then
                         Indent_Start ("pragma Unreferenced (");
 
-                        if Unref_Lexer then
-                           Put ("Lexer");
+                        if Unref_Tree then
+                           Put ("Tree");
                            Need_Comma := True;
                         end if;
                         if Unref_Nonterm then
@@ -313,24 +304,20 @@ is
       Put_Raw_Code (Ada_Comment, Input_Data.Raw_Code (Copyright_License));
       New_Line;
 
-      if (case Common_Data.Generate_Algorithm is
-          when LR_Generate_Algorithm => Input_Data.Action_Count > 0 or 
Input_Data.Check_Count > 0,
-          when Packrat_Generate_Algorithm | External => 
Input_Data.Action_Count > 0)
-      then
-         Put_Line ("with " & Actions_Package_Name & "; use " & 
Actions_Package_Name & ";");
-      end if;
-
       case Common_Data.Lexer is
-      when None | Elisp_Lexer =>
+      when None | Tree_Sitter_Lexer =>
          null;
 
       when re2c_Lexer =>
+         Put_Line ("with SAL;");
          Put_Line ("with WisiToken.Lexer.re2c;");
          Put_Line ("with " & re2c_Package_Name & ";");
       end case;
 
+      Put_Line ("with " & Actions_Package_Name & "; use " & 
Actions_Package_Name & ";");
+
       case Common_Data.Generate_Algorithm is
-      when LR_Generate_Algorithm =>
+      when LR_Generate_Algorithm | Tree_Sitter =>
          null;
 
       when Packrat_Gen =>
@@ -349,32 +336,32 @@ is
       New_Line;
 
       case Common_Data.Lexer is
-      when None | Elisp_Lexer =>
+      when None | Tree_Sitter_Lexer =>
          null;
 
       when re2c_Lexer =>
-         Indent_Line ("package Lexer is new WisiToken.Lexer.re2c");
-         Indent_Line ("  (" & re2c_Package_Name & ".New_Lexer,");
-         Indent_Line ("   " & re2c_Package_Name & ".Free_Lexer,");
-         Indent_Line ("   " & re2c_Package_Name & ".Reset_Lexer,");
-         Indent_Line ("   " & re2c_Package_Name & ".Next_Token);");
-         New_Line;
+         Create_re2c_Lexer (Generate_Data, Output_File_Name_Root);
       end case;
 
       case Common_Data.Generate_Algorithm is
       when LR_Generate_Algorithm =>
-         LR_Create_Create_Parser (Input_Data, Common_Data, Generate_Data);
+         LR_Create_Create_Parse_Table (Input_Data, Common_Data, Generate_Data, 
Actions_Package_Name);
+         Create_Create_Productions (Generate_Data);
 
       when Packrat_Gen =>
          WisiToken.BNF.Generate_Packrat (Packrat_Data, Generate_Data);
-
-         Packrat_Create_Create_Parser (Common_Data, Generate_Data, 
Packrat_Data);
+         Create_Create_Productions (Generate_Data);
+         Packrat_Create_Create_Parser (Actions_Package_Name, Common_Data, 
Generate_Data, Packrat_Data);
 
       when Packrat_Proc =>
-         Packrat_Create_Create_Parser (Common_Data, Generate_Data, 
Packrat_Data);
+         Create_Create_Productions (Generate_Data);
+         Packrat_Create_Create_Parser (Actions_Package_Name, Common_Data, 
Generate_Data, Packrat_Data);
 
       when External =>
          External_Create_Create_Grammar (Generate_Data);
+
+      when Tree_Sitter =>
+         null;
       end case;
 
       Put_Line ("end " & Main_Package_Name & ";");
@@ -382,9 +369,7 @@ is
       Set_Output (Standard_Output);
    end Create_Ada_Main_Body;
 
-   procedure Create_Ada_Test_Main
-     (Actions_Package_Name : in String;
-      Main_Package_Name    : in String)
+   procedure Create_Ada_Test_Main (Main_Package_Name : in String)
    is
       use WisiToken.Generate;
 
@@ -401,10 +386,10 @@ is
                else "Gen_LR_Parser_No_Recover_Run")),
 
          when Packrat_Generate_Algorithm => "Gen_Packrat_Parser_Run",
-         when External => raise SAL.Programmer_Error);
+         when External => raise SAL.Programmer_Error,
+         when Tree_Sitter => "Gen_Tree_Sitter_Parser_Run");
 
-      Unit_Name : constant String := File_Name_To_Ada (Output_File_Name_Root) &
-        "_" & Generate_Algorithm'Image (Common_Data.Generate_Algorithm) & 
"_Run";
+      Unit_Name : constant String := File_Name_To_Ada (Output_File_Name_Root) 
& Gen_Alg_Name & "_Run";
 
       Default_Language_Runtime_Package : constant String := 
"WisiToken.Parse.LR.McKenzie_Recover." & File_Name_To_Ada
         (Output_File_Name_Root);
@@ -418,13 +403,13 @@ is
       Indent := 1;
 
       Put_File_Header (Ada_Comment, Use_Tuple => True, Tuple => Tuple);
-      --  no Copyright_License; just a test file
       New_Line;
 
       Put_Line ("with " & Generic_Package_Name & ";");
-      Put_Line ("with " & Actions_Package_Name & ";");
       Put_Line ("with " & Main_Package_Name & ";");
-      if Input_Data.Language_Params.Error_Recover and
+
+      if Common_Data.Generate_Algorithm in LR_Generate_Algorithm and
+          Input_Data.Language_Params.Error_Recover and
         Input_Data.Language_Params.Use_Language_Runtime
       then
          declare
@@ -440,42 +425,44 @@ is
       end if;
 
       Put_Line ("procedure " & Unit_Name & " is new " & Generic_Package_Name);
-      Put_Line ("  (" & Actions_Package_Name & ".Descriptor,");
-      if Common_Data.Text_Rep then
-         Put_Line ("   """ & Output_File_Name_Root & "_" &
-                     To_Lower (Generate_Algorithm_Image (Tuple.Gen_Alg).all) &
-                     "_parse_table.txt"",");
-      end if;
-      if Input_Data.Language_Params.Error_Recover then
-         if Input_Data.Language_Params.Use_Language_Runtime then
-            Put_Line ("Fixes'Access, Matching_Begin_Tokens'Access, 
String_ID_Set'Access,");
-         else
-            Put_Line ("null, null, null,");
+      Put_Line ("  (");
+      case Common_Data.Generate_Algorithm is
+      when LR_Generate_Algorithm =>
+         if Common_Data.Text_Rep then
+            Put_Line
+              ("   """ &
+                 Text_Rep_File_Name
+                   (Output_File_Name_Root, Tuple, Generate_Task_Count, 
Input_Data.If_Lexer_Present, Test_Main) & """,");
          end if;
-      end if;
-      Put_Line (Main_Package_Name & ".Create_Parser);");
+         if Input_Data.Language_Params.Error_Recover then
+            if Input_Data.Language_Params.Use_Language_Runtime then
+               Put_Line ("   Fixes'Access, Matching_Begin_Tokens'Access, 
String_ID_Set'Access,");
+            else
+               Put_Line ("   null, null, null,");
+            end if;
+         end if;
+         Put_Line ("   " & Main_Package_Name & ".Create_Parse_Table,");
+         Put_Line ("   " & Main_Package_Name & ".Create_Productions,");
+         Put_Line ("   " & Main_Package_Name & ".Create_Lexer);");
+
+      when Packrat_Generate_Algorithm | Tree_Sitter =>
+         Put_Line ("   " & Main_Package_Name & ".Create_Parser);");
+
+      when External =>
+         raise SAL.Programmer_Error;
+      end case;
       Close (File);
       Set_Output (Standard_Output);
    end Create_Ada_Test_Main;
 
 begin
-   case Common_Data.Lexer is
-   when None | re2c_Lexer =>
-      null;
-
-   when Elisp_Lexer =>
-      raise User_Error with WisiToken.Generate.Error_Message
-        (Input_Data.Grammar_Lexer.File_Name, 1, "Ada output language does not 
support " & Lexer_Image
-           (Common_Data.Lexer).all & " lexer");
-   end case;
-
    case Tuple.Interface_Kind is
    when None  =>
       null;
 
    when Module | Process =>
       raise User_Error with WisiToken.Generate.Error_Message
-        (Input_Data.Grammar_Lexer.File_Name, 1, "Ada output language does not 
support setting Interface");
+        (Grammar_File_Name, 1, "Ada output language does not support setting 
Interface");
    end case;
 
    declare
@@ -500,7 +487,7 @@ begin
          Create_Ada_Main_Spec (To_Lower (Main_Package_Name) & ".ads", 
Main_Package_Name, Input_Data, Common_Data);
 
          if Test_Main then
-            Create_Ada_Test_Main (Actions_Package_Name, Main_Package_Name);
+            Create_Ada_Test_Main (Main_Package_Name);
          end if;
       end if;
    end;
diff --git a/wisitoken-bnf-output_ada_common.adb 
b/wisitoken-bnf-output_ada_common.adb
index 3a594bb2f1..ffd61d4698 100644
--- a/wisitoken-bnf-output_ada_common.adb
+++ b/wisitoken-bnf-output_ada_common.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -19,13 +19,11 @@ pragma License (GPL);
 
 with Ada.Strings.Fixed;
 with Ada.Text_IO; use Ada.Text_IO;
-with System.Multiprocessors;
 with WisiToken.BNF.Generate_Grammar;
 with WisiToken.BNF.Utils;
 with WisiToken.Generate; use WisiToken.Generate;
 with WisiToken.Parse.LR;
 with WisiToken.Productions;
-with WisiToken.Syntax_Trees;
 package body WisiToken.BNF.Output_Ada_Common is
 
    --  Body subprograms, alphabetical
@@ -59,7 +57,7 @@ package body WisiToken.BNF.Output_Ada_Common is
    end Duplicate_Reduce;
 
    function Image (Item : in Boolean) return String
-     is (if Item then "True" else "False");
+   is (if Item then "True" else "False");
 
    function Symbols_Image (State : in Parse.LR.Parse_State) return String
    is
@@ -96,10 +94,10 @@ package body WisiToken.BNF.Output_Ada_Common is
    is
       use Generate_Utils;
 
-      Descriptor  : WisiToken.Descriptor renames Generate_Data.Descriptor.all;
-      Spec_File : File_Type;
-      Paren_Done  : Boolean      := False;
-      Cursor      : Token_Cursor := First (Generate_Data, Non_Grammar => True, 
Nonterminals => True);
+      Descriptor : WisiToken.Descriptor renames Generate_Data.Descriptor.all;
+      Spec_File  : File_Type;
+      Paren_Done : Boolean      := False;
+      Cursor     : Token_Cursor := First (Generate_Data);
    begin
       Create (Spec_File, Out_File, Output_File_Name);
       Set_Output (Spec_File);
@@ -115,13 +113,9 @@ package body WisiToken.BNF.Output_Ada_Common is
       if not (Input_Data.Action_Count > 0 or Input_Data.Check_Count > 0) then
          Put_Line ("with WisiToken;");
       end if;
-      if Input_Data.Action_Count > 0 then
+      if Input_Data.Action_Count > 0 or Input_Data.Check_Count > 0 then
          Put_Line ("with WisiToken.Syntax_Trees;");
       end if;
-      if Input_Data.Check_Count > 0 then
-         Put_Line ("with WisiToken.Lexer;");
-         Put_Line ("with WisiToken.Semantic_Checks;");
-      end if;
       Put_Raw_Code (Ada_Comment, Input_Data.Raw_Code (Actions_Spec_Context));
       Put_Line ("package " & Package_Name & " is");
       Indent := Indent + 3;
@@ -129,30 +123,31 @@ package body WisiToken.BNF.Output_Ada_Common is
 
       Put_Raw_Code (Ada_Comment, Input_Data.Raw_Code (Actions_Spec_Pre));
 
-      Indent_Line ("Descriptor : aliased WisiToken.Descriptor :=");
+      Indent_Line ("Descriptor : aliased constant WisiToken.Descriptor :=");
       Indent_Line ("  (First_Terminal    =>" & WisiToken.Token_ID'Image 
(Descriptor.First_Terminal) & ",");
       Indent := Indent + 3;
-      Indent_Line ("Last_Terminal     =>" & WisiToken.Token_ID'Image 
(Descriptor.Last_Terminal) & ",");
-      Indent_Line ("First_Nonterminal =>" & WisiToken.Token_ID'Image 
(Descriptor.First_Nonterminal) & ",");
-      Indent_Line ("Last_Nonterminal  =>" & WisiToken.Token_ID'Image 
(Descriptor.Last_Nonterminal) & ",");
-      Indent_Line ("EOI_ID            =>" & WisiToken.Token_ID'Image 
(Descriptor.EOI_ID) & ",");
-      Indent_Line ("Accept_ID         =>" & WisiToken.Token_ID'Image 
(Descriptor.Accept_ID) & ",");
-      Indent_Line ("Case_Insensitive  => " & Image 
(Input_Data.Language_Params.Case_Insensitive) & ",");
-      Indent_Line ("New_Line_ID       =>" & WisiToken.Token_ID'Image 
(Descriptor.New_Line_ID) & ",");
-      Indent_Line ("String_1_ID       =>" & WisiToken.Token_ID'Image 
(Descriptor.String_1_ID) & ",");
-      Indent_Line ("String_2_ID       =>" & WisiToken.Token_ID'Image 
(Descriptor.String_2_ID) & ",");
-      Indent_Line ("Image             =>");
+      Indent_Line ("Last_Terminal       =>" & WisiToken.Token_ID'Image 
(Descriptor.Last_Terminal) & ",");
+      Indent_Line ("First_Nonterminal   =>" & WisiToken.Token_ID'Image 
(Descriptor.First_Nonterminal) & ",");
+      Indent_Line ("Last_Nonterminal    =>" & WisiToken.Token_ID'Image 
(Descriptor.Last_Nonterminal) & ",");
+      Indent_Line ("SOI_ID              =>" & WisiToken.Token_ID'Image 
(Descriptor.SOI_ID) & ",");
+      Indent_Line ("EOI_ID              =>" & WisiToken.Token_ID'Image 
(Descriptor.EOI_ID) & ",");
+      Indent_Line ("Accept_ID           =>" & WisiToken.Token_ID'Image 
(Descriptor.Accept_ID) & ",");
+      Indent_Line ("Case_Insensitive    => " & Image 
(Input_Data.Language_Params.Case_Insensitive) & ",");
+      Indent_Line ("New_Line_ID         =>" & WisiToken.Token_ID'Image 
(Descriptor.New_Line_ID) & ",");
+      Indent_Line ("String_1_ID         =>" & WisiToken.Token_ID'Image 
(Descriptor.String_1_ID) & ",");
+      Indent_Line ("String_2_ID         =>" & WisiToken.Token_ID'Image 
(Descriptor.String_2_ID) & ",");
+      Indent_Line ("Image               =>");
       Indent_Start ("  (");
       Indent := Indent + 3;
       loop
          exit when Is_Done (Cursor);
          if Paren_Done then
-            Indent_Start ("new String'(""" & (Name (Cursor)));
+            Indent_Start ("new String'(""" & (Name (Generate_Data, Cursor)));
          else
-            Put ("new String'(""" & (Name (Cursor)));
+            Put ("new String'(""" & (Name (Generate_Data, Cursor)));
             Paren_Done := True;
          end if;
-         Next (Cursor, Nonterminals => True);
+         Next (Generate_Data, Cursor);
          if Is_Done (Cursor) then
             Put_Line (""")),");
          else
@@ -170,19 +165,19 @@ package body WisiToken.BNF.Output_Ada_Common is
       if Input_Data.Language_Params.Declare_Enums then
          Paren_Done := False;
 
-         Cursor := First (Generate_Data, Non_Grammar => True, Nonterminals => 
True);
+         Cursor := First (Generate_Data);
          Indent_Line ("type Token_Enum_ID is");
          Indent_Start ("  (");
          Indent := Indent + 3;
          loop
             exit when Is_Done (Cursor);
             if Paren_Done then
-               Indent_Start (To_Token_Ada_Name (Name (Cursor)));
+               Indent_Start (To_Token_Ada_Name (Name (Generate_Data, Cursor)));
             else
-               Put (To_Token_Ada_Name (Name (Cursor)));
+               Put (To_Token_Ada_Name (Name (Generate_Data, Cursor)));
                Paren_Done := True;
             end if;
-            Next (Cursor, Nonterminals => True);
+            Next (Generate_Data, Cursor);
             if Is_Done (Cursor) then
                Put_Line (");");
             else
@@ -210,10 +205,9 @@ package body WisiToken.BNF.Output_Ada_Common is
             for Name of Name_List.all loop
                if Name /= null then
                   Indent_Line ("procedure " & Name.all);
-                  Indent_Line (" (User_Data : in out 
WisiToken.Syntax_Trees.User_Data_Type'Class;");
-                  Indent_Line ("  Tree      : in out 
WisiToken.Syntax_Trees.Tree;");
-                  Indent_Line ("  Nonterm   : in     
WisiToken.Valid_Node_Index;");
-                  Indent_Line ("  Tokens    : in     
WisiToken.Valid_Node_Index_Array);");
+                  Indent_Line ("  (User_Data : in out 
WisiToken.Syntax_Trees.User_Data_Type'Class;");
+                  Indent_Line ("   Tree      : in out 
WisiToken.Syntax_Trees.Tree;");
+                  Indent_Line ("   Nonterm   : in     
WisiToken.Syntax_Trees.Valid_Node_Access);");
                end if;
             end loop;
          end if;
@@ -224,11 +218,11 @@ package body WisiToken.BNF.Output_Ada_Common is
             for Name of Name_List.all loop
                if Name /= null then
                   Indent_Line ("function " & Name.all);
-                  Indent_Line (" (Lexer          : access constant 
WisiToken.Lexer.Instance'Class;");
-                  Indent_Line ("  Nonterm        : in out 
WisiToken.Recover_Token;");
-                  Indent_Line ("  Tokens         : in     
WisiToken.Recover_Token_Array;");
+                  Indent_Line (" (Tree           : in     
WisiToken.Syntax_Trees.Tree;");
+                  Indent_Line ("  Nonterm        : in out 
WisiToken.Syntax_Trees.Recover_Token;");
+                  Indent_Line ("  Tokens         : in     
WisiToken.Syntax_Trees.Recover_Token_Array;");
                   Indent_Line ("  Recover_Active : in     Boolean)");
-                  Indent_Line (" return 
WisiToken.Semantic_Checks.Check_Status;");
+                  Indent_Line (" return 
WisiToken.Syntax_Trees.In_Parse_Actions.Status;");
                end if;
             end loop;
          end if;
@@ -254,35 +248,22 @@ package body WisiToken.BNF.Output_Ada_Common is
 
       procedure LR_Process
       is begin
-         Indent_Line ("procedure Create_Parser");
-         if Input_Data.Language_Params.Error_Recover then
-            Indent_Line ("  (Parser                         :    out 
WisiToken.Parse.LR.Parser.Parser;");
-            Indent_Line ("   Language_Fixes                 : in     
WisiToken.Parse.LR.Parser.Language_Fixes_Access;");
-            Indent_Line ("   Language_Matching_Begin_Tokens : in     " &
-                           
"WisiToken.Parse.LR.Parser.Language_Matching_Begin_Tokens_Access;");
-            Indent_Line ("   Language_String_ID_Set         : in     " &
-                           
"WisiToken.Parse.LR.Parser.Language_String_ID_Set_Access;");
-         else
-            Indent_Line ("  (Parser                       :    out 
WisiToken.Parse.LR.Parser_No_Recover.Parser;");
-            Indent_Line ("   --  no error recovery");
-         end if;
-         Indent_Line ("   Trace                        : not null access 
WisiToken.Trace'Class;");
-         Indent_Start ("   User_Data                    : in     
WisiToken.Syntax_Trees.User_Data_Access");
-
+         Indent_Line ("function Create_Parse_Table");
          if Common_Data.Text_Rep then
-            Put_Line (";");
-            Indent_Line ("   Text_Rep_File_Name : in String);");
-         else
-            Put_Line (");");
+            Indent_Line ("  (Text_Rep_File_Name : in String)");
          end if;
+         Indent_Line ("  return WisiToken.Parse.LR.Parse_Table_Ptr;");
          New_Line;
+         Indent_Line ("function Create_Productions return 
WisiToken.Syntax_Trees.Production_Info_Trees.Vector;");
+         New_Line;
+         Indent_Line ("function Create_Lexer (Trace : in 
WisiToken.Trace_Access) return WisiToken.Lexer.Handle;");
       end LR_Process;
 
       procedure Packrat_Process
       is begin
          Indent_Line ("function Create_Parser");
-         Indent_Line ("  (Trace     : not null access WisiToken.Trace'Class;");
-         Indent_Line ("   User_Data : in     
WisiToken.Syntax_Trees.User_Data_Access)");
+         Indent_Line ("  (Trace      : in WisiToken.Trace_Access;");
+         Indent_Line ("   User_Data  : in 
WisiToken.Syntax_Trees.User_Data_Access)");
          Indent_Line ("  return WisiToken.Parse.Base_Parser'Class;");
          New_Line;
       end Packrat_Process;
@@ -303,35 +284,17 @@ package body WisiToken.BNF.Output_Ada_Common is
       Put_Raw_Code (Ada_Comment, Input_Data.Raw_Code (Copyright_License));
       New_Line;
 
-      case Common_Data.Output_Language is
-      when Ada_Lang =>
-         Put_Line ("with WisiToken.Syntax_Trees;");
-
-      when Ada_Emacs_Lang =>
-         case Common_Data.Interface_Kind is
-         when Process =>
-            Put_Line ("with WisiToken.Syntax_Trees;");
-
-         when Module =>
-            Put_Line ("with Emacs_Module_Aux;");
-            Put_Line ("with emacs_module_h;");
-            Put_Line ("with Interfaces.C;");
-            Put_Line ("with WisiToken.Semantic_State;");
-         end case;
-      end case;
+      Put_Line ("with WisiToken.Syntax_Trees;");
 
       case Common_Data.Generate_Algorithm is
       when LR_Generate_Algorithm =>
-         if Input_Data.Language_Params.Error_Recover then
-            Put_Line ("with WisiToken.Parse.LR.Parser;");
-         else
-            Put_Line ("with WisiToken.Parse.LR.Parser_No_Recover;");
-         end if;
+         Put_Line ("with WisiToken.Lexer;");
+         Put_Line ("with WisiToken.Parse.LR;");
 
       when Packrat_Generate_Algorithm =>
          Put_Line ("with WisiToken.Parse;");
 
-      when External =>
+      when External | Tree_Sitter =>
          null;
       end case;
 
@@ -346,7 +309,7 @@ package body WisiToken.BNF.Output_Ada_Common is
             LR_Process;
          when Packrat_Generate_Algorithm =>
             Packrat_Process;
-         when External =>
+         when External | Tree_Sitter =>
             null;
          end case;
 
@@ -358,7 +321,7 @@ package body WisiToken.BNF.Output_Ada_Common is
                LR_Process;
             when Packrat_Generate_Algorithm =>
                Packrat_Process;
-            when External =>
+            when External | Tree_Sitter =>
                null;
             end case;
 
@@ -407,20 +370,17 @@ package body WisiToken.BNF.Output_Ada_Common is
    end Create_External_Main_Spec;
 
    procedure Create_LR_Parser_Core_1
-     (Common_Data   : in Output_Ada_Common.Common_Data;
+     (Input_Data    : in WisiToken_Grammar_Runtime.User_Data_Type;
       Generate_Data : in WisiToken.BNF.Generate_Utils.Generate_Data)
    is
       use Ada.Strings.Unbounded;
 
-      subtype Nonterminal_ID is Token_ID range
-        Generate_Data.Grammar.First_Index .. Generate_Data.Grammar.Last_Index;
-
       Table : WisiToken.Parse.LR.Parse_Table_Ptr renames 
Generate_Data.LR_Parse_Table;
       Line  : Unbounded_String;
 
       procedure Append (Item : in String)
       is begin
-         Line := Line & Item;
+         Append (Line, Item);
       end Append;
 
       procedure Put (Label : in String; Item : in Token_ID_Array_Natural)
@@ -444,72 +404,27 @@ package body WisiToken.BNF.Output_Ada_Common is
       end Put;
 
    begin
-      Indent_Line ("McKenzie_Param : constant McKenzie_Param_Type :=");
-      Indent_Line ("  (First_Terminal    =>" & Token_ID'Image 
(Table.McKenzie_Param.First_Terminal) & ",");
-      Indent := Indent + 3;
-      Indent_Line ("Last_Terminal     =>" & Token_ID'Image 
(Table.McKenzie_Param.Last_Terminal) & ",");
-      Indent_Line ("First_Nonterminal =>" & Token_ID'Image 
(Table.McKenzie_Param.First_Nonterminal) & ",");
-      Indent_Line ("Last_Nonterminal  =>" & Token_ID'Image 
(Table.McKenzie_Param.Last_Nonterminal) & ",");
-      Put ("Insert", Table.McKenzie_Param.Insert);
-      Put ("Delete", Table.McKenzie_Param.Delete);
-      Put ("Push_Back", Table.McKenzie_Param.Push_Back);
-      Put ("Undo_Reduce", Table.McKenzie_Param.Undo_Reduce);
-      Indent_Line
-        ("Minimal_Complete_Cost_Delta => " & Integer'Image 
(Table.McKenzie_Param.Minimal_Complete_Cost_Delta) & ",");
-      Indent_Line ("Fast_Forward => " & Integer'Image 
(Table.McKenzie_Param.Fast_Forward) & ",");
-      Indent_Line ("Matching_Begin => " & Integer'Image 
(Table.McKenzie_Param.Matching_Begin) & ",");
-      Indent_Line ("Ignore_Check_Fail  =>" & Integer'Image 
(Table.McKenzie_Param.Ignore_Check_Fail) & ",");
-      Indent_Line ("Task_Count  =>" & System.Multiprocessors.CPU_Range'Image
-                     (Table.McKenzie_Param.Task_Count) & ",");
-      Indent_Line ("Check_Limit =>" & Token_Index'Image 
(Table.McKenzie_Param.Check_Limit) & ",");
-      Indent_Line ("Check_Delta_Limit =>" & Integer'Image 
(Table.McKenzie_Param.Check_Delta_Limit) & ",");
-      Indent_Line ("Enqueue_Limit =>" & Integer'Image 
(Table.McKenzie_Param.Enqueue_Limit) & ");");
-      Indent := Indent - 3;
-      New_Line;
-
-      if Common_Data.Text_Rep then
-         Indent_Line ("function Actions return 
WisiToken.Parse.LR.Semantic_Action_Array_Arrays.Vector");
-         Indent_Line ("is begin");
-         Indent := Indent + 3;
-         Indent_Line ("return Acts : 
WisiToken.Parse.LR.Semantic_Action_Array_Arrays.Vector do");
+      if Input_Data.Language_Params.Error_Recover then
+         Indent_Line ("McKenzie_Param : constant McKenzie_Param_Type :=");
+         Indent_Line ("  (First_Terminal    =>" & Token_ID'Image 
(Table.McKenzie_Param.First_Terminal) & ",");
          Indent := Indent + 3;
+         Indent_Line ("Last_Terminal     =>" & Token_ID'Image 
(Table.McKenzie_Param.Last_Terminal) & ",");
+         Indent_Line ("First_Nonterminal =>" & Token_ID'Image 
(Table.McKenzie_Param.First_Nonterminal) & ",");
+         Indent_Line ("Last_Nonterminal  =>" & Token_ID'Image 
(Table.McKenzie_Param.Last_Nonterminal) & ",");
+         Put ("Insert", Table.McKenzie_Param.Insert);
+         Put ("Delete", Table.McKenzie_Param.Delete);
+         Put ("Push_Back", Table.McKenzie_Param.Push_Back);
+         Put ("Undo_Reduce", Table.McKenzie_Param.Undo_Reduce);
          Indent_Line
-           ("Acts.Set_First_Last (" & Trimmed_Image 
(Generate_Data.Grammar.First_Index) & ", " &
-              Trimmed_Image (Generate_Data.Grammar.Last_Index) & ");");
-
-         for I in Nonterminal_ID loop
-            declare
-               P : Productions.Instance renames Generate_Data.Grammar (I);
-            begin
-               if Generate_Data.Action_Names (P.LHS) /= null or 
Generate_Data.Check_Names (P.LHS) /= null then
-                  Indent_Line
-                    ("Acts (" & Trimmed_Image (P.LHS) & ").Set_First_Last (0," 
&
-                       Integer'Image (P.RHSs.Last_Index) & ");");
-
-                  for J in P.RHSs.First_Index .. P.RHSs.Last_Index loop
-                     if (Generate_Data.Action_Names (P.LHS) /= null and then
-                           Generate_Data.Action_Names (P.LHS)(J) /= null)
-                       or
-                       (Generate_Data.Check_Names (P.LHS) /= null and then
-                          Generate_Data.Check_Names (P.LHS) /= null)
-                     then
-                        Indent_Wrap
-                          ("Acts (" & Trimmed_Image (P.LHS) & ")(" & 
Trimmed_Image (J) & ") := (" &
-                             (if Generate_Data.Action_Names (P.LHS) = null 
then "null"
-                              elsif Generate_Data.Action_Names (P.LHS)(J) = 
null then "null"
-                              else Generate_Data.Action_Names (P.LHS)(J).all & 
"'Access") & ", " &
-                             (if Generate_Data.Check_Names (P.LHS) = null then 
"null"
-                              elsif Generate_Data.Check_Names (P.LHS)(J) = 
null then "null"
-                              else Generate_Data.Check_Names (P.LHS)(J).all & 
"'Access") & ");");
-                     end if;
-                  end loop;
-               end if;
-            end;
-         end loop;
-         Indent := Indent - 3;
-         Indent_Line ("end return;");
+           ("Minimal_Complete_Cost_Delta => " & Integer'Image 
(Table.McKenzie_Param.Minimal_Complete_Cost_Delta) & ",");
+         Indent_Line ("Fast_Forward => " & Integer'Image 
(Table.McKenzie_Param.Fast_Forward) & ",");
+         Indent_Line ("Matching_Begin => " & Integer'Image 
(Table.McKenzie_Param.Matching_Begin) & ",");
+         Indent_Line ("Ignore_Check_Fail  =>" & Integer'Image 
(Table.McKenzie_Param.Ignore_Check_Fail) & ",");
+         Indent_Line ("Check_Limit =>" & 
Table.McKenzie_Param.Check_Limit'Image & ",");
+         Indent_Line ("Zombie_Limit =>" & 
Table.McKenzie_Param.Zombie_Limit'Image & ",");
+         Indent_Line ("Check_Delta_Limit =>" & Integer'Image 
(Table.McKenzie_Param.Check_Delta_Limit) & ",");
+         Indent_Line ("Enqueue_Limit =>" & Integer'Image 
(Table.McKenzie_Param.Enqueue_Limit) & ");");
          Indent := Indent - 3;
-         Indent_Line ("end Actions;");
          New_Line;
       end if;
    end Create_LR_Parser_Core_1;
@@ -522,8 +437,14 @@ package body WisiToken.BNF.Output_Ada_Common is
       use WisiToken.Parse.LR;
       use Ada.Strings.Unbounded;
 
+      --  Optimize source structure for GNAT compile time; one subroutine
+      --  with thousands of "Table.States (*) := ..." takes forever to
+      --  compile (apparently depending on available memory). But hundreds
+      --  of subroutines, containing the same lines in chunks of
+      --  Lines_Per_Subr, compiles in acceptable time.
+
       Table            : WisiToken.Parse.LR.Parse_Table_Ptr renames 
Generate_Data.LR_Parse_Table;
-      Lines_Per_Subr   : constant := 1000;
+      Lines_Per_Subr   : constant := 500;
       Subr_Count       : Integer  := 1;
       Last_Subr_Closed : Boolean  := False;
       Line             : Unbounded_String;
@@ -533,12 +454,6 @@ package body WisiToken.BNF.Output_Ada_Common is
          Line := Line & Item;
       end Append;
    begin
-      --  Optimize source structure for GNAT compile time; one subroutine
-      --  with thousands of "Table.States (*) := ..." takes forever to
-      --  compile (apparently depending on available memory). But hundreds
-      --  of subroutines, containing the same lines in chunks of 1000,
-      --  compiles in acceptable time.
-
       Indent_Line ("declare");
       Indent := Indent + 3;
 
@@ -566,26 +481,11 @@ package body WisiToken.BNF.Output_Ada_Common is
                      Action : constant Reduce_Action_Rec := Node.Actions.Item;
                   begin
                      Set_Col (Indent);
-                     Line := +"Add_Action (Table.States (" & Trimmed_Image 
(State_Index) & "), " &
-                       Symbols_Image (Table.States (State_Index)) & ", " &
-                       Image (Action.Production) & ", " &
-                       Count_Type'Image (Action.Token_Count) & ", ";
-
-                     Append
-                       ((if Generate_Data.Action_Names (Action.Production.LHS) 
= null then "null"
-                         elsif Generate_Data.Action_Names
-                           (Action.Production.LHS)(Action.Production.RHS) = 
null then "null"
-                         else Generate_Data.Action_Names
-                           (Action.Production.LHS)(Action.Production.RHS).all 
& "'Access"));
-                     Append (", ");
-                     Append
-                       ((if Generate_Data.Check_Names (Action.Production.LHS) 
= null then "null"
-                         elsif Generate_Data.Check_Names
-                           (Action.Production.LHS)(Action.Production.RHS) = 
null then "null"
-                         else Generate_Data.Check_Names
-                           (Action.Production.LHS)(Action.Production.RHS).all 
& "'Access"));
-
-                     Indent_Wrap (-Line & ");");
+                     Indent_Wrap
+                       ("Add_Action (Table.States (" & Trimmed_Image 
(State_Index) & "), " &
+                          Symbols_Image (Table.States (State_Index)) & ", " &
+                          Image (Action.Production) & ", " &
+                          Count_Type'Image (Action.Token_Count) & ");");
                      Line_Count := Line_Count + 1;
                      Indent     := Base_Indent;
                   end;
@@ -615,25 +515,7 @@ package body WisiToken.BNF.Output_Ada_Common is
                         end if;
                         Append (", ");
                         Append (Image (Action_Node.Item.Production) & ", ");
-                        Append (Count_Type'Image 
(Action_Node.Item.Token_Count) & ", ");
-                        Append
-                          ((if Generate_Data.Action_Names 
(Action_Node.Item.Production.LHS) = null then "null"
-                            elsif Generate_Data.Action_Names
-                              
(Action_Node.Item.Production.LHS)(Action_Node.Item.Production.RHS) = null
-                            then "null"
-                            else Generate_Data.Action_Names
-                              
(Action_Node.Item.Production.LHS)(Action_Node.Item.Production.RHS).all &
-                               "'Access"));
-                        Append (", ");
-                        Append
-                          ((if Generate_Data.Check_Names 
(Action_Node.Item.Production.LHS) = null then "null"
-                            elsif Generate_Data.Check_Names
-                              
(Action_Node.Item.Production.LHS)(Action_Node.Item.Production.RHS) = null
-                            then "null"
-                            else Generate_Data.Check_Names
-                              
(Action_Node.Item.Production.LHS)(Action_Node.Item.Production.RHS).all &
-                               "'Access"));
-                        Append (");");
+                        Append (Count_Type'Image 
(Action_Node.Item.Token_Count) & ");");
 
                      when Parse.LR.Error =>
                         raise SAL.Programmer_Error;
@@ -651,25 +533,8 @@ package body WisiToken.BNF.Output_Ada_Common is
                            Line := +"Add_Conflict (Table.States (" & 
Trimmed_Image (State_Index) & "), " &
                              Trimmed_Image (Node.Symbol) & ", ";
                            Append (Image (Action_Node.Item.Production) & ", ");
-                           Append (Count_Type'Image 
(Action_Node.Item.Token_Count) & ", ");
-                           Append
-                             ((if Generate_Data.Action_Names 
(Action_Node.Item.Production.LHS) = null then "null"
-                               elsif Generate_Data.Action_Names
-                                 
(Action_Node.Item.Production.LHS)(Action_Node.Item.Production.RHS) = null
-                               then "null"
-                               else Generate_Data.Action_Names
-                                 
(Action_Node.Item.Production.LHS)(Action_Node.Item.Production.RHS).all &
-                                  "'Access"));
-                           Append (", ");
-                           Append
-                             ((if Generate_Data.Check_Names 
(Action_Node.Item.Production.LHS) = null then "null"
-                               elsif Generate_Data.Check_Names
-                                 
(Action_Node.Item.Production.LHS)(Action_Node.Item.Production.RHS) = null
-                               then "null"
-                               else Generate_Data.Check_Names
-                                 
(Action_Node.Item.Production.LHS)(Action_Node.Item.Production.RHS).all &
-                                  "'Access"));
-                           Indent_Wrap (-Line & ");");
+                           Append (Count_Type'Image 
(Action_Node.Item.Token_Count) & ");");
+                           Indent_Wrap (-Line);
                            Line_Count := Line_Count + 1;
 
                         when others =>
@@ -744,60 +609,35 @@ package body WisiToken.BNF.Output_Ada_Common is
       Indent_Line ("end;");
    end Create_LR_Parser_Table;
 
-   procedure LR_Create_Create_Parser
-     (Input_Data    :         in     WisiToken_Grammar_Runtime.User_Data_Type;
-      Common_Data   :         in out Output_Ada_Common.Common_Data;
-      Generate_Data : aliased in     
WisiToken.BNF.Generate_Utils.Generate_Data)
+   procedure LR_Create_Create_Parse_Table
+     (Input_Data           :         in     
WisiToken_Grammar_Runtime.User_Data_Type;
+      Common_Data          :         in out Output_Ada_Common.Common_Data;
+      Generate_Data        : aliased in     
WisiToken.BNF.Generate_Utils.Generate_Data;
+      Actions_Package_Name :         in     String)
    is
       Table : WisiToken.Parse.LR.Parse_Table_Ptr renames 
Generate_Data.LR_Parse_Table;
    begin
-      Indent_Line ("procedure Create_Parser");
-      case Common_Data.Interface_Kind is
-      when Process =>
-         if Input_Data.Language_Params.Error_Recover then
-            Indent_Line ("  (Parser                         :    out 
WisiToken.Parse.LR.Parser.Parser;");
-            Indent_Line ("   Language_Fixes                 : in     
WisiToken.Parse.LR.Parser.Language_Fixes_Access;");
-            Indent_Line ("   Language_Matching_Begin_Tokens : in     " &
-                           
"WisiToken.Parse.LR.Parser.Language_Matching_Begin_Tokens_Access;");
-            Indent_Line
-              ("   Language_String_ID_Set       : in     
WisiToken.Parse.LR.Parser.Language_String_ID_Set_Access;");
-         else
-            Indent_Line ("  (Parser                         :    out 
WisiToken.Parse.LR.Parser_No_Recover.Parser;");
-         end if;
-         Indent_Line ("   Trace                        : not null access 
WisiToken.Trace'Class;");
-         Indent_Start ("   User_Data                    : in     
WisiToken.Syntax_Trees.User_Data_Access");
-
-      when Module =>
-         Indent_Line ("  (Parser              :    out 
WisiToken.Parse.LR.Parser.Parser;");
-         Indent_Line ("   Env                 : in     Emacs_Env_Access;");
-         Indent_Start ("   Lexer_Elisp_Symbols : in     
Lexers.Elisp_Array_Emacs_Value");
-      end case;
+      Indent_Line ("function Create_Parse_Table");
 
       if Common_Data.Text_Rep then
-         Put_Line (";");
-         Indent_Line ("   Text_Rep_File_Name : in String)");
-      else
-         Put_Line (")");
+         Indent_Line (" (Text_Rep_File_Name : in String)");
       end if;
+      Indent_Line ("  return WisiToken.Parse.LR.Parse_Table_Ptr");
 
       Indent_Line ("is");
       Indent := Indent + 3;
 
       Indent_Line ("use WisiToken.Parse.LR;");
 
+      Create_LR_Parser_Core_1 (Input_Data, Generate_Data);
+
       if Common_Data.Text_Rep then
-         Create_LR_Parser_Core_1 (Common_Data, Generate_Data);
-         Indent_Line ("Table : constant Parse_Table_Ptr := Get_Text_Rep");
-         Indent_Line ("  (Text_Rep_File_Name, McKenzie_Param, Actions);");
+         Indent_Line ("Table : constant Parse_Table_Ptr := Get_Text_Rep 
(Text_Rep_File_Name);");
          Indent := Indent - 3;
          Indent_Line ("begin");
          Indent := Indent + 3;
 
       else
-         if Input_Data.Language_Params.Error_Recover then
-            Create_LR_Parser_Core_1 (Common_Data, Generate_Data);
-         end if;
-
          Indent_Line ("Table : constant Parse_Table_Ptr := new Parse_Table");
          Indent_Line ("  (State_First       => 0,");
          Indent := Indent + 3;
@@ -811,46 +651,34 @@ package body WisiToken.BNF.Output_Ada_Common is
          Indent := Indent - 3;
          Indent_Line ("begin");
          Indent := Indent + 3;
-         if Input_Data.Language_Params.Error_Recover then
-            Indent_Line ("Table.McKenzie_Param := McKenzie_Param;");
-         end if;
          Create_LR_Parser_Table (Input_Data, Generate_Data);
          New_Line;
       end if;
 
       if Input_Data.Language_Params.Error_Recover then
-         Indent_Line ("WisiToken.Parse.LR.Parser.New_Parser");
-      else
-         Indent_Line ("WisiToken.Parse.LR.Parser_No_Recover.New_Parser");
+         Indent_Line ("Table.Error_Recover_Enabled := True;");
+         Indent_Line ("Table.McKenzie_Param := McKenzie_Param;");
       end if;
-      Indent_Line ("  (Parser,");
-      case Common_Data.Interface_Kind is
-      when Process =>
-         Indent_Line ("   Trace,");
-         Indent_Line ("   Lexer.New_Lexer (Trace.Descriptor),");
-         Indent_Line ("   Table,");
-         if Input_Data.Language_Params.Error_Recover then
-            Indent_Line ("   Language_Fixes,");
-            Indent_Line ("   Language_Matching_Begin_Tokens,");
-            Indent_Line ("   Language_String_ID_Set,");
-         end if;
-         Indent_Line ("   User_Data,");
-         Indent_Line ("   Max_Parallel         => 15,");
-         Indent_Line ("   Terminate_Same_State => True);");
+      Indent_Line ("Table.Max_Parallel :=" & Table.Max_Parallel'Image & ";");
 
-      when Module =>
-         Indent_Line ("   Lexer.New_Lexer (Env, Lexer_Elisp_Symbols),");
-         Indent_Line ("   Table, Max_Parallel => 15, Terminate_Same_State => 
True);");
+      Indent_Line ("return Table;");
 
-      end case;
       Indent := Indent - 3;
-      Indent_Line ("end Create_Parser;");
-   end LR_Create_Create_Parser;
+      Indent_Line ("end Create_Parse_Table;");
+      New_Line;
+
+      Indent_Line ("function Create_Lexer (Trace : in WisiToken.Trace_Access) 
return WisiToken.Lexer.Handle");
+      Indent_Line ("is begin");
+      Indent_Line ("   return Lexer.New_Lexer (Trace, " & Actions_Package_Name 
& ".Descriptor'Access);");
+      Indent_Line ("end Create_Lexer;");
+      New_Line;
+   end LR_Create_Create_Parse_Table;
 
    procedure Packrat_Create_Create_Parser
-     (Common_Data   :         in out Output_Ada_Common.Common_Data;
-      Generate_Data : aliased in     
WisiToken.BNF.Generate_Utils.Generate_Data;
-      Packrat_Data  :         in     WisiToken.Generate.Packrat.Data)
+     (Actions_Package_Name :         in     String;
+      Common_Data          :         in out Output_Ada_Common.Common_Data;
+      Generate_Data        : aliased in     
WisiToken.BNF.Generate_Utils.Generate_Data;
+      Packrat_Data         :         in     WisiToken.Generate.Packrat.Data)
    is
       use Ada.Strings.Unbounded;
 
@@ -858,8 +686,8 @@ package body WisiToken.BNF.Output_Ada_Common is
       Need_Bar : Boolean := True;
    begin
       Indent_Line ("function Create_Parser");
-      Indent_Line ("  (Trace     : not null access WisiToken.Trace'Class;");
-      Indent_Line ("   User_Data : in     
WisiToken.Syntax_Trees.User_Data_Access)");
+      Indent_Line ("  (Trace      : in WisiToken.Trace_Access;");
+      Indent_Line ("   User_Data  : in 
WisiToken.Syntax_Trees.User_Data_Access)");
       Indent_Line ("  return WisiToken.Parse.Base_Parser'Class");
 
       case Packrat_Generate_Algorithm'(Common_Data.Generate_Algorithm) is
@@ -868,8 +696,8 @@ package body WisiToken.BNF.Output_Ada_Common is
          Indent := Indent + 3;
          Indent_Line ("return Parser : 
WisiToken.Parse.Packrat.Generated.Parser do");
          Indent := Indent + 3;
-         Indent_Line ("Parser.Trace := Trace;");
-         Indent_Line ("Parser.Lexer := Lexer.New_Lexer (Trace.Descriptor);");
+         Indent_Line ("Parser.Tree.Lexer := Lexer.New_Lexer (Trace, " & 
Actions_Package_Name & ".Descriptor'Access);");
+         Indent_Line ("Parser.Productions := Create_Productions;");
          Indent_Line ("Parser.User_Data := User_Data;");
          Indent_Line ("Parser.Parse_WisiToken_Accept := 
Parse_wisitoken_accept_1'Access;");
          Indent := Indent - 3;
@@ -912,9 +740,10 @@ package body WisiToken.BNF.Output_Ada_Common is
          WisiToken.BNF.Generate_Grammar (Generate_Data.Grammar, 
Generate_Data.Action_Names.all);
 
          Indent_Line ("return WisiToken.Parse.Packrat.Procedural.Create");
-         Indent_Line
-           ("  (Grammar, Direct_Left_Recursive, " & Trimmed_Image 
(Generate_Data.Descriptor.Accept_ID) &
-              ", Trace, Lexer.New_Lexer (Trace.Descriptor), User_Data);");
+         Indent_Line ("  (Grammar, Direct_Left_Recursive, " & Trimmed_Image 
(Generate_Data.Descriptor.Accept_ID) &
+                        ", Lexer.New_Lexer");
+         Indent_Line ("     (Trace, " & Actions_Package_Name & 
".Descriptor'Access),");
+         Indent_Line ("   Create_Productions, User_Data);");
       end case;
       Indent := Indent - 3;
       Indent_Line ("end Create_Parser;");
@@ -939,7 +768,82 @@ package body WisiToken.BNF.Output_Ada_Common is
       Indent_Line ("end Create_Grammar;");
    end External_Create_Create_Grammar;
 
-   procedure Create_re2c
+   procedure Create_Create_Productions
+     (Generate_Data : in WisiToken.BNF.Generate_Utils.Generate_Data)
+   is
+      subtype Nonterminal_ID is Token_ID range 
Generate_Data.Grammar.First_Index .. Generate_Data.Grammar.Last_Index;
+
+      Actions_Present : Boolean := False;
+   begin
+      Indent_Line ("function Create_Productions return 
WisiToken.Syntax_Trees.Production_Info_Trees.Vector");
+      Indent_Line ("is begin");
+      Indent := Indent + 3;
+      Indent_Line ("return Result : 
WisiToken.Syntax_Trees.Production_Info_Trees.Vector do");
+      Indent := Indent + 3;
+      Indent_Line
+        ("Result.Set_First_Last (" &
+           Trimmed_Image (Generate_Data.Grammar.First_Index) & ", " &
+           Trimmed_Image (Generate_Data.Grammar.Last_Index) & ");");
+
+      for I in Nonterminal_ID loop
+         declare
+            P : Productions.Instance renames Generate_Data.Grammar (I);
+         begin
+            if P.Optimized_List then
+               Indent_Line ("Result (" & Trimmed_Image (P.LHS) & 
").Optimized_List := True;");
+               Actions_Present := True;
+            end if;
+
+            if Generate_Data.Check_Names (P.LHS) /= null or
+              Generate_Data.Action_Names (P.LHS) /= null
+            then
+               Indent_Line
+                 ("Result (" & Trimmed_Image (P.LHS) & ").RHSs.Set_First_Last 
(" &
+                    Trimmed_Image (P.RHSs.First_Index) & ", " &
+                    Trimmed_Image (P.RHSs.Last_Index) & ");");
+
+               for J in P.RHSs.First_Index .. P.RHSs.Last_Index loop
+                  if Generate_Data.Check_Names (P.LHS) = null then
+                     Indent_Line
+                       ("Result (" & Trimmed_Image (P.LHS) & ").RHSs (" & 
Trimmed_Image (J) &
+                          ").In_Parse_Action := null;");
+                  else
+                     Actions_Present := True;
+                     Indent_Line
+                       ("Result (" & Trimmed_Image (P.LHS) & ").RHSs (" & 
Trimmed_Image (J) & ").In_Parse_Action := " &
+                          (if Generate_Data.Check_Names (P.LHS)(J) = null then 
"null"
+                           else Generate_Data.Check_Names (P.LHS)(J).all & 
"'Access") &
+                          ";");
+                  end if;
+                  if Generate_Data.Action_Names (P.LHS) = null then
+                     Indent_Line
+                       ("Result (" & Trimmed_Image (P.LHS) & ").RHSs (" & 
Trimmed_Image (J) &
+                          ").Post_Parse_Action := null;");
+                  else
+                     Actions_Present := True;
+                     Indent_Line
+                       ("Result (" & Trimmed_Image (P.LHS) & ").RHSs (" & 
Trimmed_Image (J) &
+                          ").Post_Parse_Action := " &
+                          (if Generate_Data.Action_Names (P.LHS)(J) = null 
then "null"
+                           else Generate_Data.Action_Names (P.LHS)(J).all & 
"'Access") &
+                          ";");
+                  end if;
+               end loop;
+            end if;
+         end;
+      end loop;
+      if not Actions_Present then
+         Indent_Line ("null;");
+      end if;
+
+      Indent := Indent - 3;
+      Indent_Line ("end return;");
+      Indent := Indent - 3;
+      Indent_Line ("end Create_Productions;");
+      New_Line;
+   end Create_Create_Productions;
+
+   procedure Create_re2c_File
      (Input_Data            :         in 
WisiToken_Grammar_Runtime.User_Data_Type;
       Tuple                 :         in Generate_Tuple;
       Generate_Data         : aliased in 
WisiToken.BNF.Generate_Utils.Generate_Data;
@@ -954,7 +858,7 @@ package body WisiToken.BNF.Output_Ada_Common is
       Set_Output (File);
       Indent := 1;
 
-      Put_File_Header (C_Comment, " -*- mode: C -*-", Use_Tuple => True, Tuple 
=> Tuple);
+      Put_File_Header (C_Comment, "mode: C", Use_Tuple => True, Tuple => 
Tuple);
       Put_Raw_Code (C_Comment, Input_Data.Raw_Code (Copyright_License));
       New_Line;
 
@@ -1012,9 +916,9 @@ package body WisiToken.BNF.Output_Ada_Common is
       Indent_Line ("result->byte_token_start  = input;");
       Indent_Line ("result->char_pos          = 1; /* match 
WisiToken.Buffer_Region */");
       Indent_Line ("result->char_token_start  = 1;");
-      Indent_Line ("result->line              = (*result->cursor == 0x0A) ? 2 
: 1;");
+      Indent_Line ("result->line              = 1;");
       Indent_Line ("result->line_token_start  = result->line;");
-      Indent_Line ("result->verbosity         = verbosity;");
+      Indent_Line ("result->verbosity         = 0;");
       Indent_Line ("return result;");
       Indent := Indent - 3;
       Indent_Line ("}");
@@ -1036,7 +940,29 @@ package body WisiToken.BNF.Output_Ada_Common is
       Indent := Indent + 3;
       Indent_Line ("lexer->cursor   = lexer->buffer;");
       Indent_Line ("lexer->char_pos = 1;");
-      Indent_Line ("lexer->line     = (*lexer->cursor == 0x0A) ? 2 : 1;");
+      Indent_Line ("lexer->line     = 1;");
+      Indent := Indent - 3;
+      Indent_Line ("}");
+      New_Line;
+
+      Indent_Line ("void");
+      Indent_Line (Output_File_Name_Root & "_set_verbosity");
+      Indent_Line ("   (wisi_lexer* lexer, int verbosity)");
+      Indent_Line ("{");
+      Indent := Indent + 3;
+      Indent_Line ("lexer->verbosity = verbosity;");
+      Indent := Indent - 3;
+      Indent_Line ("}");
+      New_Line;
+
+      Indent_Line ("void");
+      Indent_Line (Output_File_Name_Root & "_set_position");
+      Indent_Line ("   (wisi_lexer* lexer, size_t byte_position, size_t 
char_position, int line)");
+      Indent_Line ("{");
+      Indent := Indent + 3;
+      Indent_Line ("lexer->cursor   = lexer->buffer + byte_position - 1;");
+      Indent_Line ("lexer->char_pos = char_position;");
+      Indent_Line ("lexer->line     = line;");
       Indent := Indent - 3;
       Indent_Line ("}");
       New_Line;
@@ -1070,7 +996,9 @@ package body WisiToken.BNF.Output_Ada_Common is
       Indent_Line ("{");
       Indent := Indent + 3;
       Indent_Line ("if (lexer->cursor <= lexer->buffer_last)");
-      Indent_Line ("   ++lexer->cursor;");
+      Indent_Line ("{");
+      Indent := Indent + 3;
+      Indent_Line ("++lexer->cursor;");
       Indent_Line ("if (lexer->cursor <= lexer->buffer_last)");
       Indent_Line ("{");
       Indent_Line ("   /* UFT-8 encoding: 
https://en.wikipedia.org/wiki/UTF-8#Description */");
@@ -1081,8 +1009,10 @@ package body WisiToken.BNF.Output_Ada_Common is
       Indent_Line ("     {/* byte 2, 3 or 4 of multi-byte UTF-8 char */");
       Indent_Line ("     }");
       Indent_Line ("   else");
-      Indent_Line ("     ++lexer->char_pos;");
-      Indent_Line ("   if (*lexer->cursor == 0x0A) ++lexer->line;");
+      Indent_Line ("     lexer->char_pos++;");
+      Indent_Line ("} else ");
+      Indent_Line ("   lexer->char_pos++;");
+      Indent := Indent - 3;
       Indent_Line ("}");
       Indent := Indent - 3;
       Indent_Line ("}");
@@ -1099,13 +1029,22 @@ package body WisiToken.BNF.Output_Ada_Common is
                      "lexer->line = lexer->context_line");
       New_Line;
 
-      if Is_In (Input_Data.Tokens.Tokens, "delimited-text") then
+      if Is_In (Input_Data.Tokens.Tokens, "delimited-text") or
+        Is_In (Input_Data.Tokens.Non_Grammar, "delimited-text")
+      then
          Indent_Line ("static void skip_to(wisi_lexer* lexer, char* target)");
          Indent_Line ("{");
-         Indent_Line ("  int i;");
+         Indent_Line ("  int i, j;");
          New_Line;
+         Indent_Line ("  // Count all new-lines contained in the skip region. 
Caller has ");
+         Indent_Line ("  // skipped the start delimiter; if lexer->cursor is a 
new-line it");
+         Indent_Line ("  // has not yet been counted. Start and end delimiters 
do not contain new-line.");
          Indent_Line ("  while (lexer->cursor <= lexer->buffer_last)");
          Indent_Line ("    {");
+         Indent_Line ("      if (*lexer->cursor == 0x0A)");
+         Indent_Line ("      {");
+         Indent_Line ("        lexer->line++;");
+         Indent_Line ("      }");
          Indent_Line ("      if (*lexer->cursor == target[0])");
          Indent_Line ("      {");
          Indent_Line ("        i = 0;");
@@ -1117,7 +1056,7 @@ package body WisiToken.BNF.Output_Ada_Common is
          New_Line;
          Indent_Line ("        if (0 == target[i])");
          Indent_Line ("          {");
-         Indent_Line ("            for (i = 0; 0 != target[i]; i++)");
+         Indent_Line ("            for (j = 0; j < i; j++)");
          Indent_Line ("               skip(lexer);");
          Indent_Line ("            break;");
          Indent_Line ("          }");
@@ -1137,7 +1076,8 @@ package body WisiToken.BNF.Output_Ada_Common is
       Indent_Line ("   size_t* byte_length,");
       Indent_Line ("   size_t* char_position,");
       Indent_Line ("   size_t* char_length,");
-      Indent_Line ("   int*    line_start)");
+      Indent_Line ("   int*    line_start,");
+      Indent_Line ("   int*    line_length)");
       Indent_Line ("{");
       Indent := Indent + 3;
 
@@ -1148,11 +1088,14 @@ package body WisiToken.BNF.Output_Ada_Common is
       Indent_Line ("{");
       Indent := Indent + 3;
       Indent_Line ("*id            =" & WisiToken.Token_ID'Image 
(Generate_Data.Descriptor.EOI_ID) & ";");
-      Indent_Line ("*byte_position = lexer->buffer_last - lexer->buffer + 1;");
+      --  EOI position.last = last char of input, so byte_region (root) = all 
of input (in packrat parse)
+      --  EOI position.first = last + 1 => null region.
+      Indent_Line ("*byte_position = lexer->buffer_last - lexer->buffer + 2;");
       Indent_Line ("*byte_length   = 0;");
-      Indent_Line ("*char_position = lexer->char_token_start;");
+      Indent_Line ("*char_position = lexer->char_pos;");
       Indent_Line ("*char_length   = 0;");
       Indent_Line ("*line_start    = lexer->line;");
+      Indent_Line ("*line_length   = 0;");
       Indent_Line ("return status;");
       Indent := Indent - 3;
       Indent_Line ("}");
@@ -1160,10 +1103,7 @@ package body WisiToken.BNF.Output_Ada_Common is
 
       Indent_Line ("lexer->byte_token_start = lexer->cursor;");
       Indent_Line ("lexer->char_token_start = lexer->char_pos;");
-      Indent_Line ("if (*lexer->cursor == 0x0A)");
-      Indent_Line ("   lexer->line_token_start = lexer->line-1;");
-      Indent_Line ("else");
-      Indent_Line ("   lexer->line_token_start = lexer->line;");
+      Indent_Line ("lexer->line_token_start = lexer->line;");
       New_Line;
 
       Indent_Line ("while (*id == -1 && status == 0)");
@@ -1176,7 +1116,7 @@ package body WisiToken.BNF.Output_Ada_Common is
       New_Line;
 
       --  Regexps used in definitions
-      for Pair of Input_Data.Tokens.re2c_Regexps loop
+      for Pair of Input_Data.Tokens.Lexer_Regexps loop
          Indent_Line (-Pair.Name & " = " & (-Pair.Value) & ";");
       end loop;
       New_Line;
@@ -1184,25 +1124,51 @@ package body WisiToken.BNF.Output_Ada_Common is
       --  definitions
       for I in All_Tokens (Generate_Data).Iterate (Non_Grammar => True, 
Nonterminals => False) loop
 
-         if 0 /= Index (Source => Value (I), Pattern => "/") then
-            --  trailing context syntax; forbidden in definitions
+         if Kind (Generate_Data, I) = "comment-new-line" then
+            --  This must be before the check for "trailing context syntax", to
+            --  handle Java comments.
+            Indent_Line
+              (Name (Generate_Data, I) & " = " & Value (Generate_Data, I) &
+                 "[^\x0a\x04]*([\x0a]|[\x0d][\x0a]|[\x04]) ;");
+
+         elsif Kind (Generate_Data, I) = "comment-one-line" then
+            declare
+               Open  : constant String := Value (Generate_Data, I);
+               Close : constant String := Repair_Image (Generate_Data, I);
+            begin
+               --  Open and Close are both strings.
+               if Close'Length = 3 then
+                  --  Here we handle the special case of Close being a single 
character.
+                  Indent_Line
+                    (Name (Generate_Data, I) & " = " & Open & " [^\x0a\x04" & 
Close (2) & "]* " & Close & ";");
+               else
+                  raise SAL.Not_Implemented;
+                  --  IMPROVEME: similar to delimited-text, but exclude 
new-line.
+               end if;
+            end;
+
+         elsif 0 /= Index (Source => Value (Generate_Data, I), Pattern => "/") 
then
+            --  Trailing context syntax; forbidden in definitions
             null;
 
-         elsif Kind (I) = "EOI" then
-            Indent_Line (Name (I) & " = [\x04];");
+         elsif Kind (Generate_Data, I) = "EOI" then
+            Indent_Line (Name (Generate_Data, I) & " = [\x04];");
 
-         elsif Kind (I) = "delimited-text" then
-            --  not declared in definitions
+         elsif Kind (Generate_Data, I) = "delimited-text" then
+            --  Not declared in definitions
             null;
 
-         elsif Kind (I) = "keyword" and 
Input_Data.Language_Params.Case_Insensitive then
+         elsif Kind (Generate_Data, I) = "keyword" and 
Input_Data.Language_Params.Case_Insensitive then
             --  This assumes re2c regular expression syntax, where single quote
             --  means case insensitive.
-            Indent_Line (Name (I) & " = '" & Strip_Quotes (Value (I)) & "';");
+            Indent_Line (Name (Generate_Data, I) & " = '" & Strip_Quotes 
(Value (Generate_Data, I)) & "';");
+
+         elsif Kind (Generate_Data, I) = "new-line" then
+            Indent_Line (Name (Generate_Data, I) & " = [\x0a]|[\x0d][\x0a];");
 
          else
-            --  Other kinds have values that are regular expressions, in re2c 
syntax
-            Indent_Line (Name (I) & " = " & Value (I) & ";");
+            --  Other kinds have values that are regular expressions, in lexer 
syntax
+            Indent_Line (Name (Generate_Data, I) & " = " & Value 
(Generate_Data, I) & ";");
          end if;
       end loop;
       New_Line;
@@ -1210,28 +1176,41 @@ package body WisiToken.BNF.Output_Ada_Common is
       --  lexer rules
       for I in All_Tokens (Generate_Data).Iterate (Non_Grammar => True, 
Nonterminals => False) loop
          declare
-            Val : constant String := Value (I);
+            Val : constant String := Value (Generate_Data, I);
          begin
 
-            if Kind (I) = "non-reporting" then
-               Indent_Line (Name (I) & " { lexer->byte_token_start = 
lexer->cursor;");
+            if Kind (Generate_Data, I) = "non-reporting" then
+               Indent_Line (Name (Generate_Data, I) & " { 
lexer->byte_token_start = lexer->cursor;");
                Indent_Line ("    lexer->char_token_start = lexer->char_pos;");
-               Indent_Line ("    if (*lexer->cursor == 0x0A)");
-               Indent_Line ("       lexer->line_token_start = lexer->line-1;");
-               Indent_Line ("    else");
-               Indent_Line ("       lexer->line_token_start = lexer->line;");
+               Indent_Line ("    lexer->line_token_start = lexer->line;");
                Indent_Line ("    continue; }");
 
-            elsif Kind (I) = "delimited-text" then
+            elsif Kind (Generate_Data, I) = "delimited-text" then
+               Indent_Line
+                 (Val & " {*id =" & WisiToken.Token_ID'Image (ID (I)) &
+                    "; skip_to(lexer, " & Repair_Image (Generate_Data, I) & 
"); continue;}");
+
+            elsif Kind (Generate_Data, I) = "new-line"
+            then
+               Indent_Line
+                 (Name (Generate_Data, I) &
+                    " {*id =" & WisiToken.Token_ID'Image (ID (I)) & "; 
lexer->line++; continue;}");
+
+            elsif Kind (Generate_Data, I) = "comment-one-line" or
+              Kind (Generate_Data, I) = "comment-new-line"
+            then
+               --  Comments can be terminated by new_line or EOI
                Indent_Line
-                    (Val & " {*id = " & WisiToken.Token_ID'Image (ID (I)) &
-                       "; skip_to(lexer, " & Repair_Image (I) & "); 
continue;}");
+                 (Name (Generate_Data, I) &
+                    " {*id =" & WisiToken.Token_ID'Image (ID (I)) &
+                    "; if (lexer->cursor[-1] == 0x0a || (lexer->cursor[-1] == 
0x0d && lexer->cursor[-2] == 0x0a))" &
+                    " lexer->line++; continue;}");
 
             elsif 0 /= Index (Source => Val, Pattern => "/") then
-               Indent_Line (Val & " {*id = " & WisiToken.Token_ID'Image (ID 
(I)) & "; continue;}");
+               Indent_Line (Val & " {*id =" & WisiToken.Token_ID'Image (ID 
(I)) & "; continue;}");
 
             else
-               Indent_Line (Name (I) & " {*id = " & WisiToken.Token_ID'Image 
(ID (I)) & "; continue;}");
+               Indent_Line (Name (Generate_Data, I) & " {*id =" & 
WisiToken.Token_ID'Image (ID (I)) & "; continue;}");
             end if;
          end;
       end loop;
@@ -1250,6 +1229,7 @@ package body WisiToken.BNF.Output_Ada_Common is
       Indent_Line ("*char_position = lexer->char_token_start;");
       Indent_Line ("*char_length   = lexer->char_pos - 
lexer->char_token_start;");
       Indent_Line ("*line_start    = lexer->line_token_start;");
+      Indent_Line ("*line_length   = lexer->line - lexer->line_token_start;");
       Indent_Line ("return status;");
       Indent_Line ("}");
       Indent := Indent - 3;
@@ -1277,13 +1257,12 @@ package body WisiToken.BNF.Output_Ada_Common is
 
          Indent_Line ("function New_Lexer");
          Indent_Line ("  (Buffer    : in System.Address;");
-         Indent_Line ("   Length    : in Interfaces.C.size_t;");
-         Indent_Line ("   Verbosity : in Interfaces.C.int)");
+         Indent_Line ("   Length    : in Interfaces.C.size_t)");
          Indent_Line ("  return System.Address");
          Indent_Line ("with Import        => True,");
          Indent_Line ("     Convention    => C,");
          Indent_Line ("     External_Name => """ & Output_File_Name_Root & 
"_new_lexer"";");
-         Indent_Line ("--  Create the lexer object, passing it the full text 
to process.");
+         Indent_Line ("--  Create the lexer object, passing it the text 
buffer.");
          New_Line;
          Indent_Line ("procedure Free_Lexer (Lexer : in out System.Address)");
          Indent_Line ("with Import        => True,");
@@ -1298,6 +1277,23 @@ package body WisiToken.BNF.Output_Ada_Common is
          Indent_Line ("     External_Name => """ & Output_File_Name_Root & 
"_reset_lexer"";");
          New_Line;
 
+         Indent_Line ("procedure Set_Verbosity");
+         Indent_Line ("  (Lexer     : in System.Address;");
+         Indent_Line ("   Verbosity : in Interfaces.C.int)");
+         Indent_Line ("with Import        => True,");
+         Indent_Line ("     Convention    => C,");
+         Indent_Line ("     External_Name => """ & Output_File_Name_Root & 
"_set_verbosity"";");
+
+         Indent_Line ("procedure Set_Position");
+         Indent_Line ("  (Lexer         : in System.Address;");
+         Indent_Line ("   Byte_Position : in Interfaces.C.size_t;");
+         Indent_Line ("   Char_Position : in Interfaces.C.size_t;");
+         Indent_Line ("   Line          : in Interfaces.C.int)");
+         Indent_Line ("with Import        => True,");
+         Indent_Line ("     Convention    => C,");
+         Indent_Line ("     External_Name => """ & Output_File_Name_Root & 
"_set_position"";");
+         New_Line;
+
          Indent_Line ("function Next_Token");
          Indent_Line ("  (Lexer         : in     System.Address;");
          Indent_Line ("   ID            :    out WisiToken.Token_ID;");
@@ -1305,7 +1301,8 @@ package body WisiToken.BNF.Output_Ada_Common is
          Indent_Line ("   Byte_Length   :    out Interfaces.C.size_t;");
          Indent_Line ("   Char_Position :    out Interfaces.C.size_t;");
          Indent_Line ("   Char_Length   :    out Interfaces.C.size_t;");
-         Indent_Line ("   Line_Start    :    out Interfaces.C.int)");
+         Indent_Line ("   Line_Start    :    out Interfaces.C.int;");
+         Indent_Line ("   Line_Length   :    out Interfaces.C.int)");
          Indent_Line ("  return Interfaces.C.int");
          Indent_Line ("with Import        => True,");
          Indent_Line ("     Convention    => C,");
@@ -1317,7 +1314,664 @@ package body WisiToken.BNF.Output_Ada_Common is
          Set_Output (Standard_Output);
          Close (File);
       end;
-   end Create_re2c;
+   end Create_re2c_File;
+
+   procedure Create_re2c_Lexer
+     (Generate_Data         : aliased in 
WisiToken.BNF.Generate_Utils.Generate_Data;
+      Output_File_Name_Root :         in String)
+   is
+      use WisiToken.BNF.Generate_Utils;
+
+      New_Line_Count : Integer := 0;
+      Block_Count    : Integer := 0;
+      Need_Separator : Boolean := False;
+   begin
+      for I in All_Tokens (Generate_Data).Iterate
+        (Non_Grammar  => True,
+         Nonterminals => False,
+         Include_SOI  => False)
+      loop
+         if Kind (Generate_Data, I) = "comment-new-line" or
+           Kind (Generate_Data, I) = "comment-one-line" or
+           Kind (Generate_Data, I) = "string-double-one-line" or
+           Kind (Generate_Data, I) = "string-single-one-line"
+           --  comment-one-line, strings do not always contain a new_line, but
+           --  the preconditions in WisiToken.Lexer guarantee it does if we ask
+           --  for Line_Begin_Char_Pos from one.
+         then
+            New_Line_Count := @ + 1;
+            Block_Count  := @ + 1;
+
+         elsif Kind (Generate_Data, I) = "new-line" then
+            New_Line_Count := @ + 1;
+
+         elsif Kind (Generate_Data, I) = "string-double" or
+           Kind (Generate_Data, I) = "string-single" or
+           Kind (Generate_Data, I) = "delimited-text"
+         then
+            Block_Count := @ + 1;
+         end if;
+      end loop;
+
+      Indent_Line ("function Is_Block_Delimited (ID : in WisiToken.Token_ID) 
return Boolean");
+      Indent_Line ("is begin");
+      Indent := @ + 3;
+      Indent_Line ("case To_Token_Enum (ID) is");
+      if Block_Count > 0 then
+         Indent_Line ("when");
+         Need_Separator := False;
+         Indent := @ + 3;
+
+         for I in All_Tokens (Generate_Data).Iterate
+           (Non_Grammar  => True,
+            Nonterminals => False,
+            Include_SOI  => False)
+         loop
+            if Kind (Generate_Data, I) = "comment-new-line" or
+              Kind (Generate_Data, I) = "comment-one-line" or
+              Kind (Generate_Data, I) = "string-double-one-line" or
+              Kind (Generate_Data, I) = "string-single-one-line" or
+              Kind (Generate_Data, I) = "string-double" or
+              Kind (Generate_Data, I) = "string-single" or
+              Kind (Generate_Data, I) = "delimited-text"
+            then
+               if Need_Separator then
+                  Put_Line (" |");
+               else
+                  Need_Separator := True;
+               end if;
+               Indent_Start (Name (Generate_Data, I) & "_ID");
+            end if;
+         end loop;
+         Put_Line (" => return True;");
+         Indent := @ - 3;
+      end if;
+
+      Indent_Line ("when others => return False;");
+      Indent_Line ("end case;");
+      Indent := @ - 3;
+      Indent_Line ("end Is_Block_Delimited;");
+      New_Line;
+
+      Indent_Line ("function Same_Block_Delimiters (ID : in 
WisiToken.Token_ID) return Boolean");
+      Indent_Line ("is begin");
+      Indent := @ + 3;
+      Indent_Line ("case To_Token_Enum (ID) is");
+      if Block_Count > 0 then
+         for I in All_Tokens (Generate_Data).Iterate
+           (Non_Grammar  => True,
+            Nonterminals => False,
+            Include_SOI  => False)
+         loop
+            if Kind (Generate_Data, I) = "string-double-one-line" or
+              Kind (Generate_Data, I) = "string-single-one-line" or
+              Kind (Generate_Data, I) = "string-double" or
+              Kind (Generate_Data, I) = "string-single"
+            then
+               Indent_Line ("when " & Name (Generate_Data, I) & "_ID => return 
True;");
+
+            elsif Kind (Generate_Data, I) = "comment-new-line" or
+              Kind (Generate_Data, I) = "comment-one-line" or
+              Kind (Generate_Data, I) = "delimited-text"
+            then
+               Indent_Line ("when " & Name (Generate_Data, I) & "_ID => return 
False;");
+            end if;
+         end loop;
+      end if;
+
+      Indent_Line ("when others => return False;");
+      Indent_Line ("end case;");
+      Indent := @ - 3;
+      Indent_Line ("end Same_Block_Delimiters;");
+      New_Line;
+
+      Indent_Line ("function Escape_Delimiter_Doubled (ID : in 
WisiToken.Token_ID) return Boolean");
+      Indent_Line ("is begin");
+      Indent := @ + 3;
+      Indent_Line ("case To_Token_Enum (ID) is");
+      if Block_Count > 0 then
+         for I in All_Tokens (Generate_Data).Iterate
+           (Non_Grammar  => True,
+            Nonterminals => False,
+            Include_SOI  => False)
+         loop
+            if Generate_Data.Tokens.Escape_Delimiter_Doubled.Contains (Name 
(Generate_Data, I)) then
+               Indent_Line ("when " & Name (Generate_Data, I) & "_ID => return 
True;");
+            end if;
+         end loop;
+      end if;
+
+      Indent_Line ("when others => return False;");
+      Indent_Line ("end case;");
+      Indent := @ - 3;
+      Indent_Line ("end Escape_Delimiter_Doubled;");
+      New_Line;
+
+      Indent_Line ("function Start_Delimiter_Length (ID : in 
WisiToken.Token_ID) return Integer");
+      Indent_Line ("is begin");
+      Indent := @ + 3;
+      Indent_Line ("case To_Token_Enum (ID) is");
+      if Block_Count > 0 then
+         for I in All_Tokens (Generate_Data).Iterate
+           (Non_Grammar  => True,
+            Nonterminals => False,
+            Include_SOI  => False)
+         loop
+            if Kind (Generate_Data, I) = "comment-new-line" or
+              Kind (Generate_Data, I) = "comment-one-line" or
+              Kind (Generate_Data, I) = "delimited-text"
+            then
+               Indent_Line
+                 ("when " & Name (Generate_Data, I) & "_ID => return" &
+                    Integer'Image (Utils.Strip_Quotes (Value (Generate_Data, 
I))'Length) & ";");
+
+            elsif Kind (Generate_Data, I) = "string-double-one-line" or
+              Kind (Generate_Data, I) = "string-single-one-line" or
+              Kind (Generate_Data, I) = "string-double" or
+              Kind (Generate_Data, I) = "string-single"
+            then
+               Indent_Line ("when " & Name (Generate_Data, I) & "_ID => return 
1;");
+
+            end if;
+         end loop;
+      end if;
+
+      Indent_Line ("when others => raise SAL.Programmer_Error; return 0;");
+      Indent_Line ("end case;");
+      Indent := @ - 3;
+      Indent_Line ("end Start_Delimiter_Length;");
+      New_Line;
+
+      Indent_Line ("function End_Delimiter_Length (ID : in WisiToken.Token_ID) 
return Integer");
+      Indent_Line ("is begin");
+      Indent := @ + 3;
+      Indent_Line ("case To_Token_Enum (ID) is");
+      if Block_Count > 0 then
+         Indent_Line ("when");
+         Need_Separator := False;
+         Indent := @ + 3;
+
+         for I in All_Tokens (Generate_Data).Iterate
+           (Non_Grammar  => True,
+            Nonterminals => False,
+            Include_SOI  => False)
+         loop
+            if Kind (Generate_Data, I) = "comment-new-line" or
+              Kind (Generate_Data, I) = "string-double-one-line" or
+              Kind (Generate_Data, I) = "string-single-one-line" or
+              Kind (Generate_Data, I) = "string-double" or
+              Kind (Generate_Data, I) = "string-single"
+            then
+               if Need_Separator then
+                  Put_Line (" |");
+               else
+                  Need_Separator := True;
+               end if;
+               Indent_Start (Name (Generate_Data, I) & "_ID");
+            end if;
+         end loop;
+         Put_Line (" => return 1;");
+         Indent := @ - 3;
+
+         for I in All_Tokens (Generate_Data).Iterate
+           (Non_Grammar  => True,
+            Nonterminals => False,
+            Include_SOI  => False)
+         loop
+            if Kind (Generate_Data, I) = "comment-one-line" or
+              Kind (Generate_Data, I) = "delimited-text"
+            then
+               Indent_Line
+                 ("when " & Name (Generate_Data, I) & "_ID => return" &
+                    Integer'Image (Utils.Strip_Quotes (Repair_Image 
(Generate_Data, I))'Length) & ";");
+            end if;
+         end loop;
+      end if;
+
+      Indent_Line ("when others => raise SAL.Programmer_Error; return 0;");
+      Indent_Line ("end case;");
+      Indent := @ - 3;
+      Indent_Line ("end End_Delimiter_Length;");
+      New_Line;
+
+      Indent_Line ("function New_Line_Is_End_Delimiter (ID : in 
WisiToken.Token_ID) return Boolean");
+      Indent_Line ("is begin");
+      Indent := @ + 3;
+      Indent_Line ("return");
+      Indent_Line ("  (case To_Token_Enum (ID) is");
+      Indent := @ + 3;
+      if Block_Count > 0 then
+         for I in All_Tokens (Generate_Data).Iterate
+           (Non_Grammar  => True,
+            Nonterminals => False,
+            Include_SOI  => False)
+         loop
+            if Kind (Generate_Data, I) = "comment-new-line" or
+              Kind (Generate_Data, I) = "comment-one-line" or
+              Kind (Generate_Data, I) = "string-double-one-line" or
+              Kind (Generate_Data, I) = "string-single-one-line"
+            then
+               Indent_Line ("when " & Name (Generate_Data, I) & "_ID => 
True,");
+
+            elsif Kind (Generate_Data, I) = "string-double" or
+              Kind (Generate_Data, I) = "string-single" or
+              Kind (Generate_Data, I) = "delimited-text"
+            then
+               Indent_Line ("when " & Name (Generate_Data, I) & "_ID => 
False,");
+            end if;
+         end loop;
+      end if;
+
+      Indent_Line ("when others => raise SAL.Programmer_Error);");
+      Indent := @ - 6;
+      Indent_Line ("end New_Line_Is_End_Delimiter;");
+      New_Line;
+
+      Indent_Line ("function Find_End_Delimiter");
+      Indent_Line ("  (Source      : in WisiToken.Lexer.Source;");
+      Indent_Line ("   ID          : in WisiToken.Token_ID;");
+      Indent_Line ("   Token_Start : in WisiToken.Buffer_Pos)");
+      Indent_Line ("  return WisiToken.Buffer_Pos");
+      if Block_Count > 0 then
+         Indent_Line ("is begin");
+      else
+         Indent_Line ("is");
+         Indent_Line ("   pragma Unreferenced (Source, Token_Start);");
+         Indent_Line ("begin");
+      end if;
+      Indent := @ + 3;
+      Indent_Line ("return");
+      Indent_Line ("  (case To_Token_Enum (ID) is");
+      Indent := @ + 3;
+      if Block_Count > 0 then
+         for I in All_Tokens (Generate_Data).Iterate
+           (Non_Grammar  => True,
+            Nonterminals => False,
+            Include_SOI  => False)
+         loop
+            if Kind (Generate_Data, I) = "comment-new-line" then
+               Indent_Line
+                 ("when " & Name (Generate_Data, I) & "_ID => 
WisiToken.Lexer.Find_New_Line (Source, Token_Start),");
+
+            elsif Kind (Generate_Data, I) = "string-double-one-line" or
+              Kind (Generate_Data, I) = "string-single-one-line"
+            then
+               Indent_Line
+                 ("when " & Name (Generate_Data, I) &
+                    "_ID => WisiToken.Lexer.Find_String_Or_New_Line (Source, 
Token_Start, """"""""),");
+
+            elsif Kind (Generate_Data, I) = "string-double" then
+               Indent_Line
+                 ("when " & Name (Generate_Data, I) &
+                    "_ID => WisiToken.Lexer.Find_String (Source, Token_Start, 
""""""""),");
+
+            elsif Kind (Generate_Data, I) = "string-single" then
+               Indent_Line
+                 ("when " & Name (Generate_Data, I) &
+                    "_ID => WisiToken.Lexer.Find_String (Source, Token_Start, 
""'""),");
+
+            elsif Kind (Generate_Data, I) = "comment-one-line" or
+              Kind (Generate_Data, I) = "delimited-text"
+            then
+               Indent_Line
+                 ("when " & Name (Generate_Data, I) &
+                    "_ID => WisiToken.Lexer.Find_String (Source, Token_Start, 
" &
+                    --  Repair_Image includes quotes.
+                    Repair_Image (Generate_Data, I) & "),");
+            end if;
+         end loop;
+      end if;
+
+      Indent_Line ("when others => raise SAL.Programmer_Error);");
+      Indent := @ - 6;
+      Indent_Line ("end Find_End_Delimiter;");
+      New_Line;
+
+      Indent_Line ("function Find_Scan_End");
+      Indent_Line ("  (Source   : in WisiToken.Lexer.Source;");
+      Indent_Line ("   ID       : in WisiToken.Token_ID;");
+      Indent_Line ("   Region   : in WisiToken.Buffer_Region;");
+      Indent_Line ("   Inserted : in Boolean;");
+      Indent_Line ("   Start    : in Boolean)");
+      Indent_Line ("  return WisiToken.Buffer_Pos");
+      declare
+         Need_Region : constant Boolean :=
+           (for some I in All_Tokens (Generate_Data).Iterate
+              (Non_Grammar  => True,
+               Nonterminals => False,
+               Include_SOI  => False)
+              => Kind (Generate_Data, I) = "comment-new-line" or
+                Kind (Generate_Data, I) = "string-double-one-line" or
+                Kind (Generate_Data, I) = "string-single-one-line" or
+                Kind (Generate_Data, I) = "comment-one-line" or
+                Kind (Generate_Data, I) = "delimited-text");
+
+         Need_Inserted : constant Boolean :=
+           (for some I in All_Tokens (Generate_Data).Iterate
+              (Non_Grammar  => True,
+               Nonterminals => False,
+               Include_SOI  => False)
+              => Kind (Generate_Data, I) = "comment-new-line" or
+                Kind (Generate_Data, I) = "comment-one-line" or
+                Kind (Generate_Data, I) = "delimited-text");
+
+         Need_Start : constant Boolean :=
+           (for some I in All_Tokens (Generate_Data).Iterate
+              (Non_Grammar  => True,
+               Nonterminals => False,
+               Include_SOI  => False)
+              => Kind (Generate_Data, I) = "comment-new-line" or
+                Kind (Generate_Data, I) = "comment-one-line" or
+                Kind (Generate_Data, I) = "delimited-text");
+      begin
+         if Block_Count > 0 then
+            Indent_Line ("is");
+            Indent_Line ("   use WisiToken;");
+            if not Need_Region then
+               Indent_Line ("   pragma Unreferenced (Region);");
+            end if;
+            if not Need_Inserted then
+               Indent_Line ("   pragma Unreferenced (Inserted);");
+            end if;
+            if not Need_Start then
+               Indent_Line ("   pragma Unreferenced (Start);");
+            end if;
+            Indent_Line ("begin");
+         else
+            Indent_Line ("is");
+            Indent_Line ("   pragma Unreferenced (Source, Region, Inserted, 
Start);");
+            Indent_Line ("begin");
+         end if;
+      end;
+      Indent := @ + 3;
+      Indent_Line ("return");
+      Indent_Line ("  (case To_Token_Enum (ID) is");
+      Indent := @ + 3;
+      if Block_Count > 0 then
+         for I in All_Tokens (Generate_Data).Iterate
+           (Non_Grammar  => True,
+            Nonterminals => False,
+            Include_SOI  => False)
+         loop
+            --  Inserted : a start or end delimiter was inserted
+            --  Start    : start delimeter
+            --  Return position where lex can end
+            --  start delimiter in Value, end delimiter in Repair_Image
+            if Kind (Generate_Data, I) = "comment-new-line" then
+               --  If Inserted, Start; a comment start was inserted in an 
existing
+               --  comment; just scan the existing comment.
+               --
+               --  If Inserted, not Start; a comment end was inserted in an 
existing
+               --  comment; scan to the previous comment end.
+               --
+               --  If not inserted, Start; a comment start was deleted; scan 
to the
+               --  previous comment end.
+               --
+               --  If not inserted, not Start; a comment end was deleted; find 
a new
+               --  comment end.
+               Indent_Line ("when " & Name (Generate_Data, I) & "_ID =>");
+               Indent_Line ("(if Inserted then Region.Last");
+               Indent_Line (" elsif Start then Region.Last");
+               Indent_Line (" else Lexer.Find_New_Line (Source, 
Region.Last)),");
+
+            elsif Kind (Generate_Data, I) = "string-double-one-line" or
+              Kind (Generate_Data, I) = "string-single-one-line"
+            then
+               --  Delimiters are the same, so all delimiters flip state; 
terminated
+               --  by new_line.
+               Indent_Line ("when " & Name (Generate_Data, I) & "_ID => 
Lexer.Find_New_Line (Source, Region.Last),");
+
+            elsif Kind (Generate_Data, I) = "string-double" or
+              Kind (Generate_Data, I) = "string-single"
+            then
+               --  Delimiters are the same, so all delimiters flip state; 
terminated
+               --  by EOI.
+               Indent_Line ("when " & Name (Generate_Data, I) & "_ID => 
Lexer.Buffer_Region_Byte (Source).Last,");
+
+            elsif Kind (Generate_Data, I) = "comment-one-line" then
+               --  Similar to comment-new-line, terminated by either end 
delimiter or new_line
+               Indent_Line ("when " & Name (Generate_Data, I) & "_ID =>");
+               Indent_Line ("(if Inserted then Region.Last");
+               Indent_Line (" elsif Start then Region.Last");
+               Indent_Line (" else Lexer.Find_String_Or_New_Line (Source, 
Region.Last, " &
+                              Value (Generate_Data, I) & ")),");
+
+            elsif Kind (Generate_Data, I) = "delimited-text"
+            then
+               --  Similar to comment-new-line, terminated by either end 
delimiter or EOI
+               Indent_Line ("when " & Name (Generate_Data, I) & "_ID =>");
+               Indent_Line ("(if Inserted then Region.Last");
+               Indent_Line (" elsif Start then Region.Last");
+               Indent_Line (" else Lexer.Find_String (Source, Region.First, " &
+                              Repair_Image (Generate_Data, I) & ")),");
+            end if;
+         end loop;
+      end if;
+
+      Indent_Line ("when others => raise SAL.Programmer_Error);");
+      Indent := @ - 6;
+      Indent_Line ("end Find_Scan_End;");
+      New_Line;
+
+      Indent_Line ("function Contains_End_Delimiter");
+      Indent_Line ("  (Source : in WisiToken.Lexer.Source;");
+      Indent_Line ("   ID     : in WisiToken.Token_ID;");
+      Indent_Line ("   Region : in WisiToken.Buffer_Region)");
+      Indent_Line ("  return WisiToken.Base_Buffer_Pos");
+      if Block_Count > 0 then
+         Indent_Line ("is");
+         Indent_Line ("   use WisiToken;");
+         Indent_Line ("begin");
+      else
+         Indent_Line ("is");
+         Indent_Line ("   use WisiToken;");
+         Indent_Line ("   pragma Unreferenced (Source, Region);");
+         Indent_Line ("begin");
+      end if;
+      Indent := @ + 3;
+      Indent_Line ("return");
+      Indent_Line ("  (case To_Token_Enum (ID) is");
+      Indent := @ + 3;
+      if Block_Count > 0 then
+         for I in All_Tokens (Generate_Data).Iterate
+           (Non_Grammar  => True,
+            Nonterminals => False,
+            Include_SOI  => False)
+         loop
+            if Kind (Generate_Data, I) = "comment-new-line" then
+               Indent_Line
+                 ("when " & Name (Generate_Data, I) &
+                    "_ID => Lexer.Find_New_Line (Source, Region),");
+
+            elsif Kind (Generate_Data, I) = "string-double-one-line" then
+               Indent_Line
+                 ("when " & Name (Generate_Data, I) &
+                    "_ID => Lexer.Find_String_Or_New_Line (Source, Region, 
""""""""),");
+
+            elsif Kind (Generate_Data, I) = "string-single-one-line" then
+               Indent_Line
+                 ("when " & Name (Generate_Data, I) &
+                    "_ID => Lexer.Find_String_Or_New_Line (Source, Region, 
""'""),");
+
+            elsif Kind (Generate_Data, I) = "string-double" then
+               Indent_Line
+                 ("when " & Name (Generate_Data, I) &
+                    "_ID => Lexer.Find_String (Source, Region, """"""""),");
+
+            elsif Kind (Generate_Data, I) = "string-single" then
+               Indent_Line
+                 ("when " & Name (Generate_Data, I) &
+                    "_ID => Lexer.Find_String (Source, Region, ""'""),");
+
+            elsif Kind (Generate_Data, I) = "comment-one-line"  or
+              Kind (Generate_Data, I) = "delimited-text"
+            then
+               Indent_Line
+                 ("when " & Name (Generate_Data, I) &
+                    "_ID => Lexer.Find_String_Or_New_Line (Source, Region, " &
+                    --  Repair_Image includes quotes.
+                    Repair_Image (Generate_Data, I) & "),");
+            end if;
+         end loop;
+      end if;
+
+      Indent_Line ("when others => raise SAL.Programmer_Error);");
+      Indent := @ - 6;
+      Indent_Line ("end Contains_End_Delimiter;");
+      New_Line;
+
+      Indent_Line ("function Line_Begin_Char_Pos");
+      Indent_Line (" (Source : in WisiToken.Lexer.Source;");
+      Indent_Line ("  Token  : in WisiToken.Lexer.Token;");
+      Indent_Line ("  Line   : in WisiToken.Line_Number_Type)");
+      Indent_Line ("return WisiToken.Buffer_Pos");
+      Indent_Line ("is");
+      if New_Line_Count + Block_Count = 0 then
+         Indent_Line ("   pragma Unreferenced (Source, Token, Line);");
+
+      elsif Block_Count = 0 then
+         Indent_Line ("   pragma Unreferenced (Source, Line);");
+      else
+         declare
+            Need_Source_Line : Boolean := False;
+         begin
+            for I in All_Tokens (Generate_Data).Iterate
+              (Non_Grammar  => True,
+               Nonterminals => False,
+               Include_SOI  => False)
+            loop
+               if Kind (Generate_Data, I) = "comment-new-line" or
+                 Kind (Generate_Data, I) = "comment-one-line" or
+                 Kind (Generate_Data, I) = "string-double-one-line" or
+                 Kind (Generate_Data, I) = "string-single-one-line" or
+                 Kind (Generate_Data, I) = "new-line"
+               then
+                  null;
+               elsif Kind (Generate_Data, I) = "delimited-text" or
+                 Kind (Generate_Data, I) = "string-double" or
+                 Kind (Generate_Data, I) = "string-single"
+               then
+                  Need_Source_Line := True;
+               end if;
+            end loop;
+            if not Need_Source_Line then
+               Indent_Line ("   pragma Unreferenced (Source, Line);");
+            end if;
+         end;
+      end if;
+
+      if New_Line_Count > 0 then
+         Indent_Line ("   use all type WisiToken.Base_Buffer_Pos;");
+      end if;
+      Indent_Line ("begin");
+      Indent := @ + 3;
+      if New_Line_Count + Block_Count = 0 then
+         Indent_Line ("return WisiToken.Invalid_Buffer_Pos;");
+
+      else
+         Indent_Line ("case To_Token_Enum (Token.ID) is");
+         for I in All_Tokens (Generate_Data).Iterate
+           (Non_Grammar  => True,
+            Nonterminals => False,
+            Include_SOI  => False)
+         loop
+            --  The preconditions on Lexer.Line_Begin_Char_Pos ensure that 
Token
+            --  contains the new-line for Line. "comment-one-line",
+            --  "string-double-one-line", "string-single-one-line" cannot 
contain
+            --  new-line.
+            if Kind (Generate_Data, I) = "comment-new-line" or
+              Kind (Generate_Data, I) = "new-line"
+            then
+               Indent_Line ("when " & Name (Generate_Data, I) & "_ID => return 
Token.Char_Region.Last + 1;");
+
+            elsif Kind (Generate_Data, I) = "delimited-text" or
+              Kind (Generate_Data, I) = "string-double" or
+              Kind (Generate_Data, I) = "string-single"
+            then
+               Indent_Line
+                 ("when " & Name (Generate_Data, I) & "_ID => return " &
+                    "WisiToken.Lexer.Line_Begin_Char_Pos (Source, Token, 
Line);");
+
+            end if;
+         end loop;
+
+         Indent_Line ("when others => raise SAL.Programmer_Error;");
+         Indent_Line ("end case;");
+      end if;
+      Indent := @ - 3;
+      Indent_Line ("end Line_Begin_Char_Pos;");
+      New_Line;
+
+      Indent_Line ("function Can_Contain_New_Line (ID : in WisiToken.Token_ID) 
return Boolean");
+      Indent_Line ("is begin");
+      Indent := @ + 3;
+      Indent_Line ("case To_Token_Enum (ID) is");
+
+      for I in All_Tokens (Generate_Data).Iterate
+        (Non_Grammar  => True,
+         Nonterminals => False,
+         Include_SOI  => False)
+      loop
+         if Kind (Generate_Data, I) = "new-line" or
+           Kind (Generate_Data, I) = "comment-new-line" or
+           Kind (Generate_Data, I) = "delimited-text"
+         then
+            Indent_Line ("when " & Name (Generate_Data, I) & "_ID => return 
True;");
+         end if;
+      end loop;
+
+      Indent_Line ("when others => return False;");
+      Indent_Line ("end case;");
+      Indent := @ - 3;
+      Indent_Line ("end Can_Contain_New_Line;");
+      New_Line;
+
+      Indent_Line ("function Terminated_By_New_Line (ID : in 
WisiToken.Token_ID) return Boolean");
+      Indent_Line ("is begin");
+      Indent := @ + 3;
+      Indent_Line ("case To_Token_Enum (ID) is");
+
+      for I in All_Tokens (Generate_Data).Iterate
+        (Non_Grammar  => True,
+         Nonterminals => False,
+         Include_SOI  => False)
+      loop
+         if Kind (Generate_Data, I) = "new-line" or
+           Kind (Generate_Data, I) = "comment-new-line" or
+           Kind (Generate_Data, I) = "string-double-one-line" or
+           Kind (Generate_Data, I) = "string-single-one-line"
+         then
+            Indent_Line ("when " & Name (Generate_Data, I) & "_ID => return 
True;");
+         end if;
+      end loop;
+
+      Indent_Line ("when others => return False;");
+      Indent_Line ("end case;");
+      Indent := @ - 3;
+      Indent_Line ("end Terminated_By_New_Line;");
+      New_Line;
+
+      Indent_Line ("package Lexer is new WisiToken.Lexer.re2c");
+      Indent_Line ("  (" & Output_File_Name_Root & "_re2c_c.New_Lexer,");
+      Indent_Line ("   " & Output_File_Name_Root & "_re2c_c.Free_Lexer,");
+      Indent_Line ("   " & Output_File_Name_Root & "_re2c_c.Reset_Lexer,");
+      Indent_Line ("   " & Output_File_Name_Root & "_re2c_c.Set_Verbosity,");
+      Indent_Line ("   " & Output_File_Name_Root & "_re2c_c.Set_Position,");
+      Indent_Line ("   " & Output_File_Name_Root & "_re2c_c.Next_Token,");
+      Indent_Line ("   Is_Block_Delimited,");
+      Indent_Line ("   Same_Block_Delimiters,");
+      Indent_Line ("   Escape_Delimiter_Doubled,");
+      Indent_Line ("   Start_Delimiter_Length,");
+      Indent_Line ("   End_Delimiter_Length,");
+      Indent_Line ("   New_Line_Is_End_Delimiter,");
+      Indent_Line ("   Find_End_Delimiter,");
+      Indent_Line ("   Contains_End_Delimiter,");
+      Indent_Line ("   Find_Scan_End,");
+      Indent_Line ("   Line_Begin_Char_Pos,");
+      Indent_Line ("   Can_Contain_New_Line,");
+      Indent_Line ("   Terminated_By_New_Line);");
+      New_Line;
+   end Create_re2c_Lexer;
 
    function File_Name_To_Ada (File_Name : in String) return String
    is
@@ -1338,6 +1992,7 @@ package body WisiToken.BNF.Output_Ada_Common is
    function Initialize
      (Input_Data        : in WisiToken_Grammar_Runtime.User_Data_Type;
       Tuple             : in Generate_Tuple;
+      Grammar_File_Name : in String;
       Output_File_Root  : in String;
       Check_Interface   : in Boolean)
      return Common_Data
@@ -1358,9 +2013,7 @@ package body WisiToken.BNF.Output_Ada_Common is
             if Tuple.Interface_Kind in Valid_Interface then
                Data.Interface_Kind := Valid_Interface (Tuple.Interface_Kind);
             else
-               Put_Error
-                 (Error_Message
-                    (Input_Data.Grammar_Lexer.File_Name, 1, "Interface_Kind 
not set"));
+               Put_Error (Error_Message (Grammar_File_Name, 1, "Interface_Kind 
not set"));
             end if;
          else
             Data.Interface_Kind := Process;
diff --git a/wisitoken-bnf-output_ada_common.ads 
b/wisitoken-bnf-output_ada_common.ads
index 44d7a632c8..fd8eb284c2 100644
--- a/wisitoken-bnf-output_ada_common.ads
+++ b/wisitoken-bnf-output_ada_common.ads
@@ -2,7 +2,7 @@
 --
 --  Types and operations shared by Ada and Ada_Emacs outputs.
 --
---  Copyright (C) 2017, 2018 Free Software Foundation, Inc.
+--  Copyright (C) 2017, 2018, 2020 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -38,6 +38,7 @@ package WisiToken.BNF.Output_Ada_Common is
    function Initialize
      (Input_Data        : in WisiToken_Grammar_Runtime.User_Data_Type;
       Tuple             : in Generate_Tuple;
+      Grammar_File_Name : in String;
       Output_File_Root  : in String;
       Check_Interface   : in Boolean)
      return Common_Data;
@@ -63,23 +64,28 @@ package WisiToken.BNF.Output_Ada_Common is
       Tuple                : in Generate_Tuple;
       Input_Data           : in WisiToken_Grammar_Runtime.User_Data_Type);
 
-   procedure LR_Create_Create_Parser
-     (Input_Data    :         in     WisiToken_Grammar_Runtime.User_Data_Type;
-      Common_Data   :         in out Output_Ada_Common.Common_Data;
-      Generate_Data : aliased in     
WisiToken.BNF.Generate_Utils.Generate_Data);
+   procedure LR_Create_Create_Parse_Table
+     (Input_Data           :         in     
WisiToken_Grammar_Runtime.User_Data_Type;
+      Common_Data          :         in out Output_Ada_Common.Common_Data;
+      Generate_Data        : aliased in     
WisiToken.BNF.Generate_Utils.Generate_Data;
+      Actions_Package_Name :         in     String);
    --  If not Common_Data.Text_Rep, includes LR parse table in generated
    --  source. Otherwise, includes call to LR.Get_Text_Rep; caller must
    --  call Put_Text_Rep to create file.
 
    procedure Packrat_Create_Create_Parser
-     (Common_Data   :         in out Output_Ada_Common.Common_Data;
-      Generate_Data : aliased in     
WisiToken.BNF.Generate_Utils.Generate_Data;
-      Packrat_Data  :         in     WisiToken.Generate.Packrat.Data);
+     (Actions_Package_Name :         in     String;
+      Common_Data          :         in out Output_Ada_Common.Common_Data;
+      Generate_Data        : aliased in     
WisiToken.BNF.Generate_Utils.Generate_Data;
+      Packrat_Data         :         in     WisiToken.Generate.Packrat.Data);
 
    procedure External_Create_Create_Grammar
      (Generate_Data : in WisiToken.BNF.Generate_Utils.Generate_Data);
 
-   procedure Create_re2c
+   procedure Create_Create_Productions
+     (Generate_Data : in WisiToken.BNF.Generate_Utils.Generate_Data);
+
+   procedure Create_re2c_File
      (Input_Data            :         in 
WisiToken_Grammar_Runtime.User_Data_Type;
       Tuple                 :         in Generate_Tuple;
       Generate_Data         : aliased in 
WisiToken.BNF.Generate_Utils.Generate_Data;
@@ -87,4 +93,10 @@ package WisiToken.BNF.Output_Ada_Common is
    --  Create_re2c is called from wisitoken-bnf-generate, which does not 
declare
    --  Common_Data.
 
+   procedure Create_re2c_Lexer
+     (Generate_Data         : aliased in 
WisiToken.BNF.Generate_Utils.Generate_Data;
+      Output_File_Name_Root :         in String);
+   --  Output, to Ada.Text_IO.Current_Output, source code that
+   --  instantiates WisiToken.Lexer.re2c.
+
 end WisiToken.BNF.Output_Ada_Common;
diff --git a/wisitoken-bnf-output_ada_emacs.adb 
b/wisitoken-bnf-output_ada_emacs.adb
index c0e91c274a..76357dfc79 100644
--- a/wisitoken-bnf-output_ada_emacs.adb
+++ b/wisitoken-bnf-output_ada_emacs.adb
@@ -12,7 +12,7 @@
 --  If run in an Emacs dynamically loaded module, the parser actions
 --  call the elisp actions directly.
 --
---  Copyright (C) 2012 - 2015, 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2012 - 2015, 2017 - 2022 Free Software Foundation, Inc.
 --
 --  The WisiToken package is free software; you can redistribute it
 --  and/or modify it under terms of the GNU General Public License as
@@ -40,6 +40,7 @@ with WisiToken.Generate.Packrat;
 with WisiToken_Grammar_Runtime;
 procedure WisiToken.BNF.Output_Ada_Emacs
   (Input_Data            :         in WisiToken_Grammar_Runtime.User_Data_Type;
+   Grammar_File_Name     :         in String;
    Output_File_Name_Root :         in String;
    Generate_Data         : aliased in 
WisiToken.BNF.Generate_Utils.Generate_Data;
    Packrat_Data          :         in WisiToken.Generate.Packrat.Data;
@@ -56,7 +57,7 @@ is
    Numeric   : constant Ada.Strings.Maps.Character_Set := 
Ada.Strings.Maps.To_Set ("0123456789");
 
    Common_Data : Output_Ada_Common.Common_Data := 
WisiToken.BNF.Output_Ada_Common.Initialize
-     (Input_Data, Tuple, Output_File_Name_Root, Check_Interface => True);
+     (Input_Data, Tuple, Grammar_File_Name, Output_File_Name_Root, 
Check_Interface => True);
 
    Gen_Alg_Name : constant String :=
      (if Test_Main or Multiple_Tuples
@@ -86,7 +87,8 @@ is
 
       Delete_Last_Paren : Boolean := False;
    begin
-      --  Loop thru Item, copying chars to Buffer, ignoring comments, newlines.
+      --  Loop thru Item, copying chars to Buffer, replacing comments and
+      --  newlines with a single space each.
 
       if 0 /= Progn_Index then
          Item_I := Progn_Index + 6;
@@ -100,6 +102,8 @@ is
          if In_Comment then
             if Item (Item_I) in ASCII.CR | ASCII.LF then
                In_Comment := False;
+               Buffer (Buffer_J) := ' ';
+               Buffer_J := Buffer_J + 1;
             end if;
          else
             if Item (Item_I) = '(' then
@@ -133,7 +137,8 @@ is
                end if;
 
             elsif Item (Item_I) in ASCII.CR | ASCII.LF then
-               null;
+               Buffer (Buffer_J) := ' ';
+               Buffer_J := Buffer_J + 1;
 
             elsif Item (Item_I) = ';' and then Item_I < Item'Last and then 
Item (Item_I + 1) = ';' then
                In_Comment := True;
@@ -154,12 +159,13 @@ is
    end Split_Sexp;
 
    procedure Create_Ada_Action
-     (Name          : in String;
-      RHS           : in RHS_Type;
-      Prod_ID       : in WisiToken.Production_ID;
-      Unsplit_Lines : in Ada.Strings.Unbounded.Unbounded_String;
-      Labels        : in String_Arrays.Vector;
-      Check         : in Boolean)
+     (Name          : in     String;
+      RHS           : in     RHS_Type;
+      Prod_ID       : in     WisiToken.Production_ID;
+      Unsplit_Lines : in     Ada.Strings.Unbounded.Unbounded_String;
+      Labels        : in     String_Arrays.Vector;
+      Empty         :    out Boolean;
+      Check         : in     Boolean)
    is
       --  Create Action (if Check = False; Lines must be RHS.Action) or
       --  Check (if Check = True; Lines must be RHS.Check) subprogram named
@@ -171,7 +177,7 @@ is
       use WisiToken.Generate;
 
       Sexps : constant String_Lists.List := Split_Sexp
-        (-Unsplit_Lines, Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line);
+        (-Unsplit_Lines, Grammar_File_Name, RHS.Source_Line);
 
       use all type Ada.Strings.Maps.Character_Set;
 
@@ -186,10 +192,44 @@ is
       Label_Needed   : array (Labels.First_Index .. Labels.Last_Index) of 
Boolean := (others => False);
       Nonterm_Needed : Boolean := False;
 
+      Last_Token_Index : Base_Identifier_Index := 0;
+      function Next_Token_Label return String
+      is begin
+         --  Only called from Indent_Params when RHS.Auto_Token_Labels is True.
+         Last_Token_Index := @ + 1;
+         return "T" & Trimmed_Image (Last_Token_Index);
+      end Next_Token_Label;
+
+      function Get_Label (Token_Param : in String; Integer : in Boolean := 
False) return String
+      is begin
+         if RHS.Auto_Token_Labels then
+            return
+              (if Integer
+               then "Integer (T" & Token_Param & ")"
+               else "T" & Token_Param);
+         else
+            return Token_Param;
+         end if;
+      end Get_Label;
+
+      procedure Mark_Label_Used (Label : in String)
+      is begin
+         for I in Labels.First_Index .. Labels.Last_Index loop
+            if Label = Labels (I) then
+               Label_Needed (I) := True;
+            end if;
+         end loop;
+      end Mark_Label_Used;
+
       function Label_Used (Label : in String) return Boolean
       is
          Found : Boolean := False;
       begin
+         if 0 = Index (Label, Numeric, Outside) then
+            --  Label is an integer, not a label
+            return True;
+         end if;
+
          for Tok of RHS.Tokens loop
             if -Tok.Label = Label then
                Found := True;
@@ -210,17 +250,6 @@ is
          raise SAL.Programmer_Error;
       end Label_Used;
 
-      function Count_Label_Needed return Ada.Containers.Count_Type
-      is
-         use Ada.Containers;
-         Result : Count_Type := 0;
-      begin
-         for B of Label_Needed loop
-            if B then Result := Result + 1; end if;
-         end loop;
-         return Result;
-      end Count_Label_Needed;
-
       function Find_Token_Index (I : in Base_Identifier_Index) return 
SAL.Base_Peek_Type
       is
          Rule_Label : constant String := -Labels (I);
@@ -255,9 +284,9 @@ is
             Last := Index (Params, Space_Paren_Set, Second + 1);
 
             declare
-               Label : constant String := Params (First .. Second - 1);
+               Label : constant String := Get_Label (Params (First .. Second - 
1));
             begin
-               if 0 = Index (Label, Numeric, Outside) or else Label_Used 
(Label) then
+               if Label_Used (Label) then
                   Count := Count + 1;
                   Result := Result & (if Need_Comma then ", " else "") &
                     "(" & Label & ", " &
@@ -269,7 +298,7 @@ is
             end;
          end loop;
          Nonterm_Needed := True;
-         return " (Parse_Data, Tree, Nonterm, Tokens, " &
+         return " (Parse_Data, Tree, Nonterm, " &
            (case Count is
             when 0 => "(1 .. 0 => (1, Motion)))",
             when 1 => "(1 => " & (-Result) & "))",
@@ -300,7 +329,7 @@ is
             if not (Last in Params'First .. Params'Last) then
                Put_Error
                  (Error_Message
-                    (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line,
+                    (Grammar_File_Name, RHS.Source_Line,
                      "Missing ']' or ')'"));
                exit;
             end if;
@@ -321,15 +350,17 @@ is
                when E : Not_Found =>
                   Put_Error
                     (Error_Message
-                       (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line,
+                       (Grammar_File_Name, RHS.Source_Line,
                         Ada.Exceptions.Exception_Message (E)));
                end;
 
                declare
-                  Label : constant String := Params (Index_First .. 
Index_Last);
+                  Label : constant String := Get_Label (Params (Index_First .. 
Index_Last));
                begin
-                  if 0 = Index (Label, Numeric, Outside) or else Label_Used 
(Label) then
-                     Result := Result & (if Need_Comma then " & " else "") & 
"(" &
+                  if Label_Used (Label) then
+                     --  Ada 2020 added vector aggregates, making this 
ambiguous without
+                     --  Index_ID qualification
+                     Result := Result & (if Need_Comma then " & " else "") & 
"Index_ID'(" &
                        Label & ", " & ID & ")";
                      Need_Comma := True;
                      Count  := Count + 1;
@@ -338,7 +369,7 @@ is
                if Params (Last) /= ']' then
                   Put_Error
                     (Error_Message
-                       (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line,
+                       (Grammar_File_Name, RHS.Source_Line,
                         "too many token IDs in motion action"));
                   return -Result & "))";
                end if;
@@ -347,10 +378,11 @@ is
                First  := Index_Non_Blank (Params, Last);
                Last   := Index (Params, Delim, First);
                declare
-                  Label : constant String := Params (First .. Last - 1);
+                  Label : constant String := Get_Label (Params (First .. Last 
- 1));
                begin
-                  if 0 = Index (Label, Numeric, Outside) or else Label_Used 
(Label) then
-                     Result := Result & (if Need_Comma then " & " else "") & 
"(" & Label & ", Invalid_Token_ID)";
+                  if Label_Used (Label) then
+                     Result := Result & (if Need_Comma then " & " else "") &
+                       "Index_ID'(" & Label & ", Invalid_Token_ID)";
                      Need_Comma := True;
                      Count  := Count + 1;
                   end if;
@@ -362,7 +394,7 @@ is
             return "";
          else
             Nonterm_Needed := True;
-            return " (Parse_Data, Tree, Nonterm, Tokens, (" & (-Result) & "))";
+            return " (Parse_Data, Tree, Nonterm, (" & (-Result) & "))";
          end if;
       end Motion_Params;
 
@@ -375,18 +407,18 @@ is
          use Ada.Strings.Maps;
          Delim : constant Character_Set := To_Set ("]") or Blank_Set;
 
-         Last       : Integer          := Index_Non_Blank (Params); -- skip [
+         Last       : Integer := Index_Non_Blank (Params); -- skip [
          First      : Integer;
          Result     : Unbounded_String;
-         Need_Comma : Boolean          := False;
-         Count      : Integer          := 0;
+         Need_Comma : Boolean := False;
+         Count      : Integer := 0;
 
          procedure Elisp_Param (Skip : in Boolean)
          is begin
             if Params (Last) = ']' then
                Put_Error
                  (Error_Message
-                    (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, 
"invalid wisi-face-apply argument"));
+                    (Grammar_File_Name, RHS.Source_Line, "invalid 
wisi-face-apply argument"));
                return;
             end if;
 
@@ -407,9 +439,9 @@ is
             First := Last;
             Last  := Index (Params, Delim, First);
             declare
-               Label : constant String := Params (First .. Last - 1);
+               Label : constant String := Get_Label (Params (First .. Last - 
1));
             begin
-               if 0 = Index (Label, Numeric, Outside) or else Label_Used 
(Label) then
+               if Label_Used (Label) then
                   Count  := Count + 1;
                   Result := Result & (if Need_Comma then ", (" else "(") & 
Label;
                   Need_Comma := True;
@@ -426,16 +458,16 @@ is
             return "";
          elsif Count = 1 then
                Nonterm_Needed := True;
-            return " (Parse_Data, Tree, Nonterm, Tokens, (1 => " & (-Result) & 
"))";
+            return " (Parse_Data, Tree, Nonterm, (1 => " & (-Result) & "))";
          else
                Nonterm_Needed := True;
-            return " (Parse_Data, Tree, Nonterm, Tokens, (" & (-Result) & "))";
+            return " (Parse_Data, Tree, Nonterm, (" & (-Result) & "))";
          end if;
       exception
       when E : others =>
          Put_Error
            (Error_Message
-              (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, "invalid 
syntax: " &
+              (Grammar_File_Name, RHS.Source_Line, "invalid syntax: " &
               Ada.Exceptions.Exception_Message (E)));
          return "";
       end Face_Apply_Params;
@@ -463,9 +495,9 @@ is
             First := Last;
             Last  := Index (Params, Delim, First);
             declare
-               Label : constant String := Params (First .. Last - 1);
+               Label : constant String := Get_Label (Params (First .. Last - 
1));
             begin
-               if 0 = Index (Label, Numeric, Outside) or else Label_Used 
(Label) then
+               if Label_Used (Label) then
                   Count  := Count + 1;
                   Skip   := False;
                   Result := Result & (if Need_Comma then ", (" else "(") & 
Label;
@@ -477,7 +509,7 @@ is
             if Params (Last) = ']' then
                Put_Error
                  (Error_Message
-                    (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, 
"invalid wisi-face-mark argument"));
+                    (Grammar_File_Name, RHS.Source_Line, "invalid 
wisi-face-mark argument"));
                exit;
             end if;
 
@@ -489,7 +521,7 @@ is
             end if;
          end loop;
          Nonterm_Needed := True;
-         return " (Parse_Data, Tree, Nonterm, Tokens, " &
+         return " (Parse_Data, Tree, Nonterm, " &
            (case Count is
             when 0 => "(1 .. 0 => (1, Prefix))",
             when 1 => "(1 => " & (-Result) & "))",
@@ -498,7 +530,7 @@ is
       when E : others =>
          Put_Error
            (Error_Message
-            (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, "invalid 
syntax: " &
+            (Grammar_File_Name, RHS.Source_Line, "invalid syntax: " &
               Ada.Exceptions.Exception_Message (E)));
          return "";
       end Face_Mark_Params;
@@ -506,39 +538,45 @@ is
       function Face_Remove_Params (Params : in String) return String
       is
          --  Params is a vector of token numbers: [1 3 ...]
+         --  Token numbers can be labels.
          --  Result: (1, 3, ...)
          use Ada.Strings.Maps;
          Delim : constant Character_Set := To_Set ("]") or Blank_Set;
 
-         Last       : Integer          := Index_Non_Blank (Params); -- skip [
+         Last       : Integer := Index_Non_Blank (Params); -- skip [
          First      : Integer;
          Result     : Unbounded_String;
-         Need_Comma : Boolean          := False;
-         Count      : Integer          := 0;
+         Need_Comma : Boolean := False;
+         Count      : Integer := 0;
       begin
          loop
             Last := Index_Non_Blank (Params, Last + 1);
 
             exit when Params (Last) = ']' or Params (Last) = ')';
 
-            Count  := Count + 1;
             First  := Last;
             Last   := Index (Params, Delim, First);
-            Result := Result & (if Need_Comma then ", " else "") & Params 
(First .. Last - 1);
-
-            Need_Comma := True;
+            declare
+               Label : constant String := Get_Label (Params (First .. Last - 
1));
+            begin
+               if Label_Used (Label) then
+                  Count      := Count + 1;
+                  Result     := Result & (if Need_Comma then ", " else "") & 
Label;
+                  Need_Comma := True;
+               end if;
+            end;
          end loop;
          Nonterm_Needed := True;
-         if Count = 1 then
-            return " (Parse_Data, Tree, Nonterm, Tokens, (1 => " & (-Result) & 
"))";
-         else
-            return " (Parse_Data, Tree, Nonterm, Tokens, (" & (-Result) & "))";
-         end if;
+         return " (Parse_Data, Tree, Nonterm, " &
+           (case Count is
+            when 0 => "(1 .. 0 => 1)",
+            when 1 => "(1 => " & (-Result) & "))",
+            when others => "(" & (-Result) & "))");
       exception
       when E : others =>
          Put_Error
            (Error_Message
-            (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, "invalid 
syntax: " &
+            (Grammar_File_Name, RHS.Source_Line, "invalid syntax: " &
               Ada.Exceptions.Exception_Message (E)));
          return "";
       end Face_Remove_Params;
@@ -566,27 +604,30 @@ is
 
          subtype Digit is Character range '0' .. '9';
 
-         Last          : Integer         := Index_Non_Blank (Params); -- skip [
-         Prefix        : constant String := " (Parse_Data, Tree, Nonterm, 
Tokens, " & N & "(";
-         Result        : Unbounded_String;
-         Need_Comma    : Boolean         := False;
-         Param_Count   : Count_Type      := 0;            -- in Params
+         Last       : Integer         := Index_Non_Blank (Params); -- skip [
+         Prefix     : constant String := " (Parse_Data, Tree, Nonterm, " & N & 
"(";
+         Result     : Unbounded_String;
+         Need_Comma : Boolean         := False;
+
+         --  In translated EBNF, token counts vary in each RHS, but the indent
+         --  parameter list is copied from the original. So the tokens don't
+         --  match the indent params. First we build a list of (label, value)
+         --  for parameters present in Params, then we match them against
+         --  RHS.Tokens.
+         Param_List : String_Pair_Lists.List;
 
          function Indent_Function (Elisp_Name : in String) return String
          is begin
-            if    Elisp_Name = "wisi-anchored"   then return "Anchored_0";
-            elsif Elisp_Name = "wisi-anchored%"  then return "Anchored_1";
-            elsif Elisp_Name = "wisi-anchored%-" then return "Anchored_2";
-            elsif Elisp_Name = "wisi-anchored*"  then return "Anchored_3";
-            elsif Elisp_Name = "wisi-anchored*-" then return "Anchored_4";
-            elsif Elisp_Name = "wisi-hanging"    then return "Hanging_0";
-            elsif Elisp_Name = "wisi-hanging-"   then return "Hanging_1";
-            elsif Elisp_Name = "wisi-hanging%"   then return "Hanging_2";
-            elsif Elisp_Name = "wisi-hanging%-"  then return "Hanging_3";
+            if    Elisp_Name = "wisi-anchored"  then return "Anchored_0";
+            elsif Elisp_Name = "wisi-anchored%" then return "Anchored_1";
+            elsif Elisp_Name = "wisi-block"     then return "Block";
+            elsif Elisp_Name = "wisi-hanging"   then return "Hanging_0";
+            elsif Elisp_Name = "wisi-hanging%"  then return "Hanging_1";
+            elsif Elisp_Name = "wisi-hanging*"  then return "Hanging_2";
             else
                Put_Error
                  (Error_Message
-                  (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, 
"unrecognized wisi indent function: '" &
+                  (Grammar_File_Name, RHS.Source_Line, "unrecognized wisi 
indent function: '" &
                     Elisp_Name & "'"));
                return "";
             end if;
@@ -659,7 +700,7 @@ is
 
             Function_Name : Unbounded_String;
             Args          : Unbounded_String;
-            Arg_Count     : Count_Type      := 0;
+            Arg_Count     : Count_Type := 0;
          begin
             if Params (First) in Digit or Params (First) = '-' then
                Last := Index (Params, Delim, First);
@@ -698,29 +739,86 @@ is
                   Last := Last + 1; -- get past ')'
                   return -Args;
 
-               elsif Is_Present (Input_Data.Tokens.Indents, -Function_Name) 
then
-                  --  Language-specific function call
-                  Function_Name := +Value (Input_Data.Tokens.Indents, 
-Function_Name);
-                  Arg_Count     := 0;
-                  loop
-                     exit when Params (Last) = ')';
+               elsif Input_Data.Tokens.Indents.Contains (Function_Name) then
+                  --  Language-specific indent function call
+                  declare
+                     Pair : String_Pair_Type renames 
Input_Data.Tokens.Indents.Constant_Reference (Function_Name);
+
+                     Declared_Args       : constant String := -Pair.Value;
+                     Declared_Args_First : Integer         := 
Declared_Args'First;
+                     Declared_Args_Last  : Integer         := Index 
(Declared_Args, Blank_Set);
+
+                     Declared_Arg_Count : Count_Type := 0;
+                     Next_Token_Arg     : Count_Type := 0;
+
+                     procedure Get_Next_Token_Arg
+                     is begin
+                        Declared_Args_First := Index_Non_Blank (Declared_Args, 
Declared_Args_Last + 1);
+                        if Declared_Args_First /= 0 then
+                           Declared_Args_Last := Index (Declared_Args, 
Blank_Set, Declared_Args_First);
+                           if Declared_Args_Last = 0 then
+                              Declared_Args_Last := Declared_Args'Last;
+                           end if;
+
+                           Next_Token_Arg := Count_Type'Value
+                             (Declared_Args (Declared_Args_First .. 
Declared_Args_Last));
+                        end if;
+                     end Get_Next_Token_Arg;
 
-                     First := Last + 1;
-                     if Arg_Count = 0 then
-                        Args := +Expression (First);
+                  begin
+                     if Declared_Args'Length = 0 then
+                        --  grammar file not updated to current wisitoken 
version
+                        Put_Error
+                          (Error_Message
+                             (Grammar_File_Name, RHS.Source_Line,
+                              "%elisp_indent function requires arg count, 
token index args."));
                      else
-                        Args := Args & " & " & Expression (First);
+                        if Declared_Args_Last = 0 then
+                           Declared_Args_Last := Declared_Args'Last;
+                        end if;
+
+                        Declared_Arg_Count  := Count_Type'Value
+                          (Declared_Args (Declared_Args_First .. 
Declared_Args_Last));
+
+                        Get_Next_Token_Arg;
                      end if;
-                     Arg_Count := Arg_Count + 1;
-                  end loop;
 
-                  Last := Last + 1; -- get past ')'
+                     Function_Name := Pair.Name; -- Ada name
+                     Arg_Count     := 0;
+                     loop
+                        exit when Params (Last) = ')';
+
+                        First     := Last + 1;
+                        Arg_Count := Arg_Count + 1;
 
-                  return "(Language, " & (-Function_Name) & "'Access, " &
-                    (if Arg_Count = 0 then "Null_Args"
-                     elsif Arg_Count = 1 then '+' & (-Args)
-                     else -Args)
-                    & ')';
+                        Args := @ & (if Arg_Count = 1 then "" else " & ");
+
+                        if Next_Token_Arg = Arg_Count then
+                           Args := @ & Get_Label (Expression (First), Integer 
=> True);
+                           Get_Next_Token_Arg;
+
+                        else
+                           Args := @ & Expression (First);
+                        end if;
+                     end loop;
+
+                     if Declared_Arg_Count /= Arg_Count then
+                        Put_Error
+                          (Error_Message
+                             (Grammar_File_Name,
+                              RHS.Source_Line,
+                              "declared " & (-Function_Name) & " parameter 
count" & Declared_Arg_Count'Image &
+                                " /= actual parameter count" & 
Arg_Count'Image));
+                     end if;
+
+                     Last := Last + 1; -- get past ')'
+
+                     return "(Language, " & (-Function_Name) & "'Access, " &
+                       (if Arg_Count = 0 then "Null_Args"
+                        elsif Arg_Count = 1 then '+' & (-Args)
+                        else -Args)
+                       & ')';
+                  end;
 
                else
                   --  wisi lisp function call
@@ -736,12 +834,26 @@ is
                      Args := Args & ", " & Ensure_Simple_Indent (Expression 
(Last + 1));
                      Last := Last + 1; -- get past ')'
                      return "(" & (-(Function_Name & ", " & Args)) & ")";
-                  else
-                     --  Arguments are 2 simple integer expressions
+
+                  elsif Function_Name = "Block" then
+                     --  Argument is 1 simple integer expression; delta
                      Args := +Expression (Last + 1);
+                     Last := Last + 1; -- get past ')'
+                     return "(" & (-(Function_Name & ", " & Args)) & ")";
+
+                  elsif Slice (Function_Name, 1, 4) = "Anch" then
+                     --  Arguments are 2 simple integer expressions; token 
delta
+                     Args := +Get_Label (Expression (Last + 1));
                      Args := Args & ", " & Expression (Last + 1);
                      Last := Last + 1; -- get past ')'
                      return "(" & (-(Function_Name & ", " & Args)) & ")";
+
+                  else
+                     Put_Error
+                       (Error_Message
+                          (Grammar_File_Name, RHS.Source_Line,
+                           "unimplimented wisi indent function: '" & 
(-Function_Name) & "'"));
+                     return -Function_Name;
                   end if;
                end if;
 
@@ -760,18 +872,10 @@ is
          when E : others =>
             Put_Error
               (Error_Message
-                 (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, 
Ada.Exceptions.Exception_Message (E)));
+                 (Grammar_File_Name, RHS.Source_Line, 
Ada.Exceptions.Exception_Message (E)));
             return "";
          end Expression;
 
-         procedure Skip_Expression (Param_First : in Integer)
-         is
-            Junk : constant String := Expression (Param_First);
-            pragma Unreferenced (Junk);
-         begin
-            null;
-         end Skip_Expression;
-
          function Ensure_Indent_Param (Item : in String) return String
          is begin
             --  Return an aggregate for Indent_Param. Item can be anything
@@ -784,7 +888,7 @@ is
                return Item;
 
             elsif Item (Item'First) = '(' then
-               --  Anchored or Language
+               --  Anchored or Language-specific function
                return "(Simple, " & Item & ")";
 
             elsif Item = "nil" then
@@ -796,19 +900,20 @@ is
             end if;
          end Ensure_Indent_Param;
 
-         procedure One_Param (Prefix : in Boolean := False; Skip : in Boolean 
:= False)
+         Param_Label_Count : Ada.Containers.Count_Type := 0;
+
+         procedure One_Param (Label : in String := "")
          is
-            procedure Comma
-            is begin
-               if Need_Comma then
-                  if not Prefix then
-                     Result := Result & ", ";
-                  end if;
-               else
-                  Need_Comma := True;
-               end if;
-            end Comma;
+            Pair : String_Pair_Type;
          begin
+            if Label = "" then
+               if RHS.Auto_Token_Labels then
+                  Pair.Name := +Next_Token_Label;
+               end if;
+            else
+               Pair.Name := +Label;
+            end if;
+
             case Params (Last) is
             when '(' =>
                --  cons or function
@@ -816,61 +921,48 @@ is
                   Label_Last : constant Integer := Check_Cons;
                begin
                   if Label_Last > 0 then
+                     --  cons; manual label
+                     pragma Assert (not RHS.Auto_Token_Labels);
                      declare
                         Label : constant String := Params (Last + 1 .. 
Label_Last);
                      begin
                         Last := Index_Non_Blank (Params, Label_Last + 3);
-                        if Label_Used (Label) then
-                           Comma;
-                           Result := Result & Label & " => ";
-                           One_Param (Prefix => True);
-                        else
-                           --  This token is not present in this RHS; skip 
this param
-                           One_Param (Skip => True);
-                        end if;
-                        if Params (Last) /= ')' then
-                           Put_Error
-                             (Error_Message
-                                (Input_Data.Grammar_Lexer.File_Name,
-                                 RHS.Source_Line, "invalid indent syntax; 
missing ')'"));
-                        end if;
-                        Last := Last + 1;
+                        One_Param (Label);
                      end;
-                  else
-                     if Skip then
-                        Skip_Expression (Last);
-                     else
-                        Comma;
-                        Result := Result & "(False, " & Ensure_Indent_Param 
(Expression (Last)) & ')';
+                     Param_Label_Count := @ + 1;
+
+                     if Params (Last) /= ')' then
+                        Put_Error
+                          (Error_Message
+                             (Grammar_File_Name,
+                              RHS.Source_Line, "invalid indent syntax; missing 
')'"));
                      end if;
+                     Last := Last + 1;
+                  else
+                     --  function
+                     Pair.Value := +"(False, " & Ensure_Indent_Param 
(Expression (Last)) & ')';
+                     Param_List.Append (Pair);
                   end if;
                end;
 
             when '[' =>
                --  vector
-               if Skip then
-                  Skip_Expression (Last + 1);
-                  Skip_Expression (Last + 1);
-               else
-                  Comma;
-                  Result := Result & "(True, " & Ensure_Indent_Param 
(Expression (Last + 1));
-                  Result := Result & ", " & Ensure_Indent_Param (Expression 
(Last + 1)) & ')';
-               end if;
+               Pair.Value := +"(True, " & Ensure_Indent_Param (Expression 
(Last + 1));
+               Pair.Value := @ & ", " & Ensure_Indent_Param (Expression (Last 
+ 1)) & ')';
+
+               Param_List.Append (Pair);
+
                if Params (Last) /= ']' then
                   Put_Error
                     (Error_Message
-                       (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, 
"indent missing ']'"));
+                       (Grammar_File_Name, RHS.Source_Line, "indent missing 
']'"));
                end if;
                Last := Last + 1;
 
             when others =>
                --  integer or symbol
-               if Skip then
-                  Skip_Expression (Last);
-               else
-                  Comma;
-                  Result := Result & "(False, " & Ensure_Indent_Param 
(Expression (Last)) & ')';
-               end if;
+               Pair.Value := +"(False, " & Ensure_Indent_Param (Expression 
(Last)) & ')';
+               Param_List.Append (Pair);
             end case;
          end One_Param;
 
@@ -879,7 +971,7 @@ is
             if Params (Last) /= ']' then
                Last := Index_Non_Blank (Params, Last + 1);
                if Last = 0 then
-                  Put_Error (Error_Message 
(Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, "indent missing ']'"));
+                  Put_Error (Error_Message (Grammar_File_Name, 
RHS.Source_Line, "indent missing ']'"));
                   return -Result;
                end if;
             end if;
@@ -887,35 +979,92 @@ is
             exit when Params (Last) = ']';
 
             One_Param;
-
-            Param_Count := Param_Count + 1;
          end loop;
 
-         --  In translated EBNF, token counts vary in each RHS; require each
-         --  parameter to be labeled if any are, both for catching errors, and
-         --  becase that would produce mixed positional and named association
-         --  in the Ada action subprogram.
-         if Param_Count /= RHS.Tokens.Length then
-            if Labels.Length = 0 then
-               Put_Error
-                 (Error_Message
-                    (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, 
Image (Prod_ID) &
-                       ": indent parameters count of" & Count_Type'Image 
(Param_Count) &
-                       " /= production token count of" & Count_Type'Image 
(RHS.Tokens.Length)));
+         --  Now we have Param_List; match it against RHS.Tokens and create 
Result.
 
-            elsif Count_Label_Needed /= RHS.Tokens.Length then
-               Put_Error
-                 (Error_Message
-                    (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, 
Image (Prod_ID) &
-                       ": indent parameter(s) not labeled"));
-            else
-               --  all parameters labeled
-               null;
-            end if;
+         if RHS.Auto_Token_Labels or Param_Label_Count = Param_List.Length then
+            --  All tokens are either manually or automatically labeled, and if
+            --  manual then all parameters are manually labeled, and we can 
detect
+            --  extra params in edited RHS.
+            declare
+               use String_Pair_Lists;
+               use all type SAL.Base_Peek_Type;
+
+               Token_I   : Positive_Index_Type      := RHS.Tokens.First_Index;
+               Param_Cur : String_Pair_Lists.Cursor := Param_List.First;
+               Param_I   : Positive_Index_Type      := RHS.Tokens.First_Index;
+
+               Nil_Indent : constant String := "(False, (Simple, (Label => 
None)))";
+            begin
+               loop
+                  exit when Token_I > RHS.Tokens.Last_Index or not Has_Element 
(Param_Cur);
+
+                  declare
+                     Token_Label : constant String := -RHS.Tokens 
(Token_I).Label;
+                     Param_Label : constant String := -Element 
(Param_Cur).Name;
+                  begin
+                     if Token_Label = Param_Label then
+                        Result := Result & (if Need_Comma then ", " else "") & 
Param_Label & " => " &
+                          Element (Param_Cur).Value;
+
+                        Mark_Label_Used (Token_Label);
+
+                        Need_Comma := True;
+
+                        Token_I := @ + 1;
+                        Next (Param_Cur);
+                        Param_I := @ + 1;
+
+                     elsif RHS.Auto_Token_Labels and
+                       (Token_Label'Length > 0 and then Token_Label (1) /= 
'T') and
+                       Token_I = Param_I
+                     then
+                        Result := Result & (if Need_Comma then ", " else "") & 
Token_Label & " => " & Nil_Indent;
+                        Mark_Label_Used (Token_Label);
+                        Need_Comma := True;
+
+                        Token_I := @ + 1;
+                        Next (Param_Cur);
+                        Param_I := @ + 1;
+
+                     else
+                        Next (Param_Cur);
+                        Param_I := @ + 1;
+                     end if;
+                  end;
+               end loop;
+
+               if (not RHS.Edited_Token_List or Prod_ID.RHS = 0) and then
+                 (Token_I /= RHS.Tokens.Last_Index + 1 or Has_Element 
(Param_Cur))
+               then
+                  --  We don't check 'Has_Element (Param_Cur)' when 
edited_token_list
+                  --  and RHS_Index /= 0, because we expect to have more 
params than
+                  --  tokens. RHS_Index = 0 always has all optional tokens.
+                  if RHS.Auto_Token_Labels then
+                     Put_Error
+                       (Error_Message
+                          (Grammar_File_Name, RHS.Source_Line, Image (Prod_ID, 
Generate_Data.Descriptor.all)) &
+                          (if Token_I <= RHS.Tokens.Last_Index then " missing" 
else " extra") & " indent parameters");
+                  else
+                     Put_Error
+                       (Error_Message
+                          (Grammar_File_Name, RHS.Source_Line, Image (Prod_ID, 
Generate_Data.Descriptor.all) &
+                             ": missing or extra indent parameter, or missing 
token label"));
+                  end if;
+               end if;
+            end;
+
+         else
+            --  No labels; assume Param_List is correct.
+            for Pair of Param_List loop
+               Result := Result & (if Need_Comma then ", " else "") & 
Pair.Value;
+               Need_Comma := True;
+            end loop;
          end if;
 
          Nonterm_Needed := True;
-         if Param_Count = 1 then
+         if Param_List.Length = 1 then
             Result := Prefix & "1 => " & Result;
          else
             Result := Prefix & Result;
@@ -929,41 +1078,46 @@ is
          --  Input looks like "1 2)"
          First             : constant Integer := Index_Non_Blank (Params);
          Second            : constant Integer := Index (Params, Blank_Set, 
First);
-         Label_First       : constant String  := Params (First .. Second - 1);
-         Label_Used_First  : constant Boolean := 0 = Index (Label_First, 
Numeric, Outside) or else
-           Label_Used (Label_First);
-         Label_Second      : constant String  := Params (Second + 1 .. 
Params'Last - 1);
-         Label_Used_Second : constant Boolean := 0 = Index (Label_Second, 
Numeric, Outside) or else
-           Label_Used (Label_Second);
+         Label_First       : constant String  := Get_Label (Params (First .. 
Second - 1));
+         Label_Used_First  : constant Boolean := Label_Used (Label_First);
+         Label_Second      : constant String  := Get_Label (Params (Second + 1 
.. Params'Last - 1));
+         Label_Used_Second : constant Boolean := Label_Used (Label_Second);
       begin
          Nonterm_Needed := True;
 
          if Label_Used_First and Label_Used_Second then
-            return " (Nonterm, Tokens, " & Label_First & ", " & Label_Second & 
")";
+            return " (Tree, Nonterm, Tokens, " & Label_First & ", " & 
Label_Second & ")";
 
          elsif (not Label_Used_First) and Label_Used_Second then
             --  A copied EBNF RHS; see subprograms.wy Name
-            return " (Nonterm, Tokens, " & Label_Second & ")";
+            return " (Tree, Nonterm, Tokens, " & Label_Second & ")";
          else
             Put_Error
               (Error_Message
-                 (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, 
"merge_names token label error"));
-            return " (Nonterm, Tokens)";
+                 (Grammar_File_Name, RHS.Source_Line, "merge_names token label 
error"));
+            return " (Tree, Nonterm, Tokens)";
          end if;
       end Merge_Names_Params;
 
       function Match_Names_Params (Params : in String) return String
       is
          --  Input looks like: 1 2)
-         First  : constant Integer := Index_Non_Blank (Params);
-         Second : constant Integer := Index (Params, Blank_Set, First);
+         First             : constant Integer := Index_Non_Blank (Params);
+         Second            : constant Integer := Index (Params, Blank_Set, 
First);
+         Label_First       : constant String  := Get_Label (Params (First .. 
Second - 1));
+         Label_Second      : constant String  := Get_Label (Params (Second + 1 
.. Params'Last - 1));
+         Label_Used_First  : constant Boolean := Label_Used (Label_First);
+         Label_Used_Second : constant Boolean := Label_Used (Label_Second);
       begin
-         return " (Lexer, Descriptor, Tokens, " &
-           Params (First .. Second - 1) & ',' &
-           Params (Second .. Params'Last - 1) & ", " &
-           (if Length (Input_Data.Language_Params.End_Names_Optional_Option) > 0
-            then -Input_Data.Language_Params.End_Names_Optional_Option
-            else "False") & ")";
+         if Label_Used_First and Label_Used_Second then
+            return " (Tree, Tokens, " &
+              Label_First & ", " & Label_Second & ", " &
+              (if Length 
(Input_Data.Language_Params.End_Names_Optional_Option) > 0
+               then -Input_Data.Language_Params.End_Names_Optional_Option
+               else "False") & ")";
+         else
+            return "";
+         end if;
       end Match_Names_Params;
 
       function Language_Action_Params (Params : in String; Action_Name : in 
String) return String
@@ -979,9 +1133,9 @@ is
             First := Index_Non_Blank (Params, Last + 1);
             Last  := Index (Params, Space_Paren_Set, First);
             declare
-               Label : constant String  := Params (First .. Last - 1);
+               Label : constant String := Get_Label (Params (First .. Last - 
1));
             begin
-               if 0 = Index (Label, Numeric, Outside) or else Label_Used 
(Label) then
+               if Label_Used (Label) then
                   Param_Count := Param_Count + 1;
                   if Need_Comma then
                      Result := Result & ", ";
@@ -994,7 +1148,7 @@ is
                if Last = Params'Last then
                   Put_Error
                     (Error_Message
-                       (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, 
Action_Name & " missing ']'"));
+                       (Grammar_File_Name, RHS.Source_Line, Action_Name & " 
missing ']'"));
                   exit;
                end if;
             end;
@@ -1018,7 +1172,7 @@ is
             if Length (Face_Line) > 0 then
                Put_Error
                  (Error_Message
-                    (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, 
"multiple face actions"));
+                    (Grammar_File_Name, RHS.Source_Line, "multiple face 
actions"));
             end if;
          end Assert_Face_Empty;
 
@@ -1027,7 +1181,7 @@ is
             if Length (Indent_Action_Line) > 0 then
                Put_Error
                  (Error_Message
-                    (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, 
"multiple indent actions"));
+                    (Grammar_File_Name, RHS.Source_Line, "multiple indent 
actions"));
             end if;
          end Assert_Indent_Empty;
 
@@ -1036,7 +1190,7 @@ is
             if Length (Check_Line) > 0 then
                Put_Error
                  (Error_Message
-                    (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, 
"multiple check actions"));
+                    (Grammar_File_Name, RHS.Source_Line, "multiple check 
actions"));
             end if;
          end Assert_Check_Empty;
 
@@ -1056,12 +1210,12 @@ is
             declare
                First : constant Integer := Index_Non_Blank (Line, Last + 1);
                Last  : constant Integer := Index (Line, Space_Paren_Set, 
First);
-               Label : constant String  := Line (First .. Last - 1);
+               Label : constant String  := Get_Label (Line (First .. Last - 
1));
             begin
-               if 0 = Index (Label, Numeric, Outside) or else Label_Used 
(Label) then
+               if Label_Used (Label) then
                   Nonterm_Needed := True;
                   Navigate_Lines.Append
-                    ("Name_Action (Parse_Data, Tree, Nonterm, Tokens, " & Line 
(First .. Line'Last) & ";");
+                    ("Name_Action (Parse_Data, Tree, Nonterm, " & Label & 
");");
                end if;
             end;
 
@@ -1128,10 +1282,16 @@ is
             end;
 
          elsif Elisp_Name = "wisi-propagate-name" then
-            Assert_Check_Empty;
-            Nonterm_Needed := True;
-            Check_Line := +"return " & Elisp_Name_To_Ada (Elisp_Name, False, 
Trim => 5) &
-              " (Nonterm, Tokens, " & Line (Last + 1 .. Line'Last) & ";";
+            declare
+               Label : constant String := Get_Label (Line (Last + 1 .. 
Line'Last - 1));
+            begin
+               if Label_Used (Label) then
+                  Assert_Check_Empty;
+                  Nonterm_Needed := True;
+                  Check_Line := +"return " & Elisp_Name_To_Ada (Elisp_Name, 
False, Trim => 5) &
+                    " (Tree, Nonterm, Tokens, " & Label & ");";
+               end if;
+            end;
 
          elsif Elisp_Name = "wisi-merge-names" then
             Assert_Check_Empty;
@@ -1139,43 +1299,54 @@ is
               Merge_Names_Params (Line (Last + 1 .. Line'Last)) & ";";
 
          elsif Elisp_Name = "wisi-match-names" then
-            Assert_Check_Empty;
-            Check_Line := +"return " & Elisp_Name_To_Ada (Elisp_Name, False, 
Trim => 5) &
-              Match_Names_Params (Line (Last + 1 .. Line'Last)) & ";";
+            declare
+               Params : constant String := Match_Names_Params (Line (Last + 1 
.. Line'Last));
+            begin
+               if Params'Length > 0 then
+                  Assert_Check_Empty;
+                  Check_Line := +"return " & Elisp_Name_To_Ada (Elisp_Name, 
False, Trim => 5) &
+                    Params & ";";
+               end if;
+            end;
 
          elsif Elisp_Name = "wisi-terminate-partial-parse" then
             Assert_Check_Empty;
             Nonterm_Needed := True;
-            Check_Line := +"return Terminate_Partial_Parse 
(Partial_Parse_Active, Partial_Parse_Byte_Goal, " &
+            Check_Line := +"return Terminate_Partial_Parse (Tree, 
Partial_Parse_Active, Partial_Parse_Byte_Goal, " &
               "Recover_Active, Nonterm);";
 
-         elsif Is_Present (Input_Data.Tokens.Actions, Elisp_Name) then
-            --  Language-specific action (used in wisitoken grammar mode for
-            --  wisi-check-parens).
+         elsif Input_Data.Tokens.Actions.Contains (+Elisp_Name) then
+            --  Language-specific post-parse action (used in wisitoken grammar
+            --  mode for wisi-check-parens).
+            --
+            --  IMPROVEME: handle labels for token args. wisitoken-grammar 
declares
+            --  wisi-check-parens, but uses BNF syntax, so no token labels are
+            --  needed.
             declare
-               Item   : Elisp_Action_Type renames Input_Data.Tokens.Actions
-                 (Input_Data.Tokens.Actions.Find (+Elisp_Name));
+               Item   : String_Pair_Type renames 
Input_Data.Tokens.Actions.Constant_Reference (+Elisp_Name);
                Params : constant String := Language_Action_Params (Line (Last 
+ 1 .. Line'Last), Elisp_Name);
-               Code   : constant String := -Item.Ada_Name &
-                 " (Wisi.Parse_Data_Type'Class (User_Data), Tree, Tokens, " & 
Params & ");";
+               Code   : constant String := -Item.Value &
+                 " (Wisi.Parse_Data_Type'Class (User_Data), Tree, Nonterm, " & 
Params & ");";
             begin
+               Nonterm_Needed := True;
+
                if Params'Length > 0 then
-                  if "navigate" = -Item.Action_Label then
+                  if "navigate" = -Item.Name then
                      Navigate_Lines.Append (Code);
 
-                  elsif "face" = -Item.Action_Label then
+                  elsif "face" = -Item.Name then
                      Assert_Face_Empty;
                      Face_Line := +Code;
 
-                  elsif "indent" = -Item.Action_Label then
+                  elsif "indent" = -Item.Name then
                      Assert_Indent_Empty;
                      Indent_Action_Line := +Code;
 
                   else
                      Put_Error
                        (Error_Message
-                          (Input_Data.Grammar_Lexer.File_Name, 
RHS.Source_Line, "unrecognized action label: '" &
-                             (-Item.Action_Label) & "'"));
+                          (Grammar_File_Name, RHS.Source_Line, "unrecognized 
post-parse action: '" &
+                             (-Item.Name) & "'"));
                   end if;
 
                   --  else skip
@@ -1184,11 +1355,12 @@ is
          else
             Put_Error
               (Error_Message
-                 (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, 
"unrecognized elisp action: '" &
+                 (Grammar_File_Name, RHS.Source_Line, "unrecognized elisp 
action: '" &
                     Elisp_Name & "'"));
          end if;
       end Translate_Sexp;
 
+      Subprogram_Started : Boolean := False;
    begin
       for Sexp of Sexps loop
          begin
@@ -1197,76 +1369,84 @@ is
          when E : Not_Found =>
             Put_Error
               (Error_Message
-                 (Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line, 
Ada.Exceptions.Exception_Message (E)));
+                 (Grammar_File_Name, RHS.Source_Line, 
Ada.Exceptions.Exception_Message (E)));
          end;
       end loop;
 
       if Check then
-         --  in a check
-         Indent_Line ("function " & Name);
-         Indent_Line (" (Lexer          : access constant 
WisiToken.Lexer.Instance'Class;");
-         Indent_Line ("  Nonterm        : in out WisiToken.Recover_Token;");
-         Indent_Line ("  Tokens         : in     
WisiToken.Recover_Token_Array;");
-         Indent_Line ("  Recover_Active : in     Boolean)");
-         Indent_Line (" return WisiToken.Semantic_Checks.Check_Status");
-         declare
-            Unref_Lexer   : constant Boolean := 0 = Index (Check_Line, 
"Lexer");
-            Unref_Nonterm : constant Boolean := 0 = Index (Check_Line, 
"Nonterm");
-            Unref_Tokens  : constant Boolean := 0 = Index (Check_Line, 
"Tokens");
-            Unref_Recover : constant Boolean := 0 = Index (Check_Line, 
"Recover_Active");
-            Need_Comma    : Boolean          := False;
-         begin
-            if Unref_Lexer or Unref_Nonterm or Unref_Tokens or Unref_Recover or
-              (for some I of Label_Needed => I)
-            then
-               Indent_Line ("is");
-
-               Indent := Indent + 3;
-               if Unref_Lexer or Unref_Nonterm or Unref_Tokens or 
Unref_Recover then
-                  Indent_Start ("pragma Unreferenced (");
+         --  In an in-parse check action
+         if Length (Check_Line) = 0 then
+            Empty := True; -- don't output a spec for this.
 
-                  if Unref_Lexer then
-                     Put ((if Need_Comma then ", " else "") & "Lexer");
-                     Need_Comma := True;
-                  end if;
-                  if Unref_Nonterm then
-                     Put ((if Need_Comma then ", " else "") & "Nonterm");
-                     Need_Comma := True;
-                  end if;
-                  if Unref_Tokens then
-                     Put ((if Need_Comma then ", " else "") & "Tokens");
-                     Need_Comma := True;
-                  end if;
-                  if Unref_Recover then
-                     Put ((if Need_Comma then ", " else "") & 
"Recover_Active");
-                     Need_Comma := True;
+         else
+            Empty              := False;
+            Subprogram_Started := True;
+            Indent_Line ("function " & Name);
+            Indent_Line (" (Tree           : in     
WisiToken.Syntax_Trees.Tree;");
+            Indent_Line ("  Nonterm        : in out 
WisiToken.Syntax_Trees.Recover_Token;");
+            Indent_Line ("  Tokens         : in     
WisiToken.Syntax_Trees.Recover_Token_Array;");
+            Indent_Line ("  Recover_Active : in     Boolean)");
+            Indent_Line (" return 
WisiToken.Syntax_Trees.In_Parse_Actions.Status");
+            declare
+               Unref_Tree    : constant Boolean := 0 = Index (Check_Line, 
"Tree");
+               Unref_Nonterm : constant Boolean := 0 = Index (Check_Line, 
"Nonterm");
+               Unref_Tokens  : constant Boolean := 0 = Index (Check_Line, 
"Tokens");
+               Unref_Recover : constant Boolean := 0 = Index (Check_Line, 
"Recover_Active");
+               Need_Comma    : Boolean          := False;
+            begin
+               if Unref_Tree or Unref_Nonterm or Unref_Tokens or Unref_Recover 
or
+                 (for some I of Label_Needed => I)
+               then
+                  Indent_Line ("is");
+
+                  Indent := Indent + 3;
+                  if Unref_Tree or Unref_Nonterm or Unref_Tokens or 
Unref_Recover then
+                     Indent_Start ("pragma Unreferenced (");
+
+                     if Unref_Tree then
+                        Put ((if Need_Comma then ", " else "") & "Tree");
+                        Need_Comma := True;
+                     end if;
+                     if Unref_Nonterm then
+                        Put ((if Need_Comma then ", " else "") & "Nonterm");
+                        Need_Comma := True;
+                     end if;
+                     if Unref_Tokens then
+                        Put ((if Need_Comma then ", " else "") & "Tokens");
+                        Need_Comma := True;
+                     end if;
+                     if Unref_Recover then
+                        Put ((if Need_Comma then ", " else "") & 
"Recover_Active");
+                        Need_Comma := True;
+                     end if;
+                     Put_Line (");");
                   end if;
-                  Put_Line (");");
-               end if;
 
-               for I in Label_Needed'Range loop
-                  if Label_Needed (I) then
-                     Indent_Line
-                       (-Labels (I) & " : constant SAL.Peek_Type :=" &
-                          SAL.Peek_Type'Image (Find_Token_Index (I)) & ";");
-                  end if;
-               end loop;
-               Indent := Indent - 3;
+                  for I in Label_Needed'Range loop
+                     if Label_Needed (I) then
+                        Indent_Line
+                          (-Labels (I) & " : constant SAL.Peek_Type :=" &
+                             SAL.Peek_Type'Image (Find_Token_Index (I)) & ";");
+                     end if;
+                  end loop;
+                  Indent := Indent - 3;
 
-               Indent_Line ("begin");
-            else
-               Indent_Line ("is begin");
-            end if;
-         end;
-         Indent := Indent + 3;
-         Indent_Line (-Check_Line);
+                  Indent_Line ("begin");
+               else
+                  Indent_Line ("is begin");
+               end if;
+            end;
+            Indent := Indent + 3;
+            Indent_Line (-Check_Line);
+         end if;
       else
          --  In an action
+         Empty              := False;
+         Subprogram_Started := True;
          Indent_Line ("procedure " & Name);
-         Indent_Line (" (User_Data : in out 
WisiToken.Syntax_Trees.User_Data_Type'Class;");
-         Indent_Line ("  Tree      : in out WisiToken.Syntax_Trees.Tree;");
-         Indent_Line ("  Nonterm   : in     WisiToken.Valid_Node_Index;");
-         Indent_Line ("  Tokens    : in     
WisiToken.Valid_Node_Index_Array)");
+         Indent_Line ("  (User_Data : in out 
WisiToken.Syntax_Trees.User_Data_Type'Class;");
+         Indent_Line ("   Tree      : in out WisiToken.Syntax_Trees.Tree;");
+         Indent_Line ("   Nonterm   : in     
WisiToken.Syntax_Trees.Valid_Node_Access)");
          Indent_Line ("is");
 
          Indent := Indent + 3;
@@ -1321,9 +1501,11 @@ is
          Indent_Line ("end case;");
       end if;
 
-      Indent := Indent - 3;
-      Indent_Line ("end " & Name & ";");
-      New_Line;
+      if Subprogram_Started then
+         Indent := Indent - 3;
+         Indent_Line ("end " & Name & ";");
+         New_Line;
+      end if;
 
    end Create_Ada_Action;
 
@@ -1331,7 +1513,7 @@ is
    is begin
       for Rule of Input_Data.Tokens.Rules loop
          for RHS of Rule.Right_Hand_Sides loop
-            for Sexp of Split_Sexp (-RHS.Action, 
Input_Data.Grammar_Lexer.File_Name, RHS.Source_Line) loop
+            for Sexp of Split_Sexp (-RHS.Action, Grammar_File_Name, 
RHS.Source_Line) loop
                declare
                   Last       : constant Integer := Ada.Strings.Fixed.Index 
(Sexp, Blank_Set);
                   Elisp_Name : constant String  := Sexp (Sexp'First + 1 .. 
Last - 1);
@@ -1391,6 +1573,9 @@ is
          end;
       end if;
 
+      if Input_Data.Check_Count > 0 then
+         Indent_Line ("with WisiToken.In_Parse_Actions; use 
WisiToken.In_Parse_Actions;"); -- Match_Names etc.
+      end if;
       case Common_Data.Interface_Kind is
       when Process =>
          null;
@@ -1406,7 +1591,7 @@ is
       New_Line;
 
       if Input_Data.Check_Count > 0 then
-         Indent_Line ("use WisiToken.Semantic_Checks;");
+         Indent_Line ("use WisiToken.Syntax_Trees.In_Parse_Actions;");
       end if;
       if Motion_Actions then
          Indent_Line ("use all type Motion_Param_Array;");
@@ -1421,21 +1606,28 @@ is
          declare
             LHS_ID    : constant WisiToken.Token_ID := Find_Token_ID 
(Generate_Data, -Rule.Left_Hand_Side);
             RHS_Index : Integer                     := 0; -- Semantic_Action 
defines RHS_Index as zero-origin
+            Empty     : Boolean;
          begin
             for RHS of Rule.Right_Hand_Sides loop
                if Length (RHS.Action) > 0 then
                   declare
                      Name : constant String := Action_Names 
(LHS_ID)(RHS_Index).all;
                   begin
-                     Create_Ada_Action (Name, RHS, (LHS_ID, RHS_Index), 
RHS.Action, Rule.Labels, Check => False);
+                     Create_Ada_Action (Name, RHS, (LHS_ID, RHS_Index), 
RHS.Action, Rule.Labels, Empty, Check => False);
+                     if Empty then
+                        Action_Names (LHS_ID)(RHS_Index) := null;
+                     end if;
                   end;
                end if;
 
                if Length (RHS.Check) > 0 then
                   declare
-                     Name : constant String := Check_Names 
(LHS_ID)(RHS_Index).all;
+                     Name  : constant String := Check_Names 
(LHS_ID)(RHS_Index).all;
                   begin
-                     Create_Ada_Action (Name, RHS, (LHS_ID, RHS_Index), 
RHS.Check, Rule.Labels, Check => True);
+                     Create_Ada_Action (Name, RHS, (LHS_ID, RHS_Index), 
RHS.Check, Rule.Labels, Empty, Check => True);
+                     if Empty then
+                        Check_Names (LHS_ID)(RHS_Index) := null;
+                     end if;
                   end;
                end if;
                RHS_Index := RHS_Index + 1;
@@ -1455,6 +1647,7 @@ is
       Main_Package_Name    : in String)
    is
       use WisiToken.Generate;
+      use Generate_Utils;
 
       File_Name : constant String := To_Lower (Main_Package_Name) & ".adb";
       Body_File : File_Type;
@@ -1471,10 +1664,11 @@ is
       end if;
 
       case Common_Data.Lexer is
-      when None | Elisp_Lexer =>
+      when None | Tree_Sitter_Lexer =>
          null;
 
       when re2c_Lexer =>
+         Put_Line ("with SAL;");
          Put_Line ("with WisiToken.Lexer.re2c;");
          Put_Line ("with " & Output_File_Name_Root & "_re2c_c;");
 
@@ -1487,7 +1681,7 @@ is
       when Packrat_Generate_Algorithm =>
          Put_Line ("with WisiToken.Parse;");
 
-      when External =>
+      when External | Tree_Sitter =>
          null;
       end case;
 
@@ -1496,31 +1690,31 @@ is
       New_Line;
 
       case Common_Data.Lexer is
-      when None | Elisp_Lexer =>
+      when None | Tree_Sitter_Lexer =>
          null;
 
       when re2c_Lexer =>
-         Indent_Line ("package Lexer is new WisiToken.Lexer.re2c");
-         Indent_Line ("  (" & Output_File_Name_Root & "_re2c_c.New_Lexer,");
-         Indent_Line ("   " & Output_File_Name_Root & "_re2c_c.Free_Lexer,");
-         Indent_Line ("   " & Output_File_Name_Root & "_re2c_c.Reset_Lexer,");
-         Indent_Line ("   " & Output_File_Name_Root & "_re2c_c.Next_Token);");
-         New_Line;
+         Create_re2c_Lexer (Generate_Data, Output_File_Name_Root);
       end case;
 
       case Common_Data.Generate_Algorithm is
       when LR_Generate_Algorithm =>
-         LR_Create_Create_Parser (Input_Data, Common_Data, Generate_Data);
+         LR_Create_Create_Parse_Table (Input_Data, Common_Data, Generate_Data, 
Actions_Package_Name);
+         Create_Create_Productions (Generate_Data);
 
       when Packrat_Gen =>
          WisiToken.BNF.Generate_Packrat (Packrat_Data, Generate_Data);
-         Packrat_Create_Create_Parser (Common_Data, Generate_Data, 
Packrat_Data);
+         Create_Create_Productions (Generate_Data);
+         Packrat_Create_Create_Parser (Actions_Package_Name, Common_Data, 
Generate_Data, Packrat_Data);
 
       when Packrat_Proc =>
-         Packrat_Create_Create_Parser (Common_Data, Generate_Data, 
Packrat_Data);
+         Create_Create_Productions (Generate_Data);
+         Packrat_Create_Create_Parser (Actions_Package_Name, Common_Data, 
Generate_Data, Packrat_Data);
 
       when External =>
          External_Create_Create_Grammar (Generate_Data);
+      when Tree_Sitter =>
+         null;
       end case;
 
       case Common_Data.Interface_Kind is
@@ -1601,8 +1795,11 @@ is
       Set_Output (File);
       Indent := 1;
 
+      --  We can't use Put_File_Header here because it does not output the
+      --  file name.
       Put_Line
-        (";;; " & Output_File_Name_Root & "-process.el --- Generated parser 
support file  -*- lexical-binding:t -*-");
+        (";;; " & Output_File_Name_Root &
+           "-process.el --- Generated parser support file  -*- 
buffer-read-only:t lexical-binding:t -*-");
       Put_Command_Line (Elisp_Comment & "  ", Use_Tuple => True, Tuple => 
Tuple);
       Put_Raw_Code (Elisp_Comment, Input_Data.Raw_Code (Copyright_License));
       New_Line;
@@ -1614,10 +1811,10 @@ is
       Indent := Indent + 3;
       for Cursor in All_Tokens (Generate_Data).Iterate loop
          if Paren_1_Done then
-            Indent_Line (Name (Cursor));
+            Indent_Line (Name (Generate_Data, Cursor));
          else
             Paren_1_Done := True;
-            Put_Line (Name (Cursor));
+            Put_Line (Name (Generate_Data, Cursor));
          end if;
 
       end loop;
@@ -1659,7 +1856,7 @@ is
       Set_Output (File);
       Indent := 1;
 
-      Put_Line (";; generated by WisiToken Wisi from " & 
Input_Data.Grammar_Lexer.File_Name);
+      Put_Line (";; generated by WisiToken Wisi from " & Grammar_File_Name);
       Put_Command_Line (";; ", Use_Tuple => True, Tuple => Tuple);
       Put_Line (";;");
 
@@ -1739,7 +1936,7 @@ is
       Create (File, Out_File, Output_File_Name_Root & 
"_wisi_module_parse.gpr");
       Set_Output (File);
       Indent := 1;
-      Put_Line ("-- generated by WisiToken Wisi from " & 
Input_Data.Grammar_Lexer.File_Name);
+      Put_Line ("-- generated by WisiToken Wisi from " & Grammar_File_Name);
       Put_Command_Line ("-- ", Use_Tuple => True, Tuple => Tuple);
       Indent_Line ("with ""wisi_module_parse_common"";");
       Indent_Line ("library project " & Package_Name_Root & 
"_Wisi_Module_Parse is");
@@ -1805,7 +2002,7 @@ is
       Create (File, Out_File, Output_File_Name_Root & 
"_wisi_module_parse_agg.gpr");
       Set_Output (File);
       Indent := 1;
-      Put_Line ("-- generated by WisiToken Wisi from " & 
Input_Data.Grammar_Lexer.File_Name);
+      Put_Line ("-- generated by WisiToken Wisi from " & Grammar_File_Name);
       Put_Command_Line ("-- ", Use_Tuple => True, Tuple => Tuple);
       Indent_Line ("aggregate project " & Package_Name_Root & 
"_Wisi_Module_Parse_Agg is");
       Indent_Line ("   for Project_Path use (external (""WISI_FASTTOKEN""));");
@@ -1817,7 +2014,7 @@ is
       Create (File, Out_File, Output_File_Name_Root & 
"_wisi_module_parse_wrapper.c");
       Set_Output (File);
       Indent := 1;
-      Put_Line ("// generated by WisiToken Wisi from " & 
Input_Data.Grammar_Lexer.File_Name);
+      Put_Line ("// generated by WisiToken Wisi from " & Grammar_File_Name);
       Put_Command_Line ("// ", Use_Tuple => True, Tuple => Tuple);
       Indent_Line ("//  This file is just a wrapper around the Ada code in");
       Indent_Line ("//  *_wisi_module_parse.adb; it is needed to call 
adainit.");
@@ -1846,16 +2043,6 @@ is
    end Create_Module_Aux;
 
 begin
-   case Common_Data.Lexer is
-   when None | re2c_Lexer =>
-      null;
-
-   when Elisp_Lexer =>
-      raise User_Error with WisiToken.Generate.Error_Message
-        (Input_Data.Grammar_Lexer.File_Name, 1, "Ada_Emacs output language 
does not support " &
-           Lexer_Image (Common_Data.Lexer).all & " lexer");
-   end case;
-
    declare
       Actions_Package_Name : constant String := File_Name_To_Ada 
(Output_File_Name_Root) &
         (case Common_Data.Interface_Kind is
diff --git a/wisitoken-bnf-output_elisp_common.adb 
b/wisitoken-bnf-output_elisp_common.adb
index 2260955150..a74efd33d2 100644
--- a/wisitoken-bnf-output_elisp_common.adb
+++ b/wisitoken-bnf-output_elisp_common.adb
@@ -2,7 +2,7 @@
 --
 --  See spec
 --
---  Copyright (C) 2012, 2013, 2015, 2017 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2012, 2013, 2015, 2017 - 2019, 2022 Free Software 
Foundation, Inc.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -194,7 +194,7 @@ package body WisiToken.BNF.Output_Elisp_Common is
       Indent_Line ("  '(");
       Indent := Indent + 3;
       for Pair of Tokens.Keywords loop
-         Indent_Line ("(" & (-Pair.Name) & " . " & (-Pair.Value) & ")");
+         Indent_Line ("(" & (-Pair.Name) & " . " & re2c_To_Elisp (-Pair.Value) 
& ")");
       end loop;
       for Kind of Tokens.Tokens loop
          for Token of Kind.Tokens loop
diff --git a/wisitoken-bnf.adb b/wisitoken-bnf.adb
index 29e4f60ca2..c5157ebf59 100644
--- a/wisitoken-bnf.adb
+++ b/wisitoken-bnf.adb
@@ -2,7 +2,7 @@
 --
 --  see spec
 --
---  Copyright (C) 2012 - 2015, 2017 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2012 - 2015, 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This program is free software; you can redistribute it and/or
 --  modify it under terms of the GNU General Public License as
@@ -20,10 +20,19 @@ pragma License (GPL);
 
 with Ada.Command_Line;
 with Ada.Directories;
+with Ada.Environment_Variables;
 with Ada.Text_IO;
-with Ada.Strings.Fixed;
 package body WisiToken.BNF is
 
+   function Is_Valid_Interface (Item : in String) return Boolean
+   is
+      Lower_Item : constant String := To_Lower (Item);
+   begin
+      return
+        Lower_Item = "process" or
+        Lower_Item = "module";
+   end Is_Valid_Interface;
+
    procedure Add
      (Set   : in out Generate_Set_Access;
       Tuple : in     Generate_Tuple)
@@ -39,6 +48,16 @@ package body WisiToken.BNF is
       Free (Prev);
    end Add;
 
+   function Image (Item : in Generate_Tuple) return String
+   is begin
+      return "(" & Generate_Algorithm_Image (Item.Gen_Alg).all & ", " &
+        Output_Language_Image (Item.Out_Lang).all & ", " &
+        Lexer_Image (Item.Lexer).all &
+        (if Item.Interface_Kind = None then "" else ", " & Interface_Image 
(Item.Interface_Kind).all) &
+        (if Item.Text_Rep then ", text_rep" else "") &
+        ")";
+   end Image;
+
    function To_Generate_Algorithm (Item : in String) return Generate_Algorithm
    is begin
       for I in Generate_Algorithm loop
@@ -49,6 +68,27 @@ package body WisiToken.BNF is
       raise User_Error with "invalid generate algorithm name: '" & Item & "'";
    end To_Generate_Algorithm;
 
+   function From_Generate_Env_Var return Generate_Algorithm_Set
+   is
+      Gen_String : constant String := Ada.Environment_Variables.Value 
("GENERATE", "BNF_EBNF");
+   begin
+      --  GENERATE env var defined in wisitoken_test.gpr
+      if Gen_String = "" then
+         return (Tree_Sitter => False, others => True);
+      elsif Gen_String = "BNF_EBNF_Tree_Sitter" then
+         return (others => True);
+      elsif Gen_String = "BNF_EBNF" or
+        Gen_String = "BNF" or
+        Gen_String = "EBNF"
+      then
+         return (Tree_Sitter => False, others => True);
+      elsif Gen_String = "Tree_Sitter" then
+         return (Tree_Sitter => True, others => False);
+      else
+         raise SAL.Programmer_Error with "unsupported GENERATE env var value 
'" & Gen_String & "'";
+      end if;
+   end From_Generate_Env_Var;
+
    function To_Output_Language (Item : in String) return Output_Language
    is begin
       for I in Output_Language loop
@@ -150,14 +190,15 @@ package body WisiToken.BNF is
    end Put_Raw_Code;
 
    procedure Put_File_Header
-     (Comment_Syntax : in String_2;
-      Emacs_Mode     : in String         := "";
-      Use_Tuple      : in Boolean        := False;
-      Tuple          : in Generate_Tuple := (others => <>))
+     (Comment_Syntax   : in String_2;
+      Emacs_Local_Vars : in String         := "";
+      Use_Tuple        : in Boolean        := False;
+      Tuple            : in Generate_Tuple := (others => <>))
    is
       use Ada.Text_IO;
    begin
-      Put_Line (Comment_Syntax & "  generated parser support file." & 
Emacs_Mode);
+      Put_Line
+        (Comment_Syntax & "  generated parser support file. -*- 
buffer-read-only:t " & Emacs_Local_Vars & " -*-");
       Put_Command_Line  (Comment_Syntax & "  ", Use_Tuple, Tuple);
       Put_Line (Comment_Syntax);
    end Put_File_Header;
@@ -186,13 +227,6 @@ package body WisiToken.BNF is
       raise Not_Found;
    end Value;
 
-   function Is_Present (List : in Elisp_Action_Maps.Map; Name : in String) 
return Boolean
-   is
-      use Elisp_Action_Maps;
-   begin
-      return No_Element /= List.Find (+Name);
-   end Is_Present;
-
    function Count (Tokens : in Token_Lists.List) return Integer
    is
       Result : Integer := 0;
@@ -268,12 +302,28 @@ package body WisiToken.BNF is
             Found := True;
          end if;
       end Process;
-
    begin
       Rules.Iterate (Process'Access);
       return Found;
    end Is_Present;
 
+   function Find (Rules : in Rule_Lists.List; LHS : in String) return 
Rule_Lists.Cursor
+   is
+      use Rule_Lists;
+
+      Found : Cursor := No_Element;
+
+      procedure Process (Position : in Cursor)
+      is begin
+         if -Rules (Position).Left_Hand_Side = LHS then
+            Found := Position;
+         end if;
+      end Process;
+   begin
+      Rules.Iterate (Process'Access);
+      return Found;
+   end Find;
+
    function "+" (List : in String_Lists.List; Item : in String) return 
String_Lists.List
    is
       Result : String_Lists.List := List;
diff --git a/wisitoken-bnf.ads b/wisitoken-bnf.ads
index 5263008ff6..f7ccdb43f3 100644
--- a/wisitoken-bnf.ads
+++ b/wisitoken-bnf.ads
@@ -5,15 +5,12 @@
 --  The input file syntax is based on BNF syntax [1] with declarations
 --  and grammar actions.
 --
---  The Elisp and Ada_Emacs output languages are for use with the
---  Emacs wisi package.
---
 --  Reference :
 --
 --  [1] https://en.wikipedia.org/wiki/Backus%E2%80%93Naur_form
 --  [2] http://www.nongnu.org/ada-mode/wisi/wisi-user_guide.html, (info 
"(wisi-user_guide)Top")
 --
---  Copyright (C) 2012 - 2015, 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2012 - 2015, 2017 - 2022 Free Software Foundation, Inc.
 --
 --  The WisiToken package is free software; you can redistribute it
 --  and/or modify it under terms of the GNU General Public License as
@@ -34,20 +31,25 @@ with Ada.Containers.Doubly_Linked_Lists;
 with Ada.Containers.Indefinite_Doubly_Linked_Lists;
 with Ada.Containers.Ordered_Maps;
 with Ada.Containers.Vectors;
+with Ada.Strings.Fixed;
 with Ada.Strings.Unbounded;
 with Ada.Unchecked_Deallocation;
+with System.Multiprocessors;
 with WisiToken.Parse.LR;
+with WisiToken.Syntax_Trees;
 package WisiToken.BNF is
 
    --  See also WisiToken exceptions
 
    Not_Found : exception;
-   --  something not found; should be handled and converted to Syntax_ or 
Grammar_Error
+   --  Something not found; should be handled and converted to another
+   --  exception.
 
-   type Generate_Algorithm is (None, LALR, LR1, Packrat_Gen, Packrat_Proc, 
External);
+   type Generate_Algorithm is (None, LALR, LR1, Packrat_Gen, Packrat_Proc, 
External, Tree_Sitter);
    subtype Valid_Generate_Algorithm is Generate_Algorithm range LALR .. 
Generate_Algorithm'Last;
    subtype LR_Generate_Algorithm is Generate_Algorithm range LALR .. LR1;
    subtype Packrat_Generate_Algorithm is Generate_Algorithm range Packrat_Gen 
.. Packrat_Proc;
+   subtype LR_Packrat_Generate_Algorithm is Generate_Algorithm range LALR .. 
Packrat_Proc;
 
    Generate_Algorithm_Image : constant array (Generate_Algorithm) of 
String_Access_Constant :=
      (None         => new String'("None"),
@@ -55,7 +57,8 @@ package WisiToken.BNF is
       LR1          => new String'("LR1"),
       Packrat_Gen  => new String'("Packrat_Gen"),
       Packrat_Proc => new String'("Packrat_Proc"),
-      External     => new String'("External"));
+      External     => new String'("External"),
+      Tree_Sitter  => new String'("Tree_Sitter"));
    --  Suitable for Ada package names.
 
    function To_Generate_Algorithm (Item : in String) return Generate_Algorithm;
@@ -64,6 +67,8 @@ package WisiToken.BNF is
    type Generate_Algorithm_Set is array (Generate_Algorithm) of Boolean;
    type Generate_Algorithm_Set_Access is access Generate_Algorithm_Set;
 
+   function From_Generate_Env_Var return Generate_Algorithm_Set;
+
    type Output_Language is (Ada_Lang, Ada_Emacs_Lang);
    subtype Ada_Output_Language is Output_Language range Ada_Lang .. 
Ada_Emacs_Lang;
    --  _Lang to avoid colliding with the standard package Ada and
@@ -76,15 +81,15 @@ package WisiToken.BNF is
    function To_Output_Language (Item : in String) return Output_Language;
    --  Raises User_Error for invalid Item
 
-   type Lexer_Type is (None, Elisp_Lexer, re2c_Lexer);
-   subtype Valid_Lexer is Lexer_Type range Elisp_Lexer .. Lexer_Type'Last;
+   type Lexer_Type is (None, re2c_Lexer, Tree_Sitter_Lexer);
+   subtype Valid_Lexer is Lexer_Type range re2c_Lexer .. Lexer_Type'Last;
    --  We append "_Lexer" to these names to avoid colliding with the
    --  similarly-named WisiToken packages. In the grammar file, they
    --  are named by:
    Lexer_Image : constant array (Lexer_Type) of String_Access_Constant :=
-     (None        => new String'("none"),
-      Elisp_Lexer => new String'("elisp"),
-      re2c_Lexer  => new String'("re2c"));
+     (None              => new String'("none"),
+      re2c_Lexer        => new String'("re2c"),
+      Tree_Sitter_Lexer => new String'("tree_sitter"));
 
    function To_Lexer (Item : in String) return Lexer_Type;
    --  Raises User_Error for invalid Item
@@ -92,11 +97,19 @@ package WisiToken.BNF is
    type Lexer_Set is array (Lexer_Type) of Boolean;
 
    type Lexer_Generate_Algorithm_Set is array (Lexer_Type) of 
Generate_Algorithm_Set;
-   --  %if lexer change change the generated parse table
+   --  %if lexer changes the generated parse table
 
    type Interface_Type is (None, Process, Module);
    subtype Valid_Interface is Interface_Type range Process .. Module;
 
+   Interface_Image : constant array (Interface_Type) of String_Access_Constant 
:=
+     --  WORKAROUND: 'Image in GNAT Community 2020 with -gnat2020 returns 
integer
+     (None    => new String'("none"),
+      Process => new String'("process"),
+      Module  => new String'("module"));
+
+   function Is_Valid_Interface (Item : in String) return Boolean;
+
    type Generate_Tuple is record
       Gen_Alg        : Generate_Algorithm := None;
       Out_Lang       : Output_Language    := Ada_Lang;
@@ -105,6 +118,8 @@ package WisiToken.BNF is
       Text_Rep       : Boolean            := False;
    end record;
 
+   function Image (Item : in Generate_Tuple) return String;
+
    type Generate_Set is array (Natural range <>) of Generate_Tuple;
    type Generate_Set_Access is access Generate_Set;
    procedure Free is new Ada.Unchecked_Deallocation (Generate_Set, 
Generate_Set_Access);
@@ -113,6 +128,23 @@ package WisiToken.BNF is
      (Set   : in out Generate_Set_Access;
       Tuple : in     Generate_Tuple);
 
+   function Text_Rep_File_Name
+     (File_Name_Root      : in String;
+      Tuple               : in Generate_Tuple;
+      Generate_Task_Count : in System.Multiprocessors.CPU_Range;
+      If_Lexer_Present    : in Boolean;
+      Test_Main           : in Boolean)
+     return String
+   is (File_Name_Root & "_" &
+         Ada.Characters.Handling.To_Lower (Generate_Algorithm_Image 
(Tuple.Gen_Alg).all) &
+         (if Tuple.Gen_Alg = LR1 and Test_Main
+          then "_t" & Ada.Strings.Fixed.Trim (Generate_Task_Count'Image, 
Ada.Strings.Both)
+          else "") &
+         (if If_Lexer_Present
+          then "_" & Lexer_Image (Tuple.Lexer).all
+          else "") &
+         "_parse_table.txt");
+
    package String_Lists is new Ada.Containers.Indefinite_Doubly_Linked_Lists 
(String);
 
    package String_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
@@ -122,14 +154,15 @@ package WisiToken.BNF is
    type Language_Param_Type is record
       --  Set by grammar file declarations or command line options. Error
       --  recover parameters are in McKenzie_Recover_Param_Type below.
-      Case_Insensitive          : Boolean := False;
+      Case_Insensitive          : Boolean  := False;
+      Declare_Enums             : Boolean  := True;
       End_Names_Optional_Option : Ada.Strings.Unbounded.Unbounded_String;
-      Use_Language_Runtime      : Boolean := True;
+      Error_Recover             : Boolean  := False; -- True if grammar 
specifies error recover parameters.
+      LR1_Hash_Table_Size       : Positive := 113; --  Should match 
sal-gen_unbounded_definite_hash_tables.ads
       Language_Runtime_Name     : Ada.Strings.Unbounded.Unbounded_String;
-      Declare_Enums             : Boolean := True;
-      Error_Recover             : Boolean := False;
+      Partial_Recursion         : Boolean  := False;
       Start_Token               : Ada.Strings.Unbounded.Unbounded_String;
-      Partial_Recursion         : Boolean := False;
+      Use_Language_Runtime      : Boolean  := True;
    end record;
 
    type Raw_Code_Location is
@@ -172,10 +205,10 @@ package WisiToken.BNF is
    --  Otherwise output all lines.
 
    procedure Put_File_Header
-     (Comment_Syntax : in String_2;
-      Emacs_Mode     : in String         := "";
-      Use_Tuple      : in Boolean        := False;
-      Tuple          : in Generate_Tuple := (others => <>));
+     (Comment_Syntax   : in String_2;
+      Emacs_Local_Vars : in String         := "";
+      Use_Tuple        : in Boolean        := False;
+      Tuple            : in Generate_Tuple := (others => <>));
    --  Output "parser support file <emacs_mode> /n command line: " comment to 
Ada.Text_IO.Current_Output.
 
    type String_Pair_Type is record
@@ -195,42 +228,32 @@ package WisiToken.BNF is
 
    package String_Triple_Lists is new Ada.Containers.Doubly_Linked_Lists 
(String_Triple_Type);
 
-   type Elisp_Action_Type is record
-      --  Elisp name is the key
-      Action_Label : Ada.Strings.Unbounded.Unbounded_String;
-      Ada_Name     : Ada.Strings.Unbounded.Unbounded_String;
-   end record;
-
-   package Elisp_Action_Maps is new Ada.Containers.Ordered_Maps
-     (Ada.Strings.Unbounded.Unbounded_String, Elisp_Action_Type, 
Ada.Strings.Unbounded."<");
-
-   function Is_Present (List : in Elisp_Action_Maps.Map; Name : in String) 
return Boolean;
+   package String_Pair_Maps is new Ada.Containers.Ordered_Maps
+     (Ada.Strings.Unbounded.Unbounded_String, String_Pair_Type, 
Ada.Strings.Unbounded."<");
 
    type McKenzie_Recover_Param_Type is record
-      Source_Line : WisiToken.Line_Number_Type := 
WisiToken.Invalid_Line_Number;
-      --  Of the %mckenzie_cost_default declaration; we assume the others
-      --  are near.
-
-      Default_Insert                        : Natural               := 0;
-      Default_Delete_Terminal               : Natural               := 0;
-      Default_Push_Back                     : Natural               := 0; -- 
also default for undo_reduce
-      Delete                                : String_Pair_Lists.List;
-      Insert                                : String_Pair_Lists.List;
-      Push_Back                             : String_Pair_Lists.List;
-      Undo_Reduce                           : String_Pair_Lists.List;
-      Minimal_Complete_Cost_Delta           : Integer               :=
+      Default_Insert              : Natural                    := 0;
+      Default_Delete_Terminal     : Natural                    := 0;
+      Default_Push_Back           : Natural                    := 0; -- also 
default for undo_reduce
+      Delete                      : String_Pair_Lists.List;
+      Insert                      : String_Pair_Lists.List;
+      Push_Back                   : String_Pair_Lists.List;
+      Undo_Reduce                 : String_Pair_Lists.List;
+      Minimal_Complete_Cost_Delta : Integer                    :=
         WisiToken.Parse.LR.Default_McKenzie_Param.Minimal_Complete_Cost_Delta;
-      Fast_Forward                          : Integer               :=
+      Fast_Forward                : Integer                    :=
         WisiToken.Parse.LR.Default_McKenzie_Param.Fast_Forward;
-      Matching_Begin                        : Integer               :=
+      Matching_Begin              : Integer                    :=
         WisiToken.Parse.LR.Default_McKenzie_Param.Matching_Begin;
-      Ignore_Check_Fail                     : Natural               :=
+      Ignore_Check_Fail           : Natural                    :=
         WisiToken.Parse.LR.Default_McKenzie_Param.Ignore_Check_Fail;
-      Check_Limit                           : WisiToken.Token_Index :=
+      Check_Limit                 : Syntax_Trees.Sequential_Index :=
         WisiToken.Parse.LR.Default_McKenzie_Param.Check_Limit;
-      Check_Delta_Limit                     : Natural               :=
+      Zombie_Limit                : Positive :=
+        WisiToken.Parse.LR.Default_McKenzie_Param.Zombie_Limit;
+      Check_Delta_Limit           : Natural                    :=
         WisiToken.Parse.LR.Default_McKenzie_Param.Check_Delta_Limit;
-      Enqueue_Limit                         : Natural               :=
+      Enqueue_Limit               : Natural                    :=
         WisiToken.Parse.LR.Default_McKenzie_Param.Enqueue_Limit;
    end record;
 
@@ -261,11 +284,11 @@ package WisiToken.BNF is
 
    type Conflict is record
       Source_Line : WisiToken.Line_Number_Type;
-      Action_A    : Ada.Strings.Unbounded.Unbounded_String;
-      LHS_A       : Ada.Strings.Unbounded.Unbounded_String;
-      Action_B    : Ada.Strings.Unbounded.Unbounded_String;
-      LHS_B       : Ada.Strings.Unbounded.Unbounded_String;
+      Items       : String_Pair_Lists.List;
+      --  Item (I).Name = action, .Value = lhs
       On          : Ada.Strings.Unbounded.Unbounded_String;
+      Resolution  : Ada.Strings.Unbounded.Unbounded_String;
+      --  Resolution is not empty if this is from %conflict_resolution.
    end record;
 
    package Conflict_Lists is new Ada.Containers.Doubly_Linked_Lists (Conflict);
@@ -280,10 +303,16 @@ package WisiToken.BNF is
    --  in call to post parse grammar action.
 
    type RHS_Type is record
-      Tokens      : Labeled_Token_Arrays.Vector;
+      Tokens            : Labeled_Token_Arrays.Vector;
+      Auto_Token_Labels : Boolean := False;
+      --  Token labels generated by Translate_EBNF_To_BNF
+
+      Edited_Token_List : Boolean := False;
+      --  RHS modified by Translate_EBNF_To_BNF; RHS_Index 0 has all tokens.
+
       Action      : Ada.Strings.Unbounded.Unbounded_String;
       Check       : Ada.Strings.Unbounded.Unbounded_String;
-      Source_Line : WisiToken.Line_Number_Type := 
WisiToken.Invalid_Line_Number;
+      Source_Line : WisiToken.Line_Number_Type := 
WisiToken.Line_Number_Type'First;
    end record;
    package RHS_Lists is new Ada.Containers.Doubly_Linked_Lists (RHS_Type, "=");
 
@@ -291,6 +320,7 @@ package WisiToken.BNF is
       Left_Hand_Side   : aliased Ada.Strings.Unbounded.Unbounded_String;
       Right_Hand_Sides : RHS_Lists.List;
       Labels           : String_Arrays.Vector;
+      Optimized_List   : Boolean := False;
       Source_Line      : WisiToken.Line_Number_Type;
    end record;
 
@@ -298,6 +328,8 @@ package WisiToken.BNF is
 
    function Is_Present (Rules : in Rule_Lists.List; LHS : in String) return 
Boolean;
 
+   function Find (Rules : in Rule_Lists.List; LHS : in String) return 
Rule_Lists.Cursor;
+
    type Tokens is record
       Non_Grammar : Token_Lists.List;
       Keywords    : String_Pair_Lists.List;
@@ -310,14 +342,22 @@ package WisiToken.BNF is
       --  Nonterminals and terminals introduced by translating from EBNF to
       --  BNF.
 
-      --  The following are specified in grammar file declarations and used
-      --  in other declarations or actions. Faces, Indents only used if .wy
-      --  action language is elisp and output language is not elisp.
+      Lexer_Regexps : String_Pair_Lists.List; -- %lexer_regexp
+      Faces         : String_Lists.List;      -- %elisp_face
+
+      Escape_Delimiter_Doubled : String_Lists.List; -- 
%escape_delimiter_doubled
+
+      Indents : String_Pair_Maps.Map;
+      --  %elisp_indent; variables or functions used in wisi-indent-action.
+      --  Map key => elisp_name
+      --  Name    => Ada_Name
+      --  Value   => arg_count, token_index_args
 
-      re2c_Regexps : String_Pair_Lists.List; -- %re2c_regexp
-      Faces        : String_Lists.List;      -- %elisp_face
-      Indents      : String_Pair_Lists.List; -- %elisp_indent
-      Actions      : Elisp_Action_Maps.Map;  -- %elisp_action
+      Actions : String_Pair_Maps.Map;
+      --  %elisp_action custom grammar actions.
+      --  Map key => elisp name
+      --  Name    => post-parse action; navigate, face, indent.
+      --  Value   => Ada name
    end record;
 
    function "+" (Item : in String) return 
Ada.Strings.Unbounded.Unbounded_String
diff --git a/wisitoken-followed_by.adb b/wisitoken-followed_by.adb
deleted file mode 100644
index e254bb6608..0000000000
--- a/wisitoken-followed_by.adb
+++ /dev/null
@@ -1,207 +0,0 @@
---  Abstract :
---
---  Show productions where a token is followed by another token
---
---  Copyright (C) 2020 Stephen Leake All Rights Reserved.
---
---  This library is free software;  you can redistribute it and/or modify it
---  under terms of the  GNU General Public License  as published by the Free
---  Software  Foundation;  either version 3,  or (at your  option) any later
---  version. This library is distributed in the hope that it will be useful,
---  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
---  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
-pragma License (GPL);
-
-with Ada.Command_Line;
-with WisiToken.BNF.Generate_Utils;
-with WisiToken.Generate;
-with WisiToken.Parse.LR.Parser_No_Recover;
-with WisiToken.Productions;
-with WisiToken.Text_IO_Trace;
-with WisiToken_Grammar_Runtime;
-with Wisitoken_Grammar_Actions;
-with Wisitoken_Grammar_Main;
-procedure WisiToken.Followed_By
-is
-   use all type WisiToken_Grammar_Runtime.Meta_Syntax;
-
-   procedure Put_Usage
-   is
-      use Ada.Text_IO;
-   begin
-      Put_Line ("wisitoken-followed_by <grammar file> <token a> <token b>");
-   end Put_Usage;
-
-   function Last
-     (Grammar              : in Productions.Prod_Arrays.Vector;
-      Has_Empty_Production : in Token_ID_Set;
-      First_Terminal       : in Token_ID)
-     return Token_Array_Token_Set
-   is
-      function Last
-        (Grammar              : in WisiToken.Productions.Prod_Arrays.Vector;
-         Has_Empty_Production : in Token_ID_Set;
-         First_Terminal       : in Token_ID;
-         Non_Terminal         : in Token_ID)
-        return Token_ID_Set
-      is
-         Search_Tokens : Token_ID_Set := (Grammar.First_Index .. 
Grammar.Last_Index => False);
-      begin
-         Search_Tokens (Non_Terminal) := True;
-
-         return Result : Token_ID_Set := (First_Terminal .. Grammar.Last_Index 
=> False) do
-            while Any (Search_Tokens) loop
-               declare
-                  Added_Tokens   : Token_ID_Set := (First_Terminal .. 
Grammar.Last_Index      => False);
-                  Added_Nonterms : Token_ID_Set := (Grammar.First_Index .. 
Grammar.Last_Index => False);
-               begin
-                  for Prod of Grammar loop
-                     if Search_Tokens (Prod.LHS) then
-                        for RHS of Prod.RHSs loop
-                           for ID of reverse RHS.Tokens loop
-                              if not Result (ID) then
-                                 Added_Tokens (ID) := True;
-                                 if ID in Added_Nonterms'Range then
-                                    Added_Nonterms (ID) := True;
-                                 end if;
-                              end if;
-
-                              if ID in Has_Empty_Production'Range and then 
Has_Empty_Production (ID) then
-                                 null;
-                              else
-                                 exit;
-                              end if;
-                           end loop;
-                        end loop;
-                     end if;
-                  end loop;
-
-                  Result        := Result or Added_Tokens;
-                  Search_Tokens := Added_Nonterms;
-               end;
-            end loop;
-         end return;
-      end Last;
-
-      procedure Set_Slice (Result : in out Token_Array_Token_Set; I : 
Token_ID; Value : in Token_ID_Set)
-      is begin
-         for J in Result'Range (2) loop
-            Result (I, J) := Value (J);
-         end loop;
-      end Set_Slice;
-
-   begin
-      return Result : Token_Array_Token_Set :=
-        (Grammar.First_Index .. Grammar.Last_Index =>
-           (First_Terminal .. Grammar.Last_Index => False))
-      do
-         for I in Result'Range loop
-            Set_Slice (Result, I, Last (Grammar, Has_Empty_Production, 
First_Terminal, I));
-         end loop;
-      end return;
-   end Last;
-
-   Trace          : aliased WisiToken.Text_IO_Trace.Trace 
(Wisitoken_Grammar_Actions.Descriptor'Access);
-   Input_Data     : aliased WisiToken_Grammar_Runtime.User_Data_Type;
-   Grammar_Parser : WisiToken.Parse.LR.Parser_No_Recover.Parser;
-
-   Token_A_Name : Ada.Strings.Unbounded.Unbounded_String;
-   Token_B_Name : Ada.Strings.Unbounded.Unbounded_String;
-begin
-   Wisitoken_Grammar_Main.Create_Parser
-     (Parser    => Grammar_Parser,
-      Trace     => Trace'Unchecked_Access,
-      User_Data => Input_Data'Unchecked_Access);
-
-   declare
-      use Ada.Command_Line;
-   begin
-      if Argument_Count /= 3 then
-         Put_Usage;
-      end if;
-
-      Grammar_Parser.Lexer.Reset_With_File (Argument (1));
-
-      Token_A_Name := +Argument (2);
-      Token_B_Name := +Argument (3);
-   end;
-
-   Grammar_Parser.Parse;
-   Grammar_Parser.Execute_Actions; -- Meta phase.
-
-   if Input_Data.Meta_Syntax = WisiToken_Grammar_Runtime.EBNF_Syntax then
-      WisiToken_Grammar_Runtime.Translate_EBNF_To_BNF 
(Grammar_Parser.Parsers.First_State_Ref.Tree, Input_Data);
-      if WisiToken.Generate.Error then
-         raise WisiToken.Grammar_Error with "errors during translating EBNF to 
BNF: aborting";
-      end if;
-   end if;
-
-   Input_Data.Reset;
-   Input_Data.Phase := WisiToken_Grammar_Runtime.Other;
-   Grammar_Parser.Execute_Actions; -- populates Input_Data.Tokens
-
-   declare
-      use Ada.Text_IO;
-
-      Generate_Data : aliased WisiToken.BNF.Generate_Utils.Generate_Data :=
-        WisiToken.BNF.Generate_Utils.Initialize (Input_Data, Ignore_Conflicts 
=> True);
-      --  Builds Generate_Data.Descriptor, Generate_Data.Grammar
-
-      Nullable : constant Token_Array_Production_ID := 
WisiToken.Generate.Nullable (Generate_Data.Grammar);
-      Has_Empty_Production : constant Token_ID_Set := 
WisiToken.Generate.Has_Empty_Production (Nullable);
-
-      First_Nonterm_Set : constant Token_Array_Token_Set := 
WisiToken.Generate.First
-        (Generate_Data.Grammar, Has_Empty_Production, 
Generate_Data.Descriptor.First_Terminal);
-
-      Last_Nonterm_Set : constant Token_Array_Token_Set := Last
-        (Generate_Data.Grammar, Has_Empty_Production, 
Generate_Data.Descriptor.First_Terminal);
-
-      Token_A    : constant Token_ID := BNF.Generate_Utils.Find_Token_ID 
(Generate_Data, -Token_A_Name);
-      Token_B    : constant Token_ID := BNF.Generate_Utils.Find_Token_ID 
(Generate_Data, -Token_B_Name);
-      Need_Comma : Boolean           := False;
-
-      procedure Put (LHS : in Token_ID; RHS : in Natural)
-      is
-      begin
-         if Need_Comma then
-            Put (", ");
-         else
-            Need_Comma := True;
-         end if;
-         Put (Trimmed_Image ((LHS, RHS)));
-      end Put;
-
-   begin
-      for LHS in Generate_Data.Grammar.First_Index .. 
Generate_Data.Grammar.Last_Index loop
-         declare
-            use WisiToken.Productions;
-            Prod : Instance renames Generate_Data.Grammar (LHS);
-         begin
-            for I in Prod.RHSs.First_Index .. Prod.RHSs.Last_Index loop
-               declare
-                  Tokens : Token_ID_Arrays.Vector renames Prod.RHSs (I).Tokens;
-               begin
-                  for J in Tokens.First_Index .. Tokens.Last_Index loop
-                     if Tokens (J) = Token_A or
-                       (Tokens (J) in Last_Nonterm_Set'Range (1) and then
-                          Last_Nonterm_Set (Tokens (J), Token_A))
-                     then
-                        if J < Tokens.Last_Index then
-                           if Tokens (J + 1) in First_Nonterm_Set'Range (1) 
then
-                              if First_Nonterm_Set (Tokens (J + 1), Token_B) 
then
-                                 Put (LHS, I);
-                              end if;
-                           elsif Tokens (J + 1) = Token_B then
-                              Put (LHS, I);
-                           end if;
-                        end if;
-                     end if;
-                  end loop;
-               end;
-            end loop;
-         end;
-      end loop;
-   end;
-
-end WisiToken.Followed_By;
diff --git a/wisitoken-gen_token_enum.adb b/wisitoken-gen_token_enum.adb
deleted file mode 100644
index 0b178320d8..0000000000
--- a/wisitoken-gen_token_enum.adb
+++ /dev/null
@@ -1,133 +0,0 @@
---  Abstract :
---
---  See spec
---
---  Copyright (C) 2017, 2018 Free Software Foundation, Inc.
---
---  This library is free software;  you can redistribute it and/or modify it
---  under terms of the  GNU General Public License  as published by the Free
---  Software  Foundation;  either version 3,  or (at your  option) any later
---  version. This library is distributed in the hope that it will be useful,
---  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
---  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
---  As a special exception under Section 7 of GPL version 3, you are granted
---  additional permissions described in the GCC Runtime Library Exception,
---  version 3.1, as published by the Free Software Foundation.
-
-pragma License (GPL);
-
-with Ada.Characters.Handling;
-with WisiToken.Wisi_Ada;
-package body WisiToken.Gen_Token_Enum is
-
-   function Token_Enum_Image return Token_ID_Array_String
-   is
-      use Ada.Characters.Handling;
-      Result : Token_ID_Array_String (Token_ID'First .. +Last_Nonterminal);
-   begin
-      for I in Token_Enum_ID loop
-         if I <= Last_Terminal then
-            Result (+I) := new String'(Token_Enum_ID'Image (I));
-         else
-            Result (+I) := new String'(To_Lower (Token_Enum_ID'Image (I)));
-         end if;
-      end loop;
-      return Result;
-   end Token_Enum_Image;
-
-   function To_Syntax (Item : in Enum_Syntax) return 
WisiToken.Lexer.Regexp.Syntax
-   is
-      Result : WisiToken.Lexer.Regexp.Syntax (Token_ID'First .. 
+Last_Terminal);
-   begin
-      for I in Result'Range loop
-         Result (I) := Item (-I);
-      end loop;
-      return Result;
-   end To_Syntax;
-
-   function "&" (Left, Right : in Token_Enum_ID) return Token_ID_Arrays.Vector
-   is begin
-      return Result : Token_ID_Arrays.Vector do
-         Result.Append (+Left);
-         Result.Append (+Right);
-      end return;
-   end "&";
-
-   function "&"
-     (Left  : in Token_ID_Arrays.Vector;
-      Right : in Token_Enum_ID)
-     return Token_ID_Arrays.Vector
-   is begin
-      return Result : Token_ID_Arrays.Vector := Left do
-         Result.Append (+Right);
-      end return;
-   end "&";
-
-   function "+"
-     (Left  : in Token_Enum_ID;
-      Right : in WisiToken.Syntax_Trees.Semantic_Action)
-     return WisiToken.Productions.Right_Hand_Side
-   is begin
-      return WisiToken.Wisi_Ada."+" (+Left, Right);
-   end "+";
-
-   function "<="
-     (Left  : in Token_Enum_ID;
-      Right : in WisiToken.Productions.Right_Hand_Side)
-     return WisiToken.Productions.Instance
-   is begin
-      return WisiToken.Wisi_Ada."<=" (+Left, Productions.RHS_Arrays.To_Vector 
(Right, 1));
-   end "<=";
-
-   function To_Nonterminal_Array_Token_Set
-     (Item : in Nonterminal_Array_Token_Set)
-     return WisiToken.Token_Array_Token_Set
-   is
-      Result : Token_Array_Token_Set :=
-        (LR1_Descriptor.First_Nonterminal .. LR1_Descriptor.Last_Nonterminal =>
-           (LR1_Descriptor.First_Terminal .. LR1_Descriptor.Last_Nonterminal 
=> False));
-   begin
-      for I in Item'Range (1) loop
-         for J in Item'Range (2) loop
-            Result (+I, +J) := Item (I, J);
-         end loop;
-      end loop;
-      return Result;
-   end To_Nonterminal_Array_Token_Set;
-
-   function To_Nonterminal_Array_Terminal_Set
-     (Item : in Nonterminal_Array_Terminal_Set)
-     return WisiToken.Token_Array_Token_Set
-   is
-      Result : Token_Array_Token_Set :=
-        (LR1_Descriptor.First_Nonterminal .. LR1_Descriptor.Last_Nonterminal =>
-           (LR1_Descriptor.First_Terminal .. LR1_Descriptor.Last_Terminal => 
False));
-   begin
-      for I in Item'Range (1) loop
-         for J in Item'Range (2) loop
-            Result (+I, +J) := Item (I, J);
-         end loop;
-      end loop;
-      return Result;
-   end To_Nonterminal_Array_Terminal_Set;
-
-   function "+" (Item : in Token_Array) return WisiToken.Token_ID_Set
-   is
-      Result : Token_ID_Set := (LR1_Descriptor.First_Terminal .. 
LR1_Descriptor.Last_Terminal => False);
-   begin
-      for I in Item'Range loop
-         Result (+Item (I)) := True;
-      end loop;
-      return Result;
-   end "+";
-
-   function "+" (Item : in Token_Enum_ID) return WisiToken.Token_ID_Set
-   is begin
-      return +Token_Array'(1 => Item);
-   end "+";
-
-begin
-   LR1_Descriptor.Image := Token_Enum_Image;
-   LALR_Descriptor.Image := LR1_Descriptor.Image;
-end WisiToken.Gen_Token_Enum;
diff --git a/wisitoken-gen_token_enum.ads b/wisitoken-gen_token_enum.ads
deleted file mode 100644
index 05bdb99f9b..0000000000
--- a/wisitoken-gen_token_enum.ads
+++ /dev/null
@@ -1,122 +0,0 @@
---  Abstract :
---
---  Support for an enumerated token type
---
---  Copyright (C) 2017 - 2019 Free Software Foundation, Inc.
---
---  This library is free software;  you can redistribute it and/or modify it
---  under terms of the  GNU General Public License  as published by the Free
---  Software  Foundation;  either version 3,  or (at your  option) any later
---  version. This library is distributed in the hope that it will be useful,
---  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
---  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
---  As a special exception under Section 7 of GPL version 3, you are granted
---  additional permissions described in the GCC Runtime Library Exception,
---  version 3.1, as published by the Free Software Foundation.
-
-pragma License (GPL);
-
-with WisiToken.Lexer.Regexp;
-with WisiToken.Productions;
-with WisiToken.Syntax_Trees;
-generic
-   type Token_Enum_ID is (<>);
-   First_Terminal    : Token_Enum_ID;
-   Last_Terminal     : Token_Enum_ID;
-   First_Nonterminal : Token_Enum_ID;
-   Last_Nonterminal  : Token_Enum_ID;
-   EOF_ID            : Token_Enum_ID;
-   Accept_ID         : Token_Enum_ID;
-   Case_Insensitive  : Boolean;
-package WisiToken.Gen_Token_Enum is
-
-   function "+" (Item : in Token_Enum_ID) return Token_ID
-     is (Token_ID'First + Token_Enum_ID'Pos (Item));
-
-   function "-" (Item : in Token_ID) return Token_Enum_ID
-     is (Token_Enum_ID'Val (Item - Token_ID'First));
-
-   function Token_Enum_Image return Token_ID_Array_String;
-
-   subtype Terminal_Enum_ID is Token_Enum_ID range First_Terminal .. 
Last_Terminal;
-   subtype Nonterminal_Enum_ID is Token_Enum_ID range First_Nonterminal .. 
Last_Nonterminal;
-
-   LR1_Descriptor : aliased WisiToken.Descriptor :=
-     (First_Terminal       => +First_Terminal,
-      Last_Terminal        => +Last_Terminal,
-      First_Nonterminal    => +First_Nonterminal,
-      Last_Nonterminal     => +Last_Nonterminal,
-      EOI_ID               => +EOF_ID,
-      Accept_ID            => +Accept_ID,
-      Case_Insensitive     => Case_Insensitive,
-      New_Line_ID          => Invalid_Token_ID,
-      String_1_ID          => Invalid_Token_ID,
-      String_2_ID          => Invalid_Token_ID,
-      Image                => (others => null), --  set in body elaboration 
time code
-      Terminal_Image_Width => Terminal_Enum_ID'Width,
-      Image_Width          => Token_Enum_ID'Width,
-      Last_Lookahead       => +Last_Terminal);
-
-   LALR_Descriptor : aliased WisiToken.Descriptor :=
-     (First_Terminal       => +First_Terminal,
-      Last_Terminal        => +Last_Terminal,
-      First_Nonterminal    => +First_Nonterminal,
-      Last_Nonterminal     => +Last_Nonterminal,
-      EOI_ID               => +EOF_ID,
-      Accept_ID            => +Accept_ID,
-      Case_Insensitive     => Case_Insensitive,
-      New_Line_ID          => Invalid_Token_ID,
-      String_1_ID          => Invalid_Token_ID,
-      String_2_ID          => Invalid_Token_ID,
-      Image                => (others => null),
-      Terminal_Image_Width => Terminal_Enum_ID'Width,
-      Image_Width          => Token_Enum_ID'Width,
-      Last_Lookahead       => +First_Nonterminal);
-
-   type Enum_Syntax is array (Token_Enum_ID range Token_Enum_ID'First .. 
Last_Terminal) of
-     WisiToken.Lexer.Regexp.Syntax_Item;
-
-   function To_Syntax (Item : in Enum_Syntax) return 
WisiToken.Lexer.Regexp.Syntax;
-
-   function "&" (Left, Right : in Token_Enum_ID) return Token_ID_Arrays.Vector;
-
-   function "&"
-     (Left  : in Token_ID_Arrays.Vector;
-      Right : in Token_Enum_ID)
-     return Token_ID_Arrays.Vector;
-
-   function "+" (Left : in Token_Enum_ID; Right : in 
Syntax_Trees.Semantic_Action) return Productions.Right_Hand_Side;
-
-   function "<="
-     (Left  : in Token_Enum_ID;
-      Right : in WisiToken.Productions.Right_Hand_Side)
-     return WisiToken.Productions.Instance;
-
-   ----------
-   --  For unit tests
-
-   subtype Terminal_ID is Token_Enum_ID range First_Terminal .. Last_Terminal;
-   subtype Nonterminal_ID is Token_Enum_ID range First_Nonterminal .. 
Last_Nonterminal;
-   subtype Grammar_ID is Token_Enum_ID range First_Terminal .. 
Last_Nonterminal;
-
-   type Nonterminal_Array_Token_Set is array (Nonterminal_ID, Grammar_ID) of 
Boolean;
-
-   function To_Nonterminal_Array_Token_Set
-     (Item : in Nonterminal_Array_Token_Set)
-     return WisiToken.Token_Array_Token_Set;
-
-   type Nonterminal_Array_Terminal_Set is array (Nonterminal_ID, Terminal_ID) 
of Boolean;
-
-   function To_Nonterminal_Array_Terminal_Set
-     (Item : in Nonterminal_Array_Terminal_Set)
-     return WisiToken.Token_Array_Token_Set;
-
-   type Nonterminal_ID_Set is array (Nonterminal_ID) of Boolean;
-
-   type Token_Array is array (Positive range <>) of Token_Enum_ID;
-
-   function "+" (Item : in Token_Array) return WisiToken.Token_ID_Set;
-   function "+" (Item : in Token_Enum_ID) return WisiToken.Token_ID_Set;
-
-end WisiToken.Gen_Token_Enum;
diff --git a/wisitoken-generate-lr-lalr_generate.adb 
b/wisitoken-generate-lr-lalr_generate.adb
index e5440788cb..5f971dd103 100644
--- a/wisitoken-generate-lr-lalr_generate.adb
+++ b/wisitoken-generate-lr-lalr_generate.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2002 - 2005, 2008 - 2015, 2017 - 2020 Free Software 
Foundation, Inc.
+--  Copyright (C) 2002 - 2005, 2008 - 2015, 2017 - 2022 Free Software 
Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -20,6 +20,7 @@
 
 pragma License (Modified_GPL);
 
+with Ada.Calendar;
 with Ada.Containers;
 with Ada.Text_IO;
 with SAL.Gen_Definite_Doubly_Linked_Lists_Sorted;
@@ -65,16 +66,11 @@ package body WisiToken.Generate.LR.LALR_Generate is
      is (LR1_Items.Item_Lists.Variable_Ref
            (LR1_Items.Find (Prod => (ID.LHS, ID.RHS), Dot => ID.Dot, Set => 
Kernels (ID.State))));
 
-   function Propagate_Lookahead (Descriptor : in WisiToken.Descriptor) return 
Token_ID_Set_Access
+   function Propagate_Lookahead (Descriptor : in WisiToken.Descriptor) return 
LR1_Items.Lookahead
    is begin
-      return new Token_ID_Set'(LR1_Items.To_Lookahead 
(Descriptor.Last_Lookahead, Descriptor));
+      return LR1_Items.To_Lookahead (Descriptor.Last_Lookahead);
    end Propagate_Lookahead;
 
-   function Null_Lookahead (Descriptor : in WisiToken.Descriptor) return 
Token_ID_Set_Access
-   is begin
-      return new Token_ID_Set'(Descriptor.First_Terminal .. 
Descriptor.Last_Lookahead => False);
-   end Null_Lookahead;
-
    ----------
    --  Debug output
 
@@ -113,10 +109,11 @@ package body WisiToken.Generate.LR.LALR_Generate is
          if Item.Dot /= No_Index then
 
             declare
-               Dot      : constant Token_ID_Arrays.Cursor := 
Productions.Constant_Ref_RHS
-                 (Grammar, Item.Prod).Tokens.To_Cursor (Item.Dot);
-               Dot_ID   : constant Token_ID               := Element (Dot);
-               Next_Dot : constant Token_ID_Arrays.Cursor := Next (Dot);
+               Tokens      : Token_ID_Arrays.Vector renames 
Productions.Constant_Ref_RHS
+                 (Grammar, Item.Prod).Tokens;
+               Dot      : constant Token_ID_Arrays.Cursor := Tokens.To_Cursor 
(Item.Dot);
+               Dot_ID   : constant Token_ID               := Tokens (Dot);
+               Next_Dot : constant Token_ID_Arrays.Cursor := Tokens.Next (Dot);
             begin
                --  If Symbol = EOF_Token, this is the start symbol accept
                --  production; don't need a kernel with dot after EOF.
@@ -127,7 +124,7 @@ package body WisiToken.Generate.LR.LALR_Generate is
                   Goto_Set.Set.Insert
                     ((Prod       => Item.Prod,
                       Dot        => To_Index (Next_Dot),
-                      Lookaheads => new Token_ID_Set'(Item.Lookaheads.all)));
+                      Lookaheads => Item.Lookaheads));
 
                   if Trace_Generate_Table > Detail then
                      Ada.Text_IO.Put_Line ("LALR_Goto_Transitions 1 " & Image 
(Symbol, Descriptor));
@@ -146,16 +143,16 @@ package body WisiToken.Generate.LR.LALR_Generate is
                            P_ID       : constant Production_ID          := 
(Prod.LHS, RHS_2_I);
                            Tokens     : Token_ID_Arrays.Vector renames 
Prod.RHSs (RHS_2_I).Tokens;
                            Dot_2      : constant Token_ID_Arrays.Cursor := 
Tokens.First;
-                           Next_Dot_2 : constant Token_ID_Arrays.Cursor := 
Next (Dot_2);
+                           Next_Dot_2 : constant Token_ID_Arrays.Cursor := 
Tokens.Next (Dot_2);
                         begin
                            if (Dot_ID = Prod.LHS or First_Nonterm_Set (Dot_ID, 
Prod.LHS)) and
-                             (Has_Element (Dot_2) and then Element (Dot_2) = 
Symbol)
+                             (Has_Element (Dot_2) and then Tokens (Dot_2) = 
Symbol)
                            then
                               if not Has_Element (Find (P_ID, To_Index 
(Next_Dot_2), Goto_Set)) then
                                  Goto_Set.Set.Insert
                                    ((Prod       => P_ID,
                                      Dot        => To_Index (Next_Dot_2),
-                                     Lookaheads => Null_Lookahead 
(Descriptor)));
+                                     Lookaheads => Null_Lookahead));
 
                                  --  else already in goto set
                               end if;
@@ -186,24 +183,23 @@ package body WisiToken.Generate.LR.LALR_Generate is
 
       First_State_Index : constant State_Index := 0;
       Kernels           : LR1_Items.Item_Set_List;
-      Kernel_Tree       : LR1_Items.Item_Set_Trees.Tree; -- for fast find
+      Kernel_Tree       : LR1_Items.Item_Set_Tree; -- for fast find
       States_To_Check   : State_Index_Queues.Queue;
       Checking_State    : State_Index;
+
+      First_Item_Set : Item_Set :=
+        (Set               => Item_Lists.To_List
+           ((Prod          => (Grammar.First_Index, 0),
+             Dot           => Grammar (Grammar.First_Index).RHSs 
(0).Tokens.First_Index (No_Index_If_Empty => True),
+             Lookaheads    => Null_Lookahead)),
+         Tree_Node         =>
+           (State          => First_State_Index,
+            others         => <>),
+         others            => <>);
    begin
       Kernels.Set_First_Last (First_State_Index, First_State_Index - 1);
 
-      Add (Grammar,
-           (Set               => Item_Lists.To_List
-              ((Prod          => (Grammar.First_Index, 0),
-                Dot           => Grammar (Grammar.First_Index).RHSs 
(0).Tokens.First_Index,
-                Lookaheads    => Null_Lookahead (Descriptor))),
-            Goto_List         => <>,
-            Dot_IDs           => <>,
-            State             => First_State_Index),
-           Kernels,
-           Kernel_Tree,
-           Descriptor,
-           Include_Lookaheads => False);
+      Add (Grammar, First_Item_Set, Kernels, Kernel_Tree, Descriptor, 
Kernel_Tree.Rows, Include_Lookaheads => False);
 
       States_To_Check.Put (First_State_Index);
       loop
@@ -223,44 +219,48 @@ package body WisiToken.Generate.LR.LALR_Generate is
             declare
                New_Item_Set : Item_Set := LALR_Goto_Transitions
                  (Kernels (Checking_State), Symbol, First_Nonterm_Set, 
Grammar, Descriptor);
-               Found_State : Unknown_State_Index;
             begin
                if New_Item_Set.Set.Length > 0 then
 
-                  Found_State := Find (New_Item_Set, Kernel_Tree, 
Match_Lookaheads => False);
-
-                  if Found_State = Unknown_State then
-                     New_Item_Set.State := Kernels.Last_Index + 1;
-
-                     States_To_Check.Put (New_Item_Set.State);
+                  New_Item_Set.Tree_Node.State := Kernels.Last_Index + 1;
 
-                     Add (Grammar, New_Item_Set, Kernels, Kernel_Tree, 
Descriptor, Include_Lookaheads => False);
+                  Compute_Key_Hash (New_Item_Set, Kernel_Tree.Rows, Grammar, 
Descriptor, Include_Lookaheads => False);
 
-                     if Trace_Generate_Table > Detail then
-                        Ada.Text_IO.Put_Line ("  adding state" & 
Unknown_State_Index'Image (Kernels.Last_Index));
+                  declare
+                     Found      : Boolean;
+                     Found_Ref  : constant 
Item_Set_Trees.Constant_Reference_Type := Kernel_Tree.Find_Or_Insert
+                       (New_Item_Set.Tree_Node, Found);
+                  begin
+                     if not Found then
+                        States_To_Check.Put (New_Item_Set.Tree_Node.State);
 
-                        Ada.Text_IO.Put_Line
-                          ("  state" & Unknown_State_Index'Image 
(Checking_State) &
-                             " adding goto on " & Image (Symbol, Descriptor) & 
" to state" &
-                             Unknown_State_Index'Image (Kernels.Last_Index));
-                     end if;
-
-                     Kernels (Checking_State).Goto_List.Insert ((Symbol, 
Kernels.Last_Index));
-                  else
+                        Kernels.Append (New_Item_Set);
 
-                     --  If there's not already a goto entry between these two 
sets, create one.
-                     if not Is_In ((Symbol, Found_State), Kernels 
(Checking_State).Goto_List) then
                         if Trace_Generate_Table > Detail then
+                           Ada.Text_IO.Put_Line ("  adding state" & 
Unknown_State_Index'Image (Kernels.Last_Index));
+
                            Ada.Text_IO.Put_Line
                              ("  state" & Unknown_State_Index'Image 
(Checking_State) &
                                 " adding goto on " & Image (Symbol, 
Descriptor) & " to state" &
-                                Unknown_State_Index'Image (Found_State));
-
+                                Unknown_State_Index'Image 
(Kernels.Last_Index));
                         end if;
 
-                        Kernels (Checking_State).Goto_List.Insert ((Symbol, 
Found_State));
+                        Kernels (Checking_State).Goto_List.Insert ((Symbol, 
Kernels.Last_Index));
+                     else
+                        --  If there's not already a goto entry between these 
two sets, create one.
+                        if not Is_In ((Symbol, Found_Ref.State), Kernels 
(Checking_State).Goto_List) then
+                           if Trace_Generate_Table > Detail then
+                              Ada.Text_IO.Put_Line
+                                ("  state" & Unknown_State_Index'Image 
(Checking_State) &
+                                   " adding goto on " & Image (Symbol, 
Descriptor) & " to state" &
+                                   Unknown_State_Index'Image 
(Found_Ref.State));
+
+                           end if;
+
+                           Kernels (Checking_State).Goto_List.Insert ((Symbol, 
Found_Ref.State));
+                        end if;
                      end if;
-                  end if;
+                  end;
                end if;
             end;
          end loop;
@@ -331,7 +331,7 @@ package body WisiToken.Generate.LR.LALR_Generate is
 
       Spontaneous_Count : Integer := 0;
    begin
-      if Trace_Generate_Table > Outline then
+      if Trace_Generate_Table > Extra then
          Ada.Text_IO.Put_Line ("  closure_item: ");
          LR1_Items.Put (Grammar, Descriptor, Closure_Item);
          Ada.Text_IO.New_Line;
@@ -342,10 +342,11 @@ package body WisiToken.Generate.LR.LALR_Generate is
       end if;
 
       declare
-         Dot        : constant Token_ID_Arrays.Cursor := 
Productions.Constant_Ref_RHS
-           (Grammar, Closure_Item.Prod).Tokens.To_Cursor (Closure_Item.Dot);
-         ID         : constant Token_ID               := Element (Dot);
-         Next_Dot   : constant Token_ID_Arrays.Cursor := Next (Dot);
+         Tokens     : Token_ID_Arrays.Vector renames 
Productions.Constant_Ref_RHS
+           (Grammar, Closure_Item.Prod).Tokens;
+         Dot        : constant Token_ID_Arrays.Cursor := Tokens.To_Cursor 
(Closure_Item.Dot);
+         ID         : constant Token_ID               := Tokens (Dot);
+         Next_Dot   : constant Token_ID_Arrays.Cursor := Tokens.Next (Dot);
          Goto_State : constant Unknown_State_Index    := LR1_Items.Goto_State 
(Source_Set, ID);
       begin
          if Goto_State /= Unknown_State then
@@ -356,18 +357,18 @@ package body WisiToken.Generate.LR.LALR_Generate is
                if Closure_Item.Lookaheads (Descriptor.Last_Lookahead) then
                   Add_Propagation
                     (From_Item    => Source_Item,
-                     From_State   => Source_Set.State,
+                     From_State   => Source_Set.Tree_Node.State,
                      To_Item      => To_Item,
                      To_State     => Goto_State,
                      Propagations => Propagations);
                end if;
 
-               if Trace_Generate_Table > Outline then
+               if Trace_Generate_Table > Extra then
                   Spontaneous_Count := Spontaneous_Count + 1;
-                  Ada.Text_IO.Put_Line ("  spontaneous: " & Lookahead_Image 
(Closure_Item.Lookaheads.all, Descriptor));
+                  Ada.Text_IO.Put_Line ("  spontaneous: " & Lookahead_Image 
(Closure_Item.Lookaheads, Descriptor));
                end if;
 
-               LR1_Items.Include (Variable_Ref (To_Item), 
Closure_Item.Lookaheads.all, Descriptor);
+               LR1_Items.Include (Variable_Ref (To_Item), 
Closure_Item.Lookaheads, Descriptor);
             end;
          end if;
       end;
@@ -389,7 +390,7 @@ package body WisiToken.Generate.LR.LALR_Generate is
          for Map of Propagations loop
             for ID of Map.To loop
                LR1_Items.Include
-                 (Item_Ref (Kernels, ID), Item_Ref (Kernels, 
Map.From).Lookaheads.all, Added_One, Descriptor);
+                 (Item_Ref (Kernels, ID), Item_Ref (Kernels, 
Map.From).Lookaheads, Added_One, Descriptor);
 
                More_To_Check := More_To_Check or Added_One;
             end loop;
@@ -407,11 +408,11 @@ package body WisiToken.Generate.LR.LALR_Generate is
       Kernels                 : in out LR1_Items.Item_Set_List;
       Descriptor              : in     WisiToken.Descriptor)
    is
-      Closure : LR1_Items.Item_Set;
+      Closure      : LR1_Items.Item_Set;
       Propagations : Propagation_Lists.List;
    begin
       for Kernel of Kernels loop
-         if Trace_Generate_Table > Outline then
+         if Trace_Generate_Table > Detail then
             Ada.Text_IO.Put ("Adding lookaheads for ");
             LR1_Items.Put (Grammar, Descriptor, Kernel);
          end if;
@@ -422,9 +423,7 @@ package body WisiToken.Generate.LR.LALR_Generate is
                   ((Prod       => Kernel_Item.Prod,
                     Dot        => Kernel_Item.Dot,
                     Lookaheads => Propagate_Lookahead (Descriptor))),
-                Goto_List      => <>,
-                Dot_IDs        => <>,
-                State          => <>),
+                others         => <>),
                Has_Empty_Production, First_Terminal_Sequence, Grammar, 
Descriptor);
 
             for Closure_Item of Closure.Set loop
@@ -434,7 +433,7 @@ package body WisiToken.Generate.LR.LALR_Generate is
          end loop;
       end loop;
 
-      if Trace_Generate_Table > Outline then
+      if Trace_Generate_Table > Detail then
          Ada.Text_IO.New_Line;
          Ada.Text_IO.Put_Line ("Propagations:");
          Put (Propagations);
@@ -444,17 +443,19 @@ package body WisiToken.Generate.LR.LALR_Generate is
       Propagate_Lookaheads (Propagations, Kernels, Descriptor);
    end Fill_In_Lookaheads;
 
-   --  Add actions for all Kernels to Table.
    procedure Add_Actions
      (Kernels                 : in     LR1_Items.Item_Set_List;
       Grammar                 : in     
WisiToken.Productions.Prod_Arrays.Vector;
       Has_Empty_Production    : in     Token_ID_Set;
-      First_Nonterm_Set       : in     Token_Array_Token_Set;
       First_Terminal_Sequence : in     Token_Sequence_Arrays.Vector;
-      Conflict_Counts         :    out Conflict_Count_Lists.List;
-      Conflicts               :    out Conflict_Lists.List;
+      First_Nonterm_Set       : in     Token_Array_Token_Set;
       Table                   : in out Parse_Table;
-      Descriptor              : in     WisiToken.Descriptor)
+      Descriptor              : in     WisiToken.Descriptor;
+      Declared_Conflicts      : in out 
WisiToken.Generate.LR.Conflict_Lists.Tree;
+      Unknown_Conflicts       : in out 
WisiToken.Generate.LR.Conflict_Lists.Tree;
+      File_Name               : in     String;
+      Ignore_Conflicts        : in     Boolean := False)
+   --  Add actions for all Kernels to Table.
    is
       Closure : LR1_Items.Item_Set;
    begin
@@ -465,8 +466,8 @@ package body WisiToken.Generate.LR.LALR_Generate is
          Closure := LR1_Items.Closure (Kernel, Has_Empty_Production, 
First_Terminal_Sequence, Grammar, Descriptor);
 
          Add_Actions
-           (Closure, Table, Grammar, Has_Empty_Production, First_Nonterm_Set,
-            Conflict_Counts, Conflicts, Descriptor);
+           (Closure, Table, Grammar, Descriptor, Declared_Conflicts, 
Unknown_Conflicts, First_Nonterm_Set, File_Name,
+            Ignore_Conflicts);
       end loop;
 
       if Trace_Generate_Table > Detail then
@@ -477,15 +478,21 @@ package body WisiToken.Generate.LR.LALR_Generate is
    function Generate
      (Grammar               : in out WisiToken.Productions.Prod_Arrays.Vector;
       Descriptor            : in     WisiToken.Descriptor;
-      Known_Conflicts       : in     Conflict_Lists.List := 
Conflict_Lists.Empty_List;
+      Grammar_File_Name     : in     String;
+      Known_Conflicts       : in     Conflict_Lists.Tree := 
Conflict_Lists.Empty_Tree;
       McKenzie_Param        : in     McKenzie_Param_Type := 
Default_McKenzie_Param;
+      Max_Parallel          : in     SAL.Base_Peek_Type  := 15;
       Parse_Table_File_Name : in     String              := "";
       Include_Extra         : in     Boolean             := False;
       Ignore_Conflicts      : in     Boolean             := False;
-      Partial_Recursion     : in     Boolean             := True)
+      Partial_Recursion     : in     Boolean             := True;
+      Use_Cached_Recursions : in     Boolean             := False;
+      Recursions            : in out WisiToken.Generate.Recursions)
      return Parse_Table_Ptr
    is
-      use all type Ada.Containers.Count_Type;
+      Time_Start           : constant Ada.Calendar.Time := Ada.Calendar.Clock;
+      Table_Time           : Ada.Calendar.Time;
+      Minimal_Actions_Time : Ada.Calendar.Time;
 
       Ignore_Unused_Tokens     : constant Boolean := 
WisiToken.Trace_Generate_Table > Detail;
       Ignore_Unknown_Conflicts : constant Boolean := Ignore_Conflicts or 
WisiToken.Trace_Generate_Table > Detail;
@@ -496,12 +503,10 @@ package body WisiToken.Generate.LR.LALR_Generate is
       Nullable : constant Token_Array_Production_ID := 
WisiToken.Generate.Nullable (Grammar);
       Has_Empty_Production : constant Token_ID_Set := 
WisiToken.Generate.Has_Empty_Production (Nullable);
 
-      Recursions : constant WisiToken.Generate.Recursions :=
-        (if Partial_Recursion
-         then WisiToken.Generate.Compute_Partial_Recursion (Grammar, 
Descriptor)
-         else WisiToken.Generate.Compute_Full_Recursion (Grammar, Descriptor));
+      Recursions_Time : Ada.Calendar.Time;
+
       Minimal_Terminal_Sequences : constant Minimal_Sequence_Array :=
-        Compute_Minimal_Terminal_Sequences (Descriptor, Grammar);
+        Compute_Minimal_Terminal_Sequences (Descriptor, Grammar, 
Grammar_File_Name);
 
       Minimal_Terminal_First : constant Token_Array_Token_ID :=
         Compute_Minimal_Terminal_First (Descriptor, 
Minimal_Terminal_Sequences);
@@ -514,11 +519,25 @@ package body WisiToken.Generate.LR.LALR_Generate is
 
       Kernels : LR1_Items.Item_Set_List := LALR_Kernels (Grammar, 
First_Nonterm_Set, Descriptor);
 
-      Conflict_Counts      : Conflict_Count_Lists.List;
-      Unknown_Conflicts    : Conflict_Lists.List;
-      Known_Conflicts_Edit : Conflict_Lists.List := Known_Conflicts;
+      Unknown_Conflicts    : Conflict_Lists.Tree;
+      Known_Conflicts_Edit : Conflict_Lists.Tree := Known_Conflicts;
 
    begin
+      if not Use_Cached_Recursions or Recursions = Empty_Recursions then
+         Recursions :=
+           (if Partial_Recursion
+            then WisiToken.Generate.Compute_Partial_Recursion (Grammar, 
Descriptor)
+            else WisiToken.Generate.Compute_Full_Recursion (Grammar, 
Descriptor));
+      end if;
+      Set_Grammar_Recursions (Recursions, Grammar);
+      Recursions_Time := Ada.Calendar.Clock;
+
+      if Trace_Time then
+         Ada.Text_IO.Put_Line
+           (Ada.Text_IO.Standard_Error, "compute kernels, recursion time:" &
+              Duration'Image (Ada.Calendar."-" (Recursions_Time, Time_Start)));
+      end if;
+
       WisiToken.Generate.Error := False; -- necessary in unit tests; some 
previous test might have encountered an error.
 
       if Trace_Generate_Table + Trace_Generate_Minimal_Complete > Outline then
@@ -562,17 +581,29 @@ package body WisiToken.Generate.LR.LALR_Generate is
             Fast_Forward                => Default_McKenzie_Param.Fast_Forward,
             Matching_Begin              => 
Default_McKenzie_Param.Matching_Begin,
             Ignore_Check_Fail           => 
Default_McKenzie_Param.Ignore_Check_Fail,
-            Task_Count                  => Default_McKenzie_Param.Task_Count,
+            Zombie_Limit                => Default_McKenzie_Param.Zombie_Limit,
             Check_Limit                 => Default_McKenzie_Param.Check_Limit,
             Check_Delta_Limit           => 
Default_McKenzie_Param.Check_Delta_Limit,
             Enqueue_Limit               => 
Default_McKenzie_Param.Enqueue_Limit);
+
+         Table.Error_Recover_Enabled := False;
+
       else
-         Table.McKenzie_Param := McKenzie_Param;
+         Table.McKenzie_Param        := McKenzie_Param;
+         Table.Error_Recover_Enabled := True;
       end if;
 
+      Table.Max_Parallel := Max_Parallel;
+
       Add_Actions
-        (Kernels, Grammar, Has_Empty_Production, First_Nonterm_Set, 
First_Terminal_Sequence, Conflict_Counts,
-         Unknown_Conflicts, Table.all, Descriptor);
+        (Kernels, Grammar, Has_Empty_Production, First_Terminal_Sequence, 
First_Nonterm_Set, Table.all,
+         Descriptor, Known_Conflicts_Edit, Unknown_Conflicts, 
Grammar_File_Name, Ignore_Conflicts);
+
+      if Trace_Time then
+         Table_Time := Ada.Calendar.Clock;
+         Ada.Text_IO.Put_Line
+           ("compute parse table time:" & Duration'Image (Ada.Calendar."-" 
(Table_Time, Recursions_Time)));
+      end if;
 
       for State in Table.States'Range loop
          if Trace_Generate_Minimal_Complete > Extra then
@@ -583,27 +614,21 @@ package body WisiToken.Generate.LR.LALR_Generate is
             Minimal_Terminal_First);
       end loop;
 
-      if Parse_Table_File_Name /= "" then
-         WisiToken.Generate.LR.Put_Parse_Table
-           (Table, Parse_Table_File_Name, "LALR", Grammar, Recursions, 
Kernels, Conflict_Counts, Descriptor,
-            Include_Extra);
+      if Trace_Time then
+         Minimal_Actions_Time := Ada.Calendar.Clock;
+         Ada.Text_IO.Put_Line
+           ("compute minimal actions time:" & Duration'Image
+              (Ada.Calendar."-" (Minimal_Actions_Time, Table_Time)));
       end if;
 
-      Delete_Known (Unknown_Conflicts, Known_Conflicts_Edit);
-
-      if Unknown_Conflicts.Length > 0 then
-         Ada.Text_IO.Put_Line (Ada.Text_IO.Current_Error, "unknown 
conflicts:");
-         Put (Unknown_Conflicts, Ada.Text_IO.Current_Error, Descriptor);
-         Ada.Text_IO.New_Line (Ada.Text_IO.Current_Error);
-         WisiToken.Generate.Error := WisiToken.Generate.Error or not 
Ignore_Unknown_Conflicts;
+      if Parse_Table_File_Name /= "" then
+         WisiToken.Generate.LR.Put_Parse_Table
+           (Table, Parse_Table_File_Name, "LALR", Grammar, Recursions, 
Kernels, Known_Conflicts_Edit, Unknown_Conflicts,
+            Descriptor, Include_Extra);
       end if;
 
-      if Known_Conflicts_Edit.Length > 0 then
-         Ada.Text_IO.Put_Line (Ada.Text_IO.Current_Error, "excess known 
conflicts:");
-         Put (Known_Conflicts_Edit, Ada.Text_IO.Current_Error, Descriptor);
-         Ada.Text_IO.New_Line (Ada.Text_IO.Current_Error);
-         WisiToken.Generate.Error := WisiToken.Generate.Error or not 
Ignore_Unknown_Conflicts;
-      end if;
+      Check_Conflicts
+        ("LALR", Unknown_Conflicts, Known_Conflicts_Edit, Grammar_File_Name, 
Descriptor, Ignore_Unknown_Conflicts);
 
       return Table;
    end Generate;
diff --git a/wisitoken-generate-lr-lalr_generate.ads 
b/wisitoken-generate-lr-lalr_generate.ads
index 7527a9cc8e..b2b3fef5a0 100644
--- a/wisitoken-generate-lr-lalr_generate.ads
+++ b/wisitoken-generate-lr-lalr_generate.ads
@@ -27,12 +27,16 @@ package WisiToken.Generate.LR.LALR_Generate is
    function Generate
      (Grammar               : in out WisiToken.Productions.Prod_Arrays.Vector;
       Descriptor            : in     WisiToken.Descriptor;
-      Known_Conflicts       : in     Conflict_Lists.List := 
Conflict_Lists.Empty_List;
+      Grammar_File_Name     : in     String;
+      Known_Conflicts       : in     Conflict_Lists.Tree := 
Conflict_Lists.Empty_Tree;
       McKenzie_Param        : in     McKenzie_Param_Type := 
Default_McKenzie_Param;
+      Max_Parallel          : in     SAL.Base_Peek_Type  := 15;
       Parse_Table_File_Name : in     String              := "";
       Include_Extra         : in     Boolean             := False;
       Ignore_Conflicts      : in     Boolean             := False;
-      Partial_Recursion     : in     Boolean             := True)
+      Partial_Recursion     : in     Boolean             := True;
+      Use_Cached_Recursions : in     Boolean             := False;
+      Recursions            : in out WisiToken.Generate.Recursions)
      return Parse_Table_Ptr
    with Pre =>
      Descriptor.Last_Lookahead = Descriptor.First_Nonterminal and
@@ -46,42 +50,16 @@ package WisiToken.Generate.LR.LALR_Generate is
    --
    --  Unless Ignore_Unknown_Conflicts is True, raise Grammar_Error if there
    --  are unknown conflicts.
+   --
+   --  Grammar_File_Name is used for error messages.
 
    ----------
    --  Visible for unit tests
 
-   function LALR_Goto_Transitions
-     (Kernel            : in LR1_Items.Item_Set;
-      Symbol            : in Token_ID;
-      First_Nonterm_Set : in Token_Array_Token_Set;
-      Grammar           : in WisiToken.Productions.Prod_Arrays.Vector;
-      Descriptor        : in WisiToken.Descriptor)
-     return LR1_Items.Item_Set;
-   --  Return the Item_Set that is the goto for Symbol from Kernel.
-   --  If there is no such Item_Set, Result.Set is null.
-
    function LALR_Kernels
      (Grammar           : in WisiToken.Productions.Prod_Arrays.Vector;
       First_Nonterm_Set : in Token_Array_Token_Set;
       Descriptor        : in WisiToken.Descriptor)
      return LR1_Items.Item_Set_List;
 
-   procedure Fill_In_Lookaheads
-     (Grammar                 : in     
WisiToken.Productions.Prod_Arrays.Vector;
-      Has_Empty_Production    : in     Token_ID_Set;
-      First_Terminal_Sequence : in     Token_Sequence_Arrays.Vector;
-      Kernels                 : in out LR1_Items.Item_Set_List;
-      Descriptor              : in     WisiToken.Descriptor);
-
-   procedure Add_Actions
-     (Kernels                 : in     LR1_Items.Item_Set_List;
-      Grammar                 : in     
WisiToken.Productions.Prod_Arrays.Vector;
-      Has_Empty_Production    : in     Token_ID_Set;
-      First_Nonterm_Set       : in     Token_Array_Token_Set;
-      First_Terminal_Sequence : in     Token_Sequence_Arrays.Vector;
-      Conflict_Counts         :    out Conflict_Count_Lists.List;
-      Conflicts               :    out Conflict_Lists.List;
-      Table                   : in out Parse_Table;
-      Descriptor              : in     WisiToken.Descriptor);
-
 end WisiToken.Generate.LR.LALR_Generate;
diff --git a/wisitoken-generate-lr-lr1_generate.adb 
b/wisitoken-generate-lr-lr1_generate.adb
index 8f688c91ac..bba1176feb 100644
--- a/wisitoken-generate-lr-lr1_generate.adb
+++ b/wisitoken-generate-lr-lr1_generate.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -20,9 +20,11 @@
 
 pragma License (Modified_GPL);
 
+with Ada.Calendar;
 with Ada.Containers;
+with Ada.Exceptions;
 with Ada.Text_IO;
-with WisiToken.Generate;
+with System.Address_To_Access_Conversions;
 package body WisiToken.Generate.LR.LR1_Generate is
 
    function LR1_Goto_Transitions
@@ -33,20 +35,22 @@ package body WisiToken.Generate.LR.LR1_Generate is
       Grammar                 : in WisiToken.Productions.Prod_Arrays.Vector;
       Descriptor              : in WisiToken.Descriptor)
      return LR1_Items.Item_Set
+   with Pre => Set.Dot_IDs.Contains (Symbol),
+     Post => not LR1_Goto_Transitions'Result.Set.Is_Empty
+   --  'goto' from [dragon] algorithm 4.9.
    is
-      use all type Ada.Containers.Count_Type;
       use Token_ID_Arrays;
       use LR1_Items;
 
       Goto_Set : Item_Set;
    begin
       for Item of Set.Set loop
-         if Item.Dot /= No_Index then
-            declare
-               Dot : constant Token_ID_Arrays.Cursor := 
Productions.Constant_Ref_RHS
-                 (Grammar, Item.Prod).Tokens.To_Cursor (Item.Dot);
-            begin
-               if Element (Dot) = Symbol and
+         declare
+            Item_Tokens : Token_ID_Arrays.Vector renames 
Productions.Constant_Ref_RHS
+              (Grammar, Item.Prod).Tokens;
+         begin
+            if Item.Dot in Item_Tokens.First_Index .. Item_Tokens.Last_Index 
then
+               if Item_Tokens (Item.Dot) = Symbol and
                  --  We don't need a state with dot after EOI in the
                  --  accept production. EOI should only appear in the
                  --  accept production.
@@ -54,38 +58,33 @@ package body WisiToken.Generate.LR.LR1_Generate is
                then
                   Goto_Set.Set.Insert
                     ((Item.Prod,
-                      To_Index (Next (Dot)),
-                      new Token_ID_Set'(Item.Lookaheads.all)));
+                      (if Item.Dot = Item_Tokens.Last_Index then No_Index else 
Item.Dot + 1),
+                      Item.Lookaheads));
                end if;
-            end;
-         end if;
+            end if;
+         end;
       end loop;
 
-      if Goto_Set.Set.Length > 0 then
-         return Closure (Goto_Set, Has_Empty_Production, 
First_Terminal_Sequence, Grammar, Descriptor);
-      else
-         return Goto_Set;
-      end if;
+      return Closure (Goto_Set, Has_Empty_Production, First_Terminal_Sequence, 
Grammar, Descriptor);
    end LR1_Goto_Transitions;
 
-   function LR1_Item_Sets
+   function LR1_Item_Sets_Single
      (Has_Empty_Production    : in Token_ID_Set;
       First_Terminal_Sequence : in Token_Sequence_Arrays.Vector;
       Grammar                 : in WisiToken.Productions.Prod_Arrays.Vector;
-      Descriptor              : in WisiToken.Descriptor)
+      Descriptor              : in WisiToken.Descriptor;
+      Hash_Table_Size         : in Positive := 
LR1_Items.Item_Set_Trees.Default_Rows)
      return LR1_Items.Item_Set_List
+   --  [dragon] algorithm 4.9 pg 231; figure 4.38 pg 232; procedure
+   --  "items", with some optimizations.
    is
-      use all type Ada.Containers.Count_Type;
-
-      --  [dragon] algorithm 4.9 pg 231; figure 4.38 pg 232; procedure
-      --  "items", with some optimizations.
-
       use LR1_Items;
 
       First_State_Index : constant State_Index := 0;
+      States_Found      : Integer              := 0;
 
-      C               : LR1_Items.Item_Set_List;       -- result
-      C_Tree          : LR1_Items.Item_Set_Trees.Tree; -- for fast find
+      C               : LR1_Items.Item_Set_List; -- result
+      C_Tree          : LR1_Items.Item_Set_Tree; -- for fast find
       States_To_Check : State_Index_Queues.Queue;
       --  [dragon] specifies 'until no more items can be added', but we use
       --  a queue to avoid checking unecessary states. Ada LR1 has over
@@ -98,26 +97,25 @@ package body WisiToken.Generate.LR.LR1_Generate is
       New_Item_Set : Item_Set := Closure
         ((Set            => Item_Lists.To_List
             ((Prod       => (Grammar.First_Index, 0),
-              Dot        => Grammar (Grammar.First_Index).RHSs 
(0).Tokens.First_Index,
-              Lookaheads => new Token_ID_Set'(To_Lookahead (Descriptor.EOI_ID, 
Descriptor)))),
-          Goto_List      => <>,
-          Dot_IDs        => <>,
-          State          => First_State_Index),
+              Dot        => Grammar (Grammar.First_Index).RHSs 
(0).Tokens.First_Index (No_Index_If_Empty => True),
+              Lookaheads => To_Lookahead (Descriptor.EOI_ID))),
+          Tree_Node      =>
+            (State       => First_State_Index,
+             others      => <>),
+          others         => <>),
         Has_Empty_Production, First_Terminal_Sequence, Grammar, Descriptor);
-
-      Found_State  : Unknown_State_Index;
-
    begin
       C.Set_First_Last (First_State_Index, First_State_Index - 1);
+      C_Tree.Set_Rows (Hash_Table_Size);
 
-      Add (Grammar, New_Item_Set, C, C_Tree, Descriptor, Include_Lookaheads => 
True);
+      Add (Grammar, New_Item_Set, C, C_Tree, Descriptor, C_Tree.Rows, 
Include_Lookaheads => True);
 
       States_To_Check.Put (First_State_Index);
       loop
          exit when States_To_Check.Is_Empty;
          I := States_To_Check.Get;
 
-         if Trace_Generate_Table > Outline then
+         if Trace_Generate_Table > Detail then
             Ada.Text_IO.Put ("Checking ");
             Put (Grammar, Descriptor, C (I), Show_Lookaheads => True, 
Show_Goto_List => True);
          end if;
@@ -130,21 +128,30 @@ package body WisiToken.Generate.LR.LR1_Generate is
             --  [dragon] has 'for each grammar symbol X', but 
LR1_Goto_Transitions
             --  rejects Symbol that is not in Dot_IDs, so we iterate over that.
 
+            --  The input to LR1_Goto_Transitions is always a previous output, 
and
+            --  always unique. The output may not be unique; we store that in 
C,
+            --  and a search key in C_Tree for faster searching.
+
             New_Item_Set := LR1_Goto_Transitions
               (C (I), Symbol, Has_Empty_Production, First_Terminal_Sequence, 
Grammar, Descriptor);
 
-            if New_Item_Set.Set.Length > 0 then -- 'goto (I, X) not empty'
+            New_Item_Set.Tree_Node.State := C.Last_Index + 1;
 
-               Found_State := Find (New_Item_Set, C_Tree, Match_Lookaheads => 
True); -- 'not in C'
+            Compute_Key_Hash (New_Item_Set, C_Tree.Rows, Grammar, Descriptor, 
True);
 
-               if Found_State = Unknown_State then
-                  New_Item_Set.State := C.Last_Index + 1;
+            declare
+               Found     : Boolean;
+               Found_Ref : constant Item_Set_Trees.Constant_Reference_Type := 
C_Tree.Find_Or_Insert
+                 (New_Item_Set.Tree_Node, Found);
+            begin
+               if not Found then
+                  States_To_Check.Put (New_Item_Set.Tree_Node.State);
 
-                  States_To_Check.Put (New_Item_Set.State);
+                  New_Item_Set.Dot_IDs := Get_Dot_IDs (Grammar, 
New_Item_Set.Set, Descriptor);
 
-                  Add (Grammar, New_Item_Set, C, C_Tree, Descriptor, 
Include_Lookaheads => True);
+                  C.Append (New_Item_Set);
 
-                  if Trace_Generate_Table > Outline then
+                  if Trace_Generate_Table > Detail then
                      Ada.Text_IO.Put_Line
                        ("  adding state" & Unknown_State_Index'Image 
(C.Last_Index) & ": from state" &
                           Unknown_State_Index'Image (I) & " on " & Image 
(Symbol, Descriptor));
@@ -153,45 +160,673 @@ package body WisiToken.Generate.LR.LR1_Generate is
 
                   C (I).Goto_List.Insert ((Symbol, C.Last_Index));
                else
-
+                  States_Found := @ + 1;
                   --  If there's not already a goto entry between these two 
sets, create one.
-                  if not Is_In ((Symbol, Found_State), Goto_List => C 
(I).Goto_List) then
-                     if Trace_Generate_Table > Outline then
-                        Ada.Text_IO.Put_Line
-                          ("  adding goto on " & Image (Symbol, Descriptor) & 
" to state" &
-                             Unknown_State_Index'Image (Found_State));
+                  if not Is_In ((Symbol, Found_Ref.State), Goto_List => C 
(I).Goto_List) then
+                     C (I).Goto_List.Insert ((Symbol, Found_Ref.State));
 
+                     if Trace_Generate_Table > Detail then
+                        Ada.Text_IO.Put_Line
+                          ("  adding goto on " & Image (Symbol, Descriptor) & 
" to state" & Found_Ref.State'Image);
                      end if;
-
-                     C (I).Goto_List.Insert ((Symbol, Found_State));
                   end if;
                end if;
-            end if;
+            end;
          end loop;
       end loop;
 
-      if Trace_Generate_Table > Outline then
+      if Trace_Time then
+         declare
+            Elements, Max_Row_Depth, Average_Row_Depth : 
Ada.Containers.Count_Type;
+            Rows, Empty_Rows : Integer;
+         begin
+            C_Tree.Sizes (Elements, Rows, Max_Row_Depth, Average_Row_Depth, 
Empty_Rows);
+
+            Ada.Text_IO.Put_Line
+              ("LR1 hash table states:" & Elements'Image &
+                 " rows:" & Rows'Image &
+                 " max_row_depth:" & Max_Row_Depth'Image &
+                 " average_row_depth:" & Average_Row_Depth'Image &
+                 " empty_rows:" & Empty_Rows'Image &
+                 " states_found:" & States_Found'Image);
+         end;
+      end if;
+
+      if Trace_Generate_Table > Detail then
          Ada.Text_IO.New_Line;
       end if;
 
       return C;
-   end LR1_Item_Sets;
+   end LR1_Item_Sets_Single;
+
+   function LR1_Item_Sets_Parallel
+     (Has_Empty_Production    : in Token_ID_Set;
+      First_Terminal_Sequence : in Token_Sequence_Arrays.Vector;
+      Grammar                 : in WisiToken.Productions.Prod_Arrays.Vector;
+      Descriptor              : in WisiToken.Descriptor;
+      Task_Count              : in System.Multiprocessors.CPU_Range;
+      Hash_Table_Size         : in Positive := 
LR1_Items.Item_Set_Trees.Default_Rows)
+     return LR1_Items.Item_Set_List
+   is
+      use LR1_Items;
+      use all type Ada.Containers.Count_Type;
+      use all type SAL.Base_Peek_Type;
+      use all type System.Multiprocessors.CPU_Range;
+
+      --  [dragon] algorithm 4.9 pg 231; figure 4.38 pg 232; procedure
+      --  "items", with some optimizations.
+
+      type Base_Worker_ID is range 0 .. Positive'Last;
+      subtype Worker_ID is Base_Worker_ID range 1 .. Base_Worker_ID'Last;
+
+      package Item_Set_Tree_Node_Arrays is new 
SAL.Gen_Unbounded_Definite_Vectors
+        (Positive, Item_Set_Tree_Node, (others => <>));
+
+      package Worker_Array_State_Index_Arrays is new 
SAL.Gen_Unbounded_Definite_Vectors
+        (Worker_ID, State_Index_Arrays.Vector, 
State_Index_Arrays.Empty_Vector);
+
+      type Worker_Data is record
+         --  All data produced by one worker from one state, except New_C,
+         --  which doesn't need to be split by supervisor state.
+         From_State          : State_Index := State_Index'Last;
+         New_States          : State_Index_Arrays.Vector;
+         Existing_Goto_Items : Goto_Item_Arrays.Vector;
+         New_Goto_Items      : Goto_Item_Arrays.Vector;
+      end record;
+
+      package State_Array_Worker_Data is new SAL.Gen_Unbounded_Definite_Vectors
+        (State_Index, Worker_Data, (others => <>));
+
+      protected Supervisor is
+
+         procedure Initialize
+           (Worker_Count   : in     LR1_Item_Sets_Parallel.Worker_ID;
+            First_Item_Set : in out Item_Set);
+
+         entry Get
+           (Worker_ID     : in     LR1_Item_Sets_Parallel.Worker_ID;
+            Sets_To_Check :    out Item_Set_List;
+            Keys_To_Store :    out Item_Set_Tree_Node_Arrays.Vector)
+         with Pre => Sets_To_Check.Is_Empty and Keys_To_Store.Is_Empty;
+         --  Set Sets_To_Check to new states to check, _not_ indexed by state;
+         --  they may be discontinuous. Available when there are states to
+         --  check, or when all states have been checked and all workers are
+         --  inactive; then Sets_To_Check is empty.
+         --
+         --  If Sets_To_Check is not empty, Keys_To_Store contains keys from
+         --  other workers to store in worker's C_Tree; increment active worker
+         --  count.
+
+         procedure Update
+           (Worker_ID   : in     LR1_Item_Sets_Parallel.Worker_ID;
+            New_C       : in out Item_Set_Arrays.Vector;
+            Worker_Data : in out State_Array_Worker_Data.Vector);
+         --  New_C: New states found by worker, indexed by worker new state
+         --  number (1 origin); add to supervisor C. States are updated to 
supervisor
+         --  state numbers; worker should add those to worker's C_Tree.
+         --
+         --  Worker_Data : Indexed by supervisor state number I. Contains:
+         --
+         --     New_States: Worker new state numbers for states derived from C 
(I);
+         --  sets are in New_C.
+         --
+         --     Existing_Goto_Items: Gotos from C (I) to some state in 
supervisor
+         --  C (which worker found in C_Tree); add to supervisor C (I).
+         --
+         --     New_Goto_Items: From C (I) to some state in New_C (given by
+         --  worker new state number). Add to supervisor C (I).
+         --
+         --  Decrement active worker count.
+
+         procedure Fatal_Error
+           (Exception_ID : in Ada.Exceptions.Exception_Id;
+            Message      : in String);
+         --  Worker encountered an exception; record it for Done, decrement
+         --  active worker count.
+
+         entry Done
+           (ID      : out Ada.Exceptions.Exception_Id;
+            Message : out Ada.Strings.Unbounded.Unbounded_String);
+         --  Available when all states have been checked, and all workers
+         --  inactive.
+
+         function Get_C return Item_Set_List;
+
+      private
+         C               : Item_Set_List; -- result
+         C_Tree          : Item_Set_Tree; -- for fast find
+         States_To_Check : State_Index_Queues.Queue;
+         --  [dragon] specifies 'until no more items can be added', but we use
+         --  a queue to avoid checking unecessary states. Ada LR1 has over
+         --  100,000 states, so this is a significant gain (reduced time from
+         --  600 seconds to 40).
+
+         Worker_Count   : LR1_Item_Sets_Parallel.Worker_ID;
+         Active_Workers : Natural := 0;
+         Fatal          : Boolean := False;
+
+         New_States_For_Worker : Worker_Array_State_Index_Arrays.Vector;
+         --  Indexed by worker ID
+
+         Error_ID       : Ada.Exceptions.Exception_Id := 
Ada.Exceptions.Null_Id;
+         Error_Message  : Ada.Strings.Unbounded.Unbounded_String;
+
+         Min_States_Get      : SAL.Peek_Type := 10;
+
+         Net_Time            : Duration    := 0.0; -- Time spent in Get, 
Update.
+         Found_States        : Integer     := 0;   -- States found in Update; 
counts duplicate states found by workers
+         Summary_Last_Output : State_Index := 0;
+      end Supervisor;
+
+      function Image (Node_Ref : Item_Set_Trees.Variable_Reference_Type) 
return String
+      is
+         package Convert is new System.Address_To_Access_Conversions 
(Item_Set_Tree_Node);
+      begin
+         return Convert.To_Address (Convert.Object_Pointer (Node_Ref))'Image & 
":" &
+           Node_Ref.Hash'Image & ":" & Node_Ref.State'Image;
+      end Image;
+
+      protected body Supervisor is
+
+         procedure Initialize
+           (Worker_Count   : in     LR1_Item_Sets_Parallel.Worker_ID;
+            First_Item_Set : in out Item_Set)
+         is
+            First_State_Index : constant State_Index       := 
First_Item_Set.Tree_Node.State;
+         begin
+            Supervisor.Worker_Count := Worker_Count;
+
+            New_States_For_Worker.Set_First_Last (1, Worker_Count);
+
+            C.Set_First_Last (First_State_Index, First_State_Index - 1);
+            C_Tree.Set_Rows (Hash_Table_Size);
+
+            First_Item_Set.Dot_IDs := Get_Dot_IDs (Grammar, 
First_Item_Set.Set, Descriptor);
+            Compute_Key_Hash (First_Item_Set, C_Tree.Rows, Grammar, 
Descriptor, True);
+
+            C.Append (First_Item_Set);
+            C_Tree.Insert (First_Item_Set.Tree_Node, Duplicate => SAL.Error);
+
+            States_To_Check.Put (First_State_Index);
+         end Initialize;
+
+         entry Get
+           (Worker_ID     : in     LR1_Item_Sets_Parallel.Worker_ID;
+            Sets_To_Check :    out Item_Set_List;
+            Keys_To_Store :    out Item_Set_Tree_Node_Arrays.Vector)
+         when Fatal or States_To_Check.Length > 0 or Active_Workers = 0
+         is
+            use all type Ada.Calendar.Time;
+
+            Time_Start : constant Ada.Calendar.Time := Ada.Calendar.Clock;
+         begin
+            if States_To_Check.Length > 0 then
+               for I in 1 ..
+                 (if States_To_Check.Length / SAL.Peek_Type (Worker_Count) < 
Min_States_Get
+                  then States_To_Check.Length
+                  else States_To_Check.Length / SAL.Peek_Type (Worker_Count))
+               loop
+                  Sets_To_Check.Append (C (States_To_Check.Get));
+               end loop;
+
+               if not New_States_For_Worker (Worker_ID).Is_Empty then
+                  Keys_To_Store.Set_Capacity
+                    (New_States_For_Worker (Worker_ID).First_Index, 
New_States_For_Worker (Worker_ID).Last_Index);
+                  for State of New_States_For_Worker (Worker_ID) loop
+                     pragma Assert (C (State).Tree_Node.State = State);
+                     Keys_To_Store.Append (C (State).Tree_Node);
+                  end loop;
+                  New_States_For_Worker (Worker_ID).Clear;
+               end if;
+
+               if Trace_Generate_Table > Detail then
+                  Ada.Text_IO.Put
+                    ("(worker" & Worker_ID'Image & ") Checking" & 
Sets_To_Check.Length'Image & " states");
+                  for Set of Sets_To_Check loop
+                     Ada.Text_IO.Put (Set.Tree_Node.State'Image);
+                  end loop;
+                  Ada.Text_IO.New_Line;
+                  if Trace_Generate_Table > Extra then
+                     for Set of Sets_To_Check loop
+                        Put (Grammar, Descriptor, Set, Show_Lookaheads => 
False, Kernel_Only => True);
+                     end loop;
+                  end if;
+
+                  Ada.Text_IO.Put
+                    ("(worker" & Worker_ID'Image & ") storing" & 
Keys_To_Store.Length'Image & " states");
+                  for Node of Keys_To_Store loop
+                     Ada.Text_IO.Put (Node.State'Image);
+                  end loop;
+                  Ada.Text_IO.New_Line;
+               end if;
+
+               Active_Workers := @ + 1;
+
+               Net_Time := @ + (Ada.Calendar.Clock - Time_Start);
+            end if;
+
+            if Trace_Time and C.Last_Index > Summary_Last_Output +
+              (if C_Tree.Rows < 15_053 then 500 else 10_000) --  500 for 
ada_lite.wy, 10_000 for ada.wy
+            then
+               Ada.Text_IO.Put_Line
+                 ("(super) time:" & Net_Time'Image &
+                    " states:" & C.Last_Index'Image &
+                    " States_To_Check:" & States_To_Check.Length'Image &
+                    " Found_States:" & Found_States'Image);
+               Summary_Last_Output := C.Last_Index;
+            end if;
+         end Get;
+
+         procedure Update
+           (Worker_ID   : in     LR1_Item_Sets_Parallel.Worker_ID;
+            New_C       : in out Item_Set_Arrays.Vector;
+            Worker_Data : in out State_Array_Worker_Data.Vector)
+         is
+            use all type Ada.Calendar.Time;
+            Time_Start : constant Ada.Calendar.Time := Ada.Calendar.Clock;
+
+            State_Map : array (New_C.First_Index .. New_C.Last_Index) of 
State_Index;
+            --  Indexed by worker new state number, contains super state number
+         begin
+            if Trace_Generate_Table > Detail then
+               Ada.Text_IO.Put ("(super) Update from worker" & Worker_ID'Image 
& "; new states:");
+               for Data of Worker_Data loop
+                  for Node of Data.New_Goto_Items loop
+                     Ada.Text_IO.Put
+                       (Data.From_State'Image & "." & Image (Node.Symbol, 
Descriptor) & "." &
+                          Trimmed_Image (Node.State));
+                  end loop;
+                  Ada.Text_IO.Put (" | ");
+               end loop;
+               Ada.Text_IO.New_Line;
+            end if;
+
+            for Worker_Data of Update.Worker_Data loop
+               for Worker_New_State of Worker_Data.New_States loop
+                  declare
+                     use Item_Set_Trees;
+
+                     Super_New_State  : constant State_Index := C.Last_Index + 
1;
+
+                     Found      : Boolean;
+                     Found_Ref  : constant 
Item_Set_Trees.Variable_Reference_Type := C_Tree.Find_Or_Insert_Var
+                       (New_C (Worker_New_State).Tree_Node, Found);
+                  begin
+                     if Found then
+                        State_Map (Worker_New_State) := Found_Ref.State;
+                        Found_States := @ + 1;
+                        New_C (Worker_New_State).Tree_Node.State := 
Found_Ref.State;
+
+                     else
+                        Found_Ref.State := Super_New_State;
+                        New_C (Worker_New_State).Tree_Node.State := 
Super_New_State;
+
+                        States_To_Check.Put (Super_New_State);
+
+                        State_Map (Worker_New_State) := Super_New_State;
+
+                        C.Append (New_C (Worker_New_State));
+                        pragma Assert (C.Last_Index = Super_New_State);
+
+                        for ID in New_States_For_Worker.First_Index .. 
New_States_For_Worker.Last_Index  loop
+                           if ID /= Worker_ID then
+                              New_States_For_Worker (ID).Append 
(Super_New_State);
+                           end if;
+                        end loop;
+
+                        if Trace_Generate_Table > Extra then
+                           Ada.Text_IO.Put_Line
+                             ("from state" & Worker_Data.From_State'Image & 
"." & Trimmed_Image (Worker_New_State));
+                           Put (Grammar, Descriptor, New_C (Worker_New_State),
+                                Show_Lookaheads => False,
+                                Kernel_Only  => True);
+                        end if;
+                     end if;
+                  end;
+               end loop;
+
+               --  Now we have State_Map, we can process the gotos.
+               declare
+                  use Goto_Item_Lists;
+                  From_State : constant State_Index := Worker_Data.From_State;
+               begin
+                  for Item of Worker_Data.Existing_Goto_Items loop
+                     if Trace_Generate_Table > Extra and then
+                       not Has_Element (C (From_State).Goto_List.Find 
(Item.Symbol))
+                     then
+                        Ada.Text_IO.Put_Line
+                          ("    state" & From_State'Image & " adding goto on " 
&
+                             Image (Item.Symbol, Descriptor) & " to existing 
state" & Item.State'Image);
+                     end if;
+
+                     C (From_State).Goto_List.Insert (Item, Duplicate => 
SAL.Ignore);
+                  end loop;
+
+                  for Item of Worker_Data.New_Goto_Items loop
+                     Item.State := State_Map (Item.State);
+
+                     if Trace_Generate_Table > Extra and then
+                       not Goto_Item_Lists.Has_Element
+                         (C (From_State).Goto_List.Find (Item.Symbol))
+                     then
+                        Ada.Text_IO.Put_Line
+                          ("    state" & From_State'Image & " adding goto on " 
&
+                             Image (Item.Symbol, Descriptor) & " to new state" 
& Item.State'Image);
+                     end if;
+
+                     C (From_State).Goto_List.Insert (Item, Duplicate => 
SAL.Ignore);
+                  end loop;
+               end;
+            end loop;
+
+            Active_Workers := @ - 1;
+
+            Net_Time := @ + (Ada.Calendar.Clock - Time_Start);
+         exception
+         when E : others =>
+
+            Active_Workers := @ - 1;
+            Fatal          := True;
+            States_To_Check.Clear; -- force an early end.
+            declare
+               use Ada.Text_IO;
+               use Ada.Exceptions;
+            begin
+               Error_ID       := Exception_Identity (E);
+               Error_Message  := +Exception_Message (E);
+               Put_Line
+                 (Standard_Error, "(super) Update exception: " & 
Exception_Name (E) & ": " & Exception_Message (E));
+            end;
+         end Update;
+
+         procedure Fatal_Error
+           (Exception_ID : in Ada.Exceptions.Exception_Id;
+            Message      : in String)
+         is begin
+            Supervisor.Error_ID      := Exception_ID;
+            Supervisor.Error_Message := +Message;
+
+            States_To_Check.Clear; -- force an early end.
+            Fatal          := True;
+            Active_Workers := @ - 1;
+         end Fatal_Error;
+
+         entry Done
+           (ID      : out Ada.Exceptions.Exception_Id;
+            Message : out Ada.Strings.Unbounded.Unbounded_String)
+         when Fatal or (Active_Workers = 0 and States_To_Check.Is_Empty)
+         is begin
+            if Trace_Time then
+               Ada.Text_IO.Put_Line
+                 ("(super) net time:" & Net_Time'Image &
+                    " states:" & C.Last_Index'Image &
+                    " States_To_Check:" & States_To_Check.Length'Image &
+                    " Found_States:" & Found_States'Image);
+            end if;
+
+            ID      := Supervisor.Error_ID;
+            Message := Supervisor.Error_Message;
+         end Done;
+
+         function Get_C return Item_Set_List
+         is begin
+            return C;
+         end Get_C;
+
+      end Supervisor;
+
+      task type Worker_Task
+      is
+         entry Start (ID : in LR1_Item_Sets_Parallel.Worker_ID);
+         --  Start states from Supervisor. Stop when Supervisor returns
+         --  Invalid_State_Index;
+      end Worker_Task;
+
+      task body Worker_Task
+      is
+         use all type Ada.Calendar.Time;
+
+         ID : LR1_Item_Sets_Parallel.Worker_ID;
+
+         Time_Start     : Ada.Calendar.Time;
+         Net_Time       : Duration := 0.0; --  Time spent outside Supervisor
+         States_Checked : Integer  := 0;
+         States_Found   : Integer  := 0;
+
+         C_Tree : Item_Set_Tree;          -- Local copy for fast find
+         C      : Item_Set_Arrays.Vector; -- Local copy of subset of C to 
search; from Supervisor
+
+         Local_New_State : State_Index := 1;
+
+         --  See Supervisor Get, Update for definitions of these.
+         New_C       : Item_Set_Arrays.Vector;
+         New_C_Tree  : Item_Set_Tree;
+         Worker_Data : State_Array_Worker_Data.Vector;
+
+         procedure Check_State (C_Index : in State_Index)
+         is
+            C_I : Item_Set renames C (C_Index);
+            Worker_Data : LR1_Item_Sets_Parallel.Worker_Data renames 
Worker_Task.Worker_Data (C_Index);
+         begin
+            States_Checked := @ + 1;
+            Worker_Data.From_State := C_I.Tree_Node.State;
+            Worker_Data.New_States.Clear;
+            Worker_Data.Existing_Goto_Items.Clear;
+            Worker_Data.New_Goto_Items.Clear;
+
+            for Dot_ID_I in C_I.Dot_IDs.First_Index .. C_I.Dot_IDs.Last_Index 
loop
+               --  [dragon] has 'for each grammar symbol X', but 
LR1_Goto_Transitions
+               --  rejects Symbol that is not in Dot_IDs, so we iterate over 
that.
+
+               declare
+                  Symbol       : Token_ID renames C_I.Dot_IDs (Dot_ID_I);
+                  New_Item_Set : Item_Set := LR1_Goto_Transitions
+                    (C_I, Symbol, Has_Empty_Production, 
First_Terminal_Sequence, Grammar, Descriptor);
+               begin
+                  Compute_Key_Hash (New_Item_Set, C_Tree.Rows, Grammar, 
Descriptor, True);
+                  declare
+                     use Item_Set_Trees;
+
+                     --  First search in Worker.C_Tree
+                     Found_Cur : Cursor := C_Tree.Find 
(New_Item_Set.Tree_Node);
+
+                     Found_State : constant Unknown_State_Index :=
+                       (if Has_Element (Found_Cur)
+                        then C_Tree.Constant_Ref (Found_Cur).State
+                        else Unknown_State);
+                  begin
+                     if Found_State = Unknown_State then
+                        Found_Cur := New_C_Tree.Find (New_Item_Set.Tree_Node);
+
+                        if Has_Element (Found_Cur) then
+                           --  Local_New_State was previously generated from 
some other state we
+                           --  are checking.
+                           Worker_Data.New_Goto_Items.Append ((Symbol, 
C_Tree.Constant_Ref (Found_Cur).State));
+
+                        else
+                           Worker_Data.New_Goto_Items.Append ((Symbol, 
Local_New_State));
+
+                           New_Item_Set.Tree_Node.State := Local_New_State;
+                           New_Item_Set.Dot_IDs := Get_Dot_IDs (Grammar, 
New_Item_Set.Set, Descriptor);
+                           New_C.Append (New_Item_Set);
+                           pragma Assert (New_C.Last_Index = Local_New_State);
+                           Worker_Data.New_States.Append (Local_New_State);
+
+                           New_C_Tree.Insert (New_Item_Set.Tree_Node, 
Duplicate => SAL.Error);
+
+                           Local_New_State := Local_New_State + 1;
+                        end if;
+                     else
+                        States_Found := @ + 1;
+                        pragma Assert (C_I.Goto_List.Count = 0);
+                        Worker_Data.Existing_Goto_Items.Append ((Symbol, 
Found_State));
+                        if Trace_Generate_Table > Extra then
+                           Ada.Text_IO.Put_Line
+                             ("(worker" & ID'Image & ")  state" & 
Worker_Data.From_State'Image & " adding goto on " &
+                                Image (Symbol, Descriptor) & " to existing 
state" & Image
+                                  (C_Tree.Variable_Ref (Found_Cur)));
+                        end if;
+                     end if;
+                  end;
+               end;
+            end loop;
+         end Check_State;
+      begin
+         select
+            accept Start (ID : in LR1_Item_Sets_Parallel.Worker_ID)
+
+            do
+               Worker_Task.ID := ID;
+            end Start;
+         or
+            terminate;
+         end select;
+
+         C_Tree.Set_Rows (Hash_Table_Size);
+         New_C_Tree.Set_Rows (Hash_Table_Size);
+
+         loop
+            declare
+               Keys_To_Store : Item_Set_Tree_Node_Arrays.Vector;
+            begin
+               Supervisor.Get (ID, C, Keys_To_Store);
+               exit when C.Length = 0;
+
+               Time_Start := Ada.Calendar.Clock;
+
+               for Set of C loop
+                  --  C are all new states to check, but they may
+                  --  have been in a previous Keys_To_Store.
+                  C_Tree.Insert (Set.Tree_Node, Duplicate => SAL.Ignore);
+               end loop;
+               for Node of Keys_To_Store loop
+                  --  States are added to Keys_To_Store when they are new in
+                  --  Supervisor.C_Tree, before they are given to any worker 
to check;
+                  --  they may also be in C
+                  C_Tree.Insert (Node, Duplicate => SAL.Ignore);
+               end loop;
+            end;
+
+            Local_New_State := 1;
+            New_C.Set_First_Last (First => Local_New_State, Last => 
Local_New_State - 1);
+            New_C_Tree.Clear; -- IMPROVEME: new_c_tree red_black should use 
vector store, not allocate each node
+
+            Worker_Data.Set_First_Last (C.First_Index, C.Last_Index);
+
+            for I in C.First_Index .. C.Last_Index loop
+               Check_State (I);
+            end loop;
+            C.Clear;
+
+            Net_Time := @ + (Ada.Calendar.Clock - Time_Start);
+            Supervisor.Update (ID, New_C, Worker_Data);
+            Time_Start := Ada.Calendar.Clock;
+
+            --  New_C.Tree_Node.State updated; insert into C_Tree.
+            for Item of New_C loop
+               C_Tree.Insert (Item.Tree_Node, Duplicate => SAL.Error);
+            end loop;
+            Net_Time := @ + (Ada.Calendar.Clock - Time_Start);
+         end loop;
+
+         if Trace_Time then
+            declare
+               Elements, Max_Row_Depth, Average_Row_Depth : 
Ada.Containers.Count_Type;
+               Rows, Empty_Rows : Integer;
+            begin
+               C_Tree.Sizes (Elements, Rows, Max_Row_Depth, Average_Row_Depth, 
Empty_Rows);
+
+               Ada.Text_IO.Put_Line
+                 ("(worker" & ID'Image & ") net time" & Net_Time'Image &
+                    " states checked:" & States_Checked'Image & " states 
found:" & States_Found'Image &
+                    " hash table states:" & Elements'Image &
+                    " rows:" & Rows'Image &
+                    " max_row_depth:" & Max_Row_Depth'Image &
+                    " average_row_depth:" & Average_Row_Depth'Image &
+                    " empty_rows:" & Empty_Rows'Image);
+            end;
+         end if;
+
+         if Trace_Generate_Table > Outline then
+            Ada.Text_IO.Put_Line ("(worker" & ID'Image & ") terminate");
+         end if;
+      exception
+      when E : others =>
+         Supervisor.Fatal_Error (Ada.Exceptions.Exception_Identity (E), 
Ada.Exceptions.Exception_Message (E));
+         if Trace_Generate_Table > Outline then
+            Ada.Text_IO.Put_Line ("(worker" & ID'Image & ") terminate on 
exception");
+         end if;
+      end Worker_Task;
+
+      Worker_Tasks : array
+        (1 .. System.Multiprocessors.CPU_Range'Min
+         (Task_Count,
+         System.Multiprocessors.CPU_Range'Max (1, 
System.Multiprocessors.Number_Of_CPUs)))
+        of Worker_Task;
+
+      First_State_Index : constant State_Index := 0;
+
+      First_Item_Set : Item_Set := Closure
+        ((Set            => Item_Lists.To_List
+            ((Prod       => (Grammar.First_Index, 0),
+              Dot        => Grammar (Grammar.First_Index).RHSs 
(0).Tokens.First_Index,
+              Lookaheads => To_Lookahead (Descriptor.EOI_ID))),
+          Tree_Node      =>
+            (State       => First_State_Index,
+             others      => <>),
+          others         => <>),
+         Has_Empty_Production, First_Terminal_Sequence, Grammar, Descriptor);
+   begin
+      Supervisor.Initialize (LR1_Item_Sets_Parallel.Worker_ID 
(Worker_Tasks'Last), First_Item_Set);
+
+      if Trace_Generate_Table > Outline then
+         Ada.Text_IO.Put_Line (Worker_Tasks'Length'Image & " lr1_items worker 
tasks");
+      end if;
+
+      for I in Worker_Tasks'Range loop
+         Worker_Tasks (I).Start (LR1_Item_Sets_Parallel.Worker_ID (I));
+      end loop;
+
+      declare
+         use Ada.Exceptions;
+         ID      : Exception_Id;
+         Message : Ada.Strings.Unbounded.Unbounded_String;
+      begin
+         Supervisor.Done (ID, Message); -- Wait for all states to be checked
+
+         if ID /= Null_Id then
+            for I in Worker_Tasks'Range loop
+               if not Worker_Tasks (I)'Terminated then
+                  abort Worker_Tasks (I);
+               end if;
+            end loop;
+            Raise_Exception (ID, -Message);
+         else
+            if Trace_Generate_Table > Outline then
+               Ada.Text_IO.Put_Line ("super reports done");
+            end if;
+         end if;
+      end;
+      return Supervisor.Get_C;
+   end LR1_Item_Sets_Parallel;
 
    procedure Add_Actions
-     (Item_Sets            : in     LR1_Items.Item_Set_List;
-      Grammar              : in     WisiToken.Productions.Prod_Arrays.Vector;
-      Has_Empty_Production : in     Token_ID_Set;
-      First_Nonterm_Set    : in     Token_Array_Token_Set;
-      Conflict_Counts      :    out Conflict_Count_Lists.List;
-      Conflicts            :    out Conflict_Lists.List;
-      Table                : in out Parse_Table;
-      Descriptor           : in     WisiToken.Descriptor)
+     (Item_Sets          : in     LR1_Items.Item_Set_List;
+      Table              : in out Parse_Table;
+      Grammar            : in     WisiToken.Productions.Prod_Arrays.Vector;
+      Descriptor         : in     WisiToken.Descriptor;
+      Declared_Conflicts : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      Unknown_Conflicts  : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      First_Nonterm_Set  : in     WisiToken.Token_Array_Token_Set;
+      File_Name          : in     String;
+      Ignore_Conflicts   : in     Boolean)
    is
       --  Add actions for all Item_Sets to Table.
    begin
       for Item_Set of Item_Sets loop
          Add_Actions
-           (Item_Set, Table, Grammar, Has_Empty_Production, First_Nonterm_Set, 
Conflict_Counts, Conflicts, Descriptor);
+           (Item_Set, Table, Grammar, Descriptor, Declared_Conflicts, 
Unknown_Conflicts, First_Nonterm_Set, File_Name,
+            Ignore_Conflicts);
       end loop;
 
       if Trace_Generate_Table > Outline then
@@ -202,15 +837,26 @@ package body WisiToken.Generate.LR.LR1_Generate is
    function Generate
      (Grammar               : in out WisiToken.Productions.Prod_Arrays.Vector;
       Descriptor            : in     WisiToken.Descriptor;
-      Known_Conflicts       : in     Conflict_Lists.List := 
Conflict_Lists.Empty_List;
-      McKenzie_Param        : in     McKenzie_Param_Type := 
Default_McKenzie_Param;
-      Parse_Table_File_Name : in     String              := "";
-      Include_Extra         : in     Boolean             := False;
-      Ignore_Conflicts      : in     Boolean             := False;
-      Partial_Recursion     : in     Boolean             := True)
+      Grammar_File_Name     : in     String;
+      Known_Conflicts       : in     Conflict_Lists.Tree              := 
Conflict_Lists.Empty_Tree;
+      McKenzie_Param        : in     McKenzie_Param_Type              := 
Default_McKenzie_Param;
+      Max_Parallel          : in     SAL.Base_Peek_Type               := 15;
+      Parse_Table_File_Name : in     String                           := "";
+      Include_Extra         : in     Boolean                          := False;
+      Ignore_Conflicts      : in     Boolean                          := False;
+      Partial_Recursion     : in     Boolean                          := True;
+      Task_Count            : in     System.Multiprocessors.CPU_Range := 1;
+      Hash_Table_Size       : in     Positive                         := 
LR1_Items.Item_Set_Trees.Default_Rows;
+      Use_Cached_Recursions : in     Boolean                          := False;
+      Recursions            : in out WisiToken.Generate.Recursions)
      return Parse_Table_Ptr
    is
-      use type Ada.Containers.Count_Type;
+      use all type System.Multiprocessors.CPU_Range;
+
+      Time_Start            : constant Ada.Calendar.Time := Ada.Calendar.Clock;
+      Add_Actions_Time      : Ada.Calendar.Time;
+      Minimal_Actions_Time  : Ada.Calendar.Time;
+      Collect_Conflict_Time : Ada.Calendar.Time;
 
       Ignore_Unused_Tokens     : constant Boolean := 
WisiToken.Trace_Generate_Table > Detail;
       Ignore_Unknown_Conflicts : constant Boolean := Ignore_Conflicts or 
WisiToken.Trace_Generate_Table > Detail;
@@ -221,12 +867,10 @@ package body WisiToken.Generate.LR.LR1_Generate is
       Nullable : constant Token_Array_Production_ID := 
WisiToken.Generate.Nullable (Grammar);
       Has_Empty_Production : constant Token_ID_Set := 
WisiToken.Generate.Has_Empty_Production (Nullable);
 
-      Recursions : constant WisiToken.Generate.Recursions :=
-        (if Partial_Recursion
-         then WisiToken.Generate.Compute_Partial_Recursion (Grammar, 
Descriptor)
-         else WisiToken.Generate.Compute_Full_Recursion (Grammar, Descriptor));
+      Recursions_Time : Ada.Calendar.Time := Ada.Calendar.Clock;
+
       Minimal_Terminal_Sequences : constant Minimal_Sequence_Array :=
-        Compute_Minimal_Terminal_Sequences (Descriptor, Grammar);
+        Compute_Minimal_Terminal_Sequences (Descriptor, Grammar, 
Grammar_File_Name);
 
       Minimal_Terminal_First : constant Token_Array_Token_ID :=
         Compute_Minimal_Terminal_First (Descriptor, 
Minimal_Terminal_Sequences);
@@ -237,20 +881,44 @@ package body WisiToken.Generate.LR.LR1_Generate is
       First_Terminal_Sequence : constant Token_Sequence_Arrays.Vector :=
         WisiToken.Generate.To_Terminal_Sequence_Array (First_Nonterm_Set, 
Descriptor);
 
-      Item_Sets : constant LR1_Items.Item_Set_List := LR1_Item_Sets
-        (Has_Empty_Production, First_Terminal_Sequence, Grammar, Descriptor);
+      Item_Sets : constant LR1_Items.Item_Set_List :=
+        (if Task_Count = 1
+         then LR1_Item_Sets_Single
+           (Has_Empty_Production, First_Terminal_Sequence, Grammar, 
Descriptor, Hash_Table_Size)
+        else LR1_Item_Sets_Parallel
+           (Has_Empty_Production, First_Terminal_Sequence, Grammar, 
Descriptor, Task_Count,
+            Hash_Table_Size));
+
+      Unknown_Conflicts    : Conflict_Lists.Tree;
+      Known_Conflicts_Edit : Conflict_Lists.Tree := Known_Conflicts;
+
+      Initial_Item_Sets_Time : constant Ada.Calendar.Time := 
Ada.Calendar.Clock;
 
-      Conflict_Counts      : Conflict_Count_Lists.List;
-      Unknown_Conflicts    : Conflict_Lists.List;
-      Known_Conflicts_Edit : Conflict_Lists.List := Known_Conflicts;
    begin
-      if Trace_Generate_Table + Trace_Generate_Minimal_Complete > Outline then
+      if not Use_Cached_Recursions or Recursions = Empty_Recursions then
+         Recursions :=
+           (if Partial_Recursion
+            then WisiToken.Generate.Compute_Partial_Recursion (Grammar, 
Descriptor)
+            else WisiToken.Generate.Compute_Full_Recursion (Grammar, 
Descriptor));
+      end if;
+      Set_Grammar_Recursions (Recursions, Grammar);
+      Recursions_Time := Ada.Calendar.Clock;
+
+      if Trace_Time then
+         Ada.Text_IO.Put_Line
+           (Ada.Text_IO.Standard_Error, "compute kernels, recursion time:" &
+              Duration'Image (Ada.Calendar."-" (Recursions_Time, Time_Start)));
+         Ada.Text_IO.Put_Line
+           ("initial item_sets time:" & Duration'Image (Ada.Calendar."-" 
(Recursions_Time, Initial_Item_Sets_Time)));
+      end if;
+      if Trace_Generate_Table + Trace_Generate_Minimal_Complete > Detail then
          Ada.Text_IO.New_Line;
          Ada.Text_IO.Put_Line ("LR1_Generate:");
-         if Trace_Generate_Table > Outline then
+         if Trace_Generate_Table > Detail then
             Ada.Text_IO.Put_Line ("Item_Sets:");
             LR1_Items.Put (Grammar, Descriptor, Item_Sets);
          end if;
+         Ada.Text_IO.New_Line;
       end if;
 
       Table := new Parse_Table
@@ -276,7 +944,7 @@ package body WisiToken.Generate.LR.LR1_Generate is
             Fast_Forward                => Default_McKenzie_Param.Fast_Forward,
             Matching_Begin              => 
Default_McKenzie_Param.Matching_Begin,
             Ignore_Check_Fail           => 
Default_McKenzie_Param.Ignore_Check_Fail,
-            Task_Count                  => Default_McKenzie_Param.Task_Count,
+            Zombie_Limit                => Default_McKenzie_Param.Zombie_Limit,
             Check_Limit                 => Default_McKenzie_Param.Check_Limit,
             Check_Delta_Limit           => 
Default_McKenzie_Param.Check_Delta_Limit,
             Enqueue_Limit               => 
Default_McKenzie_Param.Enqueue_Limit);
@@ -284,9 +952,17 @@ package body WisiToken.Generate.LR.LR1_Generate is
          Table.McKenzie_Param := McKenzie_Param;
       end if;
 
+      Table.Max_Parallel := Max_Parallel;
+
       Add_Actions
-        (Item_Sets, Grammar, Has_Empty_Production, First_Nonterm_Set,
-         Conflict_Counts, Unknown_Conflicts, Table.all, Descriptor);
+        (Item_Sets, Table.all, Grammar, Descriptor, Known_Conflicts_Edit, 
Unknown_Conflicts, First_Nonterm_Set,
+         Grammar_File_Name, Ignore_Conflicts);
+
+      if Trace_Time then
+         Add_Actions_Time := Ada.Calendar.Clock;
+         Ada.Text_IO.Put_Line
+           ("add_actions time:" & Duration'Image (Ada.Calendar."-" 
(Add_Actions_Time, Initial_Item_Sets_Time)));
+      end if;
 
       for State in Table.States'Range loop
          if Trace_Generate_Minimal_Complete > Extra then
@@ -298,13 +974,27 @@ package body WisiToken.Generate.LR.LR1_Generate is
             Descriptor, Grammar, Nullable, Minimal_Terminal_Sequences, 
Minimal_Terminal_First);
       end loop;
 
+      if Trace_Time then
+         Minimal_Actions_Time := Ada.Calendar.Clock;
+         Ada.Text_IO.Put_Line
+           ("compute minimal actions time:" & Duration'Image
+              (Ada.Calendar."-" (Minimal_Actions_Time, Add_Actions_Time)));
+      end if;
+
+      if Trace_Time then
+         Collect_Conflict_Time := Ada.Calendar.Clock;
+         Ada.Text_IO.Put_Line
+           ("compute conflicts time:" & Duration'Image
+              (Ada.Calendar."-" (Collect_Conflict_Time, 
Minimal_Actions_Time)));
+      end if;
+
       if Parse_Table_File_Name /= "" then
          WisiToken.Generate.LR.Put_Parse_Table
-           (Table, Parse_Table_File_Name, "LR1", Grammar, Recursions, 
Item_Sets, Conflict_Counts, Descriptor,
-            Include_Extra);
+           (Table, Parse_Table_File_Name, "LR1", Grammar, Recursions, 
Item_Sets, Known_Conflicts_Edit,
+            Unknown_Conflicts, Descriptor, Include_Extra);
       end if;
 
-      if Trace_Generate_Table > Outline then
+      if Trace_Generate_Table > Detail then
          Ada.Text_IO.New_Line;
          Ada.Text_IO.Put_Line ("Has_Empty_Production: " & Image 
(Has_Empty_Production, Descriptor));
 
@@ -319,21 +1009,8 @@ package body WisiToken.Generate.LR.LR1_Generate is
          end loop;
       end if;
 
-      Delete_Known (Unknown_Conflicts, Known_Conflicts_Edit);
-
-      if Unknown_Conflicts.Length > 0 then
-         Ada.Text_IO.Put_Line (Ada.Text_IO.Current_Error, "unknown 
conflicts:");
-         Put (Unknown_Conflicts, Ada.Text_IO.Current_Error, Descriptor);
-         Ada.Text_IO.New_Line (Ada.Text_IO.Current_Error);
-         WisiToken.Generate.Error := WisiToken.Generate.Error or not 
Ignore_Unknown_Conflicts;
-      end if;
-
-      if Known_Conflicts_Edit.Length > 0 then
-         Ada.Text_IO.Put_Line (Ada.Text_IO.Current_Error, "excess known 
conflicts:");
-         Put (Known_Conflicts_Edit, Ada.Text_IO.Current_Error, Descriptor);
-         Ada.Text_IO.New_Line (Ada.Text_IO.Current_Error);
-         WisiToken.Generate.Error := WisiToken.Generate.Error or not 
Ignore_Unknown_Conflicts;
-      end if;
+      Check_Conflicts
+        ("LR1", Unknown_Conflicts, Known_Conflicts_Edit, Grammar_File_Name, 
Descriptor, Ignore_Unknown_Conflicts);
 
       WisiToken.Generate.Error := WisiToken.Generate.Error or (Unused_Tokens 
and not Ignore_Unused_Tokens);
 
diff --git a/wisitoken-generate-lr-lr1_generate.ads 
b/wisitoken-generate-lr-lr1_generate.ads
index d0f2d9f29c..b37388fbc2 100644
--- a/wisitoken-generate-lr-lr1_generate.ads
+++ b/wisitoken-generate-lr-lr1_generate.ads
@@ -25,6 +25,7 @@
 
 pragma License (Modified_GPL);
 
+with System.Multiprocessors;
 with WisiToken.Generate.LR1_Items;
 with WisiToken.Productions;
 package WisiToken.Generate.LR.LR1_Generate is
@@ -32,12 +33,18 @@ package WisiToken.Generate.LR.LR1_Generate is
    function Generate
      (Grammar               : in out WisiToken.Productions.Prod_Arrays.Vector;
       Descriptor            : in     WisiToken.Descriptor;
-      Known_Conflicts       : in     Conflict_Lists.List := 
Conflict_Lists.Empty_List;
-      McKenzie_Param        : in     McKenzie_Param_Type := 
Default_McKenzie_Param;
-      Parse_Table_File_Name : in     String              := "";
-      Include_Extra         : in     Boolean             := False;
-      Ignore_Conflicts      : in     Boolean             := False;
-      Partial_Recursion     : in     Boolean             := True)
+      Grammar_File_Name     : in     String;
+      Known_Conflicts       : in     Conflict_Lists.Tree              := 
Conflict_Lists.Empty_Tree;
+      McKenzie_Param        : in     McKenzie_Param_Type              := 
Default_McKenzie_Param;
+      Max_Parallel          : in     SAL.Base_Peek_Type               := 15;
+      Parse_Table_File_Name : in     String                           := "";
+      Include_Extra         : in     Boolean                          := False;
+      Ignore_Conflicts      : in     Boolean                          := False;
+      Partial_Recursion     : in     Boolean                          := True;
+      Task_Count            : in     System.Multiprocessors.CPU_Range := 1;
+      Hash_Table_Size       : in     Positive                         := 
LR1_Items.Item_Set_Trees.Default_Rows;
+      Use_Cached_Recursions : in     Boolean                          := False;
+      Recursions            : in out WisiToken.Generate.Recursions)
      return Parse_Table_Ptr
    with Pre => Descriptor.First_Nonterminal = Descriptor.Accept_ID;
    --  Generate a generalized LR1 parse table for Grammar. The
@@ -56,36 +63,30 @@ package WisiToken.Generate.LR.LR1_Generate is
    --
    --  Unless Ignore_Unknown_Conflicts is True, raise Grammar_Error if there
    --  are unknown conflicts.
+   --
+   --  Use Task_Count tasks in computing LR1 items. Default is 1 so unit
+   --  tests return repeatable results.
 
    ----------
    --  visible for unit test
 
-   function LR1_Goto_Transitions
-     (Set                     : in LR1_Items.Item_Set;
-      Symbol                  : in Token_ID;
-      Has_Empty_Production    : in Token_ID_Set;
+   function LR1_Item_Sets_Single
+     (Has_Empty_Production    : in Token_ID_Set;
       First_Terminal_Sequence : in Token_Sequence_Arrays.Vector;
       Grammar                 : in WisiToken.Productions.Prod_Arrays.Vector;
-      Descriptor              : in WisiToken.Descriptor)
-     return LR1_Items.Item_Set;
-   --  'goto' from [dragon] algorithm 4.9
+      Descriptor              : in WisiToken.Descriptor;
+      Hash_Table_Size         : in Positive := 
LR1_Items.Item_Set_Trees.Default_Rows)
+     return LR1_Items.Item_Set_List;
+   --  [dragon] algorithm 4.9 pg 231; figure 4.38 pg 232; procedure "items", 
no tasking
 
-   function LR1_Item_Sets
+   function LR1_Item_Sets_Parallel
      (Has_Empty_Production    : in Token_ID_Set;
       First_Terminal_Sequence : in Token_Sequence_Arrays.Vector;
       Grammar                 : in WisiToken.Productions.Prod_Arrays.Vector;
-      Descriptor              : in WisiToken.Descriptor)
+      Descriptor              : in WisiToken.Descriptor;
+      Task_Count              : in System.Multiprocessors.CPU_Range;
+      Hash_Table_Size         : in Positive := 
LR1_Items.Item_Set_Trees.Default_Rows)
      return LR1_Items.Item_Set_List;
-   --  [dragon] algorithm 4.9 pg 231; figure 4.38 pg 232; procedure "items"
-
-   procedure Add_Actions
-     (Item_Sets            : in     LR1_Items.Item_Set_List;
-      Grammar              : in     WisiToken.Productions.Prod_Arrays.Vector;
-      Has_Empty_Production : in     Token_ID_Set;
-      First_Nonterm_Set    : in     Token_Array_Token_Set;
-      Conflict_Counts      :    out Conflict_Count_Lists.List;
-      Conflicts            :    out Conflict_Lists.List;
-      Table                : in out Parse_Table;
-      Descriptor           : in     WisiToken.Descriptor);
+   --  With tasking; used if State_Count known.
 
 end WisiToken.Generate.LR.LR1_Generate;
diff --git a/wisitoken-generate-lr.adb b/wisitoken-generate-lr.adb
index d66746dec1..9a5aa54f4f 100644
--- a/wisitoken-generate-lr.adb
+++ b/wisitoken-generate-lr.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -19,9 +19,10 @@ pragma License (GPL);
 
 with Ada.Strings.Fixed;
 with Ada.Text_IO;
-with System.Multiprocessors;
-with WisiToken.Generate;
+with WisiToken.Syntax_Trees;
+with SAL.Unix_Text_IO;
 package body WisiToken.Generate.LR is
+   use all type Conflict_Lists.Cursor;
 
    package RHS_Set is new SAL.Gen_Unbounded_Definite_Vectors (Natural, 
Boolean, Default_Element => False);
 
@@ -190,11 +191,125 @@ package body WisiToken.Generate.LR is
       end if;
    end Terminal_Sequence;
 
+   function To_Conflict (Action_Node : in Parse.LR.Action_Node) return Conflict
+   is
+      Node : Parse_Action_Node_Ptr := Action_Node.Actions;
+   begin
+      return Result : Conflict do
+         Result.On := Action_Node.Symbol;
+
+         --  We do not append to Result.States here; To_Conflict is called
+         --  multiple times for the same conflict, sometimes when the state is
+         --  unknown.
+
+         loop
+            if Node.Item.Verb = WisiToken.Parse.LR.Error then
+               raise SAL.Programmer_Error with "'Error' verb in conflict";
+            else
+               Result.Items.Insert ((Conflict_Parse_Actions'(Node.Item.Verb), 
Node.Item.Production.LHS));
+            end if;
+            Node := Node.Next;
+            exit when Node = null;
+         end loop;
+      end return;
+   end To_Conflict;
+
    ----------
    --  Public subprograms, declaration order
 
+   function Image (Conflict : in LR.Conflict; Descriptor : in 
WisiToken.Descriptor) return String
+   is
+      use Ada.Strings.Unbounded;
+      use all type Ada.Containers.Count_Type;
+
+      --  Must match wisitoken_grammar_runtime.adb Add_Declaration
+      --  "conflict"; see there for comment with format.
+
+      Result   : Unbounded_String :=
+        (if Conflict.Resolution = Invalid_Token_ID
+         then +"%conflict "
+         else +"%conflict_resolution ");
+      Need_Bar : Boolean          := False;
+
+      function Image (Item : in Conflict_Parse_Actions) return String
+      --  WORKAROUND: subtype_object'Image GNAT Community 2020 with -gnat2020
+      --  returns integer, not name.
+      is (case Item is
+          when Shift => "SHIFT",
+          when Reduce => "REDUCE",
+          when Accept_It => "ACCEPT_IT");
+   begin
+      for Item of Conflict.Items loop
+         if Need_Bar then
+            Result := Result & " | ";
+         else
+            Need_Bar := True;
+         end if;
+
+         Result := Result & Image (Item.Verb) & " " & Image (Item.LHS, 
Descriptor);
+      end loop;
+
+      Result := Result & " on token " & Image (Conflict.On, Descriptor);
+
+      Need_Bar := False;
+
+      if Conflict.States.Length > 0 then
+         Result := Result & " (";
+         for State of Conflict.States loop
+            if Need_Bar then
+               Result := Result & "," & State'Image;
+            else
+               Need_Bar := True;
+               Result := Result & Trimmed_Image (State);
+            end if;
+         end loop;
+
+         Result := Result & ")";
+      end if;
+      return -Result;
+   end Image;
+
+   function Conflict_Compare (Left, Right : in Conflict) return 
SAL.Compare_Result
+   is
+      use all type SAL.Compare_Result;
+      use all type SAL.Base_Peek_Type;
+   begin
+      if Left.On > Right.On then
+         return Greater;
+      elsif Left.On < Right.On then
+         return Less;
+      else
+         declare
+            I : SAL.Peek_Type := Left.Items.First_Index;
+            J : SAL.Peek_Type := Right.Items.First_Index;
+         begin
+            loop
+               case Conflict_Item_Compare (Left.Items (I), Right.Items (J)) is
+               when Greater =>
+                  return Greater;
+               when Less =>
+                  return Less;
+               when Equal =>
+                  I := I + 1;
+                  J := J + 1;
+
+                  if I > Left.Items.Last_Index and J > Right.Items.Last_Index 
then
+                     return Equal;
+                  elsif I > Left.Items.Last_Index then
+                     return Greater;
+                  elsif J > Right.Items.Last_Index then
+                     return Less;
+                  else
+                     null;
+                  end if;
+               end case;
+            end loop;
+         end;
+      end if;
+   end Conflict_Compare;
+
    procedure Put
-     (Item       : in Conflict_Lists.List;
+     (Item       : in Conflict_Lists.Tree;
       File       : in Ada.Text_IO.File_Type;
       Descriptor : in WisiToken.Descriptor)
    is begin
@@ -203,19 +318,465 @@ package body WisiToken.Generate.LR is
       end loop;
    end Put;
 
+   function Apply_Optimized_List_Conflict
+     (Conflict          : in out Parse.LR.Action_Node;
+      Conflict_Count    : in     Integer;
+      Grammar           : in     WisiToken.Productions.Prod_Arrays.Vector;
+      Descriptor        : in     WisiToken.Descriptor;
+      First_Nonterm_Set : in     WisiToken.Token_Array_Token_Set;
+      File_Name         : in     String)
+     return Boolean
+   with Pre => Conflict_Count /= 0
+   --  If Conflict is due to an optimized_list, it is modified to
+   --  implement the appropriate conflict resolution, and the function
+   --  returns True. Otherwize, Conflict is not modified, and the
+   --  function returns False.
+   is
+      use all type Ada.Containers.Count_Type;
+
+      Temp : Parse_Action_Node_Ptr := Conflict.Actions;
+      Prev : Parse_Action_Node_Ptr := null;
+
+      procedure Report_Error (Message : in String)
+      is
+         use Ada.Text_IO;
+      begin
+         Put_Line (Current_Error, Error_Message (File_Name, 1, " " & Message & 
":"));
+         Put (Current_Error, Conflict.Actions, Descriptor);
+         New_Line (Current_Error);
+         WisiToken.Generate.Error := True;
+      end Report_Error;
+
+   begin
+      --  Also see wisitoken_grammar_runtime.adb Add_Nonterminal
+      --  Is_Optimized_List.
+
+      --  In the following examples, the parse tables are generated with
+      --  --ignore_conflicts, so the optimized_list resolutions are not
+      --  applied, and the full conflicts appear in the tables.
+      --
+      --  From optimized_list parse table (without applying conflict
+      --  resolution), the conflicts are:
+      --
+      --  State 8:
+      --       10.0:declarations <= declaration ^
+      --       10.1:declarations <= declarations declaration ^
+      --
+      --     PRAGMA           => reduce 1 tokens to declarations 10.0,
+      --                         reduce 2 tokens to declarations 10.1
+      --     IDENTIFIER       => reduce 1 tokens to declarations 10.0,
+      --                         reduce 2 tokens to declarations 10.1
+      --     Wisi_EOI         => reduce 1 tokens to declarations 10.0,
+      --                         reduce 2 tokens to declarations 10.1
+      --
+      --  In this state, we know 'declarations declaration' is on the parse
+      --  stack, and we want to use production 10.1 to reduce 2 tokens to
+      --  'declarations'.
+      --
+      --  resolution: reduce 2 tokens to declarations 10.1
+      --
+      --  State 9:
+      --        10.1:declarations <= declarations ^ declaration
+      --        10.2:declarations <= declarations declarations ^
+      --        10.2:declarations <= declarations ^ declarations
+      --
+      --     IDENTIFIER       => shift and goto state 1 9.0,
+      --                         reduce 2 tokens to declarations 10.2
+      --
+      --  In this state, 'declarations declarations' is on the stack (which
+      --  can only happen in incremental parse), so:
+      --
+      --  resolution: reduce 2 tokens to declarations 10.2
+
+      --  From optimized_list_ebnf; a list with a separator:
+      --
+      --  State 33:
+      --       17.1:term <= term ^ multiplying_operator IDENTIFIER
+      --       17.2:term <= term multiplying_operator term ^
+      --       17.2:term <= term ^ multiplying_operator term
+      --
+      --     SLASH                    => shift and goto state 18 16.1,
+      --                                 reduce 3 tokens to term 17.2
+      --
+      --  production 16.1 is: multiplying_operator <= SLASH
+      --
+      --  multiplying_operator is the second token in RHS 17.2 for term.
+      --
+      --  resolution: reduce 3 tokens to term 17.2
+
+      --  From empty_production_2_optimized_list:
+      --
+      --  State 3:
+      --        7.0:wisitoken_accept <= declarations ^ Wisi_EOI
+      --        9.0:declarations <= declarations ^ declaration
+      --
+      --     Wisi_EOI         => accept it 7.0,
+      --                         reduce 0 tokens to declaration 8.1
+      --
+      --  resolution: accept it 7.0
+
+      --  From ada_lite_ebnf; list element is a higher-level nonterm:
+      --
+      --  State 117:
+      --      135.1:statement_list <= statement_list ^ statement
+      --      135.2:statement_list <= statement_list statement_list ^
+      --      135.2:statement_list <= statement_list ^ statement_list
+      --     BEGIN     => reduce 0 tokens to block_label_opt 63.1,
+      --                  reduce 2 tokens to statement_list 135.2
+      --     CASE      => shift and goto state 1 67.0,
+      --                  reduce 2 tokens to statement_list 135.2
+      --
+      --  resolution: reduce 2 tokens to statement_list 135.2
+
+
+      --  From ada_annex_p, a conflict with three items, all from the same 
optimized_list:
+      --  State 585:
+      --      452.1:statement_statement_list <= statement_statement_list ^ 
statement
+      --      452.2:statement_statement_list <= statement_statement_list 
statement_statement_list ^
+      --      452.2:statement_statement_list <= statement_statement_list ^ 
statement_statement_list
+
+      --     PARALLEL    => shift and goto state 17 282.0,
+      --                    reduce 0 tokens to label_opt 279.1,
+      --                    reduce 2 tokens to statement_statement_list 452.2
+      --
+      --  resolution: reduce 2 tokens to statement_statement_list 452.2
+
+
+      --  From optimized_conflict_01; a conflict with one item from an 
optimized_list, the other not.
+      --
+      --  State 19:
+      --    14.0:subtype_indication <= IDENTIFIER RANGE simple_expression 
DOT_DOT simple_expression ^
+      --    15.1:simple_expression <= simple_expression ^ 
binary_adding_operator term
+      --    15.2:simple_expression <= simple_expression ^ 
binary_adding_operator simple_expression
+      --
+      --  AMPERSAND              => shift and goto state 11 18.2,
+      --                            reduce 5 tokens to subtype_indication 14.0
+      --
+      --  no resolution: keep both conflict items
+
+      --  We can distinguish optimized_list conflict items from others by
+      --  checking First_Nonterm_Set; if the LHS of an item A is in the
+      --  first nonterm set of the LHS of an optimized_list conflict item B,
+      --  they are from the same optimized list, and the resolution is to
+      --  delete item A. Alternately, A may be a list separator (as in
+      --  optimized_list_ebnf multiplying_operator above); then it is the
+      --  second token in the RHS of B, and the resolution is to delete item
+      --  A.
+
+      declare
+         Prods : Production_ID_Array (1 .. Conflict_Count) := (others => 
Invalid_Production_ID);
+
+         Opt_List_Count : Integer := 0;
+         I              : Integer := 1;
+         Opt_List_I     : Integer := 0;
+
+         Delete : array (1 .. Conflict_Count) of Boolean := (others => False);
+      begin
+         Temp := Conflict.Actions;
+         loop
+            exit when Temp = null;
+            Prods (I) := Temp.Item.Production;
+            if Grammar (Prods (I).LHS).Optimized_List then
+
+               if Opt_List_Count = 0 then
+                  Opt_List_I     := I;
+                  Opt_List_Count := @ + 1;
+               else
+                  if Prods (I) = Prods (Opt_List_I) then
+                     --  Similar to optimized_list state 8 above. Because of 
the way
+                     --  conflicts are encountered and ordered, we want 
Opt_List_I to be
+                     --  the later one. Token count is 3 if there is a 
separator in the list.
+                     pragma Assert (Temp.Item.Verb = Reduce and then 
Temp.Item.Token_Count in 2 | 3);
+                     Opt_List_I := I;
+
+                  else
+                     --  Something else is going on; a nested optimized_list? 
We report
+                     --  this as an error below.
+                     Opt_List_Count := @ + 1;
+                  end if;
+               end if;
+            end if;
+            Temp := Temp.Next;
+            I := I + 1;
+         end loop;
+
+         if Opt_List_Count = 0 then
+            --  Just a grammar conflict.
+            return False;
+
+         elsif Opt_List_Count = 1 then
+            --  Opt_List_I is the last conflict item in each of the examples
+            --  above.
+            declare
+               Opt_List_Prod : constant Production_ID := Prods (Opt_List_I);
+               Delete_Count  : Integer                := 0;
+            begin
+               for I in 1 .. Conflict_Count loop
+                  if I /= Opt_List_I then
+                     if First_Nonterm_Set (Opt_List_Prod.LHS, Prods (I).LHS) 
then
+                        --  Conflict item I is one of the other optimzed_list 
conflict items
+                        --  in the examples above.
+                        Delete (I) := True;
+                        Delete_Count := @ + 1;
+
+                     elsif Grammar (Opt_List_Prod.LHS).RHSs 
(Opt_List_Prod.RHS).Tokens.Length > 2 and then
+                       Prods (I).LHS = Grammar (Opt_List_Prod.LHS).RHSs 
(Opt_List_Prod.RHS).Tokens (2)
+                     then
+                        --  LHSs (I) is the list separator, as in 
optimized_list_ebnf
+                        --  multiplying_operator above.
+                        Delete (I) := True;
+                        Delete_Count := @ + 1;
+                     else
+                        --  Conflict item I is from a grammar conflict, 
similar to the
+                        --  optimized_conflict_01 example above; keep it.
+                        null;
+                     end if;
+                  end if;
+               end loop;
+               if Delete_Count = 0 then
+                  return False;
+               elsif Delete_Count + 1 = Conflict_Count then
+                  --  Fully resolved; a pure optimized_list conflict. Do 
deletes below.
+                  null;
+               else
+                  --  Mixed optimized_list and grammar conflicts. FIXME: need 
test case.
+                  --  FIXME: also apply declared resolutions to this conflict.
+                  raise SAL.Not_Implemented with "Mixed optimized_list and 
grammar conflicts.";
+                  return False;
+               end if;
+            end;
+
+         else
+            --  Opt_List_Count > 1. There are several cases:
+            --
+            --  a) Similar to optimized_list state 8 or 9 above; all of the
+            --  conflict items have the same LHS.
+            --
+            --  b) Nested optimized_lists, as in sequence_of_statements in
+            --  optimized_list_ebnf.wy term. Look for an LHS that has the 
others
+            --  in First_Nonterm_Set.
+            --
+            --  c) Something else; report an error.
+
+            if (for all P of Prods => P.LHS = Prods (1).LHS) then
+               --  case a. Keep the production "list <= list list", which is 
the last
+               --  conflict item; auto-generated optimized_lists have that 
production
+               --  last, and %optimized_list requires it.
+               for I in 1 .. Conflict_Count - 1 loop
+                  Delete (I) := True;
+               end loop;
+            else
+               declare
+                  Candidate      : Integer  := 1;
+                  Candidate_Prod : Production_ID := Prods (Candidate);
+               begin
+                  Find_Candidate :
+                  loop
+                     loop
+                        exit Find_Candidate when Candidate > Conflict_Count;
+                        exit when Grammar (Prods 
(Candidate).LHS).Optimized_List;
+                        Candidate      := @ + 1;
+                        Candidate_Prod := Prods (Candidate);
+                     end loop;
+
+                     Valid_Candidate :
+                     for I in 1 .. Conflict_Count loop
+                        if I /= Candidate then
+                           if First_Nonterm_Set (Candidate_Prod.LHS, Prods 
(I).LHS) then
+                              Delete (I) := True;
+                           else
+                              --  Try the next candidate.
+                              Delete := (others => False);
+                              exit Valid_Candidate;
+                           end if;
+                        end if;
+                     end loop Valid_Candidate;
+                     Candidate := @ + 1;
+                  end loop Find_Candidate;
+
+                  if (for some B of Delete => B) then
+                     null; --  Do Delete below.
+                  else
+                     --  No valid candidate found
+                     Report_Error ("mixed optimized_list conflicts");
+                     return False;
+                  end if;
+               end;
+            end if;
+         end if;
+
+         Temp := Conflict.Actions;
+         for I in 1 .. Conflict_Count loop
+            if Delete (I) then
+               Parse.LR.Delete (Conflict, Prev, Temp);
+            else
+               Prev := Temp;
+               Temp := Temp.Next;
+            end if;
+         end loop;
+      end;
+
+      return True;
+   end Apply_Optimized_List_Conflict;
+
+   procedure Check_Conflicts
+     (Label            : in     String;
+      Found_Conflicts  : in out Conflict_Lists.Tree;
+      Known_Conflicts  : in out Conflict_Lists.Tree;
+      File_Name        : in     String;
+      Descriptor       : in     WisiToken.Descriptor;
+      Ignore_Conflicts : in     Boolean)
+   is
+      use Ada.Text_IO;
+      use Conflict_Lists;
+      use all type SAL.Compare_Result;
+      use all type Ada.Containers.Count_Type;
+
+      Known_Iter : constant Iterator := Known_Conflicts.Iterate;
+      Known      : Cursor            := Known_Iter.First;
+
+      Found_Iter : constant Iterator := Found_Conflicts.Iterate;
+      Found      : Cursor            := Found_Iter.First;
+
+      To_Delete : Conflict_Lists.Tree;
+   begin
+      --  First delete Known_Conflicts that are in the parse table, and
+      --  report resolutions that are not used
+      loop
+         exit when Known = No_Element;
+
+         if Known_Conflicts (Known).Resolution /= Invalid_Token_ID then
+            if not Known_Conflicts (Known).Resolution_Used then
+               New_Line (Current_Error);
+               Put_Line (Current_Error, Error_Message (File_Name, 1, Label & " 
excess conflict_resolution:"));
+               Put_Line (Current_Error, Image (Known_Conflicts 
(Known).Element.all, Descriptor));
+               WisiToken.Generate.Error := WisiToken.Generate.Error or not 
Ignore_Conflicts;
+            end if;
+            To_Delete.Insert (Element (Known));
+         elsif Known_Conflicts (Known).Conflict_Seen then
+            To_Delete.Insert (Element (Known));
+         end if;
+         Known := Known_Iter.Next (Known);
+      end loop;
+      for Conflict of To_Delete loop
+         Known_Conflicts.Delete (Conflict);
+      end loop;
+
+      Known := Known_Iter.First;
+      To_Delete.Clear;
+      loop
+         exit when Known = No_Element or Found = No_Element;
+
+         case Conflict_Compare
+           (Known_Conflicts.Constant_Ref (Known),
+            Found_Conflicts.Constant_Ref (Found))
+         is
+         when Greater =>
+            Found := Found_Iter.Next (Found);
+
+         when Less =>
+            Known := Known_Iter.Next (Known);
+
+         when Equal =>
+            To_Delete.Insert (Element (Known));
+            Known := Known_Iter.Next (Known);
+            Found := Found_Iter.Next (Found);
+         end case;
+      end loop;
+
+      for Conflict of To_Delete loop
+         Known_Conflicts.Delete (Conflict);
+         Found_Conflicts.Delete (Conflict);
+      end loop;
+
+      if Found_Conflicts.Length > 0 then
+         New_Line (Current_Error);
+         Put_Line (Current_Error, Error_Message (File_Name, 1, Label & " 
unknown conflicts:"));
+         Put (Found_Conflicts, Current_Error, Descriptor);
+         New_Line (Current_Error);
+         WisiToken.Generate.Error := WisiToken.Generate.Error or not 
Ignore_Conflicts;
+      end if;
+
+      if Known_Conflicts.Length > 0 then
+         New_Line (Current_Error);
+         Put_Line (Current_Error, Error_Message (File_Name, 1, Label & " 
excess known conflicts:"));
+         Put (Known_Conflicts, Current_Error, Descriptor);
+         New_Line (Current_Error);
+         WisiToken.Generate.Error := WisiToken.Generate.Error or not 
Ignore_Conflicts;
+      end if;
+
+   end Check_Conflicts;
+
+   ----------
+   --  Build parse table
+
+   function Apply_Declared_Resolution
+     (Conflict           : in out Parse.LR.Action_Node;
+      Found              : in     Conflict_Lists.Cursor;
+      Conflict_Count     : in     Integer;
+      Declared_Conflicts : in out WisiToken.Generate.LR.Conflict_Lists.Tree)
+     return Boolean
+   with Pre => Conflict.Actions.Next /= null and Found /= 
Conflict_Lists.No_Element
+   --  If Conflict is matches a declared %conflict_resolution, it is
+   --  modified to implement the conflict resolution, and the function
+   --  returns True. Otherwize, Conflict is not modified, and the
+   --  function returns False.
+   is
+      use Conflict_Lists;
+      Declared : WisiToken.Generate.LR.Conflict renames Declared_Conflicts 
(Found);
+      Delete   : array (1 .. Conflict_Count) of Boolean := (others => False);
+
+      Temp : Parse_Action_Node_Ptr := Conflict.Actions;
+      Prev : Parse_Action_Node_Ptr := null;
+
+      Resolution_Token_Found : Boolean := False;
+   begin
+      if Declared.Resolution = Invalid_Token_ID then
+         return False;
+      end if;
+
+      for I in 1 .. Conflict_Count loop
+         if Declared.Resolution = Temp.Item.Production.LHS then
+            Resolution_Token_Found := True;
+         else
+            Delete (I) := True;
+         end if;
+         Temp := Temp.Next;
+      end loop;
+
+      if not Resolution_Token_Found then
+         raise SAL.Programmer_Error; -- Should be checked when Conflict is 
entered into Conflicts.
+      end if;
+
+      Declared.Resolution_Used := True;
+
+      Temp := Conflict.Actions;
+      for I in 1 .. Conflict_Count loop
+         if Delete (I) then
+            Parse.LR.Delete (Conflict, Prev, Temp);
+         else
+            Prev := Temp;
+            Temp := Temp.Next;
+         end if;
+      end loop;
+
+      return True;
+   end Apply_Declared_Resolution;
+
    procedure Add_Action
-     (Symbol               : in     Token_ID;
-      Action               : in     Parse_Action_Rec;
-      Action_List          : in out Action_Arrays.Vector;
-      Closure              : in     LR1_Items.Item_Set;
-      Grammar              : in     WisiToken.Productions.Prod_Arrays.Vector;
-      Has_Empty_Production : in     Token_ID_Set;
-      First_Nonterm_Set    : in     Token_Array_Token_Set;
-      Conflict_Counts      : in out Conflict_Count_Lists.List;
-      Conflicts            : in out Conflict_Lists.List;
-      Descriptor           : in     WisiToken.Descriptor)
+     (State              : in     State_Index;
+      Symbol             : in     Token_ID;
+      Action             : in     Parse_Action_Rec;
+      Action_List        : in out Action_Arrays.Vector;
+      Grammar            : in     WisiToken.Productions.Prod_Arrays.Vector;
+      Descriptor         : in     WisiToken.Descriptor;
+      Declared_Conflicts : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      Unknown_Conflicts  : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      First_Nonterm_Set  : in     WisiToken.Token_Array_Token_Set;
+      File_Name          : in     String;
+      Ignore_Conflicts   : in     Boolean)
    is
-      Matching_Action : constant Action_Arrays.Find_Reference_Type := 
Action_List.Find (Symbol);
+      Matching_Action_Node : constant Action_Arrays.Find_Reference_Type := 
Action_List.Find (Symbol);
    begin
       if Trace_Generate_Table > Detail then
          Ada.Text_IO.Put (Image (Symbol, Descriptor) & " => ");
@@ -223,102 +784,128 @@ package body WisiToken.Generate.LR is
          Ada.Text_IO.New_Line;
       end if;
 
-      if Matching_Action.Element /= null then
-         if Is_In (Action, Matching_Action.Actions) then
-            --  Action is already in the list.
+      if Matching_Action_Node.Element /= null then
+         if Is_In (Action, Matching_Action_Node.Actions) then
             if Trace_Generate_Table > Detail then
                Ada.Text_IO.Put_Line (" - already present");
             end if;
             return;
          else
-            --  There is a conflict. Report it and add it, so the
-            --  generalized parser can follow all paths
-            declare
-               --  Enforce canonical Shift/Reduce or Accept/Reduce order, to 
simplify
-               --  searching and code generation. There can be only one Shift 
in the
-               --  list of conflicting actions, so we keep it the first item 
in the
-               --  list; no order in the rest of the list.
-               Action_A : constant Parse_Action_Rec :=
-                 (if Action.Verb in Shift | Accept_It then Action else 
Matching_Action.Actions.Item);
-
-               Action_B : constant Parse_Action_Rec :=
-                 (if Action.Verb in Shift | Accept_It then 
Matching_Action.Actions.Item else Action);
-
-               New_Conflict : constant Conflict :=
-                 (Action_A    => Action_A.Verb,
-                  Action_B    => Action_B.Verb,
-                  LHS_A       => Find
-                    (Closure, Action_A, Symbol, Grammar, Has_Empty_Production, 
First_Nonterm_Set, Descriptor),
-                  LHS_B       => Find
-                    (Closure, Action_B, Symbol, Grammar, Has_Empty_Production, 
First_Nonterm_Set, Descriptor),
-                  State_Index => Closure.State,
-                  On          => Symbol);
-
-               Counts : Conflict_Count_Lists.Cursor;
-            begin
-               for Cur in Conflict_Counts.Iterate loop
-                  if Conflict_Counts (Cur).State = Closure.State then
-                     Counts := Cur;
-                     exit;
+            --  New conflict. Sort to match Conflict_Item_Compare order
+            if Action.Verb = Shift then
+               Matching_Action_Node.Actions := new Parse_Action_Node'(Action, 
Matching_Action_Node.Actions);
+            else
+               declare
+                  Node : Parse_Action_Node_Ptr := Matching_Action_Node.Actions;
+                  Prev : Parse_Action_Node_Ptr := null;
+               begin
+                  if Node.Item.Verb = Shift then
+                     Prev := Node;
+                     Node := Node.Next;
                   end if;
-               end loop;
+                  loop
+                     exit when Node = null or else Node.Item.Production.LHS > 
Action.Production.LHS;
+                     Prev := Node;
+                     Node := Node.Next;
+                  end loop;
 
-               if not Conflict_Count_Lists.Has_Element (Counts) then
-                  Conflict_Counts.Append ((Closure.State, others => 0));
-                  Counts := Conflict_Counts.Last;
-               end if;
+                  if Prev = null then
+                     Matching_Action_Node.Actions := new 
Parse_Action_Node'(Action, Matching_Action_Node.Actions);
+                  else
+                     Prev.Next := new Parse_Action_Node'(Action, Node);
+                  end if;
+               end;
+            end if;
 
+            if not Ignore_Conflicts then
+               --  We don't apply conflict resolutions when --ignore_conflicts 
is
+               --  specified; the full original conflict is shown in the parse 
table.
+               --  This helps with debugging conflict resolutions and other 
issues.
                declare
-                  use Conflict_Count_Lists;
-                  Counts_Ref : constant Reference_Type := Reference 
(Conflict_Counts, Counts);
-               begin
-                  case Action_A.Verb is
-                  when Shift =>
-                     case Action_B.Verb is
-                     when Shift | Accept_It | WisiToken.Parse.LR.Error =>
-                        raise SAL.Programmer_Error;
-                     when Reduce =>
-                        Counts_Ref.Shift_Reduce := Counts_Ref.Shift_Reduce + 1;
-                     end case;
-                  when Reduce =>
-                     case Action_B.Verb is
-                     when Shift | Accept_It | WisiToken.Parse.LR.Error =>
-                        raise SAL.Programmer_Error;
-                     when Reduce =>
-                        Counts_Ref.Reduce_Reduce := Counts_Ref.Reduce_Reduce + 
1;
-                     end case;
-                  when Accept_It =>
-                     case Action_B.Verb is
-                     when Shift | Accept_It | WisiToken.Parse.LR.Error =>
-                        raise SAL.Programmer_Error;
-                     when Reduce =>
-                        Counts_Ref.Accept_Reduce := Counts_Ref.Accept_Reduce + 
1;
-                     end case;
-                  when WisiToken.Parse.LR.Error =>
-                     raise SAL.Programmer_Error;
-                  end case;
-               end;
+                  Temp : Parse_Action_Node_Ptr := Matching_Action_Node.Actions;
+                  Conflict_Count : Integer := 0;
 
-               if not Is_Present (New_Conflict, Conflicts) then
-                  --  The same conflict may occur in a different
-                  --  item set. Only add it to conflicts once.
-                  Conflicts.Append (New_Conflict);
+                  WY_Conflict : constant LR.Conflict := To_Conflict 
(Matching_Action_Node);
+                  --  'wy' because this is what goes in the .wy file.
 
-                  if Trace_Generate_Table > Detail then
-                     Ada.Text_IO.Put_Line (" - conflict added: " & Image 
(New_Conflict, Descriptor));
-                  end if;
-               else
-                  if Trace_Generate_Table > Detail then
-                     Ada.Text_IO.Put_Line (" - conflict duplicate: " & Image 
(New_Conflict, Descriptor));
+                  Found_Declared : constant Conflict_Lists.Cursor := 
Declared_Conflicts.Find (WY_Conflict);
+               begin
+                  loop
+                     exit when Temp = null;
+                     Conflict_Count := @ + 1;
+                     Temp := Temp.Next;
+                  end loop;
+                  pragma Assert (Conflict_Count > 0);
+
+                  if Trace_Generate_Conflicts > Detail then
+                     if Trace_Generate_Conflicts > Extra or Conflict_Count > 2 
then
+                        Ada.Text_IO.Put_Line
+                          ("conflict on " & Image 
(Matching_Action_Node.Symbol, Descriptor) &
+                             ", length :" & Conflict_Count'Image);
+                        Ada.Text_IO.Put_Line (Image (WY_Conflict, Descriptor));
+                        Put (Ada.Text_IO.Current_Output, 
Matching_Action_Node.Actions, Descriptor);
+                        Ada.Text_IO.New_Line;
+                        if Found_Declared /= Conflict_Lists.No_Element then
+                           Ada.Text_IO.Put_Line ("... known");
+                        end if;
+                     end if;
                   end if;
-               end if;
 
-               if Action.Verb = Shift then
-                  Matching_Action.Actions := new Parse_Action_Node'(Action, 
Matching_Action.Actions);
-               else
-                  Matching_Action.Actions.Next := new 
Parse_Action_Node'(Action, Matching_Action.Actions.Next);
-               end if;
-            end;
+                  if Found_Declared /= Conflict_Lists.No_Element and then
+                    Apply_Declared_Resolution (Matching_Action_Node, 
Found_Declared, Conflict_Count, Declared_Conflicts)
+                  then
+                     if Trace_Generate_Conflicts > Detail then
+                        Ada.Text_IO.Put_Line ("... conflict resolution 
applied:");
+                        Put (Ada.Text_IO.Current_Output, 
Matching_Action_Node.Actions, Descriptor);
+                        Ada.Text_IO.New_Line;
+                     end if;
+
+                     --  FIXME: apply both resolutions to one conflict. Need 
test case. must update Conflct_Count.
+                  elsif Apply_Optimized_List_Conflict
+                    (Matching_Action_Node, Conflict_Count, Grammar, 
Descriptor, First_Nonterm_Set, File_Name)
+                  then
+                     if Trace_Generate_Conflicts > Detail then
+                        Ada.Text_IO.Put_Line ("... optimized_list conflict 
resolved:");
+                        Put (Ada.Text_IO.Current_Output, 
Matching_Action_Node.Actions, Descriptor);
+                        Ada.Text_IO.New_Line;
+                     end if;
+
+                  else
+                     if Found_Declared = Conflict_Lists.No_Element then
+                        declare
+                           Found_Unknown : constant Conflict_Lists.Cursor :=
+                             Unknown_Conflicts.Iterate.Find (WY_Conflict);
+                        begin
+                           if Found_Unknown = Conflict_Lists.No_Element then
+                              Unknown_Conflicts.Insert (WY_Conflict);
+                              if Trace_Generate_Conflicts > Extra then
+                                 Ada.Text_IO.Put_Line ("... add to 
Unknown_Conflicts");
+                              end if;
+                           else
+                              if Trace_Generate_Conflicts > Extra then
+                                 Ada.Text_IO.Put_Line ("... already in 
Unknown_Conflicts");
+                              end if;
+                           end if;
+                        end;
+                     else
+                        declare
+                           Found : Conflict renames Declared_Conflicts 
(Found_Declared);
+                        begin
+                           Found.Conflict_Seen := True;
+                           if not Found.States.Contains (State) then
+                              if Trace_Generate_Conflicts > Extra then
+                                 Ada.Text_IO.Put_Line ("... in state" & 
State'Image);
+                              end if;
+                              Found.States.Append (State);
+                           end if;
+                        end;
+                        if Trace_Generate_Conflicts > Extra then
+                           Ada.Text_IO.Put_Line ("... NOT resolved");
+                        end if;
+                     end if;
+                  end if;
+               end;
+            end if;
          end if;
       else
          WisiToken.Parse.LR.Add (Action_List, Symbol, Action);
@@ -326,35 +913,35 @@ package body WisiToken.Generate.LR is
    end Add_Action;
 
    procedure Add_Actions
-     (Closure              : in     LR1_Items.Item_Set;
-      Table                : in out Parse_Table;
-      Grammar              : in     WisiToken.Productions.Prod_Arrays.Vector;
-      Has_Empty_Production : in     Token_ID_Set;
-      First_Nonterm_Set    : in     Token_Array_Token_Set;
-      Conflict_Counts      : in out Conflict_Count_Lists.List;
-      Conflicts            : in out Conflict_Lists.List;
-      Descriptor           : in     WisiToken.Descriptor)
+     (Closure            : in     LR1_Items.Item_Set;
+      Table              : in out Parse_Table;
+      Grammar            : in     WisiToken.Productions.Prod_Arrays.Vector;
+      Descriptor         : in     WisiToken.Descriptor;
+      Declared_Conflicts : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      Unknown_Conflicts  : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      First_Nonterm_Set  : in     WisiToken.Token_Array_Token_Set;
+      File_Name          : in     String;
+      Ignore_Conflicts   : in     Boolean)
    is
       use Token_ID_Arrays;
 
-      State : constant State_Index := Closure.State;
+      State : constant State_Index := Closure.Tree_Node.State;
    begin
       if Trace_Generate_Table > Detail then
-         Ada.Text_IO.Put_Line ("adding actions for state" & State_Index'Image 
(State));
+         Ada.Text_IO.Put_Line ("setting table actions for state" & 
State_Index'Image (State));
       end if;
 
       for Item of Closure.Set loop
          declare
-            Dot : constant Token_ID_Arrays.Cursor := 
Productions.Constant_Ref_RHS
-              (Grammar, Item.Prod).Tokens.To_Cursor (Item.Dot);
+            Item_Tokens : Token_ID_Arrays.Vector renames 
Productions.Constant_Ref_RHS
+              (Grammar, Item.Prod).Tokens;
          begin
-            if not Has_Element (Dot) then
+            if Item.Dot not in Item_Tokens.First_Index .. 
Item_Tokens.Last_Index then
                Add_Lookahead_Actions
-                 (Item, Table.States (State).Action_List, Grammar, 
Has_Empty_Production, First_Nonterm_Set,
-                  Conflict_Counts, Conflicts, Closure, Descriptor);
+                 (State, Item, Table.States (State).Action_List, Grammar, 
Descriptor, Declared_Conflicts,
+                  Unknown_Conflicts, First_Nonterm_Set, File_Name, 
Ignore_Conflicts);
 
-            elsif Element (Dot) in
-              Descriptor.First_Terminal .. Descriptor.Last_Terminal
+            elsif Item_Tokens (Item.Dot) in Descriptor.First_Terminal .. 
Descriptor.Last_Terminal
             then
                --  Dot is before a terminal token.
                declare
@@ -362,7 +949,7 @@ package body WisiToken.Generate.LR is
 
                   P_ID : constant Production_ID := Item.Prod;
 
-                  Dot_ID : constant Token_ID := Element (Dot);
+                  Dot_ID : constant Token_ID := Item_Tokens (Item.Dot);
                   --  ID of token after Item.Dot
 
                   Goto_State : constant Unknown_State_Index := 
LR1_Items.Goto_State (Closure, Dot_ID);
@@ -373,28 +960,29 @@ package body WisiToken.Generate.LR is
                         RHS  : Productions.Right_Hand_Side renames Grammar 
(P_ID.LHS).RHSs (P_ID.RHS);
                      begin
                         Add_Action
-                          (Dot_ID,
-                           (Accept_It, P_ID, RHS.Action, RHS.Check, 
RHS.Tokens.Length - 1),
+                          (State, Dot_ID,
+                           (Accept_It, P_ID, RHS.Tokens.Length - 1),
                            --  EOF is not pushed on stack in parser, because 
the action for EOF
                            --  is Accept, not Shift.
-                           Table.States (State).Action_List, Closure,
-                           Grammar, Has_Empty_Production, First_Nonterm_Set, 
Conflict_Counts, Conflicts, Descriptor);
+                           Table.States (State).Action_List,
+                           Grammar, Descriptor, Declared_Conflicts, 
Unknown_Conflicts, First_Nonterm_Set, File_Name,
+                           Ignore_Conflicts);
                      end;
                   else
                      if Goto_State /= Unknown_State then
                         Add_Action
-                          (Dot_ID,
+                          (State, Dot_ID,
                            (Shift, P_ID, Goto_State),
                            Table.States (State).Action_List,
-                           Closure, Grammar, Has_Empty_Production, 
First_Nonterm_Set,
-                           Conflict_Counts, Conflicts, Descriptor);
+                           Grammar, Descriptor, Declared_Conflicts, 
Unknown_Conflicts, First_Nonterm_Set, File_Name,
+                           Ignore_Conflicts);
                      end if;
                   end if;
                end;
             else
                --  Dot is before a non-terminal token; no action.
                if Trace_Generate_Table > Detail then
-                  Ada.Text_IO.Put_Line (Image (Element (Dot), Descriptor) & " 
=> no action");
+                  Ada.Text_IO.Put_Line (Image (Item_Tokens (Item.Dot), 
Descriptor) & " => no action");
                end if;
             end if;
          end;
@@ -406,26 +994,29 @@ package body WisiToken.Generate.LR is
 
       for Item of Closure.Goto_List loop
          if Item.Symbol in Descriptor.First_Nonterminal .. 
Descriptor.Last_Nonterminal then
-            --  FIXME: Goto_List has terminals; either don't need to add 
those, or can use that instead of above code.
-            Add_Goto (Table.States (State), Item.Symbol, Item.State); -- note 
list is already sorted.
+            --  Goto_List also has terminals, used above in Goto_State. We 
can't just
+            --  use Goto_List to create actions for terminals; they don't 
contain
+            --  enough information.
+            Add_Goto (Table.States (State), Item.Symbol, Item.State);
          end if;
       end loop;
    end Add_Actions;
 
    procedure Add_Lookahead_Actions
-     (Item                 : in     LR1_Items.Item;
-      Action_List          : in out Action_Arrays.Vector;
-      Grammar              : in     WisiToken.Productions.Prod_Arrays.Vector;
-      Has_Empty_Production : in     Token_ID_Set;
-      First_Nonterm_Set    : in     Token_Array_Token_Set;
-      Conflict_Counts      : in out Conflict_Count_Lists.List;
-      Conflicts            : in out Conflict_Lists.List;
-      Closure              : in     LR1_Items.Item_Set;
-      Descriptor           : in     WisiToken.Descriptor)
+     (State              : in     State_Index;
+      Item               : in     LR1_Items.Item;
+      Action_List        : in out Action_Arrays.Vector;
+      Grammar            : in     WisiToken.Productions.Prod_Arrays.Vector;
+      Descriptor         : in     WisiToken.Descriptor;
+      Declared_Conflicts : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      Unknown_Conflicts  : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      First_Nonterm_Set  : in     WisiToken.Token_Array_Token_Set;
+      File_Name          : in     String;
+      Ignore_Conflicts   : in     Boolean)
    is
       Prod   : Productions.Instance renames Grammar (Item.Prod.LHS);
       RHS    : Productions.Right_Hand_Side renames Prod.RHSs (Item.Prod.RHS);
-      Action : constant Parse_Action_Rec := (Reduce, Item.Prod, RHS.Action, 
RHS.Check, RHS.Tokens.Length);
+      Action : constant Parse_Action_Rec := (Reduce, Item.Prod, 
RHS.Tokens.Length);
    begin
       if Trace_Generate_Table > Detail then
          Ada.Text_IO.Put_Line ("processing lookaheads");
@@ -438,191 +1029,13 @@ package body WisiToken.Generate.LR is
                null;
             else
                Add_Action
-                 (Lookahead, Action, Action_List, Closure, Grammar,
-                  Has_Empty_Production, First_Nonterm_Set, Conflict_Counts, 
Conflicts, Descriptor);
+                 (State, Lookahead, Action, Action_List, Grammar, Descriptor, 
Declared_Conflicts,
+                  Unknown_Conflicts, First_Nonterm_Set, File_Name, 
Ignore_Conflicts);
             end if;
          end if;
       end loop;
    end Add_Lookahead_Actions;
 
-   procedure Delete_Known
-     (Conflicts       : in out Conflict_Lists.List;
-      Known_Conflicts : in out Conflict_Lists.List)
-   is
-      --  Delete all elements in Conflicts that match an element in
-      --  Known_Conflicts. There can be more than one Conflict that
-      --  match one Known_Conflict.
-      use Conflict_Lists;
-      Known      : Cursor  := Known_Conflicts.First;
-      Next_Known : Cursor;
-   begin
-      loop
-         exit when Known = No_Element;
-         Next_Known := Next (Known);
-         declare
-            I      : Cursor  := Conflicts.First;
-            Next_I : Cursor;
-            Used   : Boolean := False;
-         begin
-            loop
-               exit when I = No_Element;
-               Next_I := Next (I);
-               if Match (Element (Known), Conflicts.Constant_Reference (I)) 
then
-                  Delete (Conflicts, I);
-                  Used := True;
-               end if;
-               I := Next_I;
-            end loop;
-
-            if Used then
-               Delete (Known_Conflicts, Known);
-            end if;
-         end;
-         Known := Next_Known;
-      end loop;
-   end Delete_Known;
-
-   function Find
-     (Closure              : in LR1_Items.Item_Set;
-      Action               : in Parse_Action_Rec;
-      Lookahead            : in Token_ID;
-      Grammar              : in WisiToken.Productions.Prod_Arrays.Vector;
-      Has_Empty_Production : in Token_ID_Set;
-      First                : in Token_Array_Token_Set;
-      Descriptor           : in WisiToken.Descriptor)
-     return Token_ID
-   is
-      use WisiToken.Token_ID_Arrays;
-   begin
-      case Action.Verb is
-      when Reduce | Accept_It =>
-         --  If the nonterm produced by the reduce is the LHS of the state
-         --  production, use it.
-         for Item of Closure.Set loop
-            if LR1_Items.In_Kernel (Grammar, Descriptor, Item) and
-              Action.Production.LHS = Item.Prod.LHS
-            then
-               return Item.Prod.LHS;
-            end if;
-         end loop;
-
-         --  The reduce nonterm is after Dot in a state production; find which
-         --  one, use that.
-         for Item of Closure.Set loop
-            if LR1_Items.In_Kernel (Grammar, Descriptor, Item) then
-               declare
-                  Dot : Token_ID_Arrays.Cursor := Productions.Constant_Ref_RHS
-                    (Grammar, Item.Prod).Tokens.To_Cursor (Item.Dot);
-               begin
-                  loop
-                     if not Has_Element (Dot) then
-                        if Item.Lookaheads (Lookahead) then
-                           return Item.Prod.LHS;
-                        end if;
-                     else
-                        declare
-                           Dot_ID : constant Token_ID := Element (Dot);
-                        begin
-                           if Dot_ID = Lookahead or
-                             (Dot_ID in Descriptor.First_Nonterminal .. 
Descriptor.Last_Nonterminal and then
-                                First (Dot_ID, Lookahead))
-                           then
-                              return Item.Prod.LHS;
-                           end if;
-                           exit when Dot_ID in Descriptor.First_Nonterminal .. 
Descriptor.Last_Nonterminal and then
-                             not Has_Empty_Production (Dot_ID);
-                        end;
-                     end if;
-
-                     exit when not Has_Element (Dot);
-                     Next (Dot);
-                  end loop;
-               end;
-            end if;
-         end loop;
-
-      when Shift =>
-
-         for Item of Closure.Set loop
-            --  Lookahead (the token shifted) is starting a nonterm in a state
-            --  production; it is in First of that nonterm.
-            if LR1_Items.In_Kernel (Grammar, Descriptor, Item) then
-               declare
-                  Dot : Token_ID_Arrays.Cursor := Productions.Constant_Ref_RHS
-                    (Grammar, Item.Prod).Tokens.To_Cursor (Item.Dot);
-               begin
-                  loop
-                     exit when not Has_Element (Dot);
-                     declare
-                        Dot_ID : constant Token_ID := Element (Dot);
-                     begin
-                        if Dot_ID = Lookahead or
-                          (Dot_ID in Descriptor.First_Nonterminal .. 
Descriptor.Last_Nonterminal and then
-                             First (Dot_ID, Lookahead))
-                        then
-                           return Item.Prod.LHS;
-                        end if;
-
-                        exit when Dot_ID in Descriptor.First_Nonterminal .. 
Descriptor.Last_Nonterminal and then
-                          not Has_Empty_Production (Dot_ID);
-                     end;
-
-                     Next (Dot);
-                  end loop;
-               end;
-            end if;
-         end loop;
-
-      when WisiToken.Parse.LR.Error =>
-         raise SAL.Programmer_Error;
-      end case;
-
-      Ada.Text_IO.Put_Line
-        ("item for " & Image (Action, Descriptor) & " on " & Image (Lookahead, 
Descriptor) & " not found in");
-      LR1_Items.Put (Grammar, Descriptor, Closure, Kernel_Only => True);
-      raise SAL.Programmer_Error;
-   end Find;
-
-   function Image (Item : in Conflict; Descriptor : in WisiToken.Descriptor) 
return String
-   is begin
-      return
-        ("%conflict " &
-           Conflict_Parse_Actions'Image (Item.Action_A) & "/" &
-           Conflict_Parse_Actions'Image (Item.Action_B) & " in state " &
-           Image (Item.LHS_A, Descriptor) & ", " &
-           Image (Item.LHS_B, Descriptor) &
-           " on token " & Image (Item.On, Descriptor) &
-           " (" & State_Index'Image (Item.State_Index) & ")"); -- state number 
last for easier delete
-   end Image;
-
-   function Is_Present (Item : in Conflict; Conflicts : in 
Conflict_Lists.List) return Boolean
-   is
-      use Conflict_Lists;
-      I : Cursor := Conflicts.First;
-   begin
-      loop
-         exit when I = No_Element;
-         if Match (Item, Conflicts.Constant_Reference (I)) then
-            return True;
-         end if;
-         I := Next (I);
-      end loop;
-      return False;
-   end Is_Present;
-
-   function Match (Known : in Conflict; Item : in 
Conflict_Lists.Constant_Reference_Type) return Boolean
-   is begin
-      --  Ignore State_Index. Actions are in canonical order; enforced
-      --  in Add_Action above. For reduce/reduce, LHS_A, LHS_B are not
-      --  in canonical order.
-      return
-        Known.Action_A = Item.Action_A and
-        Known.Action_B = Item.Action_B and
-        ((Known.LHS_A = Item.LHS_A and Known.LHS_B = Item.LHS_B) or
-           (Known.LHS_B = Item.LHS_A and Known.LHS_A = Item.LHS_B)) and
-        Known.On = Item.On;
-   end Match;
-
    ----------
    --  Minimal terminal sequences.
 
@@ -660,8 +1073,9 @@ package body WisiToken.Generate.LR is
    end Min;
 
    function Compute_Minimal_Terminal_Sequences
-     (Descriptor : in WisiToken.Descriptor;
-      Grammar    : in WisiToken.Productions.Prod_Arrays.Vector)
+     (Descriptor        : in WisiToken.Descriptor;
+      Grammar           : in WisiToken.Productions.Prod_Arrays.Vector;
+      Grammar_File_Name : in String)
      return Minimal_Sequence_Array
    is
       --  Result (ID).Sequence.Length = 0 is a valid result (ie the
@@ -692,14 +1106,28 @@ package body WisiToken.Generate.LR is
                Terminal_Sequence (Grammar, Descriptor, Result, All_Seq_Set, 
RHS_Seq_Set, Recursing, P.LHS);
             end loop;
             This_Count := Count (All_Seq_Set);
+
             if This_Count = Last_Seq_Count then
-               Ada.Text_IO.Put_Line (Ada.Text_IO.Standard_Error, Image 
(All_Seq_Set, Descriptor, Inverted => True));
-               raise Grammar_Error with "sequences not resolved";
+               Ada.Text_IO.Put_Line
+                 (Ada.Text_IO.Current_Error,
+                  Error_Message
+                    (File_Name => Grammar_File_Name,
+                     File_Line => Line_Number_Type'First,
+                     Message   => "terminal sequences not resolved:"));
+
+               Ada.Text_IO.Put_Line
+                 (Ada.Text_IO.Current_Error,
+                  Error_Message
+                    (File_Name => Grammar_File_Name,
+                     File_Line => Line_Number_Type'First,
+                     Message   => Image (All_Seq_Set, Descriptor, Inverted => 
True)));
+               raise Parse_Error;
             end if;
             Last_Seq_Count := This_Count;
          end loop;
 
          if Trace_Generate_Minimal_Complete > Detail then
+            Ada.Text_IO.New_Line;
             Ada.Text_IO.Put_Line ("Minimal_Terminal_Sequences:");
             for LHS in Result'Range loop
                Ada.Text_IO.Put_Line (Image (LHS, Result, Descriptor));
@@ -712,9 +1140,7 @@ package body WisiToken.Generate.LR is
      (Descriptor                 : in WisiToken.Descriptor;
       Minimal_Terminal_Sequences : in Minimal_Sequence_Array)
      return Token_Array_Token_ID
-   is
-      use Token_ID_Arrays;
-   begin
+   is begin
       return Result : Token_Array_Token_ID (Descriptor.First_Nonterminal .. 
Descriptor.Last_Nonterminal) do
          for ID in Result'Range loop
             declare
@@ -724,7 +1150,7 @@ package body WisiToken.Generate.LR is
                if Min_Seq.Length = 0 then
                   Result (ID) := Invalid_Token_ID;
                else
-                  Result (ID) := Element (Min_Seq.First);
+                  Result (ID) := Min_Seq (Min_Seq.First);
                end if;
             end;
          end loop;
@@ -765,7 +1191,9 @@ package body WisiToken.Generate.LR is
                end case;
             end if;
          end loop;
-         raise SAL.Programmer_Error;
+         raise SAL.Programmer_Error with
+           "Set_Minimal_Complete_Actions: action for " & Image (ID, 
Descriptor) & " not found in state" &
+           Kernel.Tree_Node.State'Image;
       end Find_Action;
 
       function Compute_Action (ID : in Token_ID) return Minimal_Action
@@ -802,18 +1230,22 @@ package body WisiToken.Generate.LR is
          loop
             exit when not Has_Element (I);
 
-            if Element (I) in Terminals then
+            if Tokens (I) in Terminals then
                Result := Result + 1;
             else
                Result := Result + Min_Length (Minimal_Terminal_Sequences 
(Tokens (I)).Sequence);
             end if;
-            Next (I);
+            Tokens.Next (I);
          end loop;
          return Result;
       end Length_After_Dot;
 
    begin
-      if Kernel.State = 0 then
+      if Trace_Generate_Minimal_Complete > Detail then
+         Ada.Text_IO.Put_Line ("State" & Kernel.Tree_Node.State'Image);
+      end if;
+
+      if Kernel.Tree_Node.State = 0 then
          --  State 0 has dot before all tokens, which is never needed in the
          --  Minimal_Complete_Action algorithm.
          return;
@@ -863,8 +1295,7 @@ package body WisiToken.Generate.LR is
          State.Kernel.Set_First_Last (Kernel_Index'First, Kernel_Index'Last);
          for Item of Kernel.Set loop
             declare
-               RHS    : WisiToken.Productions.Right_Hand_Side renames
-                 Grammar (Item.Prod.LHS).RHSs (Item.Prod.RHS);
+               RHS    : WisiToken.Productions.Right_Hand_Side renames Grammar 
(Item.Prod.LHS).RHSs (Item.Prod.RHS);
                Dot_ID : constant Token_ID :=
                  (if Item.Dot = No_Index
                   then Invalid_Token_ID
@@ -872,14 +1303,23 @@ package body WisiToken.Generate.LR is
 
                --  Kernel components
                Length_After_Dot  : constant Count_Type := 
Set_Minimal_Complete_Actions.Length_After_Dot (Item);
+
                Reduce_Production : constant Production_ID :=
                  (if Length_After_Dot = 0
                   then (if Dot_ID in Nullable'Range then Nullable (Dot_ID) 
else Item.Prod)
                   else Invalid_Production_ID);
+
                Reduce_Count : constant Count_Type :=
                  (if Reduce_Production = Invalid_Production_ID
                   then 0
-                  else Grammar (Reduce_Production.LHS).RHSs 
(Reduce_Production.RHS).Tokens.Length);
+                  else
+                    (if Reduce_Production.LHS = Dot_ID and
+                      (Reduce_Production.LHS in Nullable'Range and then
+                       Nullable (Reduce_Production.LHS) /= 
Invalid_Production_ID)
+                     then 0
+                     else Grammar (Reduce_Production.LHS).RHSs 
(Reduce_Production.RHS).Tokens.Length));
+
+               Case_Label : Integer; --  for debugging
             begin
                --  Here we must compute Item_State (I).Label and 
.Minimal_Action,
                --  considering recursion.
@@ -891,10 +1331,10 @@ package body WisiToken.Generate.LR is
                --  The strategy in Insert_Minimal_Complete_Actions when
                --  Item.Length_After_Dot = 0 is to compute Length_After_Dot by 
doing
                --  Reduce until a Shift is encountered, and using 
Length_After_Dot
-               --  for that item.               --
+               --  for that item.
                --
                --  Consider these kernel items with possible recursion (from
-               --  ada_lite_lalr.parse_table - not listed in state order here, 
to
+               --  ada_lite_lalr_re2c_t1.parse_table - not listed in state 
order here, to
                --  group related productions). The recursion of each 
production is
                --  shown after ';', if not all None.
                --
@@ -902,7 +1342,7 @@ package body WisiToken.Generate.LR is
                --       86.0:exit_statement <= EXIT ^ identifier_opt WHEN 
expression_opt SEMICOLON
                --       86.1:exit_statement <= EXIT ^ identifier_opt SEMICOLON
                --
-               --  State 43:
+               --  State 42:
                --     103.2:name <= IDENTIFIER ^
                --
                --  State 30:
@@ -910,61 +1350,62 @@ package body WisiToken.Generate.LR is
                --
                --  State 47:
                --      103.0:name <= name ^ LEFT_PAREN range_list RIGHT_PAREN 
; ( 1 => Direct_Left,  3 => Other)
-               --      103.1:name <= name ^ actual_parameter_part ; ( 1 => 
Direct_Left,  2 => Other)
+               --      103.1:name <= name ^ actual_parameter_part ; ( 1 => 
Direct_Left,  2 => Other_Right)
                --      113.2:primary <= name ^  ; ( 1 => Other_Left)
                --      124.0:selected_component <= name ^ DOT IDENTIFIER ; ( 1 
=> Other_Left)
                --
                --  State 68:
                --       95.1:generic_instantiation <= PROCEDURE name ^ IS NEW 
name SEMICOLON
                --      103.0:name <= name ^ LEFT_PAREN range_list RIGHT_PAREN 
; ( 1 => Direct_Left,  3 => Other)
-               --      103.1:name <= name ^ actual_parameter_part ; ( 1 => 
Direct_Left,  2 => Other)
+               --      103.1:name <= name ^ actual_parameter_part ; ( 1 => 
Direct_Left,  2 => Other_Right)
                --      115.0:procedure_specification <= PROCEDURE name ^ 
parameter_profile_opt
                --      124.0:selected_component <= name ^ DOT IDENTIFIER ; ( 1 
=> Other_Left)
                --
                --  State 50:
                --       87.1:expression <= relation_and_list ^ ; ( 1 => 
Other_Left)
-               --      119.0:relation_and_list <= relation_and_list ^ AND 
relation ; ( 1 => Direct_Left,  3 => Other)
-               --
+               --      119.0:relation_and_list <= relation_and_list ^ AND 
relation;(1 => Direct_Left, 3 => Other_Right)
                --
                --  State 77:
                --       57.0:actual_parameter_part <= LEFT_PAREN ^ 
association_list RIGHT_PAREN ; ( 2 => Other)
                --      103.0:name <= name LEFT_PAREN ^ range_list RIGHT_PAREN 
; ( 1 => Direct_Left,  3 => Other)
                --
                --  State 154:
-               --      103.0:name <= name LEFT_PAREN range_list ^ RIGHT_PAREN
-               --      118.0:range_list <= range_list ^ COMMA range_g
+               --      103.0:name <= name LEFT_PAREN range_list ^ RIGHT_PAREN 
; ( 1 => Direct_Left,  3 => Other)
+               --      118.0:range_list <= range_list ^ COMMA range_g ; (1 => 
Direct_Left, 3 => Other_Right)
                --
                --  State 251:
                --      110.0:parameter_specification <= IDENTIFIER COLON 
IDENTIFIER ^ COLON_EQUAL expression_opt
                --      110.1:parameter_specification <= IDENTIFIER COLON 
IDENTIFIER ^
                --
-               --  From java_enum_ch19_lr1.parse_table:
+               --  From java_enum_ch19_lr1_t1.parse_table:
                --
                --  State 8:
-               --       9.1:EnumConstantList <= EnumConstantList COMMA ^ 
EnumConstant ; (1 => Direct_Left, 3 => Other)
+               --       9.1:EnumConstantList <= EnumConstantList COMMA ^ 
EnumConstant ; (1 => Direct_Left)
                --      11.0:EnumBody <= LEFT_CURLY_BRACKET EnumConstantList 
COMMA ^ RIGHT_CURLY_BRACKET
                --
-               --  From empty_production_2_lalar.parse_table:
+               --  From empty_production_2_lalr.parse_table:
                --
                --  State 5:
-               --        8.0:declarations <= declarations ^ declaration
-               --        9.0:body <= IS declarations ^ BEGIN SEMICOLON
+               --        8.0:declarations <= declarations ^ declaration ; (1 
=> Direct_Left, 2 => Other_Right)
+               --        9.0:body <= IS declarations ^ BEGIN SEMICOLON ; (2 => 
Other)
 
-               --  case 0: In states 43 and 30, there is only one possible 
action, so
-               --  recursion is not considered. Minimal_Action is
-               --  computed by Compute_Minimal_Action, Label is Keep_Always.
+               --  case 0: In states 42 and 30, there is only one possible 
action.
+               --  Recursion is ignored; Minimal_Action is computed by
+               --  Compute_Action, Label is Keep_Always.
                --
                --  In the following, we only consider kernels where there is 
more
                --  than one item.
                --
-               --  case 1: In state 47 production 113.2, Length_After_Dot is 
0, so
-               --  recursion is not considered. We set Label to Keep_Always, 
since
-               --  the true Length_After_Dot must be computed at runtime.
+               --  case 1: In state 47 production 113.2, Dot is after all 
tokens, so
+               --  the true Length_After_Dot must be computed at runtime. 
Recursion
+               --  is not considered, because any other McKensie operation 
would also
+               --  need to do a reduce to the LHS here. Label is Keep_Always,
                --  Minimal_Action is Reduce_Production.
                --
-               --  Similarly in state 68 production 115.0, Length_After_Dot is 0
-               --  because parameter_profile_opt is nullable, and we set Label 
to
-               --  Keep_Always, Minimal_Action to Reduce_Production.
+               --  In state 68 production 115.0, Length_After_Dot is 0 because
+               --  parameter_profile_opt is nullable. We don't ignore 
recursion in
+               --  this case; the nullable token may be in the recursion 
cycle. So if
+               --  the production is recursive, the item is dropped.
                --
                --  case 2: In state 47, if LEFT_PAREN or First
                --  (actual_parameter_part) is inserted, a recursion cycle is 
followed
@@ -978,18 +1419,25 @@ package body WisiToken.Generate.LR is
                --  cycle; left recursion applies even when it is not just 
before the
                --  parse point. On the other hand, in ada_lite state 154, both
                --  productions are left recursive; 103.0 could be preserved. 
In the
-               --  current algorithm, both are dropped.
+               --  current algorithm, both are dropped; this avoids needing 
cycle
+               --  detection at runtime.
+               --
+               --  It is tempting to allow a minimal complete action for 
tokens in an
+               --  RHS that are not in a recursion cycle. However, with partial
+               --  recursion this is not possible because we don't have 
accurate
+               --  recursion information, and in simpler languages that allow
+               --  computing full recursion it is not very helpful. So we treat
+               --  productions with left recursion independent of dot.
                --
                --  It is possible for both case 1 and case 2 to apply; see
                --  empty_production_2_lalar.parse_table State 5 above and
-               --  ada_lite_ebnf_lalr.parse_table state 46. case 1 has 
precedence if
-               --  Dot = No_Element.
+               --  ada_lite_ebnf_lalr.parse_table state 46. case 2 has 
precedence.
                --
                --  case 3: In state 251, there is no recursion, and 
Length_After_Dot
                --  is correct; Label is set to Keep_If_Minimal, Minimal_Action 
to
-               --  Compute_Minimal_Action. In State 77, Dot_ID is 
association_list
-               --  which has Other recursion; we say "there is recursion at 
the parse
-               --  point". However, Length_After_Dot is correct; it assumes the
+               --  Compute_Action. In State 77, Dot_ID is association_list 
which has
+               --  Other recursion; we say "there is recursion at the parse 
point".
+               --  However, Length_After_Dot is correct; it assumes the
                --  recursion-breaking case for the expansion of 
association_list. So
                --  this is the same as no recursion at the parse point
                --
@@ -999,6 +1447,7 @@ package body WisiToken.Generate.LR is
 
                if Item_States'Length = 1 then
                   --  case 0
+                  Case_Label := 0;
                   Item_States (I) :=
                     (Keep_Always,
                      (if Length_After_Dot = 0
@@ -1008,9 +1457,11 @@ package body WisiToken.Generate.LR is
                elsif Length_After_Dot = 0 then
                   if Item.Dot /= No_Index and RHS.Recursion (1) in Direct_Left 
| Other_Left then
                      --  case 2
+                     Case_Label := 2;
                      Item_States (I) := (Label => Drop);
                   else
                      --  case 1
+                     Case_Label := 1;
                      Item_States (I) :=
                        (Label          => Keep_Always,
                         Minimal_Action => (Reduce, Reduce_Production, 
Reduce_Count));
@@ -1018,10 +1469,12 @@ package body WisiToken.Generate.LR is
 
                elsif RHS.Recursion (1) in Direct_Left | Other_Left then
                   --  case 2
+                  Case_Label := 2;
                   Item_States (I) := (Label => Drop);
 
                else
                   --  case 3
+                  Case_Label := 3;
                   Item_States (I) := (Keep_If_Minimal, Compute_Action 
(Dot_ID));
                end if;
 
@@ -1038,11 +1491,11 @@ package body WisiToken.Generate.LR is
                   end if;
                end if;
 
-               if Trace_Generate_Minimal_Complete > Extra then
+               if Trace_Generate_Minimal_Complete > Detail then
                   Ada.Text_IO.Put_Line
-                    ("kernel" & I'Image & " " & Strict_Image (State.Kernel 
(I)) &
-                       " ; " & Item_States (I).Label'Image &
-                       " " & State.Kernel (I).Length_After_Dot'Image);
+                    ("kernel" & I'Image & " " & Image (State.Kernel (I), 
Descriptor) &
+                       " " & Item_States (I).Label'Image & ": " & Image 
(RHS.Recursion (1)) &
+                       " case" & Case_Label'Image);
                end if;
 
                if I < Kernel_Index'Last then
@@ -1107,20 +1560,20 @@ package body WisiToken.Generate.LR is
    --  Parse table output
 
    procedure Put_Text_Rep
-     (Table        : in Parse_Table;
-      File_Name    : in String;
-      Action_Names : in Names_Array_Array;
-      Check_Names  : in Names_Array_Array)
+     (Table     : in Parse_Table;
+      File_Name : in String)
    is
       use all type SAL.Base_Peek_Type;
       use Ada.Containers;
-      use Ada.Text_IO;
+      use SAL.Unix_Text_IO;
       File : File_Type;
    begin
       --  Only space, semicolon, newline delimit object values. Bounds of
       --  arrays output before each array, unless known from discriminants.
-      --  End of lists indicated by semicolon. Action, Check subprograms are
-      --  represented by True if present, False if not.
+      --  End of lists indicated by semicolon.
+      --
+      --  We use Unix_Text_IO to enforce Unix line endings; a later dos2unix
+      --  step is very slow on very large files.
 
       Create (File, Out_File, File_Name);
 
@@ -1140,7 +1593,8 @@ package body WisiToken.Generate.LR is
                Node_J : Parse_Action_Node_Ptr := Node_I.Actions;
             begin
                loop
-                  Put (File, Node_J.Item.Verb'Image);
+                  --  WORKAROUND: subtype'Image in GNAT Community 2020 
produces integer.
+                  Put (File, All_Parse_Action_Verbs'Image 
(All_Parse_Action_Verbs (Node_J.Item.Verb)));
                   Put (File, Node_J.Item.Production.LHS'Image & 
Node_J.Item.Production.RHS'Image);
 
                   case Node_J.Item.Verb is
@@ -1148,21 +1602,6 @@ package body WisiToken.Generate.LR is
                      Put (File, State_Index'Image (Node_J.Item.State));
 
                   when Reduce | Accept_It =>
-                     if Action_Names (Node_J.Item.Production.LHS) /= null and 
then
-                       Action_Names 
(Node_J.Item.Production.LHS)(Node_J.Item.Production.RHS) /= null
-                     then
-                        Put (File, " true");
-                     else
-                        Put (File, " false");
-                     end if;
-                     if Check_Names (Node_J.Item.Production.LHS) /= null and 
then
-                       Check_Names 
(Node_J.Item.Production.LHS)(Node_J.Item.Production.RHS) /= null
-                     then
-                        Put (File, " true");
-                     else
-                        Put (File, " false");
-                     end if;
-
                      Put (File, Ada.Containers.Count_Type'Image 
(Node_J.Item.Token_Count));
 
                   when Parse.LR.Error =>
@@ -1216,7 +1655,8 @@ package body WisiToken.Generate.LR is
             Put (File, Count_Type'Image 
(State.Minimal_Complete_Actions.Last_Index));
             for Action of State.Minimal_Complete_Actions loop
                Put (File, " ");
-               Put (File, Action.Verb'Image);
+               --  WORKAROUND: subtype'Image in GNAT Community 2020 produces 
integer.
+               Put (File, All_Parse_Action_Verbs'Image (All_Parse_Action_Verbs 
(Action.Verb)));
                Put (File, Action.Production.LHS'Image & 
Action.Production.RHS'Image);
                case Action.Verb is
                when Shift =>
@@ -1295,8 +1735,7 @@ package body WisiToken.Generate.LR is
       Put_Line ("Fast_Forward      => " & Integer'Image (Item.Fast_Forward));
       Put_Line ("Matching_Begin    => " & Integer'Image (Item.Matching_Begin));
       Put_Line ("Ignore_Check_Fail =>" & Integer'Image 
(Item.Ignore_Check_Fail));
-      Put_Line ("Task_Count        =>" & 
System.Multiprocessors.CPU_Range'Image (Item.Task_Count));
-      Put_Line ("Check_Limit       =>" & Token_Index'Image (Item.Check_Limit));
+      Put_Line ("Check_Limit       =>" & Item.Check_Limit'Image);
       Put_Line ("Check_Delta_Limit =>" & Integer'Image 
(Item.Check_Delta_Limit));
       Put_Line ("Enqueue_Limit     =>" & Integer'Image (Item.Enqueue_Limit));
    end Put;
@@ -1326,6 +1765,28 @@ package body WisiToken.Generate.LR is
       end case;
    end Put;
 
+   function Image
+     (Item       : in Parse_Action_Rec;
+      Descriptor : in WisiToken.Descriptor)
+      return String
+   is
+      use Ada.Containers;
+   begin
+      case Item.Verb is
+      when Shift =>
+         return "(Shift, " & Image (Item.Production, Descriptor) & "," & 
State_Index'Image (Item.State) & ")";
+
+      when Reduce =>
+         return "(Reduce, " & Image (Item.Production, Descriptor) & "," & 
Count_Type'Image (Item.Token_Count) & ")";
+
+      when Accept_It =>
+         return "(Accept_It, " & Image (Item.Production, Descriptor) & "," & 
Count_Type'Image (Item.Token_Count) & ")";
+
+      when Parse.LR.Error =>
+         return "(Error)";
+      end case;
+   end Image;
+
    procedure Put (Descriptor : in WisiToken.Descriptor; Action : in 
Parse_Action_Node_Ptr)
    is
       use Ada.Text_IO;
@@ -1341,11 +1802,40 @@ package body WisiToken.Generate.LR is
       end loop;
    end Put;
 
+   procedure Put
+     (File       : in Ada.Text_IO.File_Type;
+      Action     : in Parse_Action_Node_Ptr;
+      Descriptor : in WisiToken.Descriptor)
+   is
+      use Ada.Text_IO;
+      Ptr : Parse_Action_Node_Ptr := Action;
+   begin
+      loop
+         Put (File, Image (Ptr.Item, Descriptor));
+         Ptr := Ptr.Next;
+         exit when Ptr = null;
+         Put_Line (File, ",");
+      end loop;
+   end Put;
+
    procedure Put (Descriptor : in WisiToken.Descriptor; State : in Parse_State)
    is
       use all type Ada.Containers.Count_Type;
       use Ada.Text_IO;
       use Ada.Strings.Fixed;
+
+      procedure Put (Action : in Minimal_Action)
+      is begin
+         Put ("(");
+         case Action.Verb is
+         when Shift =>
+            Put (Image (Action.ID, Descriptor));
+         when Reduce =>
+            Put (Trimmed_Image (Action.Token_Count) & " " & Image 
(Action.Production.LHS, Descriptor));
+         end case;
+         Put (" " & Trimmed_Image (Action.Production) & ")");
+      end Put;
+
    begin
       for Action of State.Action_List loop
          Put ("   " & Image (Action.Symbol, Descriptor) &
@@ -1370,37 +1860,18 @@ package body WisiToken.Generate.LR is
       end loop;
 
       New_Line;
-      Put ("   Minimal_Complete_Action => "); --  No trailing 's' for 
compatibility with previous good parse tables.
+      Put ("   Minimal_Complete_Actions => ");
       case State.Minimal_Complete_Actions.Length is
       when 0 =>
          null;
+
       when 1 =>
-         --  No () here for compatibity with previous known good parse tables.
-         declare
-            Action : Minimal_Action renames State.Minimal_Complete_Actions 
(State.Minimal_Complete_Actions.First_Index);
-         begin
-            case Action.Verb is
-            when Shift =>
-               Put (Image (Action.ID, Descriptor));
-            when Reduce =>
-               Put (Image (Action.Production.LHS, Descriptor));
-            end case;
-            Put (" " & Trimmed_Image (Action.Production));
-         end;
+         Put (State.Minimal_Complete_Actions 
(State.Minimal_Complete_Actions.First_Index));
+
       when others =>
          Put ("(");
          for I in State.Minimal_Complete_Actions.First_Index .. 
State.Minimal_Complete_Actions.Last_Index loop
-            declare
-               Action : Minimal_Action renames State.Minimal_Complete_Actions 
(I);
-            begin
-               case Action.Verb is
-               when Shift =>
-                  Put (Image (Action.ID, Descriptor));
-               when Reduce =>
-                  Put (Image (Action.Production.LHS, Descriptor));
-               end case;
-               Put (" " & Trimmed_Image (Action.Production));
-            end;
+            Put (State.Minimal_Complete_Actions (I));
             if I < State.Minimal_Complete_Actions.Last_Index then
                Put (", ");
             end if;
@@ -1411,19 +1882,25 @@ package body WisiToken.Generate.LR is
    end Put;
 
    procedure Put_Parse_Table
-     (Table                 : in Parse_Table_Ptr;
-      Parse_Table_File_Name : in String;
-      Title                 : in String;
-      Grammar               : in WisiToken.Productions.Prod_Arrays.Vector;
-      Recursions            : in Generate.Recursions;
-      Kernels               : in LR1_Items.Item_Set_List;
-      Conflicts             : in Conflict_Count_Lists.List;
-      Descriptor            : in WisiToken.Descriptor;
-      Include_Extra         : in Boolean := False)
+     (Table                 : in     Parse_Table_Ptr;
+      Parse_Table_File_Name : in     String;
+      Title                 : in     String;
+      Grammar               : in     WisiToken.Productions.Prod_Arrays.Vector;
+      Recursions            : in     Generate.Recursions;
+      Kernels               : in     LR1_Items.Item_Set_List;
+      Declared_Conflicts    : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      Unknown_Conflicts     : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      Descriptor            : in     WisiToken.Descriptor;
+      Include_Extra         : in     Boolean := False)
    is
+      use all type WisiToken.Syntax_Trees.Sequential_Index;
       use all type Ada.Containers.Count_Type;
       use Ada.Text_IO;
+
       Parse_Table_File : File_Type;
+
+      Minimal_Complete_Action_States : Integer := 0;
+      Minimal_Complete_Actions       : Ada.Containers.Count_Type := 0;
    begin
       Create (Parse_Table_File, Out_File, Parse_Table_File_Name);
       Set_Output (Parse_Table_File);
@@ -1434,6 +1911,26 @@ package body WisiToken.Generate.LR is
       Put_Line ("Productions:");
       WisiToken.Productions.Put (Grammar, Descriptor);
 
+      declare
+         Count : Integer := 0;
+      begin
+         for Prod of Grammar loop
+            if Prod.Optimized_List then
+               Count := @ + 1;
+            end if;
+         end loop;
+         if Count > 0 then
+            New_Line;
+            Put_Line ("Optimized_Lists:");
+            for Prod of Grammar loop
+               if Prod.Optimized_List then
+                  Put (" " & Image (Prod.LHS, Descriptor));
+               end if;
+            end loop;
+            New_Line;
+         end if;
+      end;
+
       if Include_Extra then
          New_Line;
          Put_Line ((if Recursions.Full then "Recursions:" else "Partial 
recursions:"));
@@ -1473,36 +1970,55 @@ package body WisiToken.Generate.LR is
          if State_Index /= Table.States'Last then
             New_Line;
          end if;
+
+         Minimal_Complete_Actions := @ + Table.States 
(State_Index).Minimal_Complete_Actions.Length;
+         if Table.States (State_Index).Minimal_Complete_Actions.Length > 0 then
+            Minimal_Complete_Action_States := @ + 1;
+         end if;
       end loop;
 
-      if Conflicts.Length > 0 then
-         declare
-            use Ada.Strings.Unbounded;
-            Line          : Unbounded_String := +"States with conflicts:";
-            Accept_Reduce : Integer          := 0;
-            Shift_Reduce  : Integer          := 0;
-            Reduce_Reduce : Integer          := 0;
-         begin
-            for Count of Conflicts loop
-               Line          := Line & State_Index'Image (Count.State);
-               Accept_Reduce := Accept_Reduce + Count.Accept_Reduce;
-               Shift_Reduce  := Shift_Reduce + Count.Shift_Reduce;
-               Reduce_Reduce := Reduce_Reduce + Count.Reduce_Reduce;
+      New_Line;
+      Put_Line
+        (Trimmed_Image (Minimal_Complete_Action_States) & " states with 
minimal_complete_actions;" &
+           Minimal_Complete_Actions'Image & " total 
minimal_complete_actions.");
+
+      declare
+         use Ada.Strings.Unbounded;
+         Conflict_Present : array (Table.State_First .. Table.State_Last) of 
Boolean := (others => False);
+         Conflict_Count : Integer := 0;
+         Line           : Unbounded_String;
+      begin
+         for Conflict of Declared_Conflicts loop
+            for State of Conflict.States loop
+               Conflict_Present (State) := True;
             end loop;
+         end loop;
 
-            New_Line;
-            Indent_Wrap (-Line);
+         for Conflict of Unknown_Conflicts loop
+            for State of Conflict.States loop
+               Conflict_Present (State) := True;
+            end loop;
+         end loop;
 
+         for I in Conflict_Present'Range loop
+            if Conflict_Present (I) then
+               Conflict_Count := @ + 1;
+               if Include_Extra then
+                  Append (Line, I'Image);
+               end if;
+            end if;
+         end loop;
+
+         if Conflict_Count > 0 then
             New_Line;
-            Put_Line
-              (Integer'Image (Accept_Reduce) & " accept/reduce conflicts," &
-                 Integer'Image (Shift_Reduce) & " shift/reduce conflicts," &
-                 Integer'Image (Reduce_Reduce) & " reduce/reduce conflicts");
-         end;
-      else
-         New_Line;
-         Put_Line (" 0 accept/reduce conflicts, 0 shift/reduce conflicts, 0 
reduce/reduce conflicts");
-      end if;
+            if Include_Extra then
+               Line := Trimmed_Image (Conflict_Count) & " states with 
conflicts:" & Line;
+               Indent_Wrap (-Line);
+            else
+               Put_Line (Trimmed_Image (Conflict_Count) & " states with 
conflicts");
+            end if;
+         end if;
+      end;
       Set_Output (Standard_Output);
       Close (Parse_Table_File);
    end Put_Parse_Table;
diff --git a/wisitoken-generate-lr.ads b/wisitoken-generate-lr.ads
index 00257fe56e..b2d34f3524 100644
--- a/wisitoken-generate-lr.ads
+++ b/wisitoken-generate-lr.ads
@@ -2,7 +2,7 @@
 --
 --  Common utilities for LR parser table generators.
 --
---  Copyright (C) 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -17,112 +17,145 @@
 
 pragma License (Modified_GPL);
 
-with Ada.Containers.Doubly_Linked_Lists;
+with SAL.Gen_Unbounded_Definite_Vectors_Sorted;
+with SAL.Gen_Unbounded_Definite_Red_Black_Trees;
 with WisiToken.Generate.LR1_Items;
 with WisiToken.Parse.LR;
 with WisiToken.Productions;
 package WisiToken.Generate.LR is
    use WisiToken.Parse.LR;
 
-   subtype Conflict_Parse_Actions is Parse_Action_Verbs range Shift .. 
Accept_It;
+   type Conflict_Item is record
+      Verb : Conflict_Parse_Actions := Conflict_Parse_Actions'First;
+      LHS  : Token_ID               := Invalid_Token_ID;
+   end record;
+
+   function Conflict_Item_Compare (Left, Right : in Conflict_Item) return 
SAL.Compare_Result
+   is (if Left.Verb > Right.Verb
+       then SAL.Greater
+       elsif Left.Verb < Right.Verb
+       then SAL.Less
+       else
+         (if Left.LHS > Right.LHS
+          then SAL.Greater
+          elsif Left.LHS < Right.LHS
+          then SAL.Less
+          else SAL.Equal));
+
+   function To_Key (Item : in Conflict_Item) return Conflict_Item
+   is (Item);
+
+   package Conflict_Item_Lists is new SAL.Gen_Unbounded_Definite_Vectors_Sorted
+     (Element_type    => Conflict_Item,
+      Key_Type        => Conflict_Item,
+      To_Key          => To_Key,
+      Key_Compare     => Conflict_Item_Compare,
+      Default_Element => (others => <>));
+
    type Conflict is record
-      --  A typical conflict is:
+      --  In the parse table, a "conflict" occurs when there are two or more
+      --  actions for one token in a state:
       --
-      --  SHIFT/REDUCE in state: 11 on token IS
+      --  RIGHT_PAREN => shift and goto state 833 222.1,
+      --                 reduce 1 tokens to explicit_actual_parameter 256.1,
+      --                 reduce 1 tokens to subtype_indication 118.3,
+      --                 reduce 1 tokens to primary 209.3
       --
-      --  State numbers change with minor changes in the grammar, so we
-      --  attempt to identify the state by the LHS of the two productions
-      --  involved; this is _not_ guarranteed to be unique, but is good
-      --  enough for our purposes. We also store the state number for
-      --  generated conflicts (not for known conflicts from the grammar
-      --  definition file), for debugging.
-      Action_A    : Conflict_Parse_Actions;
-      LHS_A       : Token_ID;
-      Action_B    : Conflict_Parse_Actions;
-      LHS_B       : Token_ID;
-      State_Index : Unknown_State_Index;
-      On          : Token_ID;
-   end record;
+      --  The same conflict can occur in multiple states.
+      --
+      --  The user must declare all known conflicts in the grammar file;
+      --  this helps them eliminate unnecessary conflicts, which can
+      --  significantly slow both normal parsing and error recovery.
+      --
+      --  We identify the conflict by the token, and the action and LHS of
+      --  all the productions involved. We also store all the states it
+      --  occurs in, for debugging.
+      On     : Token_ID := Invalid_Token_ID;
+      Items  : Conflict_Item_Lists.Vector;
+      States : State_Index_Arrays.Vector;
+
+      Resolution : Token_ID := Invalid_Token_ID;
+      --  The resolution specified by %conflict_resolution.
 
-   package Conflict_Lists is new Ada.Containers.Doubly_Linked_Lists (Conflict);
+      Resolution_Used : Boolean := False;
+      --  True if actually used when generating the parse table.
 
-   type Conflict_Count is record
-      State : State_Index;
-      Accept_Reduce : Integer             := 0;
-      Shift_Reduce  : Integer             := 0;
-      Reduce_Reduce : Integer             := 0;
+      Conflict_Seen : Boolean := False;
+      --  True if encountered in parse table.
    end record;
 
-   package Conflict_Count_Lists is new Ada.Containers.Doubly_Linked_Lists 
(Conflict_Count);
+   function Image (Conflict : in LR.Conflict; Descriptor : in 
WisiToken.Descriptor) return String;
+
+   function Conflict_Compare (Left, Right : in Conflict) return 
SAL.Compare_Result;
+   --  Sort on On, Items.
+
+   function To_Key (Item : in Conflict) return Conflict
+   is (Item);
+
+   package Conflict_Lists is new SAL.Gen_Unbounded_Definite_Red_Black_Trees
+     (Element_Type => Conflict,
+      Key_Type     => Conflict,
+      Key          => To_Key,
+      Key_Compare  => Conflict_Compare);
 
    procedure Put
-     (Item       : in Conflict_Lists.List;
+     (Item       : in Conflict_Lists.Tree;
       File       : in Ada.Text_IO.File_Type;
       Descriptor : in WisiToken.Descriptor);
 
+   procedure Check_Conflicts
+     (Label            : in     String;
+      Found_Conflicts  : in out Conflict_Lists.Tree;
+      Known_Conflicts  : in out Conflict_Lists.Tree;
+      File_Name        : in     String;
+      Descriptor       : in     WisiToken.Descriptor;
+      Ignore_Conflicts : in     Boolean);
+   --  Compare Found and Known Conflicts. If they differ, and
+   --  Ignore_Conflicts is false, output appropriate error messages.
+
+   ----------
+   --  Build parse table
+
    procedure Add_Action
-     (Symbol               : in     Token_ID;
-      Action               : in     Parse_Action_Rec;
-      Action_List          : in out Action_Arrays.Vector;
-      Closure              : in     LR1_Items.Item_Set;
-      Grammar              : in     WisiToken.Productions.Prod_Arrays.Vector;
-      Has_Empty_Production : in     Token_ID_Set;
-      First_Nonterm_Set    : in     Token_Array_Token_Set;
-      Conflict_Counts      : in out Conflict_Count_Lists.List;
-      Conflicts            : in out Conflict_Lists.List;
-      Descriptor           : in     WisiToken.Descriptor);
-   --  Add (Symbol, Action) to Action_List; check for conflicts
-   --
-   --  Closure .. Conflicts are for conflict reporting
+     (State              : in     State_Index;
+      Symbol             : in     Token_ID;
+      Action             : in     Parse_Action_Rec;
+      Action_List        : in out Action_Arrays.Vector;
+      Grammar            : in     WisiToken.Productions.Prod_Arrays.Vector;
+      Descriptor         : in     WisiToken.Descriptor;
+      Declared_Conflicts : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      Unknown_Conflicts  : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      First_Nonterm_Set  : in     WisiToken.Token_Array_Token_Set;
+      File_Name          : in     String;
+      Ignore_Conflicts   : in     Boolean);
+   --  Add (Symbol, Action) to Action_List. Other args are for conflict
+   --  detection, resolution, and error reporting.
 
    procedure Add_Actions
-     (Closure              : in     LR1_Items.Item_Set;
-      Table                : in out Parse_Table;
-      Grammar              : in     WisiToken.Productions.Prod_Arrays.Vector;
-      Has_Empty_Production : in     Token_ID_Set;
-      First_Nonterm_Set    : in     Token_Array_Token_Set;
-      Conflict_Counts      : in out Conflict_Count_Lists.List;
-      Conflicts            : in out Conflict_Lists.List;
-      Descriptor           : in     WisiToken.Descriptor);
-   --  Add actions for Closure to Table. Has_Empty_Production, First,
-   --  Conflicts used for conflict reporting.
+     (Closure            : in     LR1_Items.Item_Set;
+      Table              : in out Parse_Table;
+      Grammar            : in     WisiToken.Productions.Prod_Arrays.Vector;
+      Descriptor         : in     WisiToken.Descriptor;
+      Declared_Conflicts : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      Unknown_Conflicts  : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      First_Nonterm_Set  : in     WisiToken.Token_Array_Token_Set;
+      File_Name          : in     String;
+      Ignore_Conflicts   : in     Boolean);
+   --  Add actions in Closure to Table.
 
    procedure Add_Lookahead_Actions
-     (Item                 : in     LR1_Items.Item;
-      Action_List          : in out Action_Arrays.Vector;
-      Grammar              : in     WisiToken.Productions.Prod_Arrays.Vector;
-      Has_Empty_Production : in     Token_ID_Set;
-      First_Nonterm_Set    : in     Token_Array_Token_Set;
-      Conflict_Counts      : in out Conflict_Count_Lists.List;
-      Conflicts            : in out Conflict_Lists.List;
-      Closure              : in     LR1_Items.Item_Set;
-      Descriptor           : in     WisiToken.Descriptor);
+     (State              : in     State_Index;
+      Item               : in     LR1_Items.Item;
+      Action_List        : in out Action_Arrays.Vector;
+      Grammar            : in     WisiToken.Productions.Prod_Arrays.Vector;
+      Descriptor         : in     WisiToken.Descriptor;
+      Declared_Conflicts : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      Unknown_Conflicts  : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      First_Nonterm_Set  : in     WisiToken.Token_Array_Token_Set;
+      File_Name          : in     String;
+      Ignore_Conflicts   : in     Boolean);
    --  Add actions for Item.Lookaheads to Action_List
    --  Closure must be from the item set containing Item.
-   --  Has_Empty_Production .. Closure used for conflict reporting.
-
-   procedure Delete_Known
-     (Conflicts       : in out Conflict_Lists.List;
-      Known_Conflicts : in out Conflict_Lists.List);
-   --  Delete Known_Conflicts from Conflicts.
-
-   function Find
-     (Closure              : in LR1_Items.Item_Set;
-      Action               : in Parse_Action_Rec;
-      Lookahead            : in Token_ID;
-      Grammar              : in WisiToken.Productions.Prod_Arrays.Vector;
-      Has_Empty_Production : in Token_ID_Set;
-      First                : in Token_Array_Token_Set;
-      Descriptor           : in WisiToken.Descriptor)
-     return Token_ID;
-   --  Return the LHS of a production in kernel of Closure, for an Action
-   --  conflict on Lookahead; for naming a Conflict object.
-
-   function Image (Item : in Conflict; Descriptor : in WisiToken.Descriptor) 
return String;
-
-   function Is_Present (Item : in Conflict; Conflicts : in 
Conflict_Lists.List) return Boolean;
-
-   function Match (Known : in Conflict; Item : in 
Conflict_Lists.Constant_Reference_Type) return Boolean;
 
    ----------
    --  Minimal terminal sequences.
@@ -146,12 +179,17 @@ package WisiToken.Generate.LR is
    type Minimal_Sequence_Array is array (Token_ID range <>) of 
Minimal_Sequence_Item;
 
    function Compute_Minimal_Terminal_Sequences
-     (Descriptor : in WisiToken.Descriptor;
-      Grammar    : in WisiToken.Productions.Prod_Arrays.Vector)
+     (Descriptor        : in WisiToken.Descriptor;
+      Grammar           : in WisiToken.Productions.Prod_Arrays.Vector;
+      Grammar_File_Name : in String)
      return Minimal_Sequence_Array;
    --  For each production in Grammar, compute the minimal sequence of
    --  terminals that will complete it. Result is an empty sequence if
    --  the production may be empty.
+   --
+   --  If some minimal sequences cannot be computed due to bad grammar
+   --  structure, an error message using Grammar_File_Name is put to
+   --  Current_Error, and Parse_Error is raised.
 
    function Compute_Minimal_Terminal_First
      (Descriptor                 : in WisiToken.Descriptor;
@@ -177,17 +215,15 @@ package WisiToken.Generate.LR is
    --  is nothing useful to do; the accept state, or one where all
    --  productions are recursive.
    --
-   --  Also set State.Kernels; used to resolve multiple reduce actions at
+   --  Also set State.Kernel; used to resolve multiple reduce actions at
    --  runtime.
 
    ----------
    --  Parse table output
 
    procedure Put_Text_Rep
-     (Table        : in Parse_Table;
-      File_Name    : in String;
-      Action_Names : in Names_Array_Array;
-      Check_Names  : in Names_Array_Array);
+     (Table     : in Parse_Table;
+      File_Name : in String);
    --  Write machine-readable text format of Table.States to a file
    --  File_Name, to be read by the parser executable at startup, using
    --  WisiToken.Parse.LR.Get_Text_Rep.
@@ -199,16 +235,29 @@ package WisiToken.Generate.LR is
    procedure Put (Descriptor : in WisiToken.Descriptor; State : in 
Parse_State);
    --  Put Item to Ada.Text_IO.Current_Output in parse table format.
 
+   function Image
+     (Item       : in Parse_Action_Rec;
+      Descriptor : in WisiToken.Descriptor)
+      return String;
+   --  Ada aggregate format.
+
+   procedure Put
+     (File       : in Ada.Text_IO.File_Type;
+      Action     : in Parse_Action_Node_Ptr;
+      Descriptor : in WisiToken.Descriptor);
+   --  Put Action to File in error message format.
+
    procedure Put_Parse_Table
-     (Table                 : in Parse_Table_Ptr;
-      Parse_Table_File_Name : in String;
-      Title                 : in String;
-      Grammar               : in WisiToken.Productions.Prod_Arrays.Vector;
-      Recursions            : in Generate.Recursions;
-      Kernels               : in LR1_Items.Item_Set_List;
-      Conflicts             : in Conflict_Count_Lists.List;
-      Descriptor            : in WisiToken.Descriptor;
-      Include_Extra         : in Boolean := False);
+     (Table                 : in     Parse_Table_Ptr;
+      Parse_Table_File_Name : in     String;
+      Title                 : in     String;
+      Grammar               : in     WisiToken.Productions.Prod_Arrays.Vector;
+      Recursions            : in     Generate.Recursions;
+      Kernels               : in     LR1_Items.Item_Set_List;
+      Declared_Conflicts    : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      Unknown_Conflicts     : in out WisiToken.Generate.LR.Conflict_Lists.Tree;
+      Descriptor            : in     WisiToken.Descriptor;
+      Include_Extra         : in     Boolean := False);
    --  "Extra" is recursions.
 
 end WisiToken.Generate.LR;
diff --git a/wisitoken-generate-lr1_items.adb b/wisitoken-generate-lr1_items.adb
index 37e6f0e760..3111c5e8fc 100644
--- a/wisitoken-generate-lr1_items.adb
+++ b/wisitoken-generate-lr1_items.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2002, 2003, 2008, 2009, 2012 - 2015, 2017 - 2020 Free 
Software Foundation, Inc.
+--  Copyright (C) 2002, 2003, 2008, 2009, 2012 - 2015, 2017 - 2021 Free 
Software Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -27,38 +27,44 @@
 
 pragma License (Modified_GPL);
 
-with Ada.Text_IO;
 with Ada.Strings.Unbounded;
+with Ada.Text_IO;
+with Ada.Unchecked_Conversion;
 package body WisiToken.Generate.LR1_Items is
    use type Ada.Strings.Unbounded.Unbounded_String;
 
    ----------
    --  body subprograms
 
-   function Get_Dot_IDs
-     (Grammar    : in WisiToken.Productions.Prod_Arrays.Vector;
-      Set        : in Item_Lists.List;
-      Descriptor : in WisiToken.Descriptor)
-     return Token_ID_Arrays.Vector
+   function Merge
+     (Prod         : in     Production_ID;
+      Dot          : in     Token_ID_Arrays.Extended_Index;
+      ID           : in     Token_ID;
+      Existing_Set : in out Item_Set)
+     return Boolean
    is
+      --  Merge item into Existing_Set. Return True if Existing_Set
+      --  is modified.
+
       use Item_Lists;
-      IDs : Token_ID_Set (Descriptor.First_Terminal .. 
Descriptor.Last_Nonterminal) := (others => False);
+
+      Found    : constant Item_Lists.Cursor := Find (Prod, Dot, Existing_Set);
+      Modified : Boolean                    := False;
    begin
-      for Item of Set loop
+      if not Has_Element (Found) then
+         Existing_Set.Set.Insert ((Prod, Dot, To_Lookahead (ID)));
+         Modified := True;
+      else
          declare
-            use Token_ID_Arrays;
-            Dot : constant Token_ID_Arrays.Cursor :=
-              WisiToken.Productions.Constant_Ref_RHS (Grammar, 
Item.Prod).Tokens.To_Cursor (Item.Dot);
+            Lookaheads : Lookahead renames Variable_Ref (Found).Lookaheads;
          begin
-            if Has_Element (Dot) then
-               if Element (Dot) /= Descriptor.EOI_ID then
-                  IDs (Element (Dot)) := True;
-               end if;
-            end if;
+            Modified := not Lookaheads (ID);
+            Lookaheads (ID) := True;
          end;
-      end loop;
-      return To_Array (IDs);
-   end Get_Dot_IDs;
+      end if;
+
+      return Modified;
+   end Merge;
 
    function Merge
      (Prod         : in     Production_ID;
@@ -76,7 +82,7 @@ package body WisiToken.Generate.LR1_Items is
       Modified : Boolean                    := False;
    begin
       if not Has_Element (Found) then
-         Existing_Set.Set.Insert ((Prod, Dot, new Token_ID_Set'(Lookaheads)));
+         Existing_Set.Set.Insert ((Prod, Dot, Lookaheads));
 
          Modified := True;
       else
@@ -89,9 +95,9 @@ package body WisiToken.Generate.LR1_Items is
    ----------
    --  Public subprograms, declaration order
 
-   function To_Lookahead (Item : in Token_ID; Descriptor : in 
WisiToken.Descriptor) return Lookahead
+   function To_Lookahead (Item : in Token_ID) return Lookahead
    is begin
-      return Result : Token_ID_Set := (Descriptor.First_Terminal .. 
Descriptor.Last_Lookahead => False) do
+      return Result : Lookahead := (others => False) do
          Result (Item) := True;
       end return;
    end To_Lookahead;
@@ -101,7 +107,7 @@ package body WisiToken.Generate.LR1_Items is
       use Ada.Strings.Unbounded;
       Result : Unbounded_String := Null_Unbounded_String;
    begin
-      for I in Item'Range loop
+      for I in Descriptor.First_Terminal .. Descriptor.Last_Lookahead  loop
          if Item (I) then
             if Length (Result) > 0 then
                Result := Result & "/";
@@ -125,47 +131,63 @@ package body WisiToken.Generate.LR1_Items is
      (Item  : in out LR1_Items.Item;
       Value : in     Lookahead;
       Added :    out Boolean)
-   is begin
-      Added := False;
-
-      for I in Item.Lookaheads'Range loop
-         if Value (I) then
-            Added := Added or not Item.Lookaheads (I);
-            Item.Lookaheads (I) := True;
-         end if;
-      end loop;
+   is
+      Item_Lookaheads_1 : constant Lookahead := Item.Lookaheads;
+   begin
+      Item.Lookaheads := Item.Lookaheads or Value;
+      Added           := Item.Lookaheads /= Item_Lookaheads_1;
    end Include;
 
    procedure Include
      (Item       : in out LR1_Items.Item;
       Value      : in     Lookahead;
+      Added      :    out Boolean;
       Descriptor : in     WisiToken.Descriptor)
    is
-      Added : Boolean;
-      pragma Unreferenced (Added);
+      Item_Lookaheads_1 : constant Lookahead := Item.Lookaheads;
    begin
-      Include (Item, Value, Added, Descriptor);
+      Item.Lookaheads := Item.Lookaheads or Value;
+
+      Item.Lookaheads (Descriptor.Last_Lookahead) := False;
+
+      Added := Item.Lookaheads /= Item_Lookaheads_1;
    end Include;
 
    procedure Include
      (Item       : in out LR1_Items.Item;
       Value      : in     Lookahead;
-      Added      :    out Boolean;
       Descriptor : in     WisiToken.Descriptor)
    is begin
-      Added := False;
+      Item.Lookaheads := Item.Lookaheads or Value;
 
-      for I in Item.Lookaheads'Range loop
-         if I = Descriptor.Last_Lookahead then
-            null;
-         else
-            if Value (I) then
-               Added := Added or not Item.Lookaheads (I);
-               Item.Lookaheads (I) := True;
+      Item.Lookaheads (Descriptor.Last_Lookahead) := False;
+   end Include;
+
+   function Get_Dot_IDs
+     (Grammar    : in WisiToken.Productions.Prod_Arrays.Vector;
+      Set        : in Item_Lists.List;
+      Descriptor : in WisiToken.Descriptor)
+     return Token_ID_Arrays.Vector
+   is
+      use Item_Lists;
+      IDs : Token_ID_Set (Descriptor.First_Terminal .. 
Descriptor.Last_Nonterminal) := (others => False);
+   begin
+      --  Note that this algorithm eliminates duplicate Dot_IDs
+      for Item of Set loop
+         declare
+            use Token_ID_Arrays;
+            RHS : WisiToken.Productions.Right_Hand_Side renames
+              WisiToken.Productions.Constant_Ref_RHS (Grammar, Item.Prod);
+         begin
+            if Item.Dot in RHS.Tokens.First_Index .. RHS.Tokens.Last_Index then
+               if RHS.Tokens (Item.Dot) /= Descriptor.EOI_ID then
+                  IDs (RHS.Tokens (Item.Dot)) := True;
+               end if;
             end if;
-         end if;
+         end;
       end loop;
-   end Include;
+      return To_Array (IDs);
+   end Get_Dot_IDs;
 
    function Filter
      (Set        : in     Item_Set;
@@ -178,7 +200,11 @@ package body WisiToken.Generate.LR1_Items is
         return Boolean)
      return Item_Set
    is begin
-      return Result : Item_Set := (Set => <>, Goto_List => Set.Goto_List, 
Dot_IDs => Set.Dot_IDs, State => Set.State)
+      return Result : Item_Set :=
+        (Set       => <>,
+         Goto_List => Set.Goto_List,
+         Dot_IDs   => Set.Dot_IDs,
+         Tree_Node => Set.Tree_Node)
       do
          for Item of Set.Set loop
             if Include (Grammar, Descriptor, Item) then
@@ -219,87 +245,107 @@ package body WisiToken.Generate.LR1_Items is
 
    function Find
      (Prod : in Production_ID;
-      Dot : in Token_ID_Arrays.Extended_Index;
+      Dot  : in Token_ID_Arrays.Extended_Index;
       Set  : in Item_Set)
      return Item_Lists.Cursor
    is begin
-      return Set.Set.Find ((Prod, Dot, null));
+      return Set.Set.Find ((Prod, Dot, Null_Lookahead));
    end Find;
 
-   function To_Item_Set_Tree_Key
-     (Item_Set           : in LR1_Items.Item_Set;
-      Include_Lookaheads : in Boolean)
-     return Item_Set_Tree_Key
+   procedure Compute_Key_Hash
+     (Item_Set           : in out LR1_Items.Item_Set;
+      Rows               : in     Positive;
+      Grammar            : in     WisiToken.Productions.Prod_Arrays.Vector;
+      Descriptor         : in     WisiToken.Descriptor;
+      Include_Lookaheads : in     Boolean)
    is
       use Interfaces;
-      use Item_Lists;
-      Cur : Item_Lists.Cursor := Item_Set.Set.First;
+
+      type Lookahead_Int_16 is record
+         Word_0 : Interfaces.Unsigned_16;
+         Word_1 : Interfaces.Unsigned_16;
+         Word_2 : Interfaces.Unsigned_16;
+         Word_3 : Interfaces.Unsigned_16;
+         Word_4 : Interfaces.Unsigned_16;
+         Word_5 : Interfaces.Unsigned_16;
+         Word_6 : Interfaces.Unsigned_16;
+         Word_7 : Interfaces.Unsigned_16;
+      end record;
+      for Lookahead_Int_16'Size use 128;
+
+      function To_Int_16 is new Ada.Unchecked_Conversion (Source => Lookahead, 
Target => Lookahead_Int_16);
    begin
-      return Result : Item_Set_Tree_Key do
-         Result.Append (Integer_16 (Item_Set.Set.Length));
-         --  Int_Arrays."<" compares length, but only after everything else; we
-         --  want it to compare first, since it is most likely to be different.
-
-         loop
-            exit when not Has_Element (Cur);
-            declare
-               Item_1 : Item renames Item_Set.Set (Cur);
-            begin
-               Result.Append (Integer_16 (Item_1.Prod.LHS));
-               Result.Append (Integer_16 (Item_1.Prod.RHS));
-               Result.Append (Integer_16 (Item_1.Dot));
-               if Include_Lookaheads then
-                  for ID in Item_1.Lookaheads'Range loop
-                     if Item_1.Lookaheads (ID) then
-                        Result.Append (Integer_16 (ID));
-                     end if;
-                  end loop;
-               end if;
-            end;
-            Next (Cur);
-         end loop;
-      end return;
-   end To_Item_Set_Tree_Key;
+      Item_Set.Tree_Node.Key.Append (Unsigned_16 (Item_Set.Set.Length));
+      --  Int_Arrays."<" compares length, but only after everything else; we
+      --  want it to compare first, since it is most likely to be different.
+
+      for Item of Item_Set.Set loop
+         if In_Kernel (Grammar, Descriptor, Item) then
+            Item_Set.Tree_Node.Key.Append (Unsigned_16 (Item.Prod.LHS));
+            Item_Set.Tree_Node.Key.Append (Unsigned_16 (Item.Prod.RHS));
+            Item_Set.Tree_Node.Key.Append (Unsigned_16 (Item.Dot));
+            if Include_Lookaheads then
+               declare
+                  Temp : constant Lookahead_Int_16 := To_Int_16 
(Item.Lookaheads);
+               begin
+                  --  This faster than scanning the 128 booleans to get the 
token_ids,
+                  --  and shorter than some of those.
+                  Item_Set.Tree_Node.Key.Append (Temp.Word_0);
+                  Item_Set.Tree_Node.Key.Append (Temp.Word_1);
+                  Item_Set.Tree_Node.Key.Append (Temp.Word_2);
+                  Item_Set.Tree_Node.Key.Append (Temp.Word_3);
+                  Item_Set.Tree_Node.Key.Append (Temp.Word_4);
+                  Item_Set.Tree_Node.Key.Append (Temp.Word_5);
+                  Item_Set.Tree_Node.Key.Append (Temp.Word_6);
+                  Item_Set.Tree_Node.Key.Append (Temp.Word_7);
+               end;
+            end if;
+         end if;
+      end loop;
 
-   function Find
-     (New_Item_Set     : in Item_Set;
-      Item_Set_Tree    : in Item_Set_Trees.Tree;
-      Match_Lookaheads : in Boolean)
-     return Unknown_State_Index
+      Item_Set.Tree_Node.Hash := Hash_Sum_32 (Item_Set.Tree_Node.Key, Rows);
+   end Compute_Key_Hash;
+
+   function Hash_Sum_32 (Key : in Item_Set_Tree_Key; Rows : in Positive) 
return Positive
    is
-      use all type Item_Set_Trees.Cursor;
+      use Interfaces;
+      Accum : Unsigned_32 := 0;
+   begin
+      for I of Key loop
+         Accum := @ + Unsigned_32 (I);
+      end loop;
 
-      Tree_It    : constant Item_Set_Trees.Iterator := Item_Set_Trees.Iterate 
(Item_Set_Tree);
-      Key        : constant Item_Set_Tree_Key       := To_Item_Set_Tree_Key
-        (New_Item_Set, Include_Lookaheads => Match_Lookaheads);
-      Found_Tree : constant Item_Set_Trees.Cursor   := Tree_It.Find (Key);
+      Accum := 1 + (Accum mod Unsigned_32 (Rows));
+      return Positive (Accum);
+   end Hash_Sum_32;
+
+   function To_Item_Set_Tree_Hash (Node : in Item_Set_Tree_Node; Rows : in 
Positive) return Positive
+   is
+      pragma Unreferenced (Rows);
    begin
-      if Found_Tree = Item_Set_Trees.No_Element then
-         return Unknown_State;
-      else
-         return Item_Set_Tree (Found_Tree).State;
-      end if;
-   end Find;
+      return Node.Hash;
+   end To_Item_Set_Tree_Hash;
 
    procedure Add
      (Grammar            : in     WisiToken.Productions.Prod_Arrays.Vector;
-      New_Item_Set       : in     Item_Set;
-      Item_Set_Vector    : in out Item_Set_List;
-      Item_Set_Tree      : in out Item_Set_Trees.Tree;
+      New_Item_Set       : in out Item_Set;
+      Item_Set_List      : in out LR1_Items.Item_Set_List;
+      Item_Set_Tree      : in out LR1_Items.Item_Set_Tree;
       Descriptor         : in     WisiToken.Descriptor;
+      Hash_Table_Rows    : in     Positive;
       Include_Lookaheads : in     Boolean)
    is
       use Item_Set_Trees;
-      Key : constant Item_Set_Tree_Key := To_Item_Set_Tree_Key (New_Item_Set, 
Include_Lookaheads);
    begin
-      Item_Set_Vector.Append (New_Item_Set);
-      Item_Set_Vector (Item_Set_Vector.Last_Index).Dot_IDs := Get_Dot_IDs 
(Grammar, New_Item_Set.Set, Descriptor);
-      Item_Set_Tree.Insert ((Key, New_Item_Set.State));
+      Compute_Key_Hash (New_Item_Set, Hash_Table_Rows, Grammar, Descriptor, 
Include_Lookaheads);
+      Item_Set_List.Append (New_Item_Set);
+      Item_Set_List (Item_Set_List.Last_Index).Dot_IDs := Get_Dot_IDs 
(Grammar, New_Item_Set.Set, Descriptor);
+      Item_Set_Tree.Insert (New_Item_Set.Tree_Node, Duplicate => SAL.Error);
    end Add;
 
    function Is_In
      (Item      : in Goto_Item;
-      Goto_List : in Goto_Item_Lists.List)
+      Goto_List : in Goto_Item_List)
      return Boolean
    is begin
       for List_Item of Goto_List loop
@@ -333,8 +379,9 @@ package body WisiToken.Generate.LR1_Items is
       Descriptor              : in WisiToken.Descriptor)
      return Item_Set
    is
-      use all type Item_Lists.Cursor;
       use Token_ID_Arrays;
+      use WisiToken.Productions;
+      use all type Item_Lists.Cursor;
 
       --  [dragon] algorithm 4.9 pg 231; figure 4.38 pg 232; procedure 
"closure"
       --
@@ -349,25 +396,30 @@ package body WisiToken.Generate.LR1_Items is
       loop
          declare
             Item : LR1_Items.Item renames I.Set (Item_I);
-            Dot  : constant Token_ID_Arrays.Cursor :=
-              WisiToken.Productions.Constant_Ref_RHS (Grammar, 
Item.Prod).Tokens.To_Cursor (Item.Dot);
+            Item_Tokens : Token_ID_Arrays.Vector renames Constant_Ref_RHS 
(Grammar, Item.Prod).Tokens;
+            Item_Dot_ID : constant Token_ID :=
+              (if Item.Dot in Item_Tokens.First_Index .. Item_Tokens.Last_Index
+               then Item_Tokens (Item.Dot)
+               else Invalid_Token_ID);
          begin
             --  An item has the structure [A -> alpha Dot B Beta, a].
             --
             --  If B is a nonterminal, find its productions and place
             --  them in the set with lookaheads from FIRST(Beta a).
-            if Has_Element (Dot) and then
-              Element (Dot) in Descriptor.First_Nonterminal .. 
Descriptor.Last_Nonterminal
+
+            if Item_Dot_ID /= Invalid_Token_ID and
+              Item_Dot_ID in Descriptor.First_Nonterminal .. 
Descriptor.Last_Nonterminal
             then
                declare
-                  Prod : WisiToken.Productions.Instance renames Grammar 
(Element (Dot));
+                  Prod : WisiToken.Productions.Instance renames Grammar 
(Item_Dot_ID);
                begin
                   For_Each_RHS :
                   for J in Prod.RHSs.First_Index .. Prod.RHSs.Last_Index loop
                      declare
                         RHS  : WisiToken.Productions.Right_Hand_Side renames 
Prod.RHSs (J);
                         P_ID : constant Production_ID := (Prod.LHS, J);
-                        Beta : Token_ID_Arrays.Cursor := Next (Dot); -- tokens 
after nonterminal, possibly null
+                        Beta : Integer := (if Item.Dot = 
Item_Tokens.Last_Index then No_Index else Item.Dot + 1);
+                        --  Tokens after Item nonterminal, null if No_Index
                      begin
                         --  Compute FIRST (<tail of right hand side> a); loop
                         --  until find a terminal, a nonterminal that
@@ -376,30 +428,31 @@ package body WisiToken.Generate.LR1_Items is
 
                         First_Tail :
                         loop
-                           if not Has_Element (Beta) then
+                           if Beta = No_Index then
                               --  Use FIRST (a); a = Item.Lookaheads.
                               --  Lookaheads are all terminals, so
                               --  FIRST (a) = a.
                               Added_Item := Added_Item or
-                                Merge (P_ID, To_Index (RHS.Tokens.First), 
Item.Lookaheads.all, I);
+                                Merge (P_ID, RHS.Tokens.First_Index 
(No_Index_If_Empty => True), Item.Lookaheads, I);
                               exit First_Tail;
 
-                           elsif Element (Beta) in Descriptor.First_Terminal 
.. Descriptor.Last_Terminal then
+                           elsif Item_Tokens (Beta) in 
Descriptor.First_Terminal .. Descriptor.Last_Terminal then
                               --  FIRST (Beta) = Beta
                               Added_Item := Added_Item or Merge
-                                (P_ID, To_Index (RHS.Tokens.First), 
To_Lookahead (Element (Beta), Descriptor), I);
+                                (P_ID, RHS.Tokens.First_Index 
(No_Index_If_Empty => True), Item_Tokens (Beta), I);
                               exit First_Tail;
 
                            else
                               --  Beta is a nonterminal; use FIRST (Beta)
-                              for Terminal of First_Terminal_Sequence (Element 
(Beta)) loop
+                              for Terminal of First_Terminal_Sequence 
(Item_Tokens (Beta)) loop
                                  Added_Item := Added_Item or
-                                   Merge (P_ID, To_Index (RHS.Tokens.First), 
To_Lookahead (Terminal, Descriptor), I);
+                                   Merge (P_ID, RHS.Tokens.First_Index 
(No_Index_If_Empty => True), Terminal, I);
                               end loop;
 
-                              if Has_Empty_Production (Element (Beta)) then
+                              if Has_Empty_Production (Item_Tokens (Beta)) then
                                  --  Process the next token in the tail, or "a"
-                                 Beta := Next (Beta);
+                                 Beta := (if Beta = Item_Tokens.Last_Index 
then No_Index else Beta + 1);
+
                               else
                                  exit First_Tail;
                               end if;
@@ -416,13 +469,8 @@ package body WisiToken.Generate.LR1_Items is
 
             Item_I := I.Set.First;
             Added_Item := False;
-
-            if Trace_Generate_Table > Extra then
-               Ada.Text_IO.Put_Line ("  closure:");
-               Put (Grammar, Descriptor, I);
-            end if;
          else
-            Item_I := Item_Lists.Next (Item_I);
+            Item_Lists.Next (Item_I);
          end if;
       end loop;
 
@@ -460,8 +508,8 @@ package body WisiToken.Generate.LR1_Items is
          else
             Result := Result & " ";
          end if;
-         Result := Result & Image (Element (I), Descriptor);
-         Next (I);
+         Result := Result & Image (RHS.Tokens (I), Descriptor);
+         RHS.Tokens.Next (I);
       end loop;
 
       if Item.Dot = No_Index then
@@ -469,7 +517,7 @@ package body WisiToken.Generate.LR1_Items is
       end if;
 
       if Show_Lookaheads then
-         Result := Result & ", " & Lookahead_Image (Item.Lookaheads.all, 
Descriptor);
+         Result := Result & ", " & Lookahead_Image (Item.Lookaheads, 
Descriptor);
       end if;
 
       return Ada.Strings.Unbounded.To_String (Result);
@@ -486,7 +534,7 @@ package body WisiToken.Generate.LR1_Items is
 
    procedure Put
      (Descriptor : in WisiToken.Descriptor;
-      List       : in Goto_Item_Lists.List)
+      List       : in Goto_Item_List)
    is
       use Ada.Text_IO;
    begin
@@ -524,8 +572,8 @@ package body WisiToken.Generate.LR1_Items is
    is
       use Ada.Text_IO;
    begin
-      if Item.State /= Unknown_State then
-         Put_Line ("State" & Unknown_State_Index'Image (Item.State) & ":");
+      if Item.Tree_Node.State /= Unknown_State then
+         Put_Line ("State" & Unknown_State_Index'Image (Item.Tree_Node.State) 
& ":");
       end if;
 
       Put (Grammar, Descriptor, Item.Set, Show_Lookaheads, Kernel_Only);
diff --git a/wisitoken-generate-lr1_items.ads b/wisitoken-generate-lr1_items.ads
index 43e2939fc0..cfc12447e1 100644
--- a/wisitoken-generate-lr1_items.ads
+++ b/wisitoken-generate-lr1_items.ads
@@ -2,7 +2,7 @@
 --
 --  Types and operatorion for LR(1) items.
 --
---  Copyright (C) 2003, 2008, 2013 - 2015, 2017 - 2020 Free Software 
Foundation, Inc.
+--  Copyright (C) 2003, 2008, 2013 - 2015, 2017 - 2021 Free Software 
Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -29,14 +29,16 @@ pragma License (Modified_GPL);
 
 with Interfaces;
 with SAL.Gen_Definite_Doubly_Linked_Lists_Sorted;
+with SAL.Gen_Unbounded_Definite_Hash_Tables;
 with SAL.Gen_Unbounded_Definite_Red_Black_Trees;
 with SAL.Gen_Unbounded_Definite_Vectors.Gen_Comparable;
 with WisiToken.Productions;
 package WisiToken.Generate.LR1_Items is
+   use all type Interfaces.Unsigned_16;
 
-   use all type Interfaces.Integer_16;
-
-   subtype Lookahead is Token_ID_Set;
+   subtype Lookahead_Index_Type is Token_ID range 0 .. 127;
+   type Lookahead is array (Lookahead_Index_Type) of Boolean with Pack;
+   for Lookahead'Size use 128;
    --  Picking a type for Lookahead is not straight-forward. The
    --  operations required are (called numbers are for LR1 generate
    --  ada_lite):
@@ -74,16 +76,15 @@ package WisiToken.Generate.LR1_Items is
    --
    --  We've tried:
    --
-   --  (1) Token_ID_Set (unconstrained array of boolean, allocated directly) - 
fastest
+   --  (1) Token_ID_Set (unconstrained array of boolean, allocated directly) - 
slower than (4)
    --
-   --     Allocates more memory than (2), but everything else is fast,
-   --     and it's not enough memory to matter.
+   --      Allocates more memory than (2), but everything else is fast,
+   --      and it's not enough memory to matter.
    --
-   --     Loop over lookaheads is awkward:
-   --     for tok_id in lookaheads'range loop
+   --      Loop over lookaheads is awkward:
+   --      for tok_id in lookaheads'range loop
    --        if lookaheads (tok_id) then
    --           ...
-   --     But apparently it's fast enough.
    --
    --  (2) Instantiation of SAL.Gen_Unbounded_Definite_Vectors 
(token_id_arrays) - slower than (1).
    --
@@ -92,23 +93,22 @@ package WisiToken.Generate.LR1_Items is
    --      does sort and insert internally. Insert is inherently slow.
    --
    --  (3) Instantiation of SAL.Gen_Definite_Doubly_Linked_Lists_Sorted - 
slower than (2)
+   --
+   --  (4) Fixed length constrained array of Boolean, packed to 128 bits - 
fastest
+   --      Big enough for Ada, Java, Python. Fastest because in large
+   --      grammars the time is dominated by Include, and GNAT optimizes it
+   --      to use register compare of 64 bits at a time.
+
+   Null_Lookahead : constant Lookahead := (others => False);
 
    type Item is record
       Prod       : Production_ID;
-      Dot        : Token_ID_Arrays.Extended_Index := Token_ID_Arrays.No_Index; 
-- token after item Dot
-      Lookaheads : Token_ID_Set_Access := null;
-      --  Programmer must remember to copy Item.Lookaheads.all, not
-      --  Item.Lookaheads. Wrapping this in Ada.Finalization.Controlled
-      --  would just slow it down.
-      --
-      --  We don't free Lookaheads; we assume the user is running
-      --  wisi-generate, and not keeping LR1_Items around.
+      Dot        : Token_ID_Arrays.Extended_Index := Token_ID_Arrays.No_Index;
+      --  Token after item Dot. If after last token, value is No_Index.
+      Lookaheads : Lookahead                      := (others => False);
    end record;
 
-   function To_Lookahead (Item : in Token_ID; Descriptor : in 
WisiToken.Descriptor) return Lookahead;
-
-   function Contains (Item : in Lookahead; ID : in Token_ID) return Boolean
-     is (Item (ID));
+   function To_Lookahead (Item : in Token_ID) return Lookahead;
 
    function Lookahead_Image (Item : in Lookahead; Descriptor : in 
WisiToken.Descriptor) return String;
    --  Returns the format used in parse table output.
@@ -133,7 +133,7 @@ package WisiToken.Generate.LR1_Items is
      (Item  : in out LR1_Items.Item;
       Value : in     Lookahead;
       Added :    out Boolean);
-   --  Add Value to Item.Lookahead, if not already present.
+   --  Add Value to Item.Lookahead.
    --
    --  Added is True if Value was not already present.
    --
@@ -142,38 +142,78 @@ package WisiToken.Generate.LR1_Items is
    procedure Include
      (Item       : in out LR1_Items.Item;
       Value      : in     Lookahead;
+      Added      :    out Boolean;
       Descriptor : in     WisiToken.Descriptor);
-   --  Add Value to Item.Lookahead. Does not check if already present.
-   --  Excludes Propagate_ID.
+   --  Add Value to Item.Lookahead, excluding Propagate_ID.
 
    procedure Include
      (Item       : in out LR1_Items.Item;
       Value      : in     Lookahead;
-      Added      :    out Boolean;
       Descriptor : in     WisiToken.Descriptor);
-   --  Add Value to Item.Lookahead.
+   --  Add Value to Item.Lookahead, excluding Propagate_ID.
 
    type Goto_Item is record
-      Symbol : Token_ID;
+      Symbol : Token_ID := Invalid_Token_ID;
       --  If Symbol is a terminal, this is a shift and goto state action.
       --  If Symbol is a non-terminal, this is a post-reduce goto state action.
-      State  : State_Index;
+      State  : State_Index := State_Index'Last;
    end record;
 
-   function Goto_Item_Compare (Left, Right : in Goto_Item) return 
SAL.Compare_Result is
-     (if Left.Symbol > Right.Symbol then SAL.Greater
-      elsif Left.Symbol < Right.Symbol then SAL.Less
+   function Symbol (Item : in Goto_Item) return Token_ID is (Item.Symbol);
+   function Token_ID_Compare (Left, Right : in Token_ID) return 
SAL.Compare_Result is
+     (if Left > Right then SAL.Greater
+      elsif Left < Right then SAL.Less
       else SAL.Equal);
    --  Sort Goto_Item_Lists in ascending order of Symbol.
 
-   package Goto_Item_Lists is new SAL.Gen_Definite_Doubly_Linked_Lists_Sorted
-     (Goto_Item, Goto_Item_Compare);
+   package Goto_Item_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
+     (Positive_Index_Type, Goto_Item, (Token_ID'Last, State_Index'Last));
+   --  For temporary lists
+
+   package Goto_Item_Lists is new SAL.Gen_Unbounded_Definite_Red_Black_Trees
+     (Element_Type => Goto_Item,
+      Key_Type     => Token_ID,
+      Key          => Symbol,
+      Key_Compare  => Token_ID_Compare);
+   subtype Goto_Item_List is Goto_Item_Lists.Tree;
+   --  Goto_Item_Lists don't get very long, so red_black_trees is only
+   --  barely faster than doubly_linked_lists_sorted.
+
+   function Get_Dot_IDs
+     (Grammar    : in WisiToken.Productions.Prod_Arrays.Vector;
+      Set        : in Item_Lists.List;
+      Descriptor : in WisiToken.Descriptor)
+     return Token_ID_Arrays.Vector;
+
+   package Unsigned_16_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
+     (Positive, Interfaces.Unsigned_16, Default_Element => 
Interfaces.Unsigned_16'Last);
+   function Compare_Unsigned_16 (Left, Right : in Interfaces.Unsigned_16) 
return SAL.Compare_Result is
+     (if Left > Right then SAL.Greater
+      elsif Left < Right then SAL.Less
+      else SAL.Equal);
+
+   package Unsigned_16_Arrays_Comparable is new 
Unsigned_16_Arrays.Gen_Comparable (Compare_Unsigned_16);
+
+   subtype Item_Set_Tree_Key is Unsigned_16_Arrays_Comparable.Vector;
+   --  We want a key that is fast to compare, and has enough info to
+   --  significantly speed the search for an item set. So we convert all
+   --  relevant data in an item into a string of integers. We need 16 bit
+   --  because Ada token_ids max is 332. LR1 keys include lookaheads,
+   --  LALR keys do not.
+
+   Empty_Key : Item_Set_Tree_Key renames 
Unsigned_16_Arrays_Comparable.Empty_Vector;
+
+   type Item_Set_Tree_Node is record
+      Key   : Item_Set_Tree_Key   := 
Unsigned_16_Arrays_Comparable.Empty_Vector;
+      Hash  : Positive            := 1;
+      State : Unknown_State_Index := Unknown_State;
+   end record;
 
    type Item_Set is record
       Set       : Item_Lists.List;
-      Goto_List : Goto_Item_Lists.List;
+      Goto_List : Goto_Item_List;
       Dot_IDs   : Token_ID_Arrays.Vector;
-      State     : Unknown_State_Index := Unknown_State;
+      Tree_Node : Item_Set_Tree_Node; --  Avoids building an aggregate to 
insert in the tree.
    end record;
 
    function Filter
@@ -215,68 +255,50 @@ package WisiToken.Generate.LR1_Items is
    package Item_Set_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
      (State_Index, Item_Set, Default_Element => (others => <>));
    subtype Item_Set_List is Item_Set_Arrays.Vector;
+   --  Item_Set_Arrays.Vector holds state item sets indexed by state, for
+   --  iterating in state order. See also Item_Set_Trees.
 
-   package Int_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
-     (Positive, Interfaces.Integer_16, Default_Element => 
Interfaces.Integer_16'Last);
-   function Compare_Integer_16 (Left, Right : in Interfaces.Integer_16) return 
SAL.Compare_Result is
-     (if Left > Right then SAL.Greater
-      elsif Left < Right then SAL.Less
-      else SAL.Equal);
-
-   package Int_Arrays_Comparable is new Int_Arrays.Gen_Comparable 
(Compare_Integer_16);
-
-   subtype Item_Set_Tree_Key is Int_Arrays_Comparable.Vector;
-   --  We want a key that is fast to compare, and has enough info to
-   --  significantly speed the search for an item set. So we convert all
-   --  relevant data in an item into a string of integers. We need 16 bit
-   --  because Ada token_ids max is 332. LR1 keys include lookaheads,
-   --  LALR keys do not.
+   function Hash_Sum_32 (Key : in Item_Set_Tree_Key; Rows : in Positive) 
return Positive
+   with Post => Hash_Sum_32'Result <= Rows;
 
-   type Item_Set_Tree_Node is record
-      Key   : Item_Set_Tree_Key;
-      State : Unknown_State_Index;
-   end record;
+   procedure Compute_Key_Hash
+     (Item_Set           : in out LR1_Items.Item_Set;
+      Rows               : in     Positive;
+      Grammar            : in     WisiToken.Productions.Prod_Arrays.Vector;
+      Descriptor         : in     WisiToken.Descriptor;
+      Include_Lookaheads : in     Boolean);
 
-   function To_Item_Set_Tree_Key
-     (Item_Set           : in LR1_Items.Item_Set;
-      Include_Lookaheads : in Boolean)
-     return Item_Set_Tree_Key;
+   function To_Item_Set_Tree_Key (Node : in Item_Set_Tree_Node) return 
Item_Set_Tree_Key
+   is (Node.Key);
 
-   function To_Item_Set_Tree_Key (Node : in Item_Set_Tree_Node) return 
Item_Set_Tree_Key is
-     (Node.Key);
+   function To_Item_Set_Tree_Hash (Node : in Item_Set_Tree_Node; Rows : in 
Positive) return Positive;
 
-   package Item_Set_Trees is new SAL.Gen_Unbounded_Definite_Red_Black_Trees
+   package Item_Set_Trees is new SAL.Gen_Unbounded_Definite_Hash_Tables
      (Element_Type => Item_Set_Tree_Node,
       Key_Type     => Item_Set_Tree_Key,
       Key          => To_Item_Set_Tree_Key,
-      Key_Compare  => Int_Arrays_Comparable.Compare);
-   --  Item_Set_Arrays.Vector holds state item sets indexed by state, for
-   --  iterating in state order. Item_Set_Trees.Tree holds lists of state
-   --  indices sorted by LR1 item info, for fast Find in LR1_Item_Sets
-   --  and LALR_Kernels.
+      Key_Compare  => Unsigned_16_Arrays_Comparable.Compare,
+      Hash         => To_Item_Set_Tree_Hash);
+   --  Item_Set_Trees holds state indices sorted by Item_Set_Tree_Key,
+   --  for fast Find in LR1_Item_Sets and LALR_Kernels. See also
+   --  Item_Set_Arrays.
 
-   function Find
-     (New_Item_Set     : in Item_Set;
-      Item_Set_Tree    : in Item_Set_Trees.Tree;
-      Match_Lookaheads : in Boolean)
-     return Unknown_State_Index;
-   --  Return the State of an element in Item_Set_Tree matching
-   --  New_Item_Set, Unknown_State if not found.
-   --
-   --  Match_Lookaheads is True in LR1_Generate.
+   subtype Item_Set_Tree is Item_Set_Trees.Hash_Table;
 
    procedure Add
      (Grammar            : in     WisiToken.Productions.Prod_Arrays.Vector;
-      New_Item_Set       : in     Item_Set;
-      Item_Set_Vector    : in out Item_Set_List;
-      Item_Set_Tree      : in out Item_Set_Trees.Tree;
+      New_Item_Set       : in out Item_Set;
+      Item_Set_List      : in out LR1_Items.Item_Set_List;
+      Item_Set_Tree      : in out LR1_Items.Item_Set_Tree;
       Descriptor         : in     WisiToken.Descriptor;
-      Include_Lookaheads : in     Boolean);
+      Hash_Table_Rows    : in     Positive;
+      Include_Lookaheads : in     Boolean)
+   with Pre => New_Item_Set.Tree_Node.State = Item_Set_List.Last_Index + 1;
    --  Set New_Item_Set.Dot_IDs, add New_Item_Set to Item_Set_Vector, 
Item_Set_Tree
 
    function Is_In
      (Item      : in Goto_Item;
-      Goto_List : in Goto_Item_Lists.List)
+      Goto_List : in Goto_Item_List)
      return Boolean;
    --  Return True if a goto on Symbol to State is found in Goto_List
 
@@ -324,7 +346,7 @@ package WisiToken.Generate.LR1_Items is
 
    procedure Put
      (Descriptor : in WisiToken.Descriptor;
-      List       : in Goto_Item_Lists.List);
+      List       : in Goto_Item_List);
    procedure Put
      (Grammar         : in WisiToken.Productions.Prod_Arrays.Vector;
       Descriptor      : in WisiToken.Descriptor;
diff --git a/wisitoken-generate-packrat.adb b/wisitoken-generate-packrat.adb
index c50b1ed4c6..cd514cf8c7 100644
--- a/wisitoken-generate-packrat.adb
+++ b/wisitoken-generate-packrat.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2018 Free Software Foundation, Inc.
+--  Copyright (C) 2018, 2021 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -99,8 +99,8 @@ package body WisiToken.Generate.Packrat is
             --  We only implement the simplest left recursion solution ([warth
             --  2008] figure 3); [tratt 2010] section 6.3 gives this condition 
for
             --  that to be valid.
-            --  FIXME: not quite? definite direct right recursive ok?
-            --  FIXME: for indirect left recursion, need potential indirect 
right recursive check?
+            --  FIXME packrat: not quite? definite direct right recursive ok?
+            --  FIXME packrat: for indirect left recursion, need potential 
indirect right recursive check?
             Put_Error
               (Error_Message
                  (-Data.Source_File_Name, Data.Source_Line_Map 
(Prod.LHS).Line, "'" & Image (Prod.LHS, Descriptor) &
@@ -118,7 +118,10 @@ package body WisiToken.Generate.Packrat is
       end loop;
    end Check_Recursion;
 
-   procedure Check_RHS_Order (Data : in Packrat.Data; Descriptor : in 
WisiToken.Descriptor)
+   procedure Check_RHS_Order
+     (Data       : in Packrat.Data;
+      Descriptor : in WisiToken.Descriptor;
+      Suppress   : in WisiToken.BNF.String_Pair_Lists.List)
    is
       use all type Ada.Containers.Count_Type;
    begin
@@ -139,19 +142,41 @@ package body WisiToken.Generate.Packrat is
             declare
                Cur : Token_ID_Arrays.Vector renames Prod.RHSs (I).Tokens;
             begin
-               --  Shared prefix; longer must be first
+               --  Shared prefix; longer must be first. Sometimes this is a 
useful
+               --  message, for example:
+               --
+               --  NAME
+               --    : IDENTIFIER
+               --    | IDENTIFIER TICK IDENTIFIER
+               --
+               --  The second will never get a chance to match. Other times it 
is
+               --  wrong, because other tokens distinguish the RHSs:
+               --
+               --  Object_Decl
+               --    : IDENTIFIER ':' IDENTIFIER ';'
+               --    | IDENTIFIER ':' IDENTIFIER := expression ';'
+               --
+               --  So we make it a warning, and provide %suppress.
+               --  See test/bnf/object_declaration for a working example.
+
                for J in Prod.RHSs.First_Index .. I - 1 loop
                   declare
                      Prev : Token_ID_Arrays.Vector renames Prod.RHSs 
(J).Tokens;
                      K    : constant Natural := Shared_Prefix (Prev, Cur);
                   begin
                      if K > 0 and Prev.Length < Cur.Length then
-                        Put_Error
-                          (Error_Message
-                             (-Data.Source_File_Name, Data.Source_Line_Map 
(Prod.LHS).RHS_Map (I),
-                              "right hand side" & Integer'Image (I) & " in " & 
Image (Prod.LHS, Descriptor) &
-                                " may never match; it shares a prefix with a 
shorter previous rhs" &
-                                Integer'Image (J) & "."));
+                        if not Suppress.Contains
+                          ((Name => +Descriptor.Image (Prod.LHS).all,
+                            Value => +"may never match; it shares a prefix"))
+                        then
+                           Put_Warning
+                             (Error_Message
+                                (-Data.Source_File_Name, Data.Source_Line_Map 
(Prod.LHS).RHS_Map (I),
+                                 "right hand side" & Integer'Image (I) & " in 
" & Image (Prod.LHS, Descriptor) &
+                                   " may never match; it shares a prefix with 
a shorter previous rhs" &
+                                   Integer'Image (J) & ".",
+                                 Warning => True));
+                        end if;
                      end if;
                   end;
                end loop;
@@ -209,10 +234,13 @@ package body WisiToken.Generate.Packrat is
       end loop;
    end Check_RHS_Order;
 
-   procedure Check_All (Data : in Packrat.Data; Descriptor : in 
WisiToken.Descriptor)
+   procedure Check_All
+     (Data       : in Packrat.Data;
+      Descriptor : in WisiToken.Descriptor;
+      Suppress   : in WisiToken.BNF.String_Pair_Lists.List)
    is begin
       Check_Recursion (Data, Descriptor);
-      Check_RHS_Order (Data, Descriptor);
+      Check_RHS_Order (Data, Descriptor, Suppress);
    end Check_All;
 
    function Potential_Direct_Left_Recursive
@@ -222,7 +250,7 @@ package body WisiToken.Generate.Packrat is
    is
       subtype Nonterminal is Token_ID range Grammar.First_Index .. 
Grammar.Last_Index;
    begin
-      --  FIXME: this duplicates the computation of First; if keep First,
+      --  FIXME packrat: this duplicates the computation of Data.First; if 
keep Data.First,
       --  change this to use it.
       return Result : Token_ID_Set (Nonterminal) := (others => False) do
          for Prod of Grammar loop
diff --git a/wisitoken-generate-packrat.ads b/wisitoken-generate-packrat.ads
index 17bf03ef32..2bb50bcc5f 100644
--- a/wisitoken-generate-packrat.ads
+++ b/wisitoken-generate-packrat.ads
@@ -10,7 +10,7 @@
 --
 --  See wisitoken-parse-packrat.ads.
 --
---  Copyright (C) 2018 Free Software Foundation, Inc.
+--  Copyright (C) 2018, 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -25,6 +25,7 @@
 
 pragma License (Modified_GPL);
 
+with WisiToken.BNF;
 package WisiToken.Generate.Packrat is
 
    type Data (First_Terminal, First_Nonterminal, Last_Nonterminal : Token_ID) 
is tagged
@@ -53,15 +54,23 @@ package WisiToken.Generate.Packrat is
    procedure Check_Recursion (Data : in Packrat.Data; Descriptor : in 
WisiToken.Descriptor);
    --  Check that any rule recursion present is supported.
 
-   procedure Check_RHS_Order (Data : in Packrat.Data; Descriptor : in 
WisiToken.Descriptor);
+   procedure Check_RHS_Order
+     (Data       : in Packrat.Data;
+      Descriptor : in WisiToken.Descriptor;
+      Suppress   : in WisiToken.BNF.String_Pair_Lists.List);
    --  For each production, check that right hand sides that share
    --  prefixes have the longest right hand side first, and that any
    --  empty right hand side is last.
    --
-   --  Violations output a message to Ada.Text_IO.Standard_Error, and
-   --  set WisiToken.Generate.Error True.
+   --  Violations output a message to Ada.Text_IO.Standard_Error, and set
+   --  WisiToken.Generate.Error or WisiToken.Generate.Warning True.
+   --  Suppress allows suppressing warnings; see %suppress in wisitoken
+   --  user guide.
 
-   procedure Check_All  (Data : in Packrat.Data; Descriptor : in 
WisiToken.Descriptor);
+   procedure Check_All
+     (Data       : in Packrat.Data;
+      Descriptor : in WisiToken.Descriptor;
+      Suppress   : in WisiToken.BNF.String_Pair_Lists.List);
    --  Run all the above checks.
    --
    --  Note that WisiToken.Generate.Check_Consistent is run in
diff --git a/wisitoken-generate.adb b/wisitoken-generate.adb
index c14077d228..59b744bf81 100644
--- a/wisitoken-generate.adb
+++ b/wisitoken-generate.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -18,7 +18,6 @@
 pragma License (Modified_GPL);
 
 with Ada.Directories;
-with Ada.Real_Time;
 with Ada.Strings.Fixed;
 with Ada.Text_IO;
 package body WisiToken.Generate is
@@ -26,7 +25,8 @@ package body WisiToken.Generate is
    function Error_Message
      (File_Name : in String;
       File_Line : in Line_Number_Type;
-      Message   : in String)
+      Message   : in String;
+      Warning   : in Boolean := False)
      return String
    is
       use Ada.Directories;
@@ -34,7 +34,11 @@ package body WisiToken.Generate is
       use Ada.Strings;
    begin
       return Simple_Name (File_Name) & ":" &
-        Trim (Line_Number_Type'Image (File_Line), Left) & ":0: " & Message;
+        Trim (Line_Number_Type'Image (File_Line), Left) & ":1: " &
+        (if Warning then "warning: " else "error: ") &
+        Message;
+      --  Column number is 1 origin in Gnu error messages [gnu_coding]
+      --  warning/error is not in [gnu_coding], but is consistent with gcc.
    end Error_Message;
 
    procedure Put_Error (Message : in String)
@@ -43,6 +47,12 @@ package body WisiToken.Generate is
       Ada.Text_IO.Put_Line (Ada.Text_IO.Standard_Error, Message);
    end Put_Error;
 
+   procedure Put_Warning (Message : in String)
+   is begin
+      Warning := True;
+      Ada.Text_IO.Put_Line (Ada.Text_IO.Standard_Error, Message);
+   end Put_Warning;
+
    procedure Check_Consistent
      (Grammar          : in WisiToken.Productions.Prod_Arrays.Vector;
       Descriptor       : in WisiToken.Descriptor;
@@ -484,12 +494,10 @@ package body WisiToken.Generate is
    end Set_Grammar_Recursions;
 
    function Compute_Full_Recursion
-     (Grammar    : in out WisiToken.Productions.Prod_Arrays.Vector;
-      Descriptor : in     WisiToken.Descriptor)
+     (Grammar    : in WisiToken.Productions.Prod_Arrays.Vector;
+      Descriptor : in WisiToken.Descriptor)
      return Recursions
    is
-      Time_Start : constant Ada.Real_Time.Time := Ada.Real_Time.Clock;
-
       Graph : constant Grammar_Graphs.Graph := To_Graph (Grammar);
    begin
       return Result : Recursions :=
@@ -498,19 +506,6 @@ package body WisiToken.Generate is
       do
          Grammar_Graphs.Sort_Paths.Sort (Result.Recursions);
 
-         Set_Grammar_Recursions (Result, Grammar);
-
-         if Trace_Time then
-            declare
-               use Ada.Real_Time;
-               Time_End : constant Time := Clock;
-            begin
-               Ada.Text_IO.Put_Line
-                 (Ada.Text_IO.Standard_Error, "compute partial recursion 
time:" &
-                    Duration'Image (To_Duration (Time_End - Time_Start)));
-            end;
-         end if;
-
          if Trace_Generate_Minimal_Complete > Extra then
             Ada.Text_IO.New_Line;
             Ada.Text_IO.Put_Line ("Productions:");
@@ -525,13 +520,11 @@ package body WisiToken.Generate is
    end Compute_Full_Recursion;
 
    function Compute_Partial_Recursion
-     (Grammar    : in out WisiToken.Productions.Prod_Arrays.Vector;
-      Descriptor : in     WisiToken.Descriptor)
+     (Grammar    : in WisiToken.Productions.Prod_Arrays.Vector;
+      Descriptor : in WisiToken.Descriptor)
      return Recursions
    is
       use Grammar_Graphs;
-      Time_Start : constant Ada.Real_Time.Time := Ada.Real_Time.Clock;
-
       Graph      : constant Grammar_Graphs.Graph := To_Graph (Grammar);
       Components : constant Component_Lists.List := 
Strongly_Connected_Components
         (To_Adjancency (Graph), Non_Trivial_Only => True);
@@ -563,19 +556,6 @@ package body WisiToken.Generate is
             Result.Recursions.Append (Path);
          end;
 
-         Set_Grammar_Recursions (Result, Grammar);
-
-         if Trace_Time then
-            declare
-               use Ada.Real_Time;
-               Time_End : constant Time := Clock;
-            begin
-               Ada.Text_IO.Put_Line
-                 (Ada.Text_IO.Standard_Error, "compute full recursion time:" &
-                    Duration'Image (To_Duration (Time_End - Time_Start)));
-            end;
-         end if;
-
          if Trace_Generate_Minimal_Complete > Extra then
             Ada.Text_IO.New_Line;
             Ada.Text_IO.Put_Line ("Productions:");
diff --git a/wisitoken-generate.ads b/wisitoken-generate.ads
index a9c4e5a382..2fc5593126 100644
--- a/wisitoken-generate.ads
+++ b/wisitoken-generate.ads
@@ -12,7 +12,7 @@
 --
 --  See wisitoken.ads
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2020, 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -33,18 +33,23 @@ with SAL.Gen_Graphs;
 with WisiToken.Productions;
 package WisiToken.Generate is
 
-   Error : Boolean := False;
-   --  Set True by errors during grammar generation
+   Error   : Boolean := False;
+   Warning : Boolean := False;
+   --  Set True by errors/warnings during grammar generation
 
    function Error_Message
      (File_Name : in String;
       File_Line : in WisiToken.Line_Number_Type;
-      Message   : in String)
+      Message   : in String;
+      Warning   : in Boolean := False)
      return String;
 
    procedure Put_Error (Message : in String);
    --  Set Error True, output Message to Standard_Error
 
+   procedure Put_Warning (Message : in String);
+   --  Set Warning True, output Message to Standard_Error
+
    procedure Check_Consistent
      (Grammar          : in WisiToken.Productions.Prod_Arrays.Vector;
       Descriptor       : in WisiToken.Descriptor;
@@ -83,7 +88,7 @@ package WisiToken.Generate is
    --
    --  LALR, LR1 generate want First as both Token_Sequence_Arrays.Vector
    --  and Token_Array_Token_Set, Packrat wants Token_Array_Token_Set,
-   --  existing tests all use Token_Array_Token_Set. So for LR1 we use
+   --  existing tests all use Token_Array_Token_Set. So for LR we use
    --  To_Terminal_Sequence_Array.
 
    function To_Terminal_Sequence_Array
@@ -118,7 +123,8 @@ package WisiToken.Generate is
       Token_Index : Positive := Positive'Last;
    end record;
 
-   function Edge_Image (Edge : in Edge_Data) return String is (Trimmed_Image 
(Edge.RHS));
+   function Edge_Image (Edge : in Edge_Data) return String
+   is (Trimmed_Image (Edge.RHS) & "." & Trimmed_Image (Edge.Token_Index));
 
    type Base_Recursion_Index is range 0 .. Integer'Last;
    subtype Recursion_Index is Base_Recursion_Index range 1 .. 
Base_Recursion_Index'Last;
@@ -148,6 +154,8 @@ package WisiToken.Generate is
       --  path (I) are from path (I).
    end record;
 
+   Empty_Recursions : constant Recursions := (Full => False, Recursions => <>);
+
    package Recursion_Lists is new Ada.Containers.Doubly_Linked_Lists 
(Recursion_Index);
    function Image is new SAL.Ada_Containers.Gen_Doubly_Linked_Lists_Image
      (Recursion_Index, "=", Recursion_Lists, Trimmed_Image);
@@ -155,22 +163,23 @@ package WisiToken.Generate is
    function To_Graph (Grammar : in WisiToken.Productions.Prod_Arrays.Vector) 
return Grammar_Graphs.Graph;
 
    function Compute_Full_Recursion
-     (Grammar    : in out WisiToken.Productions.Prod_Arrays.Vector;
-      Descriptor : in     WisiToken.Descriptor)
+     (Grammar    : in WisiToken.Productions.Prod_Arrays.Vector;
+      Descriptor : in WisiToken.Descriptor)
      return Recursions;
-   --  Each element of result is a cycle in the grammar. Also sets
-   --  Recursive components in Grammar.
+   --  Each element of result is a cycle in the grammar.
 
    function Compute_Partial_Recursion
-     (Grammar    : in out WisiToken.Productions.Prod_Arrays.Vector;
-      Descriptor : in     WisiToken.Descriptor)
+     (Grammar    : in WisiToken.Productions.Prod_Arrays.Vector;
+      Descriptor : in WisiToken.Descriptor)
      return Recursions;
    --  Each element of the result contains all members of a non-trivial
    --  strongly connected component in the grammar, in arbitrary order.
    --  This is an approximation to the full recursion, when that is too
    --  hard to compute (ie for Java).
-   --
-   --  Also sets Recursive components in Grammar.
+
+   procedure Set_Grammar_Recursions
+     (Recursions : in     WisiToken.Generate.Recursions;
+      Grammar    : in out WisiToken.Productions.Prod_Arrays.Vector);
 
    ----------
    --  Indented text output. Mostly used for code generation in wisi,
diff --git a/wisitoken-in_parse_actions.adb b/wisitoken-in_parse_actions.adb
new file mode 100644
index 0000000000..e9e5d63ae3
--- /dev/null
+++ b/wisitoken-in_parse_actions.adb
@@ -0,0 +1,162 @@
+--  Abstract :
+--
+--  See spec.
+--
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+--  As a special exception under Section 7 of GPL version 3, you are granted
+--  additional permissions described in the GCC Runtime Library Exception,
+--  version 3.1, as published by the Free Software Foundation.
+
+pragma License (Modified_GPL);
+
+with Ada.Characters.Handling;
+with WisiToken.Lexer;
+package body WisiToken.In_Parse_Actions is
+
+   function Image
+     (Item       : in Syntax_Trees.In_Parse_Actions.Status;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return String
+   is
+      use WisiToken.Syntax_Trees;
+   begin
+      case Item.Label is
+      when Syntax_Trees.In_Parse_Actions.Ok =>
+         return Syntax_Trees.In_Parse_Actions.Status_Label'Image (Item.Label);
+      when Syntax_Trees.In_Parse_Actions.Error =>
+         declare
+            Begin_Node : constant Valid_Node_Access := Tree.Child (Error_Node, 
Item.Begin_Name);
+            End_Node   : constant Valid_Node_Access := Tree.Child (Error_Node, 
Item.End_Name);
+         begin
+            return '(' & Syntax_Trees.In_Parse_Actions.Status_Label'Image 
(Item.Label) & ", " &
+              Tree.Image (Begin_Node) & "'" & Tree.Lexer.Buffer_Text
+                (Tree.Byte_Region (Begin_Node, Trailing_Non_Grammar => False)) 
& "'," &
+              Tree.Image (End_Node) & "'" & Tree.Lexer.Buffer_Text
+                (Tree.Byte_Region (End_Node, Trailing_Non_Grammar => False)) & 
"')";
+         end;
+      end case;
+   end Image;
+
+   function Match_Names
+     (Tree         : in Syntax_Trees.Tree;
+      Tokens       : in Syntax_Trees.Recover_Token_Array;
+      Start_Index  : in Positive_Index_Type;
+      End_Index    : in Positive_Index_Type;
+      End_Optional : in Boolean)
+     return Syntax_Trees.In_Parse_Actions.Status
+   is
+      use Syntax_Trees;
+   begin
+      if Tree.Contains_Virtual_Terminal (Tokens (Start_Index)) or
+        Tree.Contains_Virtual_Terminal (Tokens (End_Index))
+      then
+         return (Label => Syntax_Trees.In_Parse_Actions.Ok);
+      end if;
+
+      declare
+         Start_Name_Region : constant Buffer_Region := Tree.Name (Tokens 
(Start_Index));
+         End_Name_Region   : constant Buffer_Region := Tree.Name (Tokens 
(End_Index));
+
+         function Equal return Boolean
+         is
+            use Ada.Characters.Handling;
+            Start_Name : constant String :=
+              (if Tree.Lexer.Descriptor.Case_Insensitive
+               then To_Lower (Tree.Lexer.Buffer_Text (Start_Name_Region))
+               else Tree.Lexer.Buffer_Text (Start_Name_Region));
+            End_Name  : constant String :=
+              (if Tree.Lexer.Descriptor.Case_Insensitive
+               then To_Lower (Tree.Lexer.Buffer_Text (End_Name_Region))
+               else Tree.Lexer.Buffer_Text (End_Name_Region));
+         begin
+            return Start_Name = End_Name;
+         end Equal;
+      begin
+
+         if End_Optional then
+            if End_Name_Region = Null_Buffer_Region then
+               return (Label => Syntax_Trees.In_Parse_Actions.Ok);
+
+            elsif Start_Name_Region = Null_Buffer_Region then
+               return (Syntax_Trees.In_Parse_Actions.Extra_Name_Error, 
Start_Index, End_Index);
+            else
+               if Equal then
+                  return (Label => Syntax_Trees.In_Parse_Actions.Ok);
+               else
+                  return (Syntax_Trees.In_Parse_Actions.Match_Names_Error, 
Start_Index, End_Index);
+               end if;
+            end if;
+
+         else
+            if Start_Name_Region = Null_Buffer_Region then
+               if End_Name_Region = Null_Buffer_Region then
+                  return (Label => Syntax_Trees.In_Parse_Actions.Ok);
+               else
+                  return (Syntax_Trees.In_Parse_Actions.Extra_Name_Error, 
Start_Index, End_Index);
+               end if;
+
+            elsif End_Name_Region = Null_Buffer_Region then
+               return (Syntax_Trees.In_Parse_Actions.Missing_Name_Error, 
Start_Index, End_Index);
+
+            else
+               if Equal then
+                  return (Label => Syntax_Trees.In_Parse_Actions.Ok);
+               else
+                  return (Syntax_Trees.In_Parse_Actions.Match_Names_Error, 
Start_Index, End_Index);
+               end if;
+            end if;
+         end if;
+      end;
+   end Match_Names;
+
+   function Propagate_Name
+     (Tree       : in     Syntax_Trees.Tree;
+      Nonterm    : in out Syntax_Trees.Recover_Token;
+      Tokens     : in     Syntax_Trees.Recover_Token_Array;
+      Name_Index : in     Positive_Index_Type)
+     return Syntax_Trees.In_Parse_Actions.Status
+   is begin
+      Tree.Set_Name (Nonterm, Tree.Name (Tokens (Name_Index)));
+      return (Label => Syntax_Trees.In_Parse_Actions.Ok);
+   end Propagate_Name;
+
+   function Merge_Names
+     (Tree        : in     Syntax_Trees.Tree;
+      Nonterm     : in out Syntax_Trees.Recover_Token;
+      Tokens      : in     Syntax_Trees.Recover_Token_Array;
+      First_Index : in     Positive_Index_Type;
+      Last_Index  : in     Positive_Index_Type)
+     return Syntax_Trees.In_Parse_Actions.Status
+   is begin
+      Tree.Set_Name (Nonterm, Tree.Name (Tokens (First_Index)) and Tree.Name 
(Tokens (Last_Index)));
+      return (Label => Syntax_Trees.In_Parse_Actions.Ok);
+   end Merge_Names;
+
+   function Terminate_Partial_Parse
+     (Tree                    : in Syntax_Trees.Tree;
+      Partial_Parse_Active    : in Boolean;
+      Partial_Parse_Byte_Goal : in Buffer_Pos;
+      Recover_Active          : in Boolean;
+      Nonterm                 : in Syntax_Trees.Recover_Token)
+     return Syntax_Trees.In_Parse_Actions.Status
+   is begin
+      if Partial_Parse_Active and then
+        (not Recover_Active) and then
+        Tree.Byte_Region (Nonterm).Last >= Partial_Parse_Byte_Goal
+      then
+         raise WisiToken.Partial_Parse;
+      else
+         return (Label => Syntax_Trees.In_Parse_Actions.Ok);
+      end if;
+   end Terminate_Partial_Parse;
+
+end WisiToken.In_Parse_Actions;
diff --git a/wisitoken-in_parse_actions.ads b/wisitoken-in_parse_actions.ads
new file mode 100644
index 0000000000..49428ff267
--- /dev/null
+++ b/wisitoken-in_parse_actions.ads
@@ -0,0 +1,80 @@
+--  Abstract :
+--
+--  Grammar in parse action routines.
+--
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+--  As a special exception under Section 7 of GPL version 3, you are granted
+--  additional permissions described in the GCC Runtime Library Exception,
+--  version 3.1, as published by the Free Software Foundation.
+
+pragma License (Modified_GPL);
+
+with WisiToken.Syntax_Trees;
+package WisiToken.In_Parse_Actions is
+
+   function Image
+     (Item       : in Syntax_Trees.In_Parse_Actions.Status;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return String;
+
+   function Match_Names
+     (Tree         : in Syntax_Trees.Tree;
+      Tokens       : in Syntax_Trees.Recover_Token_Array;
+      Start_Index  : in Positive_Index_Type;
+      End_Index    : in Positive_Index_Type;
+      End_Optional : in Boolean)
+     return Syntax_Trees.In_Parse_Actions.Status;
+   --  Check that buffer text at Tokens (Start_Index).Name matches buffer
+   --  text at Tokens (End_Index).Name. Comparison is controlled by
+   --  Descriptor.Case_Insensitive.
+
+   function Propagate_Name
+     (Tree       : in     Syntax_Trees.Tree;
+      Nonterm    : in out Syntax_Trees.Recover_Token;
+      Tokens     : in     Syntax_Trees.Recover_Token_Array;
+      Name_Index : in     Positive_Index_Type)
+     return Syntax_Trees.In_Parse_Actions.Status;
+   function Merge_Names
+     (Tree       : in     Syntax_Trees.Tree;
+      Nonterm    : in out Syntax_Trees.Recover_Token;
+      Tokens     : in     Syntax_Trees.Recover_Token_Array;
+      Name_Index : in     Positive_Index_Type)
+     return Syntax_Trees.In_Parse_Actions.Status
+   renames Propagate_Name;
+   --  Set Nonterm.Name to Tokens (Name_Index).Name, or .Byte_Region, if
+   --  .Name is Null_Buffer_Region. Return Ok.
+
+   function Merge_Names
+     (Tree        : in     Syntax_Trees.Tree;
+      Nonterm     : in out Syntax_Trees.Recover_Token;
+      Tokens      : in     Syntax_Trees.Recover_Token_Array;
+      First_Index : in     Positive_Index_Type;
+      Last_Index  : in     Positive_Index_Type)
+     return Syntax_Trees.In_Parse_Actions.Status;
+   --  Set Nonterm.Name to the merger of Tokens (First_Index ..
+   --  Last_Index).Name, return Ok.
+   --
+   --  If Tokens (Last_Index).Name is Null_Buffer_Region, use Tokens
+   --  (Last_Index).Byte_Region instead.
+
+   function Terminate_Partial_Parse
+     (Tree                    : in Syntax_Trees.Tree;
+      Partial_Parse_Active    : in Boolean;
+      Partial_Parse_Byte_Goal : in Buffer_Pos;
+      Recover_Active          : in Boolean;
+      Nonterm                 : in Syntax_Trees.Recover_Token)
+     return Syntax_Trees.In_Parse_Actions.Status;
+   pragma Inline (Terminate_Partial_Parse);
+   --  If partial parse is complete, raise Wisitoken.Partial_Parse;
+   --  otherwise return Ok.
+
+end WisiToken.In_Parse_Actions;
diff --git a/wisitoken-lexer-re2c.adb b/wisitoken-lexer-re2c.adb
index 46cccb6af6..c6b485f719 100644
--- a/wisitoken-lexer-re2c.adb
+++ b/wisitoken-lexer-re2c.adb
@@ -2,7 +2,7 @@
 --
 --  see spec.
 --
---  Copyright (C) 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -28,6 +28,7 @@
 pragma License (Modified_GPL);
 
 with Ada.Directories;
+with Ada.IO_Exceptions;
 with Ada.Strings.Unbounded;
 with GNATCOLL.Mmap;
 package body WisiToken.Lexer.re2c is
@@ -47,12 +48,30 @@ package body WisiToken.Lexer.re2c is
    type Instance_Access is access Instance; --  silence compiler warning
 
    function New_Lexer
-     (Descriptor  : not null access constant WisiToken.Descriptor)
+     (Trace      : in WisiToken.Trace_Access;
+      Descriptor  : in WisiToken.Descriptor_Access_Constant)
      return Handle
-   is begin
-      return Handle (Instance_Access'(new Instance (Descriptor)));
+   is
+      Result : constant Instance_Access := new Instance;
+   begin
+      Result.Trace      := Trace;
+      Result.Descriptor := Descriptor;
+      return Handle (Result);
    end New_Lexer;
 
+   overriding procedure Set_Verbosity
+     (Lexer     : in Instance;
+      Verbosity : in Integer)
+   is
+      use all type System.Address;
+   begin
+      --  Allow calling this before the lexer is configured, for
+      --  run_emacs_common_parse.adb.
+      if Lexer.Lexer /= System.Null_Address then
+         Set_Verbosity (Lexer.Lexer, Interfaces.C.int (Verbosity));
+      end if;
+   end Set_Verbosity;
+
    overriding procedure Reset_With_String
      (Lexer      : in out Instance;
       Input      : in     String;
@@ -69,12 +88,12 @@ package body WisiToken.Lexer.re2c is
          Buffer_Nominal_First_Char => Begin_Char,
          Line_Nominal_First        => Begin_Line,
          Buffer                    => new String'(Input),
+         Buffer_Last               => Input'Last,
          User_Buffer               => False);
 
       Lexer.Lexer := New_Lexer
-        (Buffer    => Lexer.Source.Buffer.all'Address,
-         Length    => Interfaces.C.size_t (Input'Length),
-         Verbosity => Interfaces.C.int (if Trace_Parse > 3 then Trace_Parse - 
3 else 0));
+        (Buffer => Lexer.Source.Buffer.all'Address,
+         Length => Interfaces.C.size_t (Input'Length));
 
       Reset (Lexer);
    end Reset_With_String;
@@ -82,28 +101,42 @@ package body WisiToken.Lexer.re2c is
    overriding procedure Reset_With_String_Access
      (Lexer      : in out Instance;
       Input      : in     Ada.Strings.Unbounded.String_Access;
+      Input_Last : in     Integer;
       File_Name  : in     Ada.Strings.Unbounded.Unbounded_String;
       Begin_Char : in     Buffer_Pos       := Buffer_Pos'First;
       Begin_Line : in     Line_Number_Type := Line_Number_Type'First)
-   is begin
+   is
+      function Short_File_Name return Ada.Strings.Unbounded.Unbounded_String
+      is
+         use Ada.Strings.Unbounded;
+      begin
+         if Length (File_Name) = 0 then
+            return +"";
+         else
+            return +Ada.Directories.Simple_Name (-File_Name);
+         end if;
+      exception
+      when Ada.IO_Exceptions.Name_Error =>
+         --  Probably an editor temp buffer name.,,
+         return File_Name;
+      end Short_File_Name;
+   begin
       Finalize (Lexer);
 
       --  We assume Input is in UTF-8 encoding
       Lexer.Source :=
-        (Label       => String_Label,
-         File_Name   =>
-           +(if Ada.Strings.Unbounded.Length (File_Name) = 0 then ""
-             else Ada.Directories.Simple_Name (-File_Name)),
+        (Label                     => String_Label,
+         File_Name                 => Short_File_Name,
          Buffer_Nominal_First_Byte => Base_Buffer_Pos (Input'First),
          Buffer_Nominal_First_Char => Begin_Char,
          Line_Nominal_First        => Begin_Line,
          Buffer                    => Input,
+         Buffer_Last               => Input_Last,
          User_Buffer               => True);
 
       Lexer.Lexer := New_Lexer
-        (Buffer    => Lexer.Source.Buffer.all'Address,
-         Length    => Interfaces.C.size_t (Input'Length),
-         Verbosity => Interfaces.C.int (if Trace_Parse > 3 then Trace_Parse - 
3 else 0));
+        (Buffer => Lexer.Source.Buffer.all'Address,
+         Length => Interfaces.C.size_t (Input_Last - Input'First + 1));
 
       Reset (Lexer);
    end Reset_With_String_Access;
@@ -117,7 +150,7 @@ package body WisiToken.Lexer.re2c is
       Begin_Line     : in     Line_Number_Type := Line_Number_Type'First)
    is
       use GNATCOLL.Mmap;
-      Length : Buffer_Pos;
+      Length : Base_Buffer_Pos; -- 0 in empty file
    begin
       Finalize (Lexer);
 
@@ -133,7 +166,7 @@ package body WisiToken.Lexer.re2c is
 
       if Begin_Byte_Pos = Invalid_Buffer_Pos then
          Lexer.Source.Region := Read (Lexer.Source.File);
-         Length              := Buffer_Pos (Last (Lexer.Source.Region));
+         Length              := Base_Buffer_Pos (Last (Lexer.Source.Region));
       else
          Length := End_Byte_Pos - Begin_Byte_Pos + 1;
 
@@ -149,9 +182,8 @@ package body WisiToken.Lexer.re2c is
       Lexer.Source.Buffer_Last := Last (Lexer.Source.Region);
 
       Lexer.Lexer := New_Lexer
-        (Buffer    => Data (Lexer.Source.Region).all'Address,
-         Length    => Interfaces.C.size_t (Length),
-         Verbosity => Interfaces.C.int (if Trace_Parse > 3 then Trace_Parse - 
3 else 0));
+        (Buffer => Data (Lexer.Source.Region).all'Address,
+         Length => Interfaces.C.size_t (Length));
 
       Reset (Lexer);
    end Reset_With_File;
@@ -159,120 +191,110 @@ package body WisiToken.Lexer.re2c is
    overriding procedure Reset (Lexer : in out Instance)
    is begin
       Reset_Lexer (Lexer.Lexer);
-      Lexer.Line            := 1;
-      Lexer.Char_Line_Start := 1;
-      Lexer.ID :=
-        --  First token is assumed to be first on a line.
-        (if Lexer.Descriptor.New_Line_ID = Invalid_Token_ID
-         then Invalid_Token_ID
-         else Lexer.Descriptor.New_Line_ID);
-      Lexer.Prev_ID := Invalid_Token_ID;
    end Reset;
 
    overriding function Find_Next
      (Lexer : in out Instance;
-      Token :    out Base_Token)
-     return Boolean
+      Token :    out WisiToken.Lexer.Token)
+     return Natural
    is
       use Interfaces.C;
 
+      ID            : Token_ID;
+      Byte_Position : Natural;
+      Byte_Length   : Natural;
+      Char_Position : Natural;
+      Char_Length   : Natural;
+      Line_Start    : Line_Number_Type;
+      Line_Length   : Base_Line_Number_Type;
+      Error_Count   : Integer := 0;
+
       procedure Build_Token
       is begin
          Token :=
-           (ID         => Lexer.ID,
-            Tree_Index => Invalid_Node_Index,
+           (ID => ID,
 
             Byte_Region =>
-              (if Lexer.ID = Lexer.Descriptor.EOI_ID and then 
Lexer.Byte_Position = Integer (Base_Buffer_Pos'First)
-               then
-                  --  EOF in empty buffer
+              (if ID = Lexer.Descriptor.EOI_ID and then Byte_Position = 0 then
+                 --  EOI in empty buffer
                  (Lexer.Source.Buffer_Nominal_First_Byte,
                   Lexer.Source.Buffer_Nominal_First_Byte - 1)
                else
-                 (Base_Buffer_Pos (Lexer.Byte_Position) + 
Lexer.Source.Buffer_Nominal_First_Byte - Buffer_Pos'First,
-                  Base_Buffer_Pos (Lexer.Byte_Position + Lexer.Byte_Length - 
1) +
+                 (Base_Buffer_Pos (Byte_Position) + 
Lexer.Source.Buffer_Nominal_First_Byte - Buffer_Pos'First,
+                  Base_Buffer_Pos (Byte_Position + Byte_Length - 1) +
                     Lexer.Source.Buffer_Nominal_First_Byte - 
Buffer_Pos'First)),
 
-            Line => Lexer.Line + Lexer.Source.Line_Nominal_First - 
Line_Number_Type'First,
-
-            Column =>
-              (if Lexer.ID = Lexer.Descriptor.New_Line_ID or
-                 Lexer.ID = Lexer.Descriptor.EOI_ID
-               then 0
-               else Ada.Text_IO.Count (Lexer.Char_Position - 
Lexer.Char_Line_Start)),
+            Line_Region =>
+              (First    => Line_Start + Lexer.Source.Line_Nominal_First - 
Line_Number_Type'First,
+               Last     => Line_Start + Line_Length + 
Lexer.Source.Line_Nominal_First - Line_Number_Type'First),
 
             Char_Region =>
-              (if Lexer.ID = Lexer.Descriptor.EOI_ID and then 
Lexer.Byte_Position = Integer (Base_Buffer_Pos'First)
+              (if ID = Lexer.Descriptor.EOI_ID and then Byte_Position = 
Integer (Base_Buffer_Pos'First)
                then
-                  --  EOF in empty buffer
+                 --  EOI in empty buffer
                  (Lexer.Source.Buffer_Nominal_First_Byte,
                   Lexer.Source.Buffer_Nominal_First_Byte - 1)
                else
-                 (To_Char_Pos (Lexer.Source, Lexer.Char_Position),
-                  To_Char_Pos (Lexer.Source, Lexer.Char_Position + 
Lexer.Char_Length - 1))));
+                 (To_Char_Pos (Lexer.Source, Char_Position),
+                  To_Char_Pos (Lexer.Source, Char_Position + Char_Length - 
1))));
       end Build_Token;
 
    begin
-      Lexer.Prev_ID := Lexer.ID;
       loop
          declare
             Status : constant int := Next_Token
-              (Lexer.Lexer, Lexer.ID,
-               Byte_Position => Interfaces.C.size_t (Lexer.Byte_Position),
-               Byte_Length   => Interfaces.C.size_t (Lexer.Byte_Length),
-               Char_Position => Interfaces.C.size_t (Lexer.Char_Position),
-               Char_Length   => Interfaces.C.size_t (Lexer.Char_Length),
-               Line_Start    => Interfaces.C.int (Lexer.Line));
+              (Lexer.Lexer, ID,
+               Byte_Position => Interfaces.C.size_t (Byte_Position),
+               Byte_Length   => Interfaces.C.size_t (Byte_Length),
+               Char_Position => Interfaces.C.size_t (Char_Position),
+               Char_Length   => Interfaces.C.size_t (Char_Length),
+               Line_Start    => Interfaces.C.int (Line_Start),
+               Line_Length   => Interfaces.C.int (Line_Length));
          begin
             case Status is
             when 0 =>
-               if Lexer.ID = Lexer.Descriptor.New_Line_ID then
-                  Lexer.Char_Line_Start := Lexer.Char_Position + 1;
-               end if;
-
                Build_Token;
-               return False;
+               return Error_Count;
 
             when 1 =>
                --  Unrecognized character from lexer. Handle missing quotes by
                --  inserting a virtual quote at the existing quote, and 
telling the
                --  lexer to skip the char.
+               Error_Count := @ + 1;
                declare
                   Buffer : constant GNATCOLL.Mmap.Str_Access := 
WisiToken.Lexer.Buffer (Lexer.Source);
                begin
-                  if Trace_Parse > Lexer_Debug then
+                  if Trace_Lexer > Outline then
                      --  We don't have a visible Trace object here.
-                     Ada.Text_IO.Put_Line ("lexer error char " & Buffer 
(Lexer.Byte_Position));
+                     Ada.Text_IO.Put_Line ("lexer error char " & Buffer 
(Byte_Position));
                   end if;
 
-                  if Buffer (Lexer.Byte_Position) = ''' then
+                  if Lexer.Descriptor.String_1_ID /= Invalid_Token_ID and 
Buffer (Byte_Position) = ''' then
                      --  Lexer has read to next new-line (or eof), then 
backtracked to next
                      --  char after '.
                      Lexer.Errors.Append
-                       ((To_Char_Pos (Lexer.Source, Lexer.Char_Position),
-                         Invalid_Token_Index,
+                       ((To_Char_Pos (Lexer.Source, Char_Position),
                          (1 => ''', others => ASCII.NUL)));
 
-                     Lexer.ID := Lexer.Descriptor.String_1_ID;
+                     ID := Lexer.Descriptor.String_1_ID;
                      Build_Token;
-                     return True;
+                     return Error_Count;
 
-                  elsif Buffer (Lexer.Byte_Position) = '"' then
+                  elsif Lexer.Descriptor.String_2_ID /= Invalid_Token_ID and 
Buffer (Byte_Position) = '"' then
                      --  Lexer has read to next new-line (or eof), then 
backtracked to next
                      --  char after ".
                      Lexer.Errors.Append
-                       ((To_Char_Pos (Lexer.Source, Lexer.Char_Position),
-                         Invalid_Token_Index,
-                         (1 => '"', others => ASCII.NUL)));
+                       ((Char_Pos     => To_Char_Pos (Lexer.Source, 
Char_Position),
+                         Recover_Char =>  (1 => '"', others => ASCII.NUL)));
 
-                     Lexer.ID := Lexer.Descriptor.String_2_ID;
+                     ID := Lexer.Descriptor.String_2_ID;
                      Build_Token;
-                     return True;
+                     return Error_Count;
 
                   else
                      --  Just skip the character; call Next_Token again.
                      Lexer.Errors.Append
-                       ((To_Char_Pos (Lexer.Source, Lexer.Char_Position), 
Invalid_Token_Index, (others => ASCII.NUL)));
+                       ((To_Char_Pos (Lexer.Source, Char_Position), (others => 
ASCII.NUL)));
                   end if;
                end;
 
@@ -283,25 +305,143 @@ package body WisiToken.Lexer.re2c is
       end loop;
    end Find_Next;
 
-   overriding function First (Lexer : in Instance) return Boolean
+   overriding procedure Set_Position
+     (Lexer         : in out Instance;
+      Byte_Position : in     Buffer_Pos;
+      Char_Position : in     Buffer_Pos;
+      Line          : in     Line_Number_Type)
    is begin
-      return Lexer.Descriptor.New_Line_ID /= Invalid_Token_ID and then
-           Lexer.Prev_ID = Lexer.Descriptor.New_Line_ID;
-   end First;
+      --  FIXME: respect partial parse lexer.source.*_Nominal_first_*. only 
needed if doing incremental after partial.
+      Set_Position
+        (Lexer.Lexer,
+         Byte_Position => Interfaces.C.size_t (Byte_Position),
+         Char_Position => Interfaces.C.size_t (Char_Position),
+         Line          => Interfaces.C.int (Line));
+   end Set_Position;
+
+   overriding
+   function Is_Block_Delimited
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean
+   is begin
+      return Is_Block_Delimited (ID);
+   end Is_Block_Delimited;
 
-   overriding function Buffer_Text (Lexer : in Instance; Byte_Bounds : in 
Buffer_Region) return String
-   is
-      First : constant Integer := Integer
-        (Byte_Bounds.First - Lexer.Source.Buffer_Nominal_First_Byte + 
Buffer_Pos'First);
-      Last  : constant Integer := Integer
-        (Byte_Bounds.Last - Lexer.Source.Buffer_Nominal_First_Byte + 
Buffer_Pos'First);
-   begin
-      return String (Buffer (Lexer.Source) (First .. Last));
-   end Buffer_Text;
+   overriding
+   function Same_Block_Delimiters
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean
+   is begin
+      return Same_Block_Delimiters (ID);
+   end Same_Block_Delimiters;
+
+   overriding
+   function Escape_Delimiter_Doubled
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean
+   is begin
+      return Escape_Delimiter_Doubled (ID);
+   end Escape_Delimiter_Doubled;
+
+   overriding
+   function Start_Delimiter_Length
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Integer
+   is begin
+      return Start_Delimiter_Length (ID);
+   end Start_Delimiter_Length;
+
+   overriding
+   function End_Delimiter_Length
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Integer
+   is begin
+      return End_Delimiter_Length (ID);
+   end End_Delimiter_Length;
+
+   overriding
+   function New_Line_Is_End_Delimiter
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean
+   is begin
+      return New_Line_Is_End_Delimiter (ID);
+   end New_Line_Is_End_Delimiter;
+
+   overriding
+   function Find_End_Delimiter
+     (Lexer       : in Instance;
+      ID          : in Token_ID;
+      Token_Start : in Buffer_Pos)
+     return Buffer_Pos
+   is begin
+      return Find_End_Delimiter (Lexer.Source, ID, Token_Start);
+   end Find_End_Delimiter;
+
+   overriding
+   function Contains_End_Delimiter
+     (Lexer         : in Instance;
+      ID            : in Token_ID;
+      Region : in Buffer_Region)
+     return Base_Buffer_Pos
+   is begin
+      return Contains_End_Delimiter (Lexer.Source, ID, Region);
+   end Contains_End_Delimiter;
+
+   overriding
+   function Find_Scan_End
+     (Lexer       : in Instance;
+      ID          : in Token_ID;
+      Byte_Region : in Buffer_Region;
+      Inserted    : in Boolean;
+      Start       : in Boolean)
+     return Buffer_Pos
+   is begin
+      return Find_Scan_End (Lexer.Source, ID, Byte_Region, Inserted, Start);
+   end Find_Scan_End;
+
+   overriding
+   function Line_Begin_Char_Pos
+     (Lexer : in Instance;
+      Token : in WisiToken.Lexer.Token;
+      Line  : in Line_Number_Type)
+     return Base_Buffer_Pos
+   is begin
+      return Line_Begin_Char_Pos (Lexer.Source, Token, Line);
+   end Line_Begin_Char_Pos;
+
+   overriding
+   function Line_At_Byte_Pos
+     (Lexer       : in Instance;
+      Byte_Region : in Buffer_Region;
+      Byte_Pos    : in Buffer_Pos;
+      First_Line  : in Line_Number_Type)
+     return Line_Number_Type
+   is begin
+      return Line_At_Byte_Pos (Lexer.Source, Byte_Region, Byte_Pos, 
First_Line);
+   end Line_At_Byte_Pos;
 
-   overriding function File_Name (Lexer : in Instance) return String
+   overriding
+   function Can_Contain_New_Line
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean
+   is begin
+      return Can_Contain_New_Line (ID);
+   end Can_Contain_New_Line;
+
+   overriding
+   function Terminated_By_New_Line
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean
    is begin
-      return File_Name (Lexer.Source);
-   end File_Name;
+      return Terminated_By_New_Line (ID);
+   end Terminated_By_New_Line;
 
 end WisiToken.Lexer.re2c;
diff --git a/wisitoken-lexer-re2c.ads b/wisitoken-lexer-re2c.ads
index b871e9cc20..a57d677c2c 100644
--- a/wisitoken-lexer-re2c.ads
+++ b/wisitoken-lexer-re2c.ads
@@ -6,7 +6,7 @@
 --
 --  [1] http://re2c.org/
 --
---  Copyright (C) 2017 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -33,9 +33,8 @@ generic
    --  These subprograms are provided by generated source code.
 
    with function New_Lexer
-     (Buffer    : in System.Address;
-      Length    : in Interfaces.C.size_t;
-      Verbosity : in Interfaces.C.int)
+     (Buffer : in System.Address;
+      Length : in Interfaces.C.size_t)
      return System.Address;
    --  Create the re2c lexer object, passing it the full text to process.
    --  Length is buffer length in 8 bit bytes.
@@ -49,6 +48,16 @@ generic
    with procedure Reset_Lexer (Lexer : in System.Address);
    --  Restart lexing, with previous input buffer.
 
+   with procedure Set_Verbosity
+     (Lexer     : in System.Address;
+      Verbosity : in Interfaces.C.int);
+
+   with procedure Set_Position
+     (Lexer         : in System.Address;
+      Byte_Position : in Interfaces.C.size_t;
+      Char_Position : in Interfaces.C.size_t;
+      Line          : in Interfaces.C.int);
+
    with function Next_Token
      (Lexer         : in     System.Address;
       ID            :    out Token_ID;
@@ -56,19 +65,75 @@ generic
       Byte_Length   :    out Interfaces.C.size_t;
       Char_Position :    out Interfaces.C.size_t;
       Char_Length   :    out Interfaces.C.size_t;
-      Line_Start    :    out Interfaces.C.int)
+      Line_Start    :    out Interfaces.C.int;
+      Line_Length   :    out Interfaces.C.int)
      return Interfaces.C.int;
    --  *_Position and *_Length give the position and length in bytes and
    --  characters of the token from the start of the buffer, 0 indexed.
    --
    --  Line_Start gives the line number in the source file that the first
-   --  character of the token is in, 1 indexed.
+   --  character of the token is in, 1 indexed. Line_Length gives the
+   --  number of line ends contained in the token; 0 for a token that is all on
+   --  one line, 1 for a new_line, more for a multi-line token.
    --
    --  Result values:
    --
    --  0 - no error
    --  1 - there is an unrecognized character at Position.
 
+   with function Is_Block_Delimited (ID : in Token_ID) return Boolean;
+   --  Implements WisiToken.Lexer.Is_Block_Delimited.
+
+   with function Same_Block_Delimiters (ID : in Token_ID) return Boolean;
+   --  Implements WisiToken.Lexer.Same_Block_Delimiters.
+
+   with function Escape_Delimiter_Doubled (ID : in Token_ID) return Boolean;
+   --  Implements WisiToken.Lexer.Escape_Delimiter_Doubled.
+
+   with function Start_Delimiter_Length (ID : in Token_ID) return Integer;
+   --  Implements WisiToken.Lexer.Start_Delimiter_Length.
+
+   with function End_Delimiter_Length (ID : in Token_ID) return Integer;
+   --  Implements WisiToken.Lexer.End_Delimiter_Length.
+
+   with function New_Line_Is_End_Delimiter (ID : in Token_ID) return Boolean;
+   --  Implements WisiToken.Lexer.New_Line_Is_End_Delimiter.
+
+   with function Find_End_Delimiter
+     (Source      : in WisiToken.Lexer.Source;
+      ID          : in Token_ID;
+      Token_Start : in Buffer_Pos)
+     return Buffer_Pos;
+   --  Implements WisiToken.Lexer.Find_End_Delimiter.
+
+   with function Contains_End_Delimiter
+     (Source : in WisiToken.Lexer.Source;
+      ID     : in Token_ID;
+      Region : in Buffer_Region)
+     return Base_Buffer_Pos;
+   --  Implements WisiToken.Lexer.Contains_End_Delimiter
+
+   with function Find_Scan_End
+     (Source      : in WisiToken.Lexer.Source;
+      ID          : in Token_ID;
+      Byte_Region : in Buffer_Region;
+      Inserted    : in Boolean;
+      Start       : in Boolean)
+     return Buffer_Pos;
+
+   with function Line_Begin_Char_Pos
+     (Source : in WisiToken.Lexer.Source;
+      Token  : in WisiToken.Lexer.Token;
+      Line   : in WisiToken.Line_Number_Type)
+     return Buffer_Pos;
+   --  Implements WisiToken.Lexer.Line_Begin_Char_Pos, so that
+   --  precondition applies.
+
+   with function Can_Contain_New_Line (ID : in Token_ID) return Boolean;
+
+   with function Terminated_By_New_Line (ID : in Token_ID) return Boolean;
+   --  Implements WisiToken.Lexer.Terminated_By_New_Line;
+
 package WisiToken.Lexer.re2c is
 
    Invalid_Input : exception;
@@ -78,11 +143,16 @@ package WisiToken.Lexer.re2c is
    overriding procedure Finalize (Object : in out Instance);
 
    function New_Lexer
-     (Descriptor  : not null access constant WisiToken.Descriptor)
+     (Trace      : in WisiToken.Trace_Access;
+      Descriptor : in WisiToken.Descriptor_Access_Constant)
      return WisiToken.Lexer.Handle;
    --  If the tokens do not include a reporting New_Line token, set
    --  New_Line_ID to Invalid_Token_ID.
 
+   overriding procedure Set_Verbosity
+     (Lexer     : in Instance;
+      Verbosity : in Integer);
+
    overriding procedure Reset_With_String
      (Lexer      : in out Instance;
       Input      : in     String;
@@ -93,6 +163,7 @@ package WisiToken.Lexer.re2c is
    overriding procedure Reset_With_String_Access
      (Lexer      : in out Instance;
       Input      : in     Ada.Strings.Unbounded.String_Access;
+      Input_Last : in     Integer;
       File_Name  : in     Ada.Strings.Unbounded.Unbounded_String;
       Begin_Char : in     Buffer_Pos       := Buffer_Pos'First;
       Begin_Line : in     Line_Number_Type := Line_Number_Type'First);
@@ -110,35 +181,110 @@ package WisiToken.Lexer.re2c is
 
    overriding procedure Reset (Lexer : in out Instance);
 
-   overriding function Buffer_Text (Lexer : in Instance; Byte_Bounds : in 
Buffer_Region) return String;
-
-   overriding function First (Lexer : in Instance) return Boolean;
+   overriding
+   procedure Set_Position
+     (Lexer         : in out Instance;
+      Byte_Position : in     Buffer_Pos;
+      Char_Position : in     Buffer_Pos;
+      Line          : in     Line_Number_Type);
 
    overriding
    function Find_Next
      (Lexer : in out Instance;
-      Token :    out Base_Token)
+      Token :    out WisiToken.Lexer.Token)
+     return Natural;
+
+   overriding
+   function Is_Block_Delimited
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean;
+
+   overriding
+   function Same_Block_Delimiters
+     (Lexer : in Instance;
+      ID    : in Token_ID)
      return Boolean;
 
-   overriding function File_Name (Lexer : in Instance) return String;
+   overriding
+   function Escape_Delimiter_Doubled
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean;
+
+   overriding
+   function Start_Delimiter_Length
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Integer;
+
+   overriding
+   function End_Delimiter_Length
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Integer;
+
+   overriding
+   function New_Line_Is_End_Delimiter
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean;
+
+   overriding
+   function Find_End_Delimiter
+     (Lexer       : in Instance;
+      ID          : in Token_ID;
+      Token_Start : in Buffer_Pos)
+     return Buffer_Pos;
+
+   overriding
+   function Contains_End_Delimiter
+     (Lexer  : in Instance;
+      ID     : in Token_ID;
+      Region : in Buffer_Region)
+     return Base_Buffer_Pos;
+
+   overriding
+   function Find_Scan_End
+     (Lexer       : in Instance;
+      ID          : in Token_ID;
+      Byte_Region : in Buffer_Region;
+      Inserted    : in Boolean;
+      Start       : in Boolean)
+     return Buffer_Pos;
+
+   overriding
+   function Line_Begin_Char_Pos
+     (Lexer : in Instance;
+      Token : in WisiToken.Lexer.Token;
+      Line  : in Line_Number_Type)
+     return Base_Buffer_Pos;
+
+   overriding
+   function Line_At_Byte_Pos
+     (Lexer       : in Instance;
+      Byte_Region : in WisiToken.Buffer_Region;
+      Byte_Pos    : in Buffer_Pos;
+      First_Line  : in Line_Number_Type)
+     return Line_Number_Type;
+
+   overriding
+   function Can_Contain_New_Line
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean;
+
+   overriding
+   function Terminated_By_New_Line
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean;
 
 private
 
    type Instance is new WisiToken.Lexer.Instance with
    record
-      Lexer         : System.Address := System.Null_Address;
-      Source        : WisiToken.Lexer.Source;
-      ID            : Token_ID; --  Last token read by find_next
-      Byte_Position : Natural;  --  We don't use Buffer_Pos here, because 
Source.Buffer is indexed by Integer
-      Byte_Length   : Natural;
-      Char_Position : Natural;
-      Char_Length   : Natural;
-      --  Position and length in bytes and characters of last token from
-      --  start of Managed.Buffer, 1 indexed.
-
-      Line            : Line_Number_Type; -- after last (or current) New_Line 
token
-      Char_Line_Start : Natural;          -- Character position after last 
New_Line token, lexer origin.
-      Prev_ID         : Token_ID;         -- previous token_id
+      Lexer  : System.Address := System.Null_Address;
    end record;
 
 end WisiToken.Lexer.re2c;
diff --git a/wisitoken-lexer-regexp.adb b/wisitoken-lexer-regexp.adb
deleted file mode 100644
index d46428953b..0000000000
--- a/wisitoken-lexer-regexp.adb
+++ /dev/null
@@ -1,259 +0,0 @@
---  Abstract:
---
---  See spec
---
---  Copyright (C) 2015, 2017 - 2020 Free Software Foundation, Inc.
---
---  This file is part of the WisiToken package.
---
---  The WisiToken package is free software; you can redistribute it
---  and/or modify it under the terms of the GNU General Public License
---  as published by the Free Software Foundation; either version 3, or
---  (at your option) any later version. The WisiToken package is
---  distributed in the hope that it will be useful, but WITHOUT ANY
---  WARRANTY; without even the implied warranty of MERCHANTABILITY or
---  FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
---  License for more details. You should have received a copy of the
---  GNU General Public License distributed with the WisiToken package;
---  see file GPL.txt. If not, write to the Free Software Foundation,
---  59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
---
---  As a special exception, if other files instantiate generics from
---  this unit, or you link this unit with other files to produce an
---  executable, this unit does not by itself cause the resulting
---  executable to be covered by the GNU General Public License. This
---  exception does not however invalidate any other reasons why the
---  executable file might be covered by the GNU Public License.
-
-pragma License (Modified_GPL);
-
-with Ada.Characters.Latin_1;
-with SAL;
-package body WisiToken.Lexer.Regexp is
-
-   function Find_Best_Match (Lexer : in out Instance) return Boolean
-   is
-      --  Find the longest matching character sequence in the buffer that
-      --  matches a token.
-      --
-      --  Return True if a token is matched, False if not.
-
-      use WisiToken.Regexp;
-
-      Current_Char         : Integer := Lexer.Buffer_Head;
-      Current_State        : Match_State;
-      Current_Match_Length : Integer := 0;
-      Best_Match_ID        : Token_ID;
-      Best_Match_Length    : Natural := 0;
-      Still_Matching       : Boolean := False;
-   begin
-      --  We only support Reset_With_String.
-
-      if Current_Char > Lexer.Source.Buffer'Last then
-         Lexer.ID := Lexer.Descriptor.EOI_ID;
-         Lexer.Lexeme_Head := Lexer.Buffer_Head;
-         Lexer.Lexeme_Tail := Lexer.Buffer_Head - 1;
-         return True;
-      end if;
-
-      for I in Lexer.Syntax'Range loop
-         Clear (Lexer.Syntax (I).Regexp);
-      end loop;
-
-      loop
-         Still_Matching := False;
-
-         for I in Lexer.Syntax'Range loop
-            if State (Lexer.Syntax (I).Regexp) /= WisiToken.Regexp.Error then
-               Current_State := Match
-                 (Lexer.Syntax (I).Regexp,
-                  Lexer.Source.Buffer (Lexer.Buffer_Head .. 
Lexer.Source.Buffer'Last),
-                  Current_Char);
-
-               case Current_State is
-               when Matching =>
-                  Still_Matching := True;
-
-               when Final =>
-                  Still_Matching := True;
-
-                  Current_Match_Length := Current_Char - Lexer.Buffer_Head + 1;
-
-                  if Best_Match_Length < Current_Match_Length then
-                     Best_Match_ID  := I;
-                     Best_Match_Length := Current_Match_Length;
-                  end if;
-
-               when WisiToken.Regexp.Error =>
-                  null;
-               end case;
-            end if;
-         end loop;
-
-         exit when (not Still_Matching) or else (Current_Char = 
Lexer.Source.Buffer'Last);
-
-         if Best_Match_Length = Lexer.Source.Buffer'Length then
-            raise SAL.Programmer_Error with
-              "token larger than buffer size of" & Integer'Image 
(Lexer.Source.Buffer'Length);
-         end if;
-
-         Current_Char := Current_Char + 1;
-      end loop;
-
-      if Best_Match_Length > 0 then
-         Lexer.Lexeme_Head := Lexer.Buffer_Head;
-         Lexer.Lexeme_Tail := Lexer.Buffer_Head + Best_Match_Length - 1;
-         Lexer.ID          := Best_Match_ID;
-
-         if Lexer.Lexeme_Head = Lexer.Source.Buffer'Last and
-           Lexer.Source.Buffer (Lexer.Lexeme_Head) = Ada.Characters.Latin_1.EOT
-         then
-            --  matched EOF; repeat that next time
-            null;
-         else
-            Lexer.Buffer_Head := Lexer.Lexeme_Tail + 1;
-         end if;
-         return True;
-
-      elsif Current_Char = Lexer.Source.Buffer'Last then
-         Lexer.ID := Lexer.Descriptor.EOI_ID;
-         Lexer.Buffer_Head := Lexer.Buffer_Head + 1;
-         return True;
-
-      else
-         return False;
-      end if;
-
-   end Find_Best_Match;
-
-   ----------
-   --  Public subprograms
-
-   function Get
-     (Regexp         : in String;
-      Case_Sensitive : in Boolean := True;
-      Report         : in Boolean := True)
-      return Syntax_Item
-   is begin
-      return (WisiToken.Regexp.Compile (Regexp, Case_Sensitive), Report);
-   end Get;
-
-   type Instance_Access is access Instance; --  silence compiler warning
-
-   function New_Lexer
-     (Descriptor : not null access constant WisiToken.Descriptor;
-      Syntax     : in              WisiToken.Lexer.Regexp.Syntax)
-     return WisiToken.Lexer.Handle
-   is
-      New_Lexer : constant Instance_Access := new Instance (Descriptor, 
Syntax'Last);
-   begin
-      New_Lexer.Syntax := Syntax;
-
-      return Handle (New_Lexer);
-   end New_Lexer;
-
-   overriding procedure Finalize (Object : in out Instance)
-   is begin
-      Finalize (Object.Source);
-   end Finalize;
-
-   overriding procedure Reset_With_String
-     (Lexer      : in out Instance;
-      Input      : in     String;
-      Begin_Char : in     Buffer_Pos       := Buffer_Pos'First;
-      Begin_Line : in     Line_Number_Type := Line_Number_Type'First)
-   is begin
-      Finalize (Lexer);
-
-      Lexer.Source :=
-        (Label                     => String_Label,
-         File_Name                 => +"",
-         Buffer_Nominal_First_Byte => Base_Buffer_Pos (Input'First),
-         Buffer_Nominal_First_Char => Begin_Char,
-         Line_Nominal_First        => Begin_Line,
-         Buffer                    => new String'(Input),
-         User_Buffer               => False);
-
-      Reset (Lexer);
-   end Reset_With_String;
-
-   overriding procedure Reset_With_String_Access
-     (Lexer      : in out Instance;
-      Input      : in     Ada.Strings.Unbounded.String_Access;
-      File_Name  : in     Ada.Strings.Unbounded.Unbounded_String;
-      Begin_Char : in     Buffer_Pos       := Buffer_Pos'First;
-      Begin_Line : in     Line_Number_Type := Line_Number_Type'First)
-   is begin
-      Finalize (Lexer);
-
-      Lexer.Source :=
-        (Label       => String_Label,
-         File_Name   => File_Name,
-         Buffer_Nominal_First_Byte => Base_Buffer_Pos (Input'First),
-         Buffer_Nominal_First_Char => Begin_Char,
-         Line_Nominal_First        => Begin_Line,
-         Buffer      => Input,
-         User_Buffer => True);
-
-      Reset (Lexer);
-   end Reset_With_String_Access;
-
-   overriding procedure Reset_With_File
-     (Lexer          : in out Instance;
-      File_Name      : in     String;
-      Begin_Byte_Pos : in     Buffer_Pos       := Invalid_Buffer_Pos;
-      End_Byte_Pos   : in     Buffer_Pos       := Invalid_Buffer_Pos;
-      Begin_Char     : in     Buffer_Pos       := Buffer_Pos'First;
-      Begin_Line     : in     Line_Number_Type := Line_Number_Type'First)
-   is
-      pragma Unreferenced (File_Name, Begin_Byte_Pos, End_Byte_Pos, 
Begin_Char, Begin_Line);
-   begin
-      Finalize (Lexer);
-
-      raise SAL.Not_Implemented;
-   end Reset_With_File;
-
-   overriding procedure Reset
-     (Lexer : in out Instance)
-   is begin
-      Lexer.Lexeme_Head := Lexer.Source.Buffer'First;
-      Lexer.Lexeme_Tail := Lexer.Source.Buffer'First - 1;
-      Lexer.ID          := Invalid_Token_ID;
-      Lexer.Buffer_Head := Lexer.Source.Buffer'First;
-   end Reset;
-
-   overriding function Find_Next
-     (Lexer : in out Instance;
-      Token :    out Base_Token)
-     return Boolean
-   is begin
-      loop
-         if not Find_Best_Match (Lexer) then
-            if Lexer.Buffer_Head > Lexer.Source.Buffer'Last then
-               raise Syntax_Error with "Unrecognized EOF";
-            else
-               raise Syntax_Error with "Unrecognized character '" & 
Lexer.Source.Buffer (Lexer.Buffer_Head) & "'";
-            end if;
-         end if;
-
-         exit when Lexer.Syntax (Lexer.ID).Report;
-
-      end loop;
-
-      Token :=
-        (ID          => Lexer.ID,
-         Tree_Index  => Invalid_Node_Index,
-         Byte_Region => (Buffer_Pos (Lexer.Lexeme_Head), Buffer_Pos 
(Lexer.Lexeme_Tail)),
-         Line        => Invalid_Line_Number,
-         Column      => Ada.Text_IO.Count (Lexer.Lexeme_Head),
-         Char_Region => (Buffer_Pos (Lexer.Lexeme_Head), Buffer_Pos 
(Lexer.Lexeme_Tail)));
-
-      return False;
-   end Find_Next;
-
-   overriding function Buffer_Text (Lexer : in Instance; Byte_Region : in 
Buffer_Region) return String
-   is begin
-      return Lexer.Source.Buffer (Integer (Byte_Region.First) .. Integer 
(Byte_Region.Last));
-   end Buffer_Text;
-
-end WisiToken.Lexer.Regexp;
diff --git a/wisitoken-lexer-regexp.ads b/wisitoken-lexer-regexp.ads
deleted file mode 100644
index da55448c29..0000000000
--- a/wisitoken-lexer-regexp.ads
+++ /dev/null
@@ -1,114 +0,0 @@
---  Abstract:
---
---  WisiToken lexer using compiled regular expressions interpreted at runtime.
---
---  This is slower, but easier to use, than the Aflex lexer; it is
---  used in most of the WisiToken unit tests. Since it uses regexp, it
---  is easy to convert to an Aflex lexer.
---
---  Copyright (C) 2015, 2017 - 2019 Free Software Foundation, Inc.
---
---  This file is part of the WisiToken package.
---
---  The WisiToken package is free software; you can redistribute it
---  and/or modify it under the terms of the GNU General Public License
---  as published by the Free Software Foundation; either version 3, or
---  (at your option) any later version. The WisiToken package is
---  distributed in the hope that it will be useful, but WITHOUT ANY
---  WARRANTY; without even the implied warranty of MERCHANTABILITY or
---  FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
---  License for more details. You should have received a copy of the
---  GNU General Public License distributed with the WisiToken package;
---  see file GPL.txt. If not, write to the Free Software Foundation,
---  59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
---
---  As a special exception, if other files instantiate generics from
---  this unit, or you link this unit with other files to produce an
---  executable, this unit does not by itself cause the resulting
---  executable to be covered by the GNU General Public License. This
---  exception does not however invalidate any other reasons why the
---  executable file might be covered by the GNU Public License.
-
-pragma License (Modified_GPL);
-
-with Ada.Unchecked_Deallocation;
-with WisiToken.Regexp;
-package WisiToken.Lexer.Regexp is
-
-   type Syntax_Item is record
-      Regexp : WisiToken.Regexp.Regexp;
-      Report : Boolean;
-   end record;
-
-   function Get
-     (Regexp         : in String;
-      Case_Sensitive : in Boolean := True;
-      Report         : in Boolean := True)
-     return Syntax_Item;
-   --  Compiles Regexp with Case_Sensitive.
-
-   type Syntax is array (Token_ID range <>) of Syntax_Item;
-
-   type Instance
-     (Descriptor    : not null access constant WisiToken.Descriptor;
-      Last_Terminal : Token_ID)
-     is new WisiToken.Lexer.Instance with private;
-
-   function New_Lexer
-     (Descriptor : not null access constant WisiToken.Descriptor;
-      Syntax     : in              WisiToken.Lexer.Regexp.Syntax)
-     return WisiToken.Lexer.Handle;
-
-   overriding procedure Finalize (Object : in out Instance);
-   overriding procedure Reset_With_String
-     (Lexer      : in out Instance;
-      Input      : in     String;
-      Begin_Char : in     Buffer_Pos       := Buffer_Pos'First;
-      Begin_Line : in     Line_Number_Type := Line_Number_Type'First);
-   overriding procedure Reset_With_String_Access
-     (Lexer      : in out Instance;
-      Input      : in     Ada.Strings.Unbounded.String_Access;
-      File_Name  : in     Ada.Strings.Unbounded.Unbounded_String;
-      Begin_Char : in     Buffer_Pos       := Buffer_Pos'First;
-      Begin_Line : in     Line_Number_Type := Line_Number_Type'First);
-   overriding procedure Reset_With_File
-     (Lexer          : in out Instance;
-      File_Name      : in     String;
-      Begin_Byte_Pos : in     Buffer_Pos       := Invalid_Buffer_Pos;
-      End_Byte_Pos   : in     Buffer_Pos       := Invalid_Buffer_Pos;
-      Begin_Char     : in     Buffer_Pos       := Buffer_Pos'First;
-      Begin_Line     : in     Line_Number_Type := Line_Number_Type'First);
-   overriding procedure Reset (Lexer : in out Instance);
-
-   overriding procedure Discard_Rest_Of_Input (Lexer : in out Instance) is 
null;
-
-   overriding function Find_Next
-     (Lexer : in out Instance;
-      Token :    out Base_Token)
-     return Boolean;
-
-   overriding function Buffer_Text (Lexer : in Instance; Byte_Region : in 
Buffer_Region) return String;
-
-   overriding function First (Lexer : in Instance) return Boolean is (False);
-
-   overriding function File_Name (Lexer : in Instance) return String is ("");
-
-private
-
-   type String_Access is access String;
-   procedure Free is new Ada.Unchecked_Deallocation (String, String_Access);
-
-   type Instance
-     (Descriptor    : not null access constant WisiToken.Descriptor;
-      Last_Terminal : Token_ID)
-     is new WisiToken.Lexer.Instance (Descriptor => Descriptor) with
-   record
-      ID          : Token_ID; --  last token read by find_next
-      Syntax      : WisiToken.Lexer.Regexp.Syntax (Token_ID'First .. 
Last_Terminal);
-      Source      : Lexer.Source;
-      Buffer_Head : Integer;
-      Lexeme_Head : Integer;
-      Lexeme_Tail : Integer;
-   end record;
-
-end WisiToken.Lexer.Regexp;
diff --git a/wisitoken-lexer.adb b/wisitoken-lexer.adb
index f73d517f45..54f559eeee 100644
--- a/wisitoken-lexer.adb
+++ b/wisitoken-lexer.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2017 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -20,6 +20,364 @@ pragma License (Modified_GPL);
 with GNAT.Strings;
 package body WisiToken.Lexer is
 
+   function Image
+     (Item       : in Token;
+      Descriptor : in WisiToken.Descriptor)
+     return String
+   is
+      ID_Image : constant String := WisiToken.Image (Item.ID, Descriptor);
+   begin
+      if Item.Char_Region = Null_Buffer_Region then
+         return "(" & ID_Image & ")";
+
+      else
+         return "(" & ID_Image & ", " & Image (Item.Char_Region) &
+           (if Item.ID = Descriptor.New_Line_ID
+            then ", " & Image (Item.Line_Region)
+            else "") & ")";
+      end if;
+   end Image;
+
+   function Full_Image
+     (Item       : in Token;
+      Descriptor : in WisiToken.Descriptor)
+     return String
+   is begin
+      return "(" & Image (Item.ID, Descriptor) & ", " &
+        Image (Item.Byte_Region) & ", " &
+        Image (Item.Char_Region) & ", " &
+        Image (Item.Line_Region) & ")";
+   end Full_Image;
+
+   procedure Shift
+     (Token       : in out Lexer.Token;
+      Shift_Bytes : in     Base_Buffer_Pos;
+      Shift_Chars : in     Base_Buffer_Pos;
+      Shift_Lines : in     Base_Line_Number_Type)
+   is begin
+      Token.Byte_Region := @ + Shift_Bytes;
+      Token.Char_Region := @ + Shift_Chars;
+      Token.Line_Region := @ + Shift_Lines;
+   end Shift;
+
+   function To_String (Item : in Recover_Characters) return String
+   is begin
+      for I in Item'Range loop
+         if Item (I) = ASCII.NUL then
+            return Item (Item'First .. I - 1);
+         end if;
+      end loop;
+      return Item;
+   end To_String;
+
+   function Column (Token : in Lexer.Token; Line_Begin_Char_Pos : in 
Buffer_Pos) return Ada.Text_IO.Count
+   is begin
+      if Token.Line_Region.First = 1 then
+         return Ada.Text_IO.Count (Token.Char_Region.First);
+
+      elsif Line_Begin_Char_Pos = Invalid_Buffer_Pos then
+         return 0;
+
+      else
+         return Ada.Text_IO.Count (Token.Char_Region.First - 
Line_Begin_Char_Pos);
+      end if;
+   end Column;
+
+   procedure Begin_Pos
+     (Object     : in     Source;
+      Begin_Byte :    out Buffer_Pos;
+      Begin_Char :    out Buffer_Pos;
+      Begin_Line :    out Line_Number_Type)
+   is begin
+      Begin_Byte := Object.Buffer_Nominal_First_Byte;
+      Begin_Char := Object.Buffer_Nominal_First_Char;
+      Begin_Line := Object.Line_Nominal_First;
+   end Begin_Pos;
+
+   function Has_Source (Lexer : access constant Instance) return Boolean
+   is begin
+      return Has_Source (Lexer.Source);
+   end Has_Source;
+
+   function Buffer_Text (Lexer : in Instance; Byte_Region : in 
WisiToken.Buffer_Region) return String
+   is
+      First : constant Integer := Integer
+        (Byte_Region.First - Lexer.Source.Buffer_Nominal_First_Byte + 
Buffer_Pos'First);
+      Last  : constant Integer := Integer
+        (Byte_Region.Last - Lexer.Source.Buffer_Nominal_First_Byte + 
Buffer_Pos'First);
+   begin
+      return String (Buffer (Lexer.Source) (First .. Last));
+   end Buffer_Text;
+
+   function Buffer_Region_Byte (Lexer : in Instance) return 
WisiToken.Buffer_Region
+   is begin
+      return Buffer_Region_Byte (Lexer.Source);
+   end Buffer_Region_Byte;
+
+   function File_Name (Lexer : in Instance) return String
+   is begin
+      return File_Name (Lexer.Source);
+   end File_Name;
+
+   procedure Begin_Pos
+     (Lexer      : in     Instance;
+      Begin_Byte :    out Buffer_Pos;
+      Begin_Char :    out Buffer_Pos;
+      Begin_Line :    out Line_Number_Type)
+   is begin
+      Begin_Pos (Lexer.Source, Begin_Byte, Begin_Char, Begin_Line);
+   end Begin_Pos;
+
+   function Line_At_Byte_Pos
+     (Lexer    : in Instance;
+      Token    : in WisiToken.Lexer.Token;
+      Byte_Pos : in Buffer_Pos)
+     return Line_Number_Type
+   is begin
+      return Line_At_Byte_Pos
+        (Instance'Class (Lexer), Token.Byte_Region, Byte_Pos, First_Line => 
Token.Line_Region.First);
+   end Line_At_Byte_Pos;
+
+   function Find_New_Line
+     (Lexer  : in Instance;
+      Region : in Buffer_Region)
+     return Base_Buffer_Pos
+   is begin
+      return Find_New_Line (Lexer.Source, Region);
+   end Find_New_Line;
+
+   function Contains_New_Line
+     (Lexer       : in Instance;
+      Byte_Region : in Buffer_Region;
+      First       : in Boolean)
+     return Base_Buffer_Pos
+   is begin
+      return Contains_New_Line (Lexer.Source, Byte_Region, First);
+   end Contains_New_Line;
+
+   function Contains_New_Line
+     (Lexer       : in Instance;
+      ID          : in Token_ID;
+      Byte_Region : in Buffer_Region;
+      First       : in Boolean)
+     return Base_Buffer_Pos
+   is begin
+      if Instance'Class (Lexer).Can_Contain_New_Line (ID) then
+         return Contains_New_Line (Lexer.Source, Byte_Region, First);
+      else
+         return Invalid_Buffer_Pos;
+      end if;
+   end Contains_New_Line;
+
+   function New_Line_Count
+     (Lexer : in Instance;
+      Item  : in Token_Arrays.Vector)
+     return Base_Line_Number_Type
+   is
+      Result : Base_Line_Number_Type := 0;
+      Pos    : Base_Buffer_Pos;
+   begin
+      for Token of Item loop
+         Pos := Lexer.Contains_New_Line (Token.ID, Token.Byte_Region, First => 
True);
+         if Pos /= Invalid_Buffer_Pos then
+            Result := @ + New_Line_Count (Lexer.Source, Token.Byte_Region);
+         end if;
+      end loop;
+      return Result;
+   end New_Line_Count;
+
+   function Find_New_Line
+     (Source : in WisiToken.Lexer.Source;
+      Start  : in Buffer_Pos)
+     return Buffer_Pos
+   is begin
+      for I in To_Buffer_Index (Source, Start) .. Source.Buffer'Last loop
+         if Source.Buffer (I) = ASCII.LF then
+            return From_Buffer_Index (Source, I);
+         end if;
+      end loop;
+      return From_Buffer_Index (Source, Source.Buffer'Last);
+   end Find_New_Line;
+
+   function Find_String_Or_New_Line
+     (Source : in WisiToken.Lexer.Source;
+      Start  : in Buffer_Pos;
+      Item   : in String)
+     return Buffer_Pos
+   is begin
+      for I in To_Buffer_Index (Source, Start) .. Source.Buffer'Last loop
+         if Source.Buffer (I) = ASCII.LF or
+           ((I + Item'Length <= Source.Buffer'Last) and then
+              Source.Buffer (I .. I + Item'Length - 1) = Item)
+         then
+            return From_Buffer_Index (Source, I);
+         end if;
+      end loop;
+      return From_Buffer_Index (Source, Source.Buffer'Last); --  Implicit 
new_line at EOI
+   end Find_String_Or_New_Line;
+
+   function Find_String
+     (Source : in WisiToken.Lexer.Source;
+      Start  : in Buffer_Pos;
+      Item   : in String)
+     return Buffer_Pos
+   is begin
+      for I in To_Buffer_Index (Source, Start) .. Source.Buffer'Last loop
+         if (I + Item'Length <= Source.Buffer'Last) and then
+           Source.Buffer (I .. I + Item'Length - 1) = Item
+         then
+            return From_Buffer_Index (Source, I);
+         end if;
+      end loop;
+      return From_Buffer_Index (Source, Source.Buffer'Last); --  Implicit 
delimiter at EOI
+   end Find_String;
+
+   function Find_New_Line
+     (Source : in WisiToken.Lexer.Source;
+      Region : in Buffer_Region)
+     return Base_Buffer_Pos
+   is begin
+      for I in To_Buffer_Index (Source, Region.First) .. To_Buffer_Index 
(Source, Region.Last) loop
+         if Source.Buffer (I) = ASCII.LF then
+            return From_Buffer_Index (Source, I);
+         end if;
+      end loop;
+      return Invalid_Buffer_Pos;
+   end Find_New_Line;
+
+   function Find_String_Or_New_Line
+     (Source : in WisiToken.Lexer.Source;
+      Region : in Buffer_Region;
+      Item   : in String)
+     return Base_Buffer_Pos
+   is
+      Index_Last : constant Integer := To_Buffer_Index (Source, Region.Last);
+   begin
+      for I in To_Buffer_Index (Source, Region.First) .. Index_Last loop
+         if Source.Buffer (I) = ASCII.LF or
+           ((I + Item'Length - 1 <= Index_Last) and then
+              Source.Buffer (I .. I + Item'Length - 1) = Item)
+         then
+            return From_Buffer_Index (Source, I);
+         end if;
+      end loop;
+      return Invalid_Buffer_Pos;
+   end Find_String_Or_New_Line;
+
+   function Find_String
+     (Source : in WisiToken.Lexer.Source;
+      Region : in Buffer_Region;
+      Item   : in String)
+     return Base_Buffer_Pos
+   is
+      Index_Last : constant Integer := To_Buffer_Index (Source, Region.Last);
+   begin
+      for I in To_Buffer_Index (Source, Region.First) .. Index_Last loop
+         if (I + Item'Length <= Index_Last) and then
+           Source.Buffer (I .. I + Item'Length - 1) = Item
+         then
+            return From_Buffer_Index (Source, I);
+         end if;
+      end loop;
+      return Invalid_Buffer_Pos;
+   end Find_String;
+
+   function Line_Begin_Char_Pos
+     (Source : in WisiToken.Lexer.Source;
+      Token  : in WisiToken.Lexer.Token;
+      Line   : in Line_Number_Type)
+     return Base_Buffer_Pos
+   is
+      Found_Line : Base_Line_Number_Type := Token.Line_Region.First;
+   begin
+      for I in To_Buffer_Index (Source, Token.Byte_Region.First) ..
+        To_Buffer_Index (Source, Token.Byte_Region.Last)
+      loop
+         if Source.Buffer (I) = ASCII.LF then
+            Found_Line := @ + 1;
+            if Found_Line = Line then
+               return Base_Buffer_Pos (I) + 1;
+            end if;
+         end if;
+      end loop;
+      return Invalid_Buffer_Pos;
+   end Line_Begin_Char_Pos;
+
+   function Get_Byte
+     (Source : in WisiToken.Lexer.Source;
+      Pos    : in Integer)
+     return Character
+   is begin
+      return
+        (case Source.Label is
+         when String_Label => Source.Buffer (Pos),
+         when File_Label => GNATCOLL.Mmap.Data (Source.Region)(Pos));
+   end Get_Byte;
+
+   function Line_At_Byte_Pos
+     (Source      : in WisiToken.Lexer.Source;
+      Byte_Region : in WisiToken.Buffer_Region;
+      Byte_Pos    : in Buffer_Pos;
+      First_Line  : in Line_Number_Type)
+     return Line_Number_Type
+   is
+      Index_Pos : constant Integer := To_Buffer_Index (Source, Byte_Pos);
+      Found_Line : Base_Line_Number_Type := First_Line;
+   begin
+      for I in To_Buffer_Index (Source, Byte_Region.First) ..
+        To_Buffer_Index (Source, Byte_Region.Last)
+      loop
+         if Get_Byte (Source, I) = ASCII.LF then
+            Found_Line := @ + 1;
+         end if;
+
+         if I = Index_Pos then
+            return Found_Line;
+         end if;
+      end loop;
+      raise SAL.Programmer_Error; -- precondition false.
+   end Line_At_Byte_Pos;
+
+   function Contains_New_Line
+     (Source      : in WisiToken.Lexer.Source;
+      Byte_Region : in Buffer_Region;
+      First       : in Boolean)
+     return Base_Buffer_Pos
+   is
+      First_Index : constant Integer := To_Buffer_Index (Source, 
Byte_Region.First);
+   begin
+      if First then
+         for I in First_Index .. To_Buffer_Index (Source, Byte_Region.Last) 
loop
+            if Get_Byte (Source, I) = ASCII.LF then
+               return Byte_Region.First + Base_Buffer_Pos (I - First_Index);
+            end if;
+         end loop;
+      else
+         for I in reverse First_Index .. To_Buffer_Index (Source, 
Byte_Region.Last) loop
+            if Get_Byte (Source, I) = ASCII.LF then
+               return Byte_Region.First + Base_Buffer_Pos (I - First_Index);
+            end if;
+         end loop;
+      end if;
+      return Invalid_Buffer_Pos;
+   end Contains_New_Line;
+
+   function New_Line_Count
+     (Source      : in WisiToken.Lexer.Source;
+      Byte_Region : in Buffer_Region)
+     return Base_Line_Number_Type
+   is begin
+      return Count : Base_Line_Number_Type := 0 do
+         for I in To_Buffer_Index (Source, Byte_Region.First) ..
+           To_Buffer_Index (Source, Byte_Region.Last)
+         loop
+            if Get_Byte (Source, I) = ASCII.LF then
+               Count := @ + 1;
+            end if;
+         end loop;
+      end return;
+   end New_Line_Count;
+
    procedure Finalize (Object : in out Source)
    is begin
       case Object.Label is
@@ -32,8 +390,38 @@ package body WisiToken.Lexer is
          GNATCOLL.Mmap.Free (Object.Region);
          GNATCOLL.Mmap.Close (Object.File);
       end case;
+
+      Object.Buffer_Nominal_First_Byte := Buffer_Pos'First;
+      Object.Buffer_Nominal_First_Char := Buffer_Pos'First;
+      Object.Line_Nominal_First        := Line_Number_Type'First;
    end Finalize;
 
+   function Has_Source (Object : in Source) return Boolean
+   is
+      use all type Ada.Strings.Unbounded.String_Access;
+   begin
+      case Object.Label is
+      when String_Label =>
+         return Object.Buffer /= null;
+
+      when File_Label =>
+         --  Mmap doesn't provice "Is_Open".
+         return Object.Buffer_Nominal_First_Byte /= Invalid_Buffer_Pos;
+      end case;
+   end Has_Source;
+
+   function Buffer_Region_Byte (Object : in Source) return Buffer_Region
+   is begin
+      case Object.Label is
+      when String_Label =>
+         return (Base_Buffer_Pos (Object.Buffer'First), Base_Buffer_Pos 
(Object.Buffer'Last));
+
+      when File_Label =>
+         return (Object.Buffer_Nominal_First_Byte,
+                 Object.Buffer_Nominal_First_Byte + Base_Buffer_Pos 
(Object.Buffer_Last));
+      end case;
+   end Buffer_Region_Byte;
+
    function Buffer (Source : in Lexer.Source) return GNATCOLL.Mmap.Str_Access
    is
       use GNATCOLL.Mmap;
diff --git a/wisitoken-lexer.ads b/wisitoken-lexer.ads
index 3de31c7896..1d5d1ecbfa 100644
--- a/wisitoken-lexer.ads
+++ b/wisitoken-lexer.ads
@@ -2,7 +2,7 @@
 --
 --  An abstract lexer interface.
 --
---  Copyright (C) 2014 - 2015, 2017 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2014 - 2015, 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -35,122 +35,450 @@ with Ada.Strings.Unbounded;
 with GNATCOLL.Mmap;
 package WisiToken.Lexer is
 
+   type Token is record
+      --  Information provided by the lexer.
+
+      ID : Token_ID := Invalid_Token_ID;
+
+      Byte_Region : Buffer_Region := Null_Buffer_Region;
+      --  Index into the Lexer buffer for the token text.
+
+      Char_Region : Buffer_Region := Null_Buffer_Region;
+      --  Character position, useful for finding the token location in Emacs
+      --  buffers.
+
+      Line_Region : WisiToken.Line_Region := Null_Line_Region;
+      --  SOI, EOI tokens have 0 length for Byte_Region and Char_Region, and
+      --  0 Line_Length_Count for Line_Region.
+      --
+      --  SOI.Byte_Region.First = first byte of first character in text
+      --  SOI.Char_Region.First = first character in text
+      --  SOI.Line_Region.First = first line in text,
+      --
+      --  SOI may not be Buffer_Pos'First and Line_Number_Type'First if 
parsing part of a file.
+      --
+      --  EOI.Byte_Region.First = Byte position of EOI character; if not
+      --  actually present, one after the last byte in the text.
+      --  EOI.Char_Region.First = Character position of EOI character.
+      --  EOI.Line_Region.First = last line in file (after final new_line).
+   end record;
+
+   function Column (Token : in Lexer.Token; Line_Begin_Char_Pos : in 
Buffer_Pos) return Ada.Text_IO.Count;
+
+   function Image
+     (Item       : in Token;
+      Descriptor : in WisiToken.Descriptor)
+     return String;
+   --  ID, Char_Region; Line_Region if New_Line
+
+   function Full_Image
+     (Item       : in Token;
+      Descriptor : in WisiToken.Descriptor)
+     return String;
+   --  All fields.
+
+   Invalid_Token : constant Token := (others => <>);
+
+   procedure Shift
+     (Token       : in out Lexer.Token;
+      Shift_Bytes : in     Base_Buffer_Pos;
+      Shift_Chars : in     Base_Buffer_Pos;
+      Shift_Lines : in     Base_Line_Number_Type);
+   --  Add Shift_* to corresponding regions.
+
+   package Token_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
+     (Positive_Index_Type, Token, Default_Element => (others => <>));
+
+   function Image is new Token_Arrays.Gen_Image_Aux (WisiToken.Descriptor, 
Trimmed_Image, Image);
+   function Full_Image is new Token_Arrays.Gen_Image_Aux 
(WisiToken.Descriptor, Trimmed_Image, Full_Image);
+
+   subtype Recover_Characters is String (1 .. 4);
+
    type Error is record
       Char_Pos : Buffer_Pos := Invalid_Buffer_Pos;
       --  Character at that position is not recognized as part of a token.
 
-      Recover_Token : Base_Token_Index := Invalid_Token_Index;
-      --  If the error was corrected by inserting a missing quote, the token
-      --  (in shared parser Terminals) that was returned.
-
-      Recover_Char : String (1 .. 4) := (others => ASCII.NUL);
+      Recover_Char : Recover_Characters := (others => ASCII.NUL);
       --  If the error was corrected, the character (in UTF-8 encoding) that
       --  was inserted; unused trailing bytes set to ASCII.NUL. Otherwise,
       --  all ASCII.Nul.
    end record;
 
+   function To_String (Item : in Recover_Characters) return String;
+   --  Item must be Recover_Char from an error; delete the trailing NULs.
+
    package Error_Lists is new Ada.Containers.Doubly_Linked_Lists (Error);
 
-   type Instance (Descriptor  : not null access constant WisiToken.Descriptor)
+   type Source_Labels is (String_Label, File_Label);
+
+   type Source (Label : Source_Labels := Source_Labels'First) is private;
+
+   type Instance
    is abstract new Ada.Finalization.Limited_Controlled with record
-      Errors : Error_Lists.List;
+      Trace      : WisiToken.Trace_Access;
+      Descriptor : WisiToken.Descriptor_Access_Constant;
+      Errors     : Error_Lists.List;
+      Source     : Lexer.Source;
    end record;
 
    subtype Class is Instance'Class;
 
    type Handle is access all Class;
 
+   function Has_Source (Lexer : access constant Instance) return Boolean;
+   --  True if one of Reset_* has been called; lexer has source to process.
+
+   procedure Set_Verbosity
+     (Lexer     : in Instance;
+      Verbosity : in Integer)
+   is null;
+
    procedure Reset_With_String
      (Lexer      : in out Instance;
       Input      : in     String;
       Begin_Char : in     Buffer_Pos       := Buffer_Pos'First;
       Begin_Line : in     Line_Number_Type := Line_Number_Type'First)
-     is abstract;
+   is abstract
+   with Post'Class => Lexer.Has_Source;
    --  Reset Lexer to start a new parse, reading from Input.
 
    procedure Reset_With_String_Access
      (Lexer      : in out Instance;
       Input      : in     Ada.Strings.Unbounded.String_Access;
+      Input_Last : in     Integer;
       File_Name  : in     Ada.Strings.Unbounded.Unbounded_String;
       Begin_Char : in     Buffer_Pos       := Buffer_Pos'First;
       Begin_Line : in     Line_Number_Type := Line_Number_Type'First)
-     is abstract;
-   --  Reset Lexer to start a new parse, reading from Input. File_Name is
-   --  used for error messages.
+   is abstract
+   with Post'Class => Lexer.Has_Source;
+   --  Reset Lexer to start a new parse, reading from Input (Input'First
+   --  .. Input_Last). Input'First is Begin_Byte. File_Name is used for
+   --  error messages.
 
    procedure Reset_With_File
      (Lexer      : in out Instance;
       File_Name  : in     String;
-      Begin_Pos  : in     Buffer_Pos       := Invalid_Buffer_Pos;
-      End_Pos    : in     Buffer_Pos       := Invalid_Buffer_Pos;
+      Begin_Byte : in     Buffer_Pos       := Invalid_Buffer_Pos;
+      End_Byte   : in     Buffer_Pos       := Invalid_Buffer_Pos;
       Begin_Char : in     Buffer_Pos       := Buffer_Pos'First;
       Begin_Line : in     Line_Number_Type := Line_Number_Type'First)
-     is abstract;
+   is abstract
+   with Post'Class => Lexer.Has_Source;
    --  Reset Lexer to start a new parse, reading from File_Name. If
    --  Begin_Pos, End_Pos /= Invalid_Buffer_Pos, only parse that portion
    --  of the file.
    --
    --  Raises Ada.IO_Exceptions.Name_Error if File_Name cannot be opened.
 
-   procedure Reset (Lexer : in out Instance) is abstract;
+   procedure Reset (Lexer : in out Instance) is abstract
+   with Pre'Class => Lexer.Has_Source,
+     Post'Class => Lexer.Has_Source;
    --  Reset Lexer, read from previous source.
 
    procedure Discard_Rest_Of_Input (Lexer : in out Instance) is abstract;
    --  If reading input from a stream, abort reading (or force it to
    --  complete); Find_Next will not be called before another Reset.
 
-   function Buffer_Text (Lexer : in Instance; Byte_Region : in Buffer_Region) 
return String is abstract;
+   function Buffer_Region_Byte (Lexer : in Instance) return Buffer_Region;
+
+   function Buffer_Text (Lexer : in Instance; Byte_Region : in Buffer_Region) 
return String;
    --  Return text from internal buffer, given region in byte position.
 
-   function First (Lexer : in Instance) return Boolean is abstract;
-   --  True if most recent token is first on a line.
+   procedure Set_Position
+     (Lexer         : in out Instance;
+      Byte_Position : in     Buffer_Pos;
+      Char_Position : in     Buffer_Pos;
+      Line          : in     Line_Number_Type)
+     is abstract;
+   --  Set the current position in the source buffer; Find_Next will
+   --  start there. Prev_Token_ID should be Descriptor.New_Line_ID or
+   --  Invalid_Token_ID; it is used for First.
 
    function Find_Next
      (Lexer : in out Instance;
-      Token :    out Base_Token)
-     return Boolean is abstract;
-   --  Set Token to the next token from the input stream.
-   --
-   --  If there is a recovered error, adds an entry to Lexer.Errors (with
-   --  Recover_Token invalid). Unrecognized characters are skipped;
-   --  missing quotes are inserted at the found quote. There can be more
-   --  than one error entry for one call to Find_Next, if several
-   --  unrecognized characters are skipped. If the recovery inserted a
-   --  missing quote, it is the last entry in Errors, the returned token
-   --  is an empty string literal, and Find_Next returns True.
+      Token :    out WisiToken.Lexer.Token)
+     return Natural is abstract;
+   --  Set Token to the next token from the input stream, return number
+   --  of lexer errors encountered.
    --
-   --  If there is a non-recoverable error, raises Fatal_Error with an
-   --  appropriate message.
-   --
-   --  Otherwise returns False.
+   --  For each lexer error, adds an entry to Lexer.Errors. Unrecognized
+   --  characters are skipped; missing quotes are inserted at the found
+   --  quote. If the recovery inserted a missing quote, it is the last
+   --  entry in Errors, the returned token is an empty string literal.
    --
    --  Token.Char_Region, Token.Byte_Region are the character and byte
-   --  position of the start and end of token, in the internal buffer,
+   --  position of the start and end of Token, in the internal buffer,
    --  1-indexed. Char_Region and Byte_Region differ when text is UTF-8
    --  or other multi-byte encoding, and when line endings are two byte.
    --
-   --  Token.Line is the line number in which recent token starts.
+   --  Token.Line_Region is the line number at the start and end of the token.
    --  If the underlying text feeder does not support the notion of
-   --  'line', returns Invalid_Line_Number.
+   --  'line', this is Null_Line_Region.
    --
-   --  Token.Column is the column number of the start of the token, 1
-   --  indexed. If the underlying text feeder does not support the notion
-   --  of 'line', returns byte position in internal buffer.
+   --  test_incremental.adb Lexer_Errors_05 has multiple lexer errors on
+   --  one token.
 
-   function File_Name (Lexer : in Instance) return String is abstract;
+   function File_Name (Lexer : in Instance) return String;
    --  Return input file name; empty string if there is no file.
 
-private
+   procedure Begin_Pos
+     (Lexer      : in     Instance;
+      Begin_Byte :    out Buffer_Pos;
+      Begin_Char :    out Buffer_Pos;
+      Begin_Line :    out Line_Number_Type)
+   with Pre => Lexer.Has_Source;
+   --  Return values from Reset*.
 
-   type Source_Labels is (String_Label, File_Label);
+   function Is_Block_Delimited
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean
+   is abstract;
+   --  True if ID is a token that spans a region of text defined by
+   --  delimiters; a string, a comment, or some similar delimited text.
+   --
+   --  Incremental parse uses this and the following related functions to
+   --  determine how much text is affected by an edit that inserts or
+   --  deletes a delimiter.
+
+   function Same_Block_Delimiters
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean
+   is abstract;
+   --  True if Is_Block_Delimited (ID) and the start and end delimiters
+   --  are the same (typically true for strings, false for comments).
+
+   function Escape_Delimiter_Doubled
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean
+   is abstract
+   with Pre'Class => Lexer.Is_Block_Delimited (ID) and 
Lexer.Same_Block_Delimiters (ID);
+   --  True if a delimiter embedded in the token is escaped by doubling
+   --  it (like Ada strings).
+
+   function Start_Delimiter_Length
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Integer
+   is abstract
+   with Pre'Class => Lexer.Is_Block_Delimited (ID);
+   --  Return length in bytes of the characters in the start delimiter 
character sequence.
+
+   function End_Delimiter_Length
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Integer
+   is abstract
+   with Pre'Class => Lexer.Is_Block_Delimited (ID);
+   --  Return length in bytes of the characters in the end delimiter character 
sequence.
+
+   function New_Line_Is_End_Delimiter
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean
+   is abstract
+   with Pre'Class => Is_Block_Delimited (Lexer, ID);
+
+   function Find_End_Delimiter
+     (Lexer       : in Instance;
+      ID          : in Token_ID;
+      Token_Start : in Buffer_Pos)
+     return Buffer_Pos
+   is abstract
+   with Pre'Class => Is_Block_Delimited (Lexer, ID);
+   --  Given the byte position of a start delimiter, return the byte
+   --  position of the corresponding end delimiter.
+   --
+   --  If no end delimiter is found, returns EOI position.
+
+   function Contains_End_Delimiter
+     (Lexer  : in Instance;
+      ID     : in Token_ID;
+      Region : in Buffer_Region)
+     return Base_Buffer_Pos
+   is abstract
+   with Pre'Class => Is_Block_Delimited (Lexer, ID);
+   --  If Region contains an end delimiter for ID, return the buffer
+   --  position of the start of that delimiter. Otherwise return
+   --  Invalid_Buffer_Pos. Does not check for matching or nested start.
+
+   function Find_Scan_End
+     (Lexer       : in Instance;
+      ID          : in Token_ID;
+      Byte_Region : in Buffer_Region;
+      Inserted    : in Boolean;
+      Start       : in Boolean)
+     return Buffer_Pos
+   is abstract
+   with Pre'Class => Is_Block_Delimited (Lexer, ID);
+   --  If Inserted, a delimiter for ID (if Start, a start delimiter, else
+   --  an end delimeter) was inserted at Byte_Region.First, and
+   --  Byte_Region.Last is the previous end of the token, shifted to
+   --  match the current edited text. .
+   --
+   --  If not Inserted, a delimeter was deleted. If Start, Byte_Region is
+   --  where in the current text to start searching for a start delmiter
+   --  (nominally the old start position). If not Start,
+   --
+   --  Text was either hidden in the new token, or exposed as code;
+   --  return the end of the buffer region that must be scanned by the
+   --  lexer.
+
+   function Find_New_Line
+     (Lexer  : in Instance;
+      Region : in Buffer_Region)
+     return Base_Buffer_Pos;
+   --  Returns Invalid_Bufer_Pos if not found in Region.
+
+   function Line_Begin_Char_Pos
+     (Lexer : in Instance;
+      Token : in WisiToken.Lexer.Token;
+      Line  : in Line_Number_Type)
+     return Base_Buffer_Pos
+   is abstract
+   with Pre'Class => Token.Line_Region.First <=  Line - 1 and 
Token.Line_Region.Last >= Line;
+   --  Return first char position on Line; Invalid_Buffer_Pos if Token
+   --  does not contain the new-line that starts Line.
+
+   function Line_At_Byte_Pos
+     (Lexer       : in Instance;
+      Byte_Region : in WisiToken.Buffer_Region;
+      Byte_Pos    : in Buffer_Pos;
+      First_Line  : in Line_Number_Type)
+     return Line_Number_Type
+   is abstract
+   with Pre'Class => Contains (Byte_Region, Byte_Pos);
+   --  Return line that contains Byte_Pos. If Byte_Pos is on a New_Line,
+   --  result is the line that the character ends. First_Line must be the
+   --  line number at Byte_Region.First.
+
+   function Line_At_Byte_Pos
+     (Lexer    : in Instance;
+      Token    : in WisiToken.Lexer.Token;
+      Byte_Pos : in Buffer_Pos)
+     return Line_Number_Type
+   with Pre => Contains (Token.Byte_Region, Byte_Pos);
+   --  Return line that contains Byte_Pos. If Byte_Pos is on a New_Line,
+   --  result is the line that the character ends.
+
+   function Can_Contain_New_Line
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean is abstract;
+
+   function Contains_New_Line
+     (Lexer       : in Instance;
+      Byte_Region : in Buffer_Region;
+      First       : in Boolean)
+     return Base_Buffer_Pos;
+   --  Returns the first or last new_line in Byte_Region;
+   --  Invalid_Buffer_Pos if none.
+
+   function Contains_New_Line
+     (Lexer       : in Instance;
+      ID          : in Token_ID;
+      Byte_Region : in Buffer_Region;
+      First       : in Boolean)
+     return Base_Buffer_Pos;
+   --  If ID cannot contain a new_line, returns Invalid_Buffer_Pos.
+   --  Otherwise, finds the first or last new_line in Byte_Region (which
+   --  must be the token byte_region); Invalid_Buffer_Pos if none.
+
+   function New_Line_Count
+     (Lexer : in Instance;
+      Item  : in Token_Arrays.Vector)
+     return Base_Line_Number_Type;
+
+   function Terminated_By_New_Line
+     (Lexer : in Instance;
+      ID    : in Token_ID)
+     return Boolean is abstract;
+   --  True for tokens that can be terminated by new-line.
+   --
+   --  Returns false for comment-one-line; terminating that by a new_line
+   --  is actually an error.
+
+   function Buffer_Region_Byte (Object : in Source) return Buffer_Region;
+
+   function Find_New_Line
+     (Source : in WisiToken.Lexer.Source;
+      Start  : in Buffer_Pos)
+     return Buffer_Pos;
+
+   function Find_New_Line
+     (Source : in WisiToken.Lexer.Source;
+      Region : in Buffer_Region)
+     return Base_Buffer_Pos;
+   --  Returns Invalid_Bufer_Pos if not found in Region.
+
+   function Find_String_Or_New_Line
+     (Source : in WisiToken.Lexer.Source;
+      Start  : in Buffer_Pos;
+      Item   : in String)
+     return Buffer_Pos;
+   --  Returns last byte in Source if not found, for an implicit New_Line
+   --  at EOI.
+
+   function Find_String
+     (Source : in WisiToken.Lexer.Source;
+      Start  : in Buffer_Pos;
+      Item   : in String)
+     return Buffer_Pos;
+   --  Returns last byte in Source if not found, for an implicit delimiter
+   --  at EOI.
+
+   function Find_String_Or_New_Line
+     (Source : in WisiToken.Lexer.Source;
+      Region : in Buffer_Region;
+      Item   : in String)
+     return Base_Buffer_Pos;
+   --  Returns Invalid_Bufer_Pos if not found in Region.
+
+   function Find_String
+     (Source : in WisiToken.Lexer.Source;
+      Region  : in Buffer_Region;
+      Item   : in String)
+     return Base_Buffer_Pos;
+   --  Returns Invalid_Bufer_Pos if not found in Region.
+
+   function Line_Begin_Char_Pos
+     (Source : in WisiToken.Lexer.Source;
+      Token  : in WisiToken.Lexer.Token;
+      Line   : in Line_Number_Type)
+     return Base_Buffer_Pos;
+   --  Implement Line_Begin_Char_Pos (Lexer ...)
+
+   function Line_At_Byte_Pos
+     (Source      : in WisiToken.Lexer.Source;
+      Byte_Region : in WisiToken.Buffer_Region;
+      Byte_Pos    : in Buffer_Pos;
+      First_Line  : in Line_Number_Type)
+     return Line_Number_Type;
+   --  Implement Line_At_Byte_Pos (Lexer ...)
+
+   function Contains_New_Line
+     (Source      : in WisiToken.Lexer.Source;
+      Byte_Region : in Buffer_Region;
+      First       : in Boolean)
+     return Base_Buffer_Pos;
+
+   function New_Line_Count
+     (Source      : in WisiToken.Lexer.Source;
+      Byte_Region : in Buffer_Region)
+     return Base_Line_Number_Type;
+
+private
 
    type Source (Label : Source_Labels := Source_Labels'First) is record
       File_Name : Ada.Strings.Unbounded.Unbounded_String;
       --  Not saved in Mapped_File, may be empty for String_Label
 
-      Buffer_Nominal_First_Byte : Buffer_Pos;
-      Buffer_Nominal_First_Char : Buffer_Pos;
-      Line_Nominal_First        : Line_Number_Type;
+      Buffer_Nominal_First_Byte : Buffer_Pos       := Buffer_Pos'First;
+      Buffer_Nominal_First_Char : Buffer_Pos       := Buffer_Pos'First;
+      Line_Nominal_First        : Line_Number_Type := Line_Number_Type'First;
+      Buffer_Last               : Natural          := 0; -- allow empty input 
string
 
       case Label is
       when String_Label =>
@@ -160,12 +488,12 @@ private
          --  it. Otherwise we must deallocate it.
 
          --  Buffer_Nominal_First, Line_Nominal_First are 1.
+
       when File_Label =>
 
          --  The input is memory mapped from the following, which must be 
closed:
          File        : GNATCOLL.Mmap.Mapped_File;
          Region      : GNATCOLL.Mmap.Mapped_Region;
-         Buffer_Last : Positive;
          --  Region always has first character at offset 0.
 
          --  Buffer_Nominal_First is Begin_Pos. Line_Nominal_First is
@@ -175,13 +503,35 @@ private
 
    procedure Finalize (Object : in out Source);
 
+   function Has_Source (Object : in Source) return Boolean;
+   --  True if one of Reset_* has been called; lexer has source to process.
+
    function Buffer (Source : in Lexer.Source) return GNATCOLL.Mmap.Str_Access;
-   --  The bounds on the result are not present; 'First, 'Last are not
-   --  reliable. If Source_Label is String_label, actual bounds are
-   --  Source.Buffer'First, 'Last. Otherwise, actual bounds are 1 ..
-   --  Source.Buffer_Last. Indexing is reliable.
+   --  If Source_Label is String_label, actual bounds are
+   --  Source.Buffer'First, 'Last. Otherwise, The bounds on the result
+   --  are not present; 'First, 'Last are not reliable. actual bounds are
+   --  1 .. Source.Buffer_Last. Indexing is reliable.
+
+   function To_Buffer_Index (Source : in WisiToken.Lexer.Source; Byte_Pos : in 
Base_Buffer_Pos) return Integer
+   is (Integer (Byte_Pos - Source.Buffer_Nominal_First_Byte) +
+                  (case Source.Label is
+                   when String_Label => Source.Buffer'First,
+                   when File_Label => Integer (Buffer_Pos'First)));
+
+   function From_Buffer_Index (Source : in WisiToken.Lexer.Source; Index : in 
Integer) return Base_Buffer_Pos
+   is (Base_Buffer_Pos (Index) + Source.Buffer_Nominal_First_Byte -
+         (case Source.Label is
+          when String_Label => Base_Buffer_Pos (Source.Buffer'First),
+          when File_Label => Buffer_Pos'First));
 
    function File_Name (Source : in Lexer.Source) return String;
+
    function To_Char_Pos (Source : in Lexer.Source; Lexer_Char_Pos : in 
Integer) return Base_Buffer_Pos;
 
+   procedure Begin_Pos
+     (Object     : in     Source;
+      Begin_Byte :    out Buffer_Pos;
+      Begin_Char :    out Buffer_Pos;
+      Begin_Line :    out Line_Number_Type);
+
 end WisiToken.Lexer;
diff --git a/wisitoken-parse-lr-mckenzie_recover-base.adb 
b/wisitoken-parse-lr-mckenzie_recover-base.adb
index d36b3349b1..f11c1e7b1c 100644
--- a/wisitoken-parse-lr-mckenzie_recover-base.adb
+++ b/wisitoken-parse-lr-mckenzie_recover-base.adb
@@ -2,7 +2,7 @@
 --
 --  Base utilities for McKenzie_Recover
 --
---  Copyright (C) 2018 - 2021 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -17,111 +17,32 @@
 
 pragma License (Modified_GPL);
 
-with GNAT.Traceback.Symbolic;
 package body WisiToken.Parse.LR.McKenzie_Recover.Base is
 
-   function Get_Barrier
-     (Parsers                 : not null access Parser_Lists.List;
-      Parser_Status           : in              Parser_Status_Array;
-      Min_Success_Check_Count : in              Natural;
-      Total_Enqueue_Count     : in              Natural;
-      Check_Delta_Limit       : in              Natural;
-      Enqueue_Limit           : in              Natural)
-     return Boolean
+   Default_Positive_Sequential_Index : constant Syntax_Trees.Sequential_Index 
:= 10;
+   Default_Negative_Sequential_Index : constant Syntax_Trees.Sequential_Index 
:= -10;
+
+   procedure Initialize
+     (Super         : in out Supervisor;
+      Shared_Parser : in out Parser.Parser)
    is
-      Done_Count : SAL.Base_Peek_Type := 0;
-      Skip : Boolean;
+      Tree : WisiToken.Syntax_Trees.Tree renames Shared_Parser.Tree;
    begin
-      --  Return True if all parsers are done, or if any parser has a config
-      --  available to check.
-      for P_Status of Parser_Status loop
-         Skip := False;
-
-         case P_Status.Recover_State is
-         when Active | Ready =>
-            if P_Status.Parser_State.Recover.Config_Heap.Count > 0 then
-               if P_Status.Parser_State.Recover.Check_Count - 
Check_Delta_Limit >= Min_Success_Check_Count then
-                  --  fail; another parser succeeded, this one taking too long.
-                  Done_Count := Done_Count + 1;
-                  Skip := True;
-
-               elsif Total_Enqueue_Count + 
P_Status.Parser_State.Recover.Config_Full_Count >= Enqueue_Limit then
-                  --  fail
-                  Done_Count := Done_Count + 1;
-                  Skip := True;
-               end if;
-            end if;
-
-            if not Skip then
-               case P_Status.Recover_State is
-               when Active =>
-                  if P_Status.Parser_State.Recover.Config_Heap.Count > 0 then
-                     --  Still working
-                     return True;
-                  else
-                     if P_Status.Active_Workers = 0 then
-                        --  fail; no configs left to check.
-                        Done_Count := Done_Count + 1;
-                     end if;
-                  end if;
-
-               when Ready =>
-                  if P_Status.Parser_State.Recover.Config_Heap.Count > 0 and 
then
-                    P_Status.Parser_State.Recover.Config_Heap.Min_Key <= 
P_Status.Parser_State.Recover.Results.Min_Key
-                  then
-                     --  Still more to check.
-                     return True;
-
-                  elsif P_Status.Active_Workers = 0 then
-                     Done_Count := Done_Count + 1;
-                  end if;
-
-               when others =>
-                  null;
-               end case;
-            end if;
-
-         when Success | Fail =>
-            Done_Count := Done_Count + 1;
-         end case;
-      end loop;
-
-      return Done_Count = Parsers.Count;
-   end Get_Barrier;
-
-   protected body Supervisor is
-
-      procedure Initialize
-        (Parsers   : not null access Parser_Lists.List;
-         Terminals : not null access constant Base_Token_Arrays.Vector)
-      is
+      declare
          Index : SAL.Peek_Type := 1;
       begin
-         Supervisor.Parsers      := Parsers;
-         Supervisor.Terminals    := Terminals;
-         All_Parsers_Done        := False;
-         Success_Counter         := 0;
-         Min_Success_Check_Count := Natural'Last;
-         Total_Enqueue_Count     := 0;
-         Fatal_Called            := False;
-         Result                  := Recover_Status'First;
-         Error_ID                := Ada.Exceptions.Null_Id;
-
-         for I in Parsers.Iterate loop
-            if Parsers.Reference (I).Recover_Insert_Delete_Current /= 
Recover_Op_Arrays.No_Index then
-               --  Previous error recovery resume not finished; this is 
supposed to
-               --  be checked in Parser.
-               raise SAL.Programmer_Error;
-            end if;
+         for Cur in Shared_Parser.Parsers.Iterate loop
+            pragma Assert (Shared_Parser.Parsers (Cur).Current_Recover_Op = 
No_Insert_Delete);
+            --  Otherwise previous error recovery resume not finished; this is 
supposed to
+            --  be checked in Parser.
 
-            Parser_Status (Index) :=
+            Super.Parser_Status (Index) :=
               (Recover_State  => Active,
-               Parser_State   => Parser_Lists.Persistent_State_Ref (I),
-               Fail_Mode      => Success,
-               Active_Workers => 0);
+               Parser_State   => Parser_Lists.Unchecked_State_Ref (Cur),
+               Fail_Mode      => Success);
 
             declare
-               Data : McKenzie_Data renames Parsers.Reference (I).Recover;
+               Data : McKenzie_Data renames Shared_Parser.Parsers 
(Cur).Recover;
             begin
                Data.Config_Heap.Clear;
                Data.Results.Clear;
@@ -132,326 +53,550 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Base is
 
             Index := Index + 1;
          end loop;
-      end Initialize;
-
-      entry Get
-        (Parser_Index : out SAL.Base_Peek_Type;
-         Config       : out Configuration;
-         Status       : out Config_Status)
-        when (Fatal_Called or All_Parsers_Done) or else Get_Barrier
-          (Parsers, Parser_Status, Min_Success_Check_Count, 
Total_Enqueue_Count, Check_Delta_Limit, Enqueue_Limit)
-      is
-         Done_Count     : SAL.Base_Peek_Type := 0;
-         Skip           : Boolean;
-         Min_Cost       : Integer            := Integer'Last;
-         Min_Cost_Index : SAL.Base_Peek_Type;
-
-         procedure Set_Outputs (I : in SAL.Peek_Type)
-         is begin
-            Parser_Index := I;
-            Config       := Parser_Status 
(I).Parser_State.Recover.Config_Heap.Remove;
-            Status       := Valid;
+      end;
+
+      --  Set Sequential_Index. Ensure that all of the error node in each
+      --  parser is indexed (ada_mode-recover_incremental_02.adb). After
+      --  that, Push_Back extends towards SOI, and everything else towards
+      --  EOI.
+      declare
+         use Syntax_Trees;
+         Streams     : Stream_ID_Array (1 .. Shared_Parser.Parsers.Count);
+         First_Nodes : Valid_Node_Access_Array (1 .. Super.Parser_Count) := 
(others => Dummy_Node);
+         Last_Nodes  : Valid_Node_Access_Array (1 .. Super.Parser_Count) := 
(others => Dummy_Node);
+
+         Min_Target : Sequential_Index := Default_Negative_Sequential_Index;
+         Max_Target : Sequential_Index := Default_Positive_Sequential_Index;
+         Min_Done   : Boolean          := False;
+         Max_Done   : Boolean          := False;
+      begin
+         for I in Last_Nodes'Range loop
+            pragma Assert
+              (not Tree.Is_Empty_Nonterm (Tree.Current_Token 
(Super.Parser_Status (I).Parser_State.Stream).Node));
 
-            Parser_Status (I).Parser_State.Recover.Check_Count :=
-              Parser_Status (I).Parser_State.Recover.Check_Count + 1;
+            First_Nodes (I) := Tree.First_Terminal
+              (Tree.Current_Token (Super.Parser_Status 
(I).Parser_State.Stream).Node);
 
-            Parser_Status (I).Active_Workers := Parser_Status 
(I).Active_Workers + 1;
-         end Set_Outputs;
+            Last_Nodes (I) := Tree.Last_Terminal
+              (Tree.Current_Token (Super.Parser_Status 
(I).Parser_State.Stream).Node);
+         end loop;
 
-         procedure Set_All_Done
-         is begin
-            Parser_Index := SAL.Base_Peek_Type'First;
+         Set_Initial_Sequential_Index
+           (Shared_Parser.Parsers, Tree, Streams, 
Super.Max_Sequential_Indices, Initialize => True);
 
-            pragma Warnings (Off, "aggregate not fully initialized");
-            --  Config.Stack.Data is not initialized, but no uninitialized 
data is
-            --  ever referenced.
-            Config       := (others => <>);
-            pragma Warnings (On, "aggregate not fully initialized");
+         Super.Min_Sequential_Indices := Super.Max_Sequential_Indices;
 
-            Status       := All_Done;
-         end Set_All_Done;
+         if (for all Term of Super.Max_Sequential_Indices => Tree.ID 
(Term.Ref.Node) = Tree.Lexer.Descriptor.EOI_ID)
+         then
+            Max_Done := True;
+         end if;
 
-      begin
-         if Fatal_Called or All_Parsers_Done then
-            Set_All_Done;
-            return;
+         if (for all Term of Super.Min_Sequential_Indices => Tree.ID 
(Term.Ref.Node) = Tree.Lexer.Descriptor.SOI_ID)
+         then
+            Min_Done := True;
          end if;
 
-         --  Same logic as in Get_Barrier, but different actions.
-         --
-         --  No task_id in outline trace messages, because they may appear in
-         --  .parse_good
-         for I in Parser_Status'Range loop
-            Skip := False;
+         loop
+            if not Max_Done then
+               Extend_Sequential_Index
+                 (Tree, Streams, Super.Max_Sequential_Indices,
+                  Target   => Max_Target,
+                  Positive => True,
+                  Clear    => False);
+            end if;
 
-            declare
-               P_Status : Base.Parser_Status renames Parser_Status (I);
-            begin
-               case P_Status.Recover_State is
-               when Active | Ready =>
-                  if P_Status.Parser_State.Recover.Config_Heap.Count > 0 then
-                     if P_Status.Parser_State.Recover.Check_Count - 
Check_Delta_Limit >= Min_Success_Check_Count then
-                        if Trace_McKenzie > Outline then
-                           Put_Line
-                             (Trace.all,
-                              P_Status.Parser_State.Label, "fail; check delta 
(limit" &
-                                Integer'Image (Min_Success_Check_Count + 
Check_Delta_Limit) & ")",
-                              Task_ID => False);
-                        end if;
-                        P_Status.Recover_State := Fail;
-                        P_Status.Fail_Mode     := Fail_Check_Delta;
+            if not Min_Done then
+               Extend_Sequential_Index
+                 (Tree, Streams, Super.Min_Sequential_Indices,
+                  Target   => Min_Target,
+                  Positive => False,
+                  Clear    => False);
+            end if;
 
-                        Done_Count := Done_Count + 1;
-                        Skip := True;
+            if (for some Node of First_Nodes => Tree.Get_Sequential_Index 
(Node) = Invalid_Sequential_Index) then
+               Min_Target := 2 * @;
+            else
+               Min_Done := True;
+            end if;
 
-                     elsif Total_Enqueue_Count + 
P_Status.Parser_State.Recover.Config_Full_Count >= Enqueue_Limit then
-                        if Trace_McKenzie > Outline then
-                           Put_Line
-                             (Trace.all,
-                              P_Status.Parser_State.Label, "fail; total 
enqueue limit (" &
-                                Enqueue_Limit'Image & " cost" &
-                                
P_Status.Parser_State.Recover.Config_Heap.Min_Key'Image & ")",
-                              Task_ID => False);
-                        end if;
-                        P_Status.Recover_State := Fail;
-                        P_Status.Fail_Mode     := Fail_Enqueue_Limit;
+            if (for some Node of Last_Nodes  => Tree.Get_Sequential_Index 
(Node) = Invalid_Sequential_Index) then
+               Max_Target := 2 * @;
+            else
+               Max_Done := True;
+            end if;
 
-                        Done_Count := Done_Count + 1;
-                        Skip := True;
+            exit when Min_Done and Max_Done;
+         end loop;
+      end;
+   end Initialize;
+
+   procedure Get
+     (Super         : in out Supervisor;
+      Shared_Parser : in     Parser.Parser;
+      Parser_Index  :    out SAL.Base_Peek_Type;
+      Config        :    out Configuration)
+   is
+      Done_Count     : SAL.Base_Peek_Type := 0;
+      Skip           : Boolean;
+      Min_Cost       : Integer            := Integer'Last;
+      Min_Cost_Index : SAL.Base_Peek_Type;
+
+      procedure Set_Outputs (I : in SAL.Peek_Type)
+      is begin
+         Parser_Index := I;
+         Config       := Super.Parser_Status 
(I).Parser_State.Recover.Config_Heap.Remove;
+
+         Super.Parser_Status (I).Parser_State.Recover.Check_Count :=
+           Super.Parser_Status (I).Parser_State.Recover.Check_Count + 1;
+      end Set_Outputs;
+
+      procedure Set_All_Done
+      is begin
+         Parser_Index := SAL.Base_Peek_Type'First;
+
+         pragma Warnings (Off, "aggregate not fully initialized");
+         --  Config.Stack.Data is not initialized, but no uninitialized data is
+         --  ever referenced.
+         Config := (others => <>);
+         pragma Warnings (On, "aggregate not fully initialized");
+      end Set_All_Done;
+
+   begin
+      if Super.All_Parsers_Done then
+         Set_All_Done;
+         return;
+      end if;
+
+      --  Same logic as in Get_Barrier, but different actions.
+      --
+      --  No task_id in outline trace messages, because they may appear in
+      --  .parse_good
+      for I in Super.Parser_Status'Range loop
+         Skip := False;
+
+         declare
+            P_Status          : Base.Parser_Status renames Super.Parser_Status 
(I);
+            Check_Delta_Limit : Natural renames 
Shared_Parser.Table.McKenzie_Param.Check_Delta_Limit;
+         begin
+            case P_Status.Recover_State is
+            when Active | Ready =>
+               if P_Status.Parser_State.Recover.Config_Heap.Count > 0 then
+                  if P_Status.Parser_State.Recover.Check_Count - 
Check_Delta_Limit >= Super.Min_Success_Check_Count then
+                     if Trace_McKenzie > Outline then
+                        Put_Line
+                          (Shared_Parser.Tree,
+                           P_Status.Parser_State.Stream, "fail; check delta 
(limit" &
+                             Integer'Image (Super.Min_Success_Check_Count + 
Check_Delta_Limit) & ")");
+                     end if;
+                     P_Status.Recover_State := Fail;
+                     P_Status.Fail_Mode     := Fail_Check_Delta;
+
+                     Done_Count := Done_Count + 1;
+                     Skip := True;
+
+                  elsif Super.Total_Enqueue_Count + 
P_Status.Parser_State.Recover.Config_Full_Count >=
+                    Shared_Parser.Table.McKenzie_Param.Enqueue_Limit
+                  then
+                     if Trace_McKenzie > Outline then
+                        Put_Line
+                          (Shared_Parser.Tree,
+                           P_Status.Parser_State.Stream, "fail; total enqueue 
limit (" &
+                             
Shared_Parser.Table.McKenzie_Param.Enqueue_Limit'Image & " cost" &
+                             
P_Status.Parser_State.Recover.Config_Heap.Min_Key'Image & ")");
                      end if;
+                     P_Status.Recover_State := Fail;
+                     P_Status.Fail_Mode     := Fail_Enqueue_Limit;
+
+                     Done_Count := Done_Count + 1;
+                     Skip := True;
                   end if;
+               end if;
 
-                  if not Skip then
-                     case P_Status.Recover_State is
-                     when Active =>
-                        if P_Status.Parser_State.Recover.Config_Heap.Count > 0 
then
-                           if 
P_Status.Parser_State.Recover.Config_Heap.Min_Key < Min_Cost then
-                              Min_Cost       := 
P_Status.Parser_State.Recover.Config_Heap.Min_Key;
-                              Min_Cost_Index := I;
-                              --  not done
-                           end if;
-                        else
-                           if P_Status.Active_Workers = 0 then
-                              --  No configs left to check (rarely happens 
with real languages).
-                              if Trace_McKenzie > Outline then
-                                 Put_Line
-                                   (Trace.all, P_Status.Parser_State.Label, 
"fail; no configs left", Task_ID => False);
-                              end if;
-                              P_Status.Recover_State := Fail;
-                              P_Status.Fail_Mode     := Fail_No_Configs_Left;
-
-                              Done_Count := Done_Count + 1;
-                           end if;
+               if not Skip then
+                  case P_Status.Recover_State is
+                  when Active =>
+                     if P_Status.Parser_State.Recover.Config_Heap.Count > 0 
then
+                        if P_Status.Parser_State.Recover.Config_Heap.Min_Key < 
Min_Cost then
+                           Min_Cost       := 
P_Status.Parser_State.Recover.Config_Heap.Min_Key;
+                           Min_Cost_Index := I;
+                           --  not done
                         end if;
-
-                     when Ready =>
-                        if P_Status.Parser_State.Recover.Config_Heap.Count > 0 
and then
-                          P_Status.Parser_State.Recover.Config_Heap.Min_Key <=
-                          P_Status.Parser_State.Recover.Results.Min_Key
-                        then
-                           --  Still more to check. We don't check Min_Cost 
here so this parser
-                           --  can finish quickly.
-                           Set_Outputs (I);
-                           return;
-
-                        elsif P_Status.Active_Workers = 0 then
-                           P_Status.Recover_State := Success;
-                           Done_Count             := Done_Count + 1;
+                     else
+                        --  No configs left to check (rarely happens with real 
languages).
+                        if Trace_McKenzie > Outline then
+                           Put_Line
+                             (Shared_Parser.Tree, P_Status.Parser_State.Stream,
+                              "fail; no configs left");
                         end if;
-                     when others =>
-                        null;
-                     end case;
-                  end if;
+                        P_Status.Recover_State := Fail;
+                        P_Status.Fail_Mode     := Fail_No_Configs_Left;
 
-               when Success | Fail =>
-                  Done_Count := Done_Count + 1;
-               end case;
-            end;
-         end loop;
+                        Done_Count := Done_Count + 1;
+                     end if;
 
-         if Min_Cost /= Integer'Last then
-            Set_Outputs (Min_Cost_Index);
+                  when Ready =>
+                     if P_Status.Parser_State.Recover.Config_Heap.Count > 0 
and then
+                       P_Status.Parser_State.Recover.Config_Heap.Min_Key <=
+                       P_Status.Parser_State.Recover.Results.Min_Key
+                     then
+                        --  Still more to check. We don't check Min_Cost here 
so this parser
+                        --  can finish quickly.
+                        Set_Outputs (I);
+                        return;
+
+                     else
+                        P_Status.Recover_State := Success;
+                        Done_Count             := Done_Count + 1;
+                     end if;
+                  when others =>
+                     null;
+                  end case;
+               end if;
 
-         elsif Done_Count = Parsers.Count then
-            if Trace_McKenzie > Extra then
-               Trace.Put_Line ("Supervisor: done, " & (if Success_Counter > 0 
then "succeed" else "fail"));
-            end if;
+            when Success | Fail =>
+               Done_Count := Done_Count + 1;
+            end case;
+         end;
+      end loop;
 
-            Set_All_Done;
-            All_Parsers_Done := True;
-         else
-            raise SAL.Programmer_Error with "Get_Barrier and Get logic do not 
match";
-         end if;
-      end Get;
-
-      procedure Success
-        (Parser_Index : in     SAL.Peek_Type;
-         Config       : in     Configuration;
-         Configs      : in out Config_Heaps.Heap_Type)
-      is
-         Data : McKenzie_Data renames Parser_Status 
(Parser_Index).Parser_State.Recover;
-      begin
-         Put (Parser_Index, Configs); --  Decrements Active_Worker_Count.
+      if Min_Cost /= Integer'Last then
+         Set_Outputs (Min_Cost_Index);
 
-         if Trace_McKenzie > Detail then
-            Put
-              ("succeed: enqueue" & Integer'Image (Data.Enqueue_Count) & ", 
check " & Integer'Image (Data.Check_Count),
-               Trace.all, Parser_Status (Parser_Index).Parser_State.Label, 
Terminals.all, Config);
+      elsif Done_Count = Super.Parser_Count then
+         if Trace_McKenzie > Extra then
+            Shared_Parser.Tree.Lexer.Trace.Put_Line
+              ("Supervisor: done, " & (if Super.Success_Counter > 0 then 
"succeed" else "fail"));
          end if;
 
-         if Force_Full_Explore then
-            return;
+         Set_All_Done;
+         Super.All_Parsers_Done := True;
+      else
+         raise SAL.Programmer_Error with "Get_Barrier and Get logic do not 
match";
+      end if;
+   end Get;
+
+   procedure Success
+     (Super         : in out Supervisor;
+      Shared_Parser : in     Parser.Parser;
+      Parser_Index  : in     SAL.Peek_Type;
+      Config        : in     Configuration;
+      Configs       : in out Config_Heaps.Heap_Type)
+   is
+      Data : McKenzie_Data renames Super.Parser_Status 
(Parser_Index).Parser_State.Recover;
+   begin
+      Super.Put (Shared_Parser, Parser_Index, Configs);
+
+      if Trace_McKenzie > Detail then
+         Put
+           ("succeed: enqueue" & Integer'Image (Data.Enqueue_Count) & ", check 
" & Integer'Image (Data.Check_Count),
+            Shared_Parser.Tree, Super.Parser_Status
+              (Parser_Index).Parser_State.Stream,
+            Config);
+      end if;
+
+      Super.Success_Counter := @ + 1;
+      Data.Success          := True;
+
+      if Force_Full_Explore then
+         Data.Results.Add (Config);
+         return;
+      end if;
+
+      if Data.Check_Count < Super.Min_Success_Check_Count then
+         Super.Min_Success_Check_Count := Data.Check_Count;
+      end if;
+
+      if Force_High_Cost_Solutions then
+         Data.Results.Add (Config);
+         if Data.Results.Count > 3 then
+            Super.Parser_Status (Parser_Index).Recover_State := Ready;
          end if;
+      else
+         if Data.Results.Count = 0 then
+            Data.Results.Add (Config);
 
-         Success_Counter := Success_Counter + 1;
-         Result          := Success;
+            Super.Parser_Status (Parser_Index).Recover_State := Ready;
 
-         Data.Success := True;
+         elsif Config.Cost < Data.Results.Min_Key then
+            --  delete higher cost configs from Results
+            loop
+               Data.Results.Drop;
+               exit when Data.Results.Count = 0 or else
+                 Config.Cost >= Data.Results.Min_Key;
+            end loop;
 
-         if Data.Check_Count < Min_Success_Check_Count then
-            Min_Success_Check_Count := Data.Check_Count;
-         end if;
+            Data.Results.Add (Config);
 
-         if Force_High_Cost_Solutions then
+         elsif Config.Cost = Data.Results.Min_Key then
             Data.Results.Add (Config);
-            if Data.Results.Count > 3 then
-               Parser_Status (Parser_Index).Recover_State := Ready;
-            end if;
+
          else
-            if Data.Results.Count = 0 then
-               Data.Results.Add (Config);
+            --  Config.Cost > Results.Min_Key
+            null;
+         end if;
+      end if;
+   end Success;
 
-               Parser_Status (Parser_Index).Recover_State := Ready;
+   procedure Put
+     (Super         : in out Supervisor;
+      Shared_Parser : in     Parser.Parser;
+      Parser_Index  : in     SAL.Peek_Type;
+      Configs       : in out Config_Heaps.Heap_Type)
+   is
+      Configs_Count : constant SAL.Base_Peek_Type := Configs.Count; -- Before 
it is emptied, for Trace.
 
-            elsif Config.Cost < Data.Results.Min_Key then
-               --  delete higher cost configs from Results
-               loop
-                  Data.Results.Drop;
-                  exit when Data.Results.Count = 0 or else
-                    Config.Cost >= Data.Results.Min_Key;
-               end loop;
+      P_Status : Base.Parser_Status renames Super.Parser_Status (Parser_Index);
+      Data     : McKenzie_Data renames P_Status.Parser_State.Recover;
+   begin
+      Super.Total_Enqueue_Count := @ + Integer (Configs_Count);
+      Data.Enqueue_Count        := @ + Integer (Configs_Count);
+      loop
+         exit when Configs.Count = 0;
 
-               Data.Results.Add (Config);
+         Data.Config_Heap.Add (Configs.Remove);
+      end loop;
 
-            elsif Config.Cost = Data.Results.Min_Key then
-               Data.Results.Add (Config);
+      if Trace_McKenzie > Detail then
+         Put_Line
+           (Shared_Parser.Tree, P_Status.Parser_State.Stream,
+            "enqueue:" & SAL.Base_Peek_Type'Image (Configs_Count) &
+              "/" & SAL.Base_Peek_Type'Image (Data.Config_Heap.Count) &
+              "/" & Trimmed_Image (Super.Total_Enqueue_Count) &
+              "/" & Trimmed_Image (Data.Check_Count));
+      end if;
+   end Put;
 
-            else
-               --  Config.Cost > Results.Min_Key
-               null;
-            end if;
+   procedure Config_Full
+     (Super         : in out Supervisor;
+      Shared_Parser : in     Parser.Parser;
+      Prefix        : in     String;
+      Parser_Index  : in     SAL.Peek_Type)
+   is
+      P_Status : Base.Parser_Status renames Super.Parser_Status (Parser_Index);
+      Data     : McKenzie_Data renames P_Status.Parser_State.Recover;
+   begin
+      Data.Config_Full_Count := Data.Config_Full_Count + 1;
+      if Trace_McKenzie > Outline then
+         Put_Line
+           (Shared_Parser.Tree, Super.Stream (Parser_Index),
+            Prefix & ": config.ops is full; " & Data.Config_Full_Count'Image);
+      end if;
+   end Config_Full;
+
+   function Recover_Result (Super : in Supervisor) return Recover_Status
+   is
+      Temp : Recover_Status := Recover_Status'First;
+   begin
+      for S of Super.Parser_Status loop
+         if S.Parser_State.Recover.Success then
+            --  Can succeed while also exceeding max_enqueue_limit;
+            --  test_mckenzie.adb Missing_Name_6 LR1, or while exceeding
+            --  Check_Delta_Limit; ada_mode-recover_32.adb.
+            Temp := Success;
+         else
+            Temp := Recover_Status'Max (Temp, S.Fail_Mode);
          end if;
-      end Success;
-
-      procedure Put (Parser_Index : in SAL.Peek_Type; Configs : in out 
Config_Heaps.Heap_Type)
-      is
-         Configs_Count : constant SAL.Base_Peek_Type := Configs.Count; -- 
Before it is emptied, for Trace.
+      end loop;
+      return Temp;
+   end Recover_Result;
 
-         P_Status : Base.Parser_Status renames Parser_Status (Parser_Index);
-         Data : McKenzie_Data renames P_Status.Parser_State.Recover;
-      begin
-         P_Status.Active_Workers := P_Status.Active_Workers - 1;
+   function Done (Super : in Supervisor) return Boolean
+   is begin
+      return Super.All_Parsers_Done;
+   end Done;
 
-         Total_Enqueue_Count := Total_Enqueue_Count + Integer (Configs_Count);
-         Data.Enqueue_Count  := Data.Enqueue_Count + Integer (Configs_Count);
-         loop
-            exit when Configs.Count = 0;
+   function Min_Sequential_Index
+     (Super         : in Supervisor;
+      Shared_Parser : in Parser.Parser)
+     return Syntax_Trees.Sequential_Index
+   is
+      use WisiToken.Syntax_Trees;
+   begin
+      return Result : Sequential_Index := Sequential_Index'Last do
+         for I in 1 .. Super.Parser_Count loop
+            Result := Sequential_Index'Min
+              (@,  Shared_Parser.Tree.Get_Sequential_Index 
(Super.Min_Sequential_Indices (I).Ref.Node));
+         end loop;
+      end return;
+   end Min_Sequential_Index;
 
-            --  [1] has a check for duplicate configs here; that only happens 
with
-            --  higher costs, which take too long for our application.
-            Data.Config_Heap.Add (Configs.Remove);
+   function Max_Sequential_Index
+     (Super         : in Supervisor;
+      Shared_Parser : in Parser.Parser)
+     return Syntax_Trees.Sequential_Index
+   is
+      use Syntax_Trees;
+   begin
+      return Result : Sequential_Index := Sequential_Index'First do
+         for I in Super.Max_Sequential_Indices'Range loop
+            Result := Sequential_Index'Max
+              (@,  Shared_Parser.Tree.Get_Sequential_Index 
(Super.Max_Sequential_Indices (I).Ref.Node));
          end loop;
+      end return;
+   end Max_Sequential_Index;
 
-         if Trace_McKenzie > Detail then
-            Put_Line
-              (Trace.all, P_Status.Parser_State.Label,
-               "enqueue:" & SAL.Base_Peek_Type'Image (Configs_Count) &
-                 "/" & SAL.Base_Peek_Type'Image (Data.Config_Heap.Count) &
-                 "/" & Trimmed_Image (Total_Enqueue_Count) &
-                 "/" & Trimmed_Image (Data.Check_Count) &
-                 ", min cost:" &
-                 (if Data.Config_Heap.Count > 0
-                  then Integer'Image (Data.Config_Heap.Min_Key)
-                  else " ? ") &
-                 ", active workers:" & Integer'Image 
(P_Status.Active_Workers));
-         end if;
-      end Put;
+   procedure Finish
+     (Super         : in out Supervisor;
+      Shared_Parser : in out Parser.Parser)
+   is begin
+      --  Allow streams to be terminated.
+      Shared_Parser.Min_Sequential_Index   := Super.Min_Sequential_Index 
(Shared_Parser);
+      Shared_Parser.Max_Sequential_Index   := Super.Max_Sequential_Index 
(Shared_Parser);
+      Super.Min_Sequential_Indices := (others => 
Syntax_Trees.Invalid_Stream_Node_Parents);
+      Super.Max_Sequential_Indices := (others => 
Syntax_Trees.Invalid_Stream_Node_Parents);
+
+      if Trace_McKenzie > Detail then
+         Shared_Parser.Tree.Lexer.Trace.New_Line;
+         Shared_Parser.Tree.Lexer.Trace.Put_Line ("Supervisor: Done");
+      end if;
+   end Finish;
+
+   function Parser_State
+     (Super        : in Supervisor;
+      Parser_Index : in SAL.Peek_Type)
+     return Parser_Lists.Constant_Reference_Type
+   is begin
+      return (Element => Super.Parser_Status (Parser_Index).Parser_State);
+   end Parser_State;
 
-      procedure Config_Full (Prefix : in String; Parser_Index : in 
SAL.Peek_Type)
-      is
-         P_Status : Base.Parser_Status renames Parser_Status (Parser_Index);
-         Data : McKenzie_Data renames P_Status.Parser_State.Recover;
-      begin
-         Data.Config_Full_Count := Data.Config_Full_Count + 1;
-         if Trace_McKenzie > Outline then
-            Put_Line (Trace.all, Label (Parser_Index), Prefix & ": config.ops 
is full; " &
-                        Data.Config_Full_Count'Image);
+   function Stream
+     (Super        : in Supervisor;
+      Parser_Index : in SAL.Peek_Type)
+     return Syntax_Trees.Stream_ID
+   is begin
+      return Super.Parser_Status (Parser_Index).Parser_State.Stream;
+   end Stream;
+
+   function Min_Sequential_Index_All_SOI
+     (Super         : in Supervisor;
+      Shared_Parser : in Parser.Parser)
+     return Boolean
+   is begin
+      for I in Super.Min_Sequential_Indices'Range loop
+         if Shared_Parser.Tree.ID (Super.Min_Sequential_Indices (I).Ref.Node) 
/=
+           Shared_Parser.Tree.Lexer.Descriptor.SOI_ID
+         then
+            return False;
          end if;
-      end Config_Full;
+      end loop;
+      return True;
+   end Min_Sequential_Index_All_SOI;
 
-      function Recover_Result return Recover_Status
-      is
-         Temp : Recover_Status := Result;
-      begin
-         if Result = Success then
-            return Success;
-         else
-            for S of Parser_Status loop
-               Temp := Recover_Status'Max (Result, S.Fail_Mode);
-            end loop;
-            return Temp;
+   function Max_Sequential_Index_All_EOI
+     (Super         : in Supervisor;
+      Shared_Parser : in Parser.Parser)
+     return Boolean
+   is begin
+      for I in 1 .. Super.Parser_Count loop
+         if Shared_Parser.Tree.ID (Super.Max_Sequential_Indices (I).Ref.Node) 
/=
+           Shared_Parser.Tree.Lexer.Descriptor.EOI_ID
+         then
+            return False;
          end if;
-      end Recover_Result;
+      end loop;
+      return True;
+   end Max_Sequential_Index_All_EOI;
+
+   procedure Extend_Sequential_Index
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out Parser.Parser;
+      Thru          : in     Syntax_Trees.Valid_Node_Access;
+      Positive      : in     Boolean)
+   is begin
+      if Shared_Parser.Tree.Get_Sequential_Index (Thru) /= 
Syntax_Trees.Invalid_Sequential_Index then
+         return;
+      end if;
+
+      declare
+         Streams : Syntax_Trees.Stream_ID_Array (1 .. 
Shared_Parser.Parsers.Count);
+
+         function Min_Target_Index return Syntax_Trees.Sequential_Index
+         is begin
+               declare
+                  Min : constant Syntax_Trees.Sequential_Index := 
Super.Min_Sequential_Index (Shared_Parser);
+                  pragma Assert (Min <= 0);
+               begin
+                  return (if Min = 0 then Default_Negative_Sequential_Index 
else 2 * Min);
+               end;
+         end Min_Target_Index;
 
-      procedure Fatal (E : in Ada.Exceptions.Exception_Occurrence)
-      is
-         use Ada.Exceptions;
       begin
-         if Trace_McKenzie > Outline then
-            Trace.Put_Line ("task " & Task_Attributes.Value'Image & " 
Supervisor: Error");
-         end if;
-         Fatal_Called   := True;
-         Error_ID       := Exception_Identity (E);
-         Error_Message  := +Exception_Message (E);
-         if Debug_Mode then
-            Trace.Put_Line (Exception_Name (E) & ": " & Exception_Message (E));
-            Trace.Put_Line (GNAT.Traceback.Symbolic.Symbolic_Traceback (E)); 
-- includes Prefix
-         end if;
-      end Fatal;
+         for I in Super.Parser_Status'Range loop
+            Streams (I) := Super.Parser_Status (I).Parser_State.Stream;
+         end loop;
 
-      entry Done (Error_ID : out Ada.Exceptions.Exception_Id; Message : out 
Ada.Strings.Unbounded.Unbounded_String)
-        when All_Parsers_Done or Fatal_Called
-      is begin
-         Error_ID := Supervisor.Error_ID;
-         Message  := Error_Message;
-         if Trace_McKenzie > Detail then
-            Trace.New_Line;
-            Trace.Put_Line ("Supervisor: Done");
-         end if;
-      end Done;
+         loop
+            exit when Shared_Parser.Tree.Get_Sequential_Index (Thru) /= 
Syntax_Trees.Invalid_Sequential_Index;
+
+            if Positive then
+               exit when Super.Max_Sequential_Index_All_EOI (Shared_Parser);
+               Extend_Sequential_Index
+                 (Shared_Parser.Tree, Streams, Super.Max_Sequential_Indices,
+                  Target   => 2 * Super.Max_Sequential_Index (Shared_Parser),
+                  Positive => Positive,
+                  Clear    => False);
+            else
+               exit when Super.Min_Sequential_Index_All_SOI (Shared_Parser);
 
-      function Parser_State (Parser_Index : in SAL.Peek_Type) return 
Parser_Lists.Constant_Reference_Type
-      is begin
-         return (Element => Parser_Status (Parser_Index).Parser_State);
-      end Parser_State;
+               Extend_Sequential_Index
+                 (Shared_Parser.Tree, Streams, Super.Min_Sequential_Indices,
+                  Target   => Min_Target_Index,
+                  Positive => Positive,
+                  Clear    => False);
+            end if;
+         end loop;
+      end;
+   end Extend_Sequential_Index;
 
-      function Label (Parser_Index : in SAL.Peek_Type) return Natural
-      is begin
-         return Parser_Status (Parser_Index).Parser_State.Label;
-      end Label;
+   procedure Extend_Sequential_Index
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out Parser.Parser;
+      Thru          : in     Syntax_Trees.Sequential_Index)
+   is
+      use Syntax_Trees;
+   begin
+      loop
+         declare
+            Min     : constant Sequential_Index := Super.Min_Sequential_Index 
(Shared_Parser);
+            Max     : constant Sequential_Index := Super.Max_Sequential_Index 
(Shared_Parser);
+            Streams : Syntax_Trees.Stream_ID_Array (1 .. 
Shared_Parser.Parsers.Count);
+         begin
+            exit when Thru in Min .. Max;
+
+            for I in Super.Parser_Status'Range loop
+               Streams (I) := Super.Parser_Status (I).Parser_State.Stream;
+            end loop;
+
+            if Thru < Min then
+               exit when Super.Min_Sequential_Index_All_SOI (Shared_Parser);
 
-   end Supervisor;
+               pragma Assert (Min < 0);
+               Extend_Sequential_Index
+                 (Shared_Parser.Tree, Streams, Super.Min_Sequential_Indices,
+                  Target   => 2 * Min,
+                  Positive => False,
+                  Clear    => False);
+            else
+               exit when Super.Max_Sequential_Index_All_EOI (Shared_Parser);
+
+               Extend_Sequential_Index
+                 (Shared_Parser.Tree, Streams, Super.Max_Sequential_Indices,
+                  Target   => 2 * Max,
+                  Positive => True,
+                  Clear    => False);
+            end if;
+         end;
+      end loop;
+   end Extend_Sequential_Index;
 
    procedure Put
-     (Message      : in              String;
-      Super        : not null access Base.Supervisor;
-      Shared       : not null access Base.Shared;
-      Parser_Index : in              SAL.Peek_Type;
-      Config       : in              Configuration;
-      Task_ID      : in              Boolean := True)
+     (Super         : in Supervisor;
+      Shared_Parser : in Parser.Parser;
+      Message       : in String;
+      Parser_Index  : in SAL.Peek_Type;
+      Config        : in Configuration)
    is begin
-      Put (Message, Super.Trace.all, Super.Parser_State (Parser_Index).Label,
-           Shared.Terminals.all, Config, Task_ID);
+      Put (Message, Shared_Parser.Tree, Super.Parser_State 
(Parser_Index).Stream, Config);
    end Put;
 
 end WisiToken.Parse.LR.McKenzie_Recover.Base;
diff --git a/wisitoken-parse-lr-mckenzie_recover-base.ads 
b/wisitoken-parse-lr-mckenzie_recover-base.ads
index f67df90430..1d94ca1fca 100644
--- a/wisitoken-parse-lr-mckenzie_recover-base.ads
+++ b/wisitoken-parse-lr-mckenzie_recover-base.ads
@@ -2,7 +2,7 @@
 --
 --  Base utilities for McKenzie_Recover
 --
---  Copyright (C) 2018 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -20,166 +20,119 @@ pragma License (Modified_GPL);
 with Ada.Exceptions;
 with WisiToken.Parse.LR.Parser;
 with WisiToken.Parse.LR.Parser_Lists;
-private package WisiToken.Parse.LR.McKenzie_Recover.Base is
+package WisiToken.Parse.LR.McKenzie_Recover.Base is
 
-   ----------
-   --  Protected object specs.
-   --
-   --  Tasking design requirements:
-   --
-   --  1) For each parse_state, find all solutions of the same lowest
-   --  cost.
-   --
-   --  2) use as many CPUs as available as fully as possible.
-   --
-   --  3) avoid
-   --     a) busy waits
-   --     b) race conditions
-   --     c) deadlocks.
-   --
-   --  For 2), we use worker_tasks to perform the check computations on
-   --  each configuration. We allocate N - 1 worker_tasks, where N is the
-   --  number of available CPUs, saving one CPU for Supervisor and the
-   --  foreground IDE.
-   --
-   --  For 1), worker_tasks always get the lowest cost configuration
-   --  available. However, some active worker_task may have a lower cost
-   --  configuration that it has not yet delivered to Supervisor.
-   --  Therefore we always wait until all current active worker_tasks
-   --  deliver their results before deciding we are done.
-   --
-   --  For 3a) we have one Supervisor protected object that controls
-   --  access to all Parse_States and configurations, and a Shared object
-   --  that provides appropriate access to the Shared_Parser components.
-   --
-   --  It is tempting to try to reduce contention for Supervisor by
-   --  having one protected object per parser, but that requires the
-   --  worker tasks to busy loop checking all the parsers.
-   --
-   --  There is still a race condition on Success; the solutions can be
-   --  delivered in different orders on different runs. This matters
-   --  because each solution results in a successful parse, possibly with
-   --  different actions (different indentation computed, for example).
-   --  Which solution finally succeeds depends on which are terminated
-   --  due to identical parser stacks, which in turn depends on the order
-   --  they were delivered. See ada-mode/tests/ada_mode-interactive_2.adb
-   --  for an example.
-   --
-   --  There is also a race condition on how many failed or higher cost
-   --  configurations are checked, before the final solutions are found.
-
-   type Config_Status is (Valid, All_Done);
    type Recover_State is (Active, Ready, Success, Fail);
 
    type Parser_Status is record
       Recover_State : Base.Recover_State;
       Parser_State  : Parser_Lists.State_Access;
       Fail_Mode     : Recover_Status;
-
-      Active_Workers : Natural;
-      --  Count of Worker_Tasks that have done Get but not Put or Success.
    end record;
 
    type Parser_Status_Array is array (SAL.Peek_Type range <>) of Parser_Status;
 
-   protected type Supervisor
-     (Trace             : not null access WisiToken.Trace'Class;
-      Check_Delta_Limit : Natural;
-      Enqueue_Limit     : Natural;
-      Parser_Count      : SAL.Peek_Type)
-   is
-      --  There is only one object of this type, declared in Recover.
-
-      procedure Initialize
-        (Parsers   : not null access Parser_Lists.List;
-         Terminals : not null access constant Base_Token_Arrays.Vector);
-
-      entry Get
-        (Parser_Index : out SAL.Base_Peek_Type;
-         Config       : out Configuration;
-         Status       : out Config_Status);
-      --  Get a new configuration to check. Available when there is a
-      --  configuration to get, or when all configs have been checked.
-      --
-      --  Increments active worker count.
-      --
-      --  Status values mean:
-      --
-      --  Valid - Parser_Index, Config are valid, should be checked.
-      --
-      --  All_Done - Parser_Index, Config are not valid.
-
-      procedure Success
-        (Parser_Index : in     SAL.Peek_Type;
-         Config       : in     Configuration;
-         Configs      : in out Config_Heaps.Heap_Type);
-      --  Report that Configuration succeeds for Parser_Label, and enqueue
-      --  Configs.
-      --
-      --  Decrements active worker count.
-
-      procedure Put (Parser_Index : in SAL.Peek_Type; Configs : in out 
Config_Heaps.Heap_Type);
-      --  Add Configs to the McKenzie_Data Config_Heap for Parser_Label
-      --
-      --  Decrements active worker count.
-
-      procedure Config_Full (Prefix : in String; Parser_Index : in 
SAL.Peek_Type);
-      --  Report that a config.ops was full when trying to add another op.
-      --  This is counted towards the enqueue limit.
-
-      function Recover_Result return Recover_Status;
-
-      procedure Fatal (E : in Ada.Exceptions.Exception_Occurrence);
-      --  Report a fatal error; abort all processing, make Done
-      --  available.
-
-      entry Done (Error_ID : out Ada.Exceptions.Exception_Id; Message : out 
Ada.Strings.Unbounded.Unbounded_String);
-      --  Available when all parsers have failed or succeeded, or an error
-      --  occured.
-      --
-      --  If Error_ID is not Null_Id, an error occured.
-
-      function Parser_State (Parser_Index : in SAL.Peek_Type) return 
Parser_Lists.Constant_Reference_Type;
-      function Label (Parser_Index : in SAL.Peek_Type) return Natural;
-
-   private
-      Parsers   : access Parser_Lists.List;
-      Terminals : access constant Base_Token_Arrays.Vector;
-
-      All_Parsers_Done        : Boolean;
-      Success_Counter         : Natural;
-      Min_Success_Check_Count : Natural;
-      Total_Enqueue_Count     : Natural;
-      Fatal_Called            : Boolean;
-      Result                  : Recover_Status;
-      Error_ID                : Ada.Exceptions.Exception_Id;
-      Error_Message           : Ada.Strings.Unbounded.Unbounded_String;
-      Parser_Status           : Parser_Status_Array (1 .. Parser_Count);
-   end Supervisor;
-
-   type Shared
-     (Trace                          : not null access WisiToken.Trace'Class;
-      Lexer                          : not null access constant 
WisiToken.Lexer.Instance'Class;
-      Table                          : not null access constant Parse_Table;
-      Language_Fixes                 : 
WisiToken.Parse.LR.Parser.Language_Fixes_Access;
-      Language_Matching_Begin_Tokens : 
WisiToken.Parse.LR.Parser.Language_Matching_Begin_Tokens_Access;
-      Language_String_ID_Set         : 
WisiToken.Parse.LR.Parser.Language_String_ID_Set_Access;
-      Terminals                      : not null access constant 
Base_Token_Arrays.Vector;
-      Line_Begin_Token               : not null access constant 
Line_Begin_Token_Vectors.Vector)
-     is null record;
-   --  There is only one object of this type, declared in Recover. It
-   --  provides appropriate access to Shared_Parser components.
+   type Supervisor (Parser_Count : SAL.Peek_Type) is tagged limited private;
+
+   procedure Initialize
+     (Super         : in out Supervisor;
+      Shared_Parser : in out WisiToken.Parse.LR.Parser.Parser);
+
+   procedure Get
+     (Super         : in out Supervisor;
+      Shared_Parser : in     Parser.Parser;
+      Parser_Index  :    out SAL.Base_Peek_Type;
+      Config        :    out Configuration);
+   --  Get a new configuration to check. If Parser_Index =
+   --  SAL.Base_Peek_Type'First, Config is invalid; there are no
+   --  configurations left to check.
+
+   procedure Success
+     (Super         : in out Supervisor;
+      Shared_Parser : in     Parser.Parser;
+      Parser_Index  : in     SAL.Peek_Type;
+      Config        : in     Configuration;
+      Configs       : in out Config_Heaps.Heap_Type);
+   --  Report that Configuration succeeds for Parser_Label, and enqueue
+   --  Configs.
+
+   procedure Put
+     (Super         : in out Supervisor;
+      Shared_Parser : in     Parser.Parser;
+      Parser_Index  : in     SAL.Peek_Type;
+      Configs       : in out Config_Heaps.Heap_Type);
+   --  Add Configs to the McKenzie_Data Config_Heap for Parser_Label
+
+   procedure Config_Full
+     (Super         : in out Supervisor;
+      Shared_Parser : in     Parser.Parser;
+      Prefix        : in     String;
+      Parser_Index  : in     SAL.Peek_Type);
+   --  Report that a config.ops was full when trying to add another op.
+   --  This is counted towards the enqueue limit.
+
+   function Recover_Result (Super : in Supervisor) return Recover_Status;
+
+   function Done (Super : in Supervisor) return Boolean;
+   --  True when all parsers have failed or succeeded.
+
+   procedure Finish
+     (Super         : in out Supervisor;
+      Shared_Parser : in out Parser.Parser);
+
+   function Parser_State
+     (Super        : in Supervisor;
+      Parser_Index : in SAL.Peek_Type)
+     return Parser_Lists.Constant_Reference_Type;
+
+   function Stream (Super : in Supervisor; Parser_Index : in SAL.Peek_Type) 
return Syntax_Trees.Stream_ID;
+
+   procedure Extend_Sequential_Index
+     (Super         : in out Supervisor;
+      Shared_Parser : in out Parser.Parser;
+      Thru          : in     Syntax_Trees.Valid_Node_Access;
+      Positive      : in     Boolean)
+   with Pre => Shared_Parser.Tree.Is_Terminal (Thru),
+     Post => Shared_Parser.Tree.Get_Sequential_Index (Thru) /= 
Syntax_Trees.Invalid_Sequential_Index;
+   --  If Thru.Node has valid Sequential_Index, return.
    --
-   --  Since all the accessible objects are read-only (except Trace),
-   --  there are no protected operations, and this is not a protected
-   --  type.
+   --  Else extend Sequential_Index range thru Thru; if Positive, towards
+   --  EOI, else towards SOI.
+
+   procedure Extend_Sequential_Index
+     (Super         : in out Supervisor;
+      Shared_Parser : in out Parser.Parser;
+      Thru          : in     Syntax_Trees.Sequential_Index);
+   --  Ensure Sequential_Index range includes Thru, or SOI/EOI.
 
    procedure Put
-     (Message      : in              String;
-      Super        : not null access Base.Supervisor;
-      Shared       : not null access Base.Shared;
-      Parser_Index : in              SAL.Peek_Type;
-      Config       : in              Configuration;
-      Task_ID      : in              Boolean := True);
+     (Super         : in Supervisor;
+      Shared_Parser : in Parser.Parser;
+      Message       : in String;
+      Parser_Index  : in SAL.Peek_Type;
+      Config        : in Configuration);
+
+private
+
+   type Supervisor (Parser_Count : SAL.Peek_Type) is tagged limited
+   record
+      All_Parsers_Done        : Boolean                     := False;
+      Success_Counter         : Natural                     := 0;
+      Min_Success_Check_Count : Natural                     := Natural'Last;
+      Total_Enqueue_Count     : Natural                     := 0;
+      Fatal_Called            : Boolean                     := False;
+      Error_ID                : Ada.Exceptions.Exception_Id := 
Ada.Exceptions.Null_Id;
+      Error_Message           : Ada.Strings.Unbounded.Unbounded_String;
+      Parser_Status           : Parser_Status_Array (1 .. Parser_Count);
+
+      Min_Sequential_Indices : Syntax_Trees.Stream_Node_Parents_Array (1 .. 
Parser_Count);
+      Max_Sequential_Indices : Syntax_Trees.Stream_Node_Parents_Array (1 .. 
Parser_Count);
+   end record;
+
+   function Min_Sequential_Index (Super : in Supervisor) return 
Syntax_Trees.Stream_Node_Parents_Array
+   is (Super.Min_Sequential_Indices);
+
+   function Max_Sequential_Index (Super : in Supervisor) return 
Syntax_Trees.Stream_Node_Parents_Array
+   is (Super.Max_Sequential_Indices);
 
 end WisiToken.Parse.LR.McKenzie_Recover.Base;
diff --git a/wisitoken-parse-lr-mckenzie_recover-explore.adb 
b/wisitoken-parse-lr-mckenzie_recover-explore.adb
index 2741dff0a2..e5f116bfa0 100644
--- a/wisitoken-parse-lr-mckenzie_recover-explore.adb
+++ b/wisitoken-parse-lr-mckenzie_recover-explore.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -18,32 +18,35 @@
 pragma License (Modified_GPL);
 
 with Ada.Exceptions;
+with GNAT.Traceback.Symbolic;
 with SAL.Gen_Bounded_Definite_Queues;
 with WisiToken.Parse.LR.McKenzie_Recover.Parse;
 with WisiToken.Parse.LR.Parser;
 package body WisiToken.Parse.LR.McKenzie_Recover.Explore is
 
    procedure Do_Shift
-     (Label             : in              String;
-      Super             : not null access Base.Supervisor;
-      Shared            : not null access Base.Shared;
-      Parser_Index      : in              SAL.Peek_Type;
-      Local_Config_Heap : in out          Config_Heaps.Heap_Type;
-      Config            : in out          Configuration;
-      State             : in              State_Index;
-      ID                : in              Token_ID;
-      Cost_Delta        : in              Integer;
-      Strategy          : in              Strategies)
+     (Label             : in     String;
+      Super             : in out Base.Supervisor;
+      Shared            : in out Parser.Parser;
+      Parser_Index      : in     SAL.Peek_Type;
+      Local_Config_Heap : in out Config_Heaps.Heap_Type;
+      Config            : in out Configuration;
+      State             : in     State_Index;
+      ID                : in     Token_ID;
+      Cost_Delta        : in     Integer;
+      Strategy          : in     Strategies)
    is
-      use Config_Op_Arrays;
+      use Recover_Op_Arrays;
+
       McKenzie_Param : McKenzie_Param_Type renames Shared.Table.McKenzie_Param;
 
-      Op : constant Config_Op := (Insert, ID, Config.Current_Shared_Token);
+      Term : constant Syntax_Trees.Node_Access := 
Parse.Peek_Current_First_Sequential_Terminal (Super, Shared, Config);
+      Op   : constant Recover_Op               := (Insert, ID, 
Shared.Tree.Get_Sequential_Index (Term));
    begin
       Config.Strategy_Counts (Strategy) := Config.Strategy_Counts (Strategy) + 
1;
 
       if Is_Full (Config.Ops) then
-         Super.Config_Full ("do_shift ops", Parser_Index);
+         Super.Config_Full (Shared, "do_shift ops", Parser_Index);
          raise Bad_Config;
       else
          Append (Config.Ops, Op);
@@ -62,70 +65,73 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore is
          Config.Cost := Integer'Max (1, Config.Cost + McKenzie_Param.Insert 
(ID) + Cost_Delta);
       end if;
 
-      Config.Error_Token.ID := Invalid_Token_ID;
-      Config.Check_Status   := (Label => WisiToken.Semantic_Checks.Ok);
+      Config.Error_Token            := Syntax_Trees.Invalid_Recover_Token;
+      Config.In_Parse_Action_Status := (Label => 
Syntax_Trees.In_Parse_Actions.Ok);
 
       if Config.Stack.Is_Full then
-         Super.Config_Full ("do_shift stack", Parser_Index);
+         Super.Config_Full (Shared, "do_shift stack", Parser_Index);
          raise Bad_Config;
       else
-         Config.Stack.Push ((State, Invalid_Node_Index, (ID, Virtual => True, 
others => <>)));
+         Config.Stack.Push ((State, (Virtual => True, ID => ID, others => 
<>)));
       end if;
       if Trace_McKenzie > Detail then
-         Base.Put
-           ((if Label'Length > 0 then Label & ": " else "") & "insert " & 
Image (ID, Super.Trace.Descriptor.all),
-            Super, Shared, Parser_Index, Config);
+         Super.Put
+           (Shared,
+            (if Label'Length > 0 then Label & ": " else "") & "insert " & 
Image (ID, Shared.Tree.Lexer.Descriptor.all),
+            Parser_Index, Config);
       end if;
 
       Local_Config_Heap.Add (Config);
    end Do_Shift;
 
    procedure Do_Reduce_1
-     (Label             : in              String;
-      Super             : not null access Base.Supervisor;
-      Shared            : not null access Base.Shared;
-      Parser_Index      : in              SAL.Peek_Type;
-      Local_Config_Heap : in out          Config_Heaps.Heap_Type;
-      Config            : in out          Configuration;
-      Action            : in              Reduce_Action_Rec;
-      Do_Language_Fixes : in              Boolean := True)
+     (Label             : in     String;
+      Super             : in out Base.Supervisor;
+      Shared            : in out Parser.Parser;
+      Parser_Index      : in     SAL.Peek_Type;
+      Local_Config_Heap : in out Config_Heaps.Heap_Type;
+      Config            : in out Configuration;
+      Action            : in     Reduce_Action_Rec;
+      Do_Language_Fixes : in     Boolean := True)
    is
-      use all type Semantic_Checks.Check_Status_Label;
+      use all type Syntax_Trees.In_Parse_Actions.Status_Label;
       use all type WisiToken.Parse.LR.Parser.Language_Fixes_Access;
 
       Prev_State : constant Unknown_State_Index := Config.Stack.Peek.State;
 
-      Descriptor : WisiToken.Descriptor renames Super.Trace.Descriptor.all;
+      Descriptor : WisiToken.Descriptor renames 
Shared.Tree.Lexer.Descriptor.all;
       Table      : Parse_Table renames Shared.Table.all;
-      Nonterm    : Recover_Token;
+      Nonterm    : Syntax_Trees.Recover_Token;
       New_State  : Unknown_State_Index;
+
+      Status : constant Syntax_Trees.In_Parse_Actions.Status := 
Parse.Reduce_Stack
+        (Shared, Config.Stack, Action, Nonterm);
    begin
-      Config.Check_Status := Parse.Reduce_Stack (Shared, Config.Stack, Action, 
Nonterm, Default_Virtual => True);
-      case Config.Check_Status.Label is
+      Config.In_Parse_Action_Status := Status;
+      case Config.In_Parse_Action_Status.Label is
       when Ok =>
          null;
 
-      when Semantic_Checks.Error =>
-         Config.Error_Token       := Nonterm;
-         Config.Check_Token_Count := Action.Token_Count;
+      when Syntax_Trees.In_Parse_Actions.Error =>
+         Config.Error_Token                 := Nonterm;
+         Config.In_Parse_Action_Token_Count := SAL.Base_Peek_Type 
(Action.Token_Count);
 
          if Do_Language_Fixes then
             if Shared.Language_Fixes /= null then
-               Shared.Language_Fixes
-                 (Super.Trace.all, Shared.Lexer, Super.Label (Parser_Index), 
Shared.Table.all, Shared.Terminals.all,
-                  Super.Parser_State (Parser_Index).Tree, Local_Config_Heap,
-                  Config);
+               Shared.Language_Fixes (Super, Shared, Parser_Index, 
Local_Config_Heap, Config);
             end if;
          end if;
 
          --  Finish the reduce; ignore the check fail.
-         if Config.Stack.Depth < SAL.Base_Peek_Type (Config.Check_Token_Count) 
then
-            raise SAL.Programmer_Error;
+         Config.Cost := @ + Table.McKenzie_Param.Ignore_Check_Fail;
+
+         if Config.Stack.Depth < Config.In_Parse_Action_Token_Count then
+            raise Bad_Config;
          else
-            Config.Stack.Pop (SAL.Base_Peek_Type (Config.Check_Token_Count));
+            Config.Stack.Pop (Config.In_Parse_Action_Token_Count);
          end if;
-         Config.Error_Token.ID := Invalid_Token_ID;
-         Config.Check_Status   := (Label => Ok);
+         Config.Error_Token            := Syntax_Trees.Invalid_Recover_Token;
+         Config.In_Parse_Action_Status := (Label => Ok);
       end case;
 
       if Config.Stack.Depth = 0 or else Config.Stack.Peek.State = 
Unknown_State then
@@ -135,100 +141,80 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
       New_State := Goto_For (Table, Config.Stack.Peek.State, 
Action.Production.LHS);
 
       if New_State = Unknown_State then
-         if Trace_McKenzie > Extra then
-            Put_Line
-              (Super.Trace.all, Super.Label (Parser_Index), Label &
-                 ": Do_Reduce_1: unknown_State " & 
Config.Stack.Peek.State'Image & " " &
-                 Image (Action.Production.LHS, Descriptor));
-         end if;
-         raise Bad_Config;
+         --  Bug in LALR parser generator; use LR1
+         raise Invalid_Case;
       end if;
 
-      Config.Stack.Push ((New_State, Invalid_Node_Index, Nonterm));
+      Config.Stack.Push ((New_State, Nonterm));
 
       if Trace_McKenzie > Extra and Label'Length > 0 then
          Put_Line
-           (Super.Trace.all, Super.Label (Parser_Index), Label &
+           (Shared.Tree, Super.Stream (Parser_Index), Label &
               ": state" & State_Index'Image (Prev_State) & " reduce" &
               Ada.Containers.Count_Type'Image (Action.Token_Count) & " to " &
               Image (Action.Production.LHS, Descriptor) & ", goto" &
-              State_Index'Image (New_State) & " via" & State_Index'Image 
(Config.Stack.Peek (2).State));
+              State_Index'Image (New_State) & " via" & State_Index'Image 
(Config.Stack.Peek (2).State) &
+              (case Status.Label is
+               when Ok => "",
+               when Syntax_Trees.In_Parse_Actions.Error => " " & 
Status.Label'Image));
       end if;
    end Do_Reduce_1;
 
    procedure Do_Reduce_2
-     (Label             : in              String;
-      Super             : not null access Base.Supervisor;
-      Shared            : not null access Base.Shared;
-      Parser_Index      : in              SAL.Peek_Type;
-      Local_Config_Heap : in out          Config_Heaps.Heap_Type;
-      Config            : in out          Configuration;
-      Inserted_ID       : in              Token_ID;
-      Cost_Delta        : in              Integer;
-      Strategy          : in              Strategies)
+     (Label             : in     String;
+      Super             : in out Base.Supervisor;
+      Shared            : in out Parser.Parser;
+      Parser_Index      : in     SAL.Peek_Type;
+      Local_Config_Heap : in out Config_Heaps.Heap_Type;
+      Config            : in out Configuration;
+      Inserted_ID       : in     Token_ID;
+      Cost_Delta        : in     Integer;
+      Strategy          : in     Strategies)
+   --  Perform reduce actions until shift Inserted_ID, add the final
+   --  configuration to the heap. If a conflict is encountered, process the
+   --  other actions the same way. If a user in-parse action fails, enqueue
+   --  possible solutions. For parse table error or accept actions, or
+   --  exception Bad_Config, do nothing.
    is
-      --  Perform reduce actions until shift Inserted_ID; if all succeed,
-      --  add the final configuration to the heap, return True. If a conflict 
is
-      --  encountered, process the other action the same way. If a semantic
-      --  check fails, enqueue possible solutions. For parse table error
-      --  actions, or exception Bad_Config, return False.
-
       Orig_Config : Configuration;
       Table       : Parse_Table renames Shared.Table.all;
       Next_Action : Parse_Action_Node_Ptr := Action_For (Table, 
Config.Stack.Peek.State, Inserted_ID);
+
+      procedure Do_One (Config : in out Configuration; Action : in 
Parse_Action_Rec)
+      is begin
+         case Action.Verb is
+         when Shift =>
+            Do_Shift
+              (Label, Super, Shared, Parser_Index, Local_Config_Heap, Config, 
Action.State, Inserted_ID,
+               Cost_Delta, Strategy);
+
+         when Reduce =>
+            Do_Reduce_1 (Label, Super, Shared, Parser_Index, 
Local_Config_Heap, Config, Action);
+            Do_Reduce_2
+              (Label, Super, Shared, Parser_Index, Local_Config_Heap, Config, 
Inserted_ID, Cost_Delta, Strategy);
+
+         when Accept_It | Error =>
+            --  Most likely a Minimal_Complete that doesn't work;
+            --  test_mckenzie_recover.adb Empty_Comments.
+            raise Invalid_Case;
+         end case;
+      end Do_One;
    begin
       if Next_Action.Next /= null then
          Orig_Config := Config;
       end if;
 
-      case Next_Action.Item.Verb is
-      when Shift =>
-         Do_Shift
-           (Label, Super, Shared, Parser_Index, Local_Config_Heap, Config, 
Next_Action.Item.State, Inserted_ID,
-            Cost_Delta, Strategy);
-
-      when Reduce =>
-         Do_Reduce_1 (Label, Super, Shared, Parser_Index, Local_Config_Heap, 
Config, Next_Action.Item);
-         Do_Reduce_2
-           (Label, Super, Shared, Parser_Index, Local_Config_Heap, Config, 
Inserted_ID, Cost_Delta, Strategy);
+      Do_One (Config, Next_Action.Item);
 
-      when Accept_It =>
-         raise SAL.Programmer_Error with "found test case for Do_Reduce 
Accept_It";
-
-      when Error =>
-         if Trace_McKenzie > Extra and Label'Length > 0 then
-            Put_Line
-              (Super.Trace.all, Super.Label (Parser_Index), Label & ": error 
on " &
-                 Image (Inserted_ID, Super.Trace.Descriptor.all) &
-                 " in state" & State_Index'Image (Config.Stack.Peek.State));
-         end if;
-      end case;
+      Next_Action := Next_Action.Next;
 
       loop
-         exit when Next_Action.Next = null;
+         exit when Next_Action = null;
          --  There is a conflict; create a new config to shift or reduce.
          declare
             New_Config : Configuration := Orig_Config;
-            Action     : Parse_Action_Rec renames Next_Action.Next.Item;
          begin
-            case Action.Verb is
-            when Shift =>
-               Do_Shift
-                 (Label, Super, Shared, Parser_Index, Local_Config_Heap, 
New_Config, Action.State, Inserted_ID,
-                  Cost_Delta, Strategy);
-
-            when Reduce =>
-               Do_Reduce_1 (Label, Super, Shared, Parser_Index, 
Local_Config_Heap, New_Config, Action);
-               Do_Reduce_2
-                 (Label, Super, Shared, Parser_Index, Local_Config_Heap, 
New_Config, Inserted_ID,
-                  Cost_Delta, Strategy);
-
-            when Accept_It =>
-               raise SAL.Programmer_Error with "found test case for Do_Reduce 
Accept_It conflict";
-
-            when Error =>
-               null;
-            end case;
+            Do_One (New_Config, Next_Action.Item);
          end;
 
          Next_Action := Next_Action.Next;
@@ -240,46 +226,53 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
       end if;
    end Do_Reduce_2;
 
-   function Edit_Point_Matches_Ops (Config : in Configuration) return Boolean
+   function Edit_Point_Matches_Ops
+     (Super  : in out Base.Supervisor;
+      Shared : in out Parser.Parser;
+      Config : in     Configuration)
+     return Boolean
    is
-      use Config_Op_Arrays, Config_Op_Array_Refs;
+      use Recover_Op_Arrays, Recover_Op_Array_Refs;
       pragma Assert (Length (Config.Ops) > 0);
-      Op : Config_Op renames Constant_Ref (Config.Ops, Last_Index 
(Config.Ops));
+      Op : Recover_Op renames Constant_Ref (Config.Ops, Last_Index 
(Config.Ops));
+      Term : constant Syntax_Trees.Node_Access := 
Parse.Peek_Current_First_Sequential_Terminal (Super, Shared, Config);
    begin
-      return Config.Current_Shared_Token =
+      return Shared.Tree.Get_Sequential_Index (Term) =
         (case Op.Op is
-         when Fast_Forward => Op.FF_Token_Index,
-         when Undo_Reduce  => Invalid_Token_Index, -- ie, "we don't know", so 
return False.
+         when Fast_Forward => Op.FF_Next_Index,
+         when Undo_Reduce  => Op.UR_Token_Index,
          when Push_Back    => Op.PB_Token_Index,
-         when Insert       => Op.Ins_Token_Index,
+         when Insert       => Op.Ins_Before,
          when Delete       => Op.Del_Token_Index + 1);
    end Edit_Point_Matches_Ops;
 
    procedure Fast_Forward
-     (Super             : not null access Base.Supervisor;
-      Shared            : not null access Base.Shared;
-      Parser_Index      : in              SAL.Base_Peek_Type;
-      Local_Config_Heap : in out          Config_Heaps.Heap_Type;
-      Config            : in              Configuration)
+     (Super             : in out Base.Supervisor;
+      Shared            : in out Parser.Parser;
+      Parser_Index      : in     SAL.Base_Peek_Type;
+      Local_Config_Heap : in out Config_Heaps.Heap_Type;
+      Config            : in     Configuration)
+   --  Apply the ops in Config.Insert_Delete; they were inserted by some
+   --  fix. Leaves Config.Error_Token, Config.In_Parse_Action_Status set.
+   --  If there are conflicts, all are parsed. All succeeding configs are
+   --  enqueued in Local_Config_Heap.
    is
-      --  Apply the ops in Config; they were inserted by some fix.
-      --  Leaves Config.Error_Token, Config.Check_Status set.
-      --  If there are conflicts, all are parsed; if more than one succeed.
-      --  All configs are enqueued in Local_Config_Heap.
-
       use Parse.Parse_Item_Arrays;
-      use Config_Op_Arrays;
+      use Recover_Op_Arrays;
 
       Parse_Items : aliased Parse.Parse_Item_Arrays.Vector;
 
+      First_Node : constant Syntax_Trees.Valid_Node_Access := 
Parse.Peek_Current_First_Sequential_Terminal
+        (Super, Shared, Config);
+
       Dummy : Boolean := Parse.Parse
         (Super, Shared, Parser_Index, Parse_Items, Config,
-         Shared_Token_Goal => Invalid_Token_Index,
+         Shared_Token_Goal => Syntax_Trees.Invalid_Sequential_Index,
          All_Conflicts     => True,
          Trace_Prefix      => "fast_forward");
    begin
-      --  This solution is from Language_Fixes (see gate on call site
-      --  below); any cost increase is done there.
+      --  This solution is from Language_Fixes (see gate on call below); any
+      --  cost increase is done there.
       --
       --  We used to handle the Parse_Items.Length = 1 case specially, and
       --  return Continue. Maintaining that requires too much code
@@ -290,15 +283,27 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
             Item : Parse.Parse_Item renames 
Parse.Parse_Item_Array_Refs.Variable_Ref (Parse_Items, I);
          begin
             if Item.Parsed and Item.Config.Current_Insert_Delete = 
No_Insert_Delete then
+               --  Parse processed all Config.Insert_Delete without error;
                --  Item.Config.Error_Token.ID, Check_Status are correct.
 
-               if not Edit_Point_Matches_Ops (Item.Config) then
+               if not Edit_Point_Matches_Ops (Super, Shared, Item.Config) then
 
                   if Is_Full (Item.Config.Ops) then
-                     Super.Config_Full ("fast_forward 1", Parser_Index);
+                     Super.Config_Full (Shared, "fast_forward 1", 
Parser_Index);
                      raise Bad_Config;
                   else
-                     Append (Item.Config.Ops, (Fast_Forward, 
Item.Config.Current_Shared_Token));
+                     declare
+                        Next_Node : constant Syntax_Trees.Valid_Node_Access :=
+                          Parse.Peek_Current_First_Sequential_Terminal (Super, 
Shared, Item.Config);
+                     begin
+                        Super.Extend_Sequential_Index (Shared, Thru => 
Next_Node, Positive => True);
+
+                        Append
+                          (Item.Config.Ops,
+                           (Fast_Forward,
+                            FF_First_Index => Shared.Tree.Get_Sequential_Index 
(First_Node),
+                            FF_Next_Index  => Shared.Tree.Get_Sequential_Index 
(Next_Node)));
+                     end;
                   end if;
                end if;
 
@@ -307,34 +312,80 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
                Local_Config_Heap.Add (Item.Config);
 
                if Trace_McKenzie > Detail then
-                  Base.Put ("fast forward enqueue", Super, Shared, 
Parser_Index, Item.Config);
+                  Super.Put (Shared, "fast forward enqueue", Parser_Index, 
Item.Config);
                end if;
             end if;
          exception
          when Bad_Config =>
-            null;
+            if Debug_Mode then
+               raise;
+            else
+               --  Process other Parse_Items.
+               null;
+            end if;
          end;
       end loop;
    end Fast_Forward;
 
    function Check
-     (Super             : not null access Base.Supervisor;
-      Shared            : not null access Base.Shared;
-      Parser_Index      : in              SAL.Base_Peek_Type;
-      Config            : in out          Configuration;
-      Local_Config_Heap : in out          Config_Heaps.Heap_Type)
+     (Super             : in out Base.Supervisor;
+      Shared            : in out Parser.Parser;
+      Parser_Index      : in     SAL.Base_Peek_Type;
+      Config            : in out Configuration;
+      Local_Config_Heap : in out Config_Heaps.Heap_Type)
      return Check_Status
    is
-      use Config_Op_Arrays, Config_Op_Array_Refs;
+      use Recover_Op_Arrays, Recover_Op_Array_Refs;
       use Parse.Parse_Item_Arrays;
-      use all type Semantic_Checks.Check_Status_Label;
+      use all type Syntax_Trees.In_Parse_Actions.Status_Label;
+
+      First_Node : constant Syntax_Trees.Valid_Node_Access := 
Parse.Peek_Current_First_Sequential_Terminal
+        (Super, Shared, Config);
 
       Parse_Items : aliased Parse.Parse_Item_Arrays.Vector;
-      Result      : Check_Status := Continue;
+
+      procedure Enqueue (Item : in out Parse.Parse_Item)
+      is begin
+         --  Append or update a Fast_Forward to indicate the changed edit
+         --  point.
+         Item.Config.Minimal_Complete_State := None;
+         Item.Config.Matching_Begin_Done    := False;
+
+         if Last_Index (Item.Config.Ops) /= Recover_Op_Arrays.No_Index and then
+           Constant_Ref (Item.Config.Ops, Last_Index (Item.Config.Ops)).Op = 
Fast_Forward
+         then
+            --  Update the trailing Fast_Forward.
+            Variable_Ref (Item.Config.Ops, Last_Index 
(Item.Config.Ops)).FF_Next_Index :=
+              Shared.Tree.Get_Sequential_Index
+                (Parse.Peek_Current_First_Sequential_Terminal (Super, Shared, 
Item.Config));
+         else
+            if Is_Full (Item.Config.Ops) then
+               Super.Config_Full (Shared, "check 1", Parser_Index);
+               raise Bad_Config;
+            else
+               declare
+                  Next_Node : constant Syntax_Trees.Node_Access := 
Parse.Peek_Current_First_Sequential_Terminal
+                    (Super, Shared, Item.Config);
+               begin
+                  Append
+                    (Item.Config.Ops,
+                     (Fast_Forward,
+                      FF_First_Index => Shared.Tree.Get_Sequential_Index 
(First_Node),
+                      FF_Next_Index  => Shared.Tree.Get_Sequential_Index 
(Next_Node)));
+               end;
+            end if;
+         end if;
+         Local_Config_Heap.Add (Item.Config);
+         if Trace_McKenzie > Detail then
+            Base.Put (Super, Shared, "new error point ", Parser_Index, 
Item.Config);
+         end if;
+      end Enqueue;
+
+      Abandon_If_Fail : Boolean := False;
    begin
       if Length (Config.Ops) > 0 then
          declare
-            Op : Config_Op renames Constant_Ref (Config.Ops, Last_Index 
(Config.Ops));
+            Op : Recover_Op renames Constant_Ref (Config.Ops, Last_Index 
(Config.Ops));
          begin
             case Op.Op is
             when Push_Back =>
@@ -344,16 +395,18 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
                return Continue;
 
             when Undo_Reduce =>
-               if Config.Check_Status.Label /= Ok then
+               if Config.In_Parse_Action_Status.Label /= Ok then
                   --  This is the "ignore error" solution for a check fail; 
check it.
-                  Config.Check_Status   := (Label => Ok);
-                  Config.Error_Token.ID := Invalid_Token_ID;
+                  Config.In_Parse_Action_Status := (Label => Ok);
+                  Config.Error_Token            := 
Syntax_Trees.Invalid_Recover_Token;
 
                else
-                  --  Check would undo the Undo_Reduce, leading to
-                  --  duplicate results.
-                  return Continue;
+                  --  Check might just undo the Undo_Reduce, but sometimes 
it's the last
+                  --  op required to succeed after Delete; 
test_mckenzie_recover.adb
+                  --  Error_2, Extra_Begin_1.
+                  Abandon_If_Fail := True;
                end if;
+
             when others =>
                --  Check it
                null;
@@ -366,96 +419,96 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
          All_Conflicts => False,
          Trace_Prefix  => "check")
       then
-         Config.Error_Token.ID := Invalid_Token_ID;
-         --  FIXME: if there were conflicts, enqueue them; they might yield a
-         --  cheaper or same cost solution?
+         Config.Error_Token    := Syntax_Trees.Invalid_Recover_Token;
          if Trace_McKenzie > Extra then
-            Put_Line (Super.Trace.all, Super.Label (Parser_Index), "check 
result: SUCCESS");
+            Put_Line (Shared.Tree, Super.Stream (Parser_Index), "check result: 
SUCCESS");
          end if;
          return Success;
       end if;
 
-      --  Set Config.error to reflect failure, if it is at current token, so
-      --  Use_Minimal_Complete_Actions can see it.
-      declare
-         Item : Parse.Parse_Item renames 
Parse.Parse_Item_Array_Refs.Constant_Ref
-           (Parse_Items, First_Index (Parse_Items));
-      begin
-         if Item.Config.Check_Status.Label /= Ok then
-            Config.Check_Status := Item.Config.Check_Status;
-            Config.Error_Token  := Item.Config.Error_Token;
+      if Abandon_If_Fail then
+         return Abandon;
+      end if;
 
-            --  Explore cannot fix a check fail; only Language_Fixes can. The
-            --  "ignore error" case is handled immediately on return from
-            --  Language_Fixes in Process_One, below.
-            Result := Abandon;
+      if Parse.Parse_Item_Arrays.Length (Parse_Items) = 1 then
+         --  Return Abandon or Continue.
+         declare
+            Item : Parse.Parse_Item renames 
Parse.Parse_Item_Array_Refs.Variable_Ref
+              (Parse_Items, First_Index (Parse_Items));
+         begin
+            if Item.Config.In_Parse_Action_Status.Label /= Ok then
+               Config.In_Parse_Action_Status := 
Item.Config.In_Parse_Action_Status;
+               Config.Error_Token            := Item.Config.Error_Token;
+
+               if Item.Shift_Count > 0 or Item.Reduce_Count > 0 then
+                  --  Progress was made, so let Language_Fixes try again on 
the new
+                  --  Config. Checking Reduce_Count > 0 is required for
+                  --  test_mckenzie_recover.adb Missing_Name_6.
+                  Enqueue (Item);
+               end if;
 
-         elsif Item.Config.Error_Token.ID /= Invalid_Token_ID then
+               --  Explore cannot fix an In_Parse_Action fail; only 
Language_Fixes.
+               --  The "ignore error" case is handled immediately on return 
from
+               --  Language_Fixes in Process_One, below.
+               return Abandon;
 
-            if Item.Shift_Count = 0 then
-               Config.Error_Token  := Item.Config.Error_Token;
-               Config.Check_Status := (Label => Ok);
             else
-               --  Error is not at current token, but Explore might find 
something
-               --  that will help (see test_mckenzie_recover.adb Extra_Begin). 
On the
-               --  other hand, this can lead to lots of bogus configs (see
-               --  If_In_Handler).
-               Config.Error_Token.ID := Invalid_Token_ID;
-               Config.Check_Status   := (Label => Ok);
+               if Item.Shift_Count = 0 then
+                  --  Parse did not process any Deletes from Insert_Delete; 
Fast_Forward
+                  --  did that. So the very first token caused an error, and 
Config is
+                  --  unchanged. Just set the error.
+                  Config.Error_Token  := Item.Config.Error_Token;
+                  Config.In_Parse_Action_Status := (Label => Ok);
+                  return Continue;
+               else
+                  --  Item.Config differs from Config, so enqueue it.
+                  Enqueue (Item);
+
+                  --  Also Continue
+                  --  Config; Explore might find something that will help (see
+                  --  test_mckenzie_recover.adb Extra_Begin). On the other 
hand, this
+                  --  can lead to lots of bogus configs (see If_In_Handler).
+                  Config.Error_Token    := Syntax_Trees.Invalid_Recover_Token;
+                  Config.In_Parse_Action_Status   := (Label => Ok);
+
+                  return Continue;
+               end if;
             end if;
-         end if;
-      end;
+         end;
+      end if;
 
-      --  All Parse_Items either failed or were not parsed; if they failed
-      --  and made progress, enqueue them.
+      --  More than one Parse_Item, all failed, all made progress,
+      --  so enqueue them.
+      --
+      --  We know they all made progress because not doing so means the
+      --  first token encountered an error, there is no chance to encounter
+      --  a conflict, and there can be only one Parse_Item, which is handled
+      --  above.
       for I in First_Index (Parse_Items) .. Last_Index (Parse_Items) loop
          declare
             Item : Parse.Parse_Item renames 
Parse.Parse_Item_Array_Refs.Variable_Ref (Parse_Items, I);
          begin
-            --  When Parse starts above, Config.Current_Shared_Token matches
-            --  Config.Ops. So if Item.Config.Current_Shared_Token >
-            --  Config.Current_Shared_Token, it made some progress. Append or
-            --  update a Fast_Forward to indicate the changed edit point.
-            if Item.Config.Error_Token.ID /= Invalid_Token_ID and
-              Item.Config.Current_Shared_Token > Config.Current_Shared_Token
-            then
-               Item.Config.Minimal_Complete_State := None;
-               Item.Config.Matching_Begin_Done    := False;
+            pragma Assert (Item.Parsed and Shared.Tree.Element_ID 
(Item.Config.Error_Token) /= Invalid_Token_ID);
 
-               if Constant_Ref (Item.Config.Ops, Last_Index 
(Item.Config.Ops)).Op = Fast_Forward then
-                  --  Update the trailing Fast_Forward.
-                  Variable_Ref (Item.Config.Ops, Last_Index 
(Item.Config.Ops)).FF_Token_Index :=
-                    Item.Config.Current_Shared_Token;
-               else
-                  if Is_Full (Item.Config.Ops) then
-                     Super.Config_Full ("check 1", Parser_Index);
-                     raise Bad_Config;
-                  else
-                     Append (Item.Config.Ops, (Fast_Forward, 
Item.Config.Current_Shared_Token));
-                  end if;
-               end if;
-               Local_Config_Heap.Add (Item.Config);
-               if Trace_McKenzie > Detail then
-                  Base.Put ("new error point ", Super, Shared, Parser_Index, 
Item.Config);
-               end if;
-            end if;
+            Enqueue (Item);
          end;
       end loop;
+      return Abandon;
 
-      if Trace_McKenzie > Extra then
-         Put_Line (Super.Trace.all, Super.Label (Parser_Index), "check result: 
" & Result'Image);
-      end if;
-      return Result;
    exception
    when Bad_Config =>
-      return Abandon;
+      if Debug_Mode then
+         raise;
+      else
+         return Abandon;
+      end if;
    end Check;
 
    function Check_Reduce_To_Start
-     (Super        : not null access Base.Supervisor;
-      Shared       : not null access Base.Shared;
-      Parser_Index : in              SAL.Base_Peek_Type;
-      Orig_Config  : in              Configuration)
+     (Super        : in out Base.Supervisor;
+      Shared       : in out Parser.Parser;
+      Parser_Index : in     SAL.Base_Peek_Type;
+      Orig_Config  : in     Configuration)
      return Boolean
       --  Returns True if Config reduces to the start nonterm.
    is
@@ -463,7 +516,7 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore is
 
       function To_Reduce_Action (Item : in Minimal_Action) return 
Reduce_Action_Rec
       is begin
-         return (Reduce, Item.Production, null, null, Item.Token_Count);
+         return (Reduce, Item.Production, Item.Token_Count);
       end To_Reduce_Action;
 
       Local_Config_Heap : Config_Heaps.Heap_Type; -- never used, because 
Do_Language_Fixes is False.
@@ -475,7 +528,7 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore is
          case Actions.Length is
          when 0 =>
             if (for some Item of Table.States (Config.Stack.Peek.State).Kernel 
=>
-                  Item.Production.LHS = Super.Trace.Descriptor.Accept_ID)
+                  Item.Production.LHS = Shared.Tree.Lexer.Descriptor.Accept_ID)
             then
                return True;
             else
@@ -505,78 +558,67 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
    exception
    when Bad_Config =>
       --  From Do_Reduce_1
-      return False;
+      if Debug_Mode then
+         raise;
+      else
+         return False;
+      end if;
    end Check_Reduce_To_Start;
 
    procedure Try_Push_Back
-     (Super             : not null access Base.Supervisor;
-      Shared            : not null access Base.Shared;
+     (Super             : in out Base.Supervisor;
+      Shared            : in Parser.Parser;
       Parser_Index      : in              SAL.Base_Peek_Type;
       Config            : in              Configuration;
       Local_Config_Heap : in out          Config_Heaps.Heap_Type)
+   --  Try pushing back the stack top, to allow operations at that point.
+   --  We assume the caller used Push_Back_Valid.
    is
-      Trace          : WisiToken.Trace'Class renames Super.Trace.all;
-      McKenzie_Param : McKenzie_Param_Type renames Shared.Table.McKenzie_Param;
-      Prev_Recover   : constant WisiToken.Base_Token_Index := 
Super.Parser_State (Parser_Index).Resume_Token_Goal;
+      use Recover_Op_Arrays;
+      use Syntax_Trees;
 
       Token : constant Recover_Token := Config.Stack.Peek.Token;
+
+      New_Config : Configuration := Config;
    begin
-      --  Try pushing back the stack top, to allow insert and other
-      --  operations at that point.
-      --
       --  Since we are not actually changing the source text, it is tempting
       --  to give this operation zero cost. But then we keep doing push_back
       --  forever, making no progress. So we give it a cost.
 
-      if Token.Min_Terminal_Index /= Invalid_Token_Index and
-        --  No point in pushing back an empty nonterm; that leads to duplicate
-        --  solutions with Undo_Reduce; see test_mckenzie_recover.adb Error_2.
-
-        (Prev_Recover = Invalid_Token_Index or else Prev_Recover < 
Token.Min_Terminal_Index)
-        --  Don't push back past previous error recover (that would require
-        --  keeping track of previous inserts/deletes, and would not be useful
-        --  in most cases).
-      then
-         declare
-            use Config_Op_Arrays;
-            New_Config : Configuration := Config;
-         begin
-            New_Config.Error_Token.ID := Invalid_Token_ID;
-            New_Config.Check_Status   := (Label => 
WisiToken.Semantic_Checks.Ok);
+      New_Config.Error_Token            := Syntax_Trees.Invalid_Recover_Token;
+      New_Config.In_Parse_Action_Status := (Label => 
Syntax_Trees.In_Parse_Actions.Ok);
 
-            New_Config.Stack.Pop;
+      if Is_Full (New_Config.Ops) then
+         Super.Config_Full (Shared, "push_back 1", Parser_Index);
+         raise Bad_Config;
+      end if;
 
-            if Is_Full (New_Config.Ops) then
-               Super.Config_Full ("push_back 1", Parser_Index);
-               raise Bad_Config;
-            else
-               if Token.Min_Terminal_Index = Invalid_Token_Index then
-                  --  Token is empty; Config.current_shared_token does not 
change, no
-                  --  cost increase.
-                  Append (New_Config.Ops, (Push_Back, Token.ID, 
New_Config.Current_Shared_Token));
-               else
-                  New_Config.Cost := New_Config.Cost + 
McKenzie_Param.Push_Back (Token.ID);
-                  Append (New_Config.Ops, (Push_Back, Token.ID, 
Token.Min_Terminal_Index));
-                  New_Config.Current_Shared_Token := Token.Min_Terminal_Index;
-               end if;
-            end if;
-            New_Config.Strategy_Counts (Push_Back) := 
New_Config.Strategy_Counts (Push_Back) + 1;
+      Do_Push_Back (Shared.Tree, New_Config);
+      New_Config.Cost := @ + Shared.Table.McKenzie_Param.Push_Back 
(Shared.Tree.Element_ID (Token));
+      New_Config.Strategy_Counts (Push_Back) := New_Config.Strategy_Counts 
(Push_Back) + 1;
 
-            Local_Config_Heap.Add (New_Config);
+      Local_Config_Heap.Add (New_Config);
 
-            if Trace_McKenzie > Detail then
-               Base.Put ("push_back " & Image (Token.ID, 
Trace.Descriptor.all), Super, Shared,
-                         Parser_Index, New_Config);
-            end if;
-         end;
+      if Trace_McKenzie > Detail then
+         Super.Put
+           (Shared, "push_back " & Image (Shared.Tree.Element_ID (Token), 
Shared.Tree.Lexer.Descriptor.all),
+            Parser_Index, New_Config);
       end if;
    end Try_Push_Back;
 
-   function Just_Pushed_Back_Or_Deleted (Config : in Configuration; ID : in 
Token_ID) return Boolean
+   function Just_Pushed_Back_Or_Deleted
+     (Super  : in out Base.Supervisor;
+      Shared : in out Parser.Parser;
+      Config : in     Configuration;
+      ID     : in     Token_ID)
+     return Boolean
    is
-      use Config_Op_Arrays, Config_Op_Array_Refs;
-      Last_Token_Index : WisiToken.Token_Index := Config.Current_Shared_Token;
-      --  Index of token in last op checked.
+      use Recover_Op_Arrays, Recover_Op_Array_Refs;
+      use Syntax_Trees;
+
+      Target_Token_Index : Sequential_Index :=
+        Shared.Tree.Get_Sequential_Index 
(Parse.Peek_Current_First_Sequential_Terminal (Super, Shared, Config));
+      --  Next token; ID might be inserted before it (see Do_Shift).
    begin
       --  This function is called when considering whether to insert ID before
       --  Config.Current_Shared_Token.
@@ -588,22 +630,18 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
       --
       for I in reverse First_Index (Config.Ops) .. Last_Index (Config.Ops) loop
          declare
-            Op : Config_Op renames Constant_Ref (Config.Ops, I);
+            Op : Recover_Op renames Constant_Ref (Config.Ops, I);
          begin
             case Op.Op is
             when Push_Back =>
                --  The case we are preventing for Push_Back is typically one 
of:
                --  (PUSH_BACK, Identifier, 2), (INSERT, Identifier, 2)
                --  (PUSH_BACK, Identifier, 2), (PUSH_BACK, END, 3), (INSERT, 
Identifier, 3), (INSERT, END, 3),
-               if Op.PB_Token_Index = Last_Token_Index then
+               if Op.PB_Token_Index = Target_Token_Index then
                   if Op.PB_ID = ID then
                      return True;
                   else
-                     if Op.PB_Token_Index = WisiToken.Token_Index'First then
-                        return False;
-                     else
-                        Last_Token_Index := Op.PB_Token_Index - 1;
-                     end if;
+                     Target_Token_Index := Op.PB_Token_Index - 1;
                   end if;
                else
                   --  Op is at a different edit point.
@@ -611,11 +649,11 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
                end if;
 
             when Delete =>
-               if Op.Del_Token_Index = Last_Token_Index - 1 then
+               if Op.Del_Token_Index = Target_Token_Index - 1 then
                   if Op.Del_ID = ID then
                      return True;
                   else
-                     Last_Token_Index := Op.Del_Token_Index;
+                     Target_Token_Index := Op.Del_Token_Index;
                   end if;
                else
                   --  Op is at a different edit point.
@@ -631,60 +669,56 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
    end Just_Pushed_Back_Or_Deleted;
 
    procedure Try_Undo_Reduce
-     (Super             : not null access Base.Supervisor;
-      Shared            : not null access Base.Shared;
-      Parser_Index      : in              SAL.Base_Peek_Type;
-      Config            : in              Configuration;
-      Local_Config_Heap : in out          Config_Heaps.Heap_Type)
+     (Super             : in out Base.Supervisor;
+      Shared            : in out Parser.Parser;
+      Parser_Index      : in     SAL.Base_Peek_Type;
+      Config            : in     Configuration;
+      Local_Config_Heap : in out Config_Heaps.Heap_Type)
    is
-      use Config_Op_Arrays;
+      use Recover_Op_Arrays;
 
-      Trace          : WisiToken.Trace'Class renames Super.Trace.all;
       McKenzie_Param : McKenzie_Param_Type renames Shared.Table.McKenzie_Param;
-      Token          : constant Recover_Token := Config.Stack.Peek.Token;
-      New_Config     : Configuration          := Config;
-      Token_Count    : Ada.Containers.Count_Type;
+      Token          : constant Syntax_Trees.Recover_Token := 
Config.Stack.Peek.Token;
+      New_Config     : Configuration                       := Config;
    begin
-      --  Try expanding the nonterm on the stack top, to allow pushing_back
-      --  its components, or insert and other operations at that point.
+      pragma Assert (not Token.Virtual); -- We assume caller used 
Undo_Reduce_Valid.
 
-      New_Config.Error_Token.ID := Invalid_Token_ID;
-      New_Config.Check_Status   := (Label => WisiToken.Semantic_Checks.Ok);
+      New_Config.Error_Token    := Syntax_Trees.Invalid_Recover_Token;
+      New_Config.In_Parse_Action_Status   := (Label => 
Syntax_Trees.In_Parse_Actions.Ok);
 
-      Token_Count := Undo_Reduce (New_Config.Stack, Super.Parser_State 
(Parser_Index).Tree);
-
-      if Token.Min_Terminal_Index /= Invalid_Token_Index  then
-         --  If Token is empty no cost increase.
-         New_Config.Cost := New_Config.Cost + McKenzie_Param.Undo_Reduce 
(Token.ID);
+      if not Shared.Tree.Is_Empty_Nonterm (Token.Element_Node) then
+         --  Token is not empty.
+         New_Config.Cost := New_Config.Cost + McKenzie_Param.Undo_Reduce 
(Shared.Tree.Element_ID (Token));
       end if;
 
       if Is_Full (New_Config.Ops) then
-         Super.Config_Full ("undo_reduce 1", Parser_Index);
+         Super.Config_Full (Shared, "undo_reduce 1", Parser_Index);
          raise Bad_Config;
-      else
-         Append (New_Config.Ops, (Undo_Reduce, Token.ID, Token_Count));
       end if;
-      New_Config.Strategy_Counts (Undo_Reduce) := New_Config.Strategy_Counts 
(Undo_Reduce) + 1;
+
+      Unchecked_Undo_Reduce (Super, Shared, New_Config);
+
+      New_Config.Strategy_Counts (Undo_Reduce) := @ + 1;
 
       Local_Config_Heap.Add (New_Config);
 
       if Trace_McKenzie > Detail then
-         Base.Put ("undo_reduce " & Image (Token.ID, Trace.Descriptor.all), 
Super, Shared,
+         Super.Put (Shared, "undo_reduce " & Image (Shared.Tree.Element_ID 
(Token), Shared.Tree.Lexer.Descriptor.all),
                    Parser_Index, New_Config);
       end if;
    end Try_Undo_Reduce;
 
    procedure Insert_From_Action_List
-     (Super             : not null access Base.Supervisor;
-      Shared            : not null access Base.Shared;
-      Parser_Index      : in              SAL.Base_Peek_Type;
-      Config            : in              Configuration;
-      Minimal_Insert    : in              Token_ID_Arrays.Vector;
-      Local_Config_Heap : in out          Config_Heaps.Heap_Type)
+     (Super             : in out Base.Supervisor;
+      Shared            : in out Parser.Parser;
+      Parser_Index      : in     SAL.Base_Peek_Type;
+      Config            : in     Configuration;
+      Minimal_Insert    : in     Token_ID_Arrays.Vector;
+      Local_Config_Heap : in out Config_Heaps.Heap_Type)
    is
       Table      : Parse_Table renames Shared.Table.all;
-      EOF_ID     : Token_ID renames Super.Trace.Descriptor.EOI_ID;
-      Descriptor : WisiToken.Descriptor renames Super.Trace.Descriptor.all;
+      EOF_ID     : Token_ID renames Shared.Tree.Lexer.Descriptor.EOI_ID;
+      Descriptor : WisiToken.Descriptor renames 
Shared.Tree.Lexer.Descriptor.all;
 
       --  Find terminal insertions from the current state's action_list to try.
       --
@@ -693,8 +727,8 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore is
       --  conflicts or semantic check fails encountered, they create other
       --  configs to enqueue.
 
-      Current_Token : constant Token_ID := Current_Token_ID_Peek
-        (Shared.Terminals.all, Config.Current_Shared_Token, 
Config.Insert_Delete, Config.Current_Insert_Delete);
+      Current_First_Terminal_ID : constant Token_ID := Shared.Tree.ID
+        (Parse.Peek_Current_First_Terminal (Shared.Tree, Config));
 
       Cached_Config : Configuration;
       Cached_Action : Reduce_Action_Rec;
@@ -715,20 +749,20 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
                if ID /= EOF_ID and then -- can't insert eof
                  ID /= Invalid_Token_ID -- invalid when Verb = Error
                then
-                  if Just_Pushed_Back_Or_Deleted (Config, ID) then
+                  if Just_Pushed_Back_Or_Deleted (Super, Shared, Config, ID) 
then
                      if Trace_McKenzie > Extra then
                         Put_Line
-                          (Super.Trace.all, Super.Label (Parser_Index), 
"Insert: abandon " & Image (ID, Descriptor) &
-                             ": undo push_back");
+                          (Shared.Tree, Super.Stream (Parser_Index),
+                           "Insert: abandon " & Image (ID, Descriptor) & ": 
undo push_back");
                      end if;
-                  elsif ID = Current_Token then
+                  elsif ID = Current_First_Terminal_ID then
                      --  This is needed because we allow explore when the 
error is not at
                      --  the explore point; it prevents inserting useless 
tokens (ie
                      --  'identifier ;' in ada_lite).
                      if Trace_McKenzie > Extra then
                         Put_Line
-                          (Super.Trace.all, Super.Label (Parser_Index), 
"Insert: abandon " & Image (ID, Descriptor) &
-                             ": current token");
+                          (Shared.Tree, Super.Stream (Parser_Index),
+                           "Insert: abandon " & Image (ID, Descriptor) & ": 
current token");
                      end if;
 
                   elsif (for some Minimal of Minimal_Insert => ID = Minimal) 
then
@@ -752,16 +786,18 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
                            declare
                               New_Config : Configuration := Config;
                            begin
-                              New_Config.Error_Token.ID := Invalid_Token_ID;
-                              New_Config.Check_Status   := (Label => 
WisiToken.Semantic_Checks.Ok);
+                              New_Config.Error_Token            := 
Syntax_Trees.Invalid_Recover_Token;
+                              New_Config.In_Parse_Action_Status := (Label => 
Syntax_Trees.In_Parse_Actions.Ok);
 
                               Do_Reduce_1
-                                ("Insert", Super, Shared, Parser_Index, 
Local_Config_Heap, New_Config, Action);
+                                ("Insert " & Image (ID, Descriptor), Super, 
Shared, Parser_Index, Local_Config_Heap,
+                                 New_Config, Action);
                               Cached_Config := New_Config;
                               Cached_Action := Action;
 
                               Do_Reduce_2
-                                ("Insert", Super, Shared, Parser_Index, 
Local_Config_Heap, New_Config, ID,
+                                ("Insert " & Image (ID, Descriptor), Super, 
Shared, Parser_Index, Local_Config_Heap,
+                                 New_Config, ID,
                                  Cost_Delta => 0,
                                  Strategy   => Insert);
                            end;
@@ -771,7 +807,8 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore is
                               New_Config : Configuration := Cached_Config;
                            begin
                               Do_Reduce_2
-                                ("Insert", Super, Shared, Parser_Index, 
Local_Config_Heap, New_Config, ID,
+                                ("Insert " & Image (ID, Descriptor) & " 
(cached reduce)", Super, Shared, Parser_Index,
+                                 Local_Config_Heap, New_Config, ID,
                                  Cost_Delta => 0,
                                  Strategy   => Insert);
                            end;
@@ -785,6 +822,10 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore is
                      end case;
                   end if;
                end if;
+            exception
+            when Invalid_Case =>
+               --  Try other actions
+               null;
             end;
             I := I.Next;
          end loop;
@@ -792,18 +833,18 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
    end Insert_From_Action_List;
 
    function Insert_Minimal_Complete_Actions
-     (Super             : not null access Base.Supervisor;
-      Shared            : not null access Base.Shared;
-      Parser_Index      : in              SAL.Base_Peek_Type;
-      Orig_Config       : in out          Configuration;
-      Local_Config_Heap : in out          Config_Heaps.Heap_Type)
+     (Super             : in out Base.Supervisor;
+      Shared            : in out Parser.Parser;
+      Parser_Index      : in     SAL.Base_Peek_Type;
+      Orig_Config       : in out Configuration;
+      Local_Config_Heap : in out Config_Heaps.Heap_Type)
      return Token_ID_Arrays.Vector
       --  Return tokens inserted (empty if none).
    is
       use Ada.Containers;
 
       Table         : Parse_Table renames Shared.Table.all;
-      Descriptor    : WisiToken.Descriptor renames Super.Trace.Descriptor.all;
+      Descriptor    : WisiToken.Descriptor renames 
Shared.Tree.Lexer.Descriptor.all;
       Inserted      : Token_ID_Array (1 .. 10) := (others => Invalid_Token_ID);
       Inserted_Last : Integer                  := Inserted'First - 1;
 
@@ -824,7 +865,7 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore is
       procedure Safe_Add_Work (Label : in String; Item : in Work_Item)
       is begin
          if Is_Full (Work) then
-            Super.Config_Full ("Minimal_Complete_Actions " & Label, 
Parser_Index);
+            Super.Config_Full (Shared, "Minimal_Complete_Actions " & Label, 
Parser_Index);
             raise Bad_Config;
          else
             Add (Work, Item);
@@ -832,27 +873,27 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
       end Safe_Add_Work;
 
       function To_Reduce_Action (Action : in Minimal_Action) return 
Reduce_Action_Rec
-        is (Reduce, Action.Production, null, null, Action.Token_Count);
+        is (Reduce, Action.Production, Action.Token_Count);
 
       procedure Minimal_Do_Shift
         (Action     : in     Minimal_Action;
          Cost_Delta : in     Integer;
          Config     : in out Configuration)
       is begin
-         if Just_Pushed_Back_Or_Deleted (Config, Action.ID) then
+         if Just_Pushed_Back_Or_Deleted (Super, Shared, Config, Action.ID) then
             if Trace_McKenzie > Extra then
                Put_Line
-                 (Super.Trace.all, Super.Label (Parser_Index),
+                 (Shared.Tree, Super.Stream (Parser_Index),
                   "Minimal_Complete_Actions: abandon " & Image (Action.ID, 
Descriptor) & ": undo push back");
             end if;
          else
-            Config.Check_Status           := (Label => 
WisiToken.Semantic_Checks.Ok);
+            Config.In_Parse_Action_Status           := (Label => 
Syntax_Trees.In_Parse_Actions.Ok);
             Config.Minimal_Complete_State := Active;
             Inserted_Last                 := Inserted_Last + 1;
             if Inserted_Last <= Inserted'Last then
                Inserted (Inserted_Last)      := Action.ID;
             else
-               Super.Config_Full ("minimal_do_shift Inserted", Parser_Index);
+               Super.Config_Full (Shared, "minimal_do_shift Inserted", 
Parser_Index);
                raise Bad_Config;
             end if;
 
@@ -864,8 +905,7 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore is
       end Minimal_Do_Shift;
 
       procedure Enqueue_Min_Actions
-        (Label   : in String;
-         Actions : in Minimal_Action_Arrays.Vector;
+        (Actions : in Minimal_Action_Arrays.Vector;
          Config  : in Configuration)
       is
          use SAL;
@@ -875,7 +915,7 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore is
       begin
          if Trace_McKenzie > Extra then
             Put_Line
-              (Super.Trace.all, Super.Label (Parser_Index), 
"Minimal_Complete_Actions: " & Label &
+              (Shared.Tree, Super.Stream (Parser_Index), 
"Minimal_Complete_Actions: " &
                  Image (Actions, Descriptor));
          end if;
 
@@ -887,8 +927,7 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore is
             return;
          end if;
 
-         --  More than one minimal action in State; try to use next states and
-         --  recursion to pick one.
+         --  More than one minimal action in State; try to use next states to 
pick one.
          Actions_Loop :
          for I in Actions.First_Index .. Actions.Last_Index loop
             declare
@@ -917,7 +956,7 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore is
                   case Action.Verb is
                   when Shift =>
                      New_Stack.Push
-                       ((Action.State, Invalid_Node_Index, (ID => Action.ID, 
others => <>)));
+                       ((Action.State, (ID => Action.ID, others => <>)));
                      Next_State := Action.State;
                      Match_ID   := Action.ID;
 
@@ -926,16 +965,16 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
                      Next_State := Goto_For (Shared.Table.all, 
New_Stack.Peek.State, Action.Production.LHS);
                      if Next_State = Unknown_State then
                         --  We get here when Insert_From_Action_Table started 
us down a bad path
-                        raise Bad_Config;
+                        raise Invalid_Case;
                      end if;
 
                      New_Stack.Push
-                       ((Next_State, Invalid_Node_Index, (ID => 
Action.Production.LHS, others => <>)));
+                       ((Next_State, (ID => Action.Production.LHS, others => 
<>)));
                      Match_ID   := Action.Production.LHS;
                   end case;
 
                   if Trace_McKenzie > Extra then
-                     Super.Trace.Put (Next_State'Image & " " & Trimmed_Image 
(Item.Production));
+                     Shared.Tree.Lexer.Trace.Put (Next_State'Image & " " & 
Trimmed_Image (Item.Production));
                   end if;
 
                   for Item of Shared.Table.States (Next_State).Kernel loop
@@ -965,10 +1004,9 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
                      Action.Production.LHS));
             begin
                if Trace_McKenzie > Extra then
-                  Super.Trace.Put
-                    ("task" & Task_Attributes.Value'Image &
-                       Super.Label (Parser_Index)'Image & ": 
Minimal_Complete_Actions: " &
-                       Image (Action, Descriptor));
+                  Put_Line
+                    (Shared.Tree, Super.Stream (Parser_Index),
+                     "Minimal_Complete_Actions: " & Image (Action, 
Descriptor));
                end if;
 
                for Item of Shared.Table.States (Next_State).Kernel loop
@@ -982,14 +1020,14 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
                      elsif Item.Length_After_Dot < Length (I) then
                         if Trace_McKenzie > Extra then
                            --  Length_After_Dot outputs this in other branch
-                           Super.Trace.Put (Next_State'Image & " " & 
Trimmed_Image (Item.Production));
+                           Shared.Tree.Lexer.Trace.Put (Next_State'Image & " " 
& Trimmed_Image (Item.Production));
                         end if;
                         Length (I) := Item.Length_After_Dot;
 
                      end if;
 
                      if Trace_McKenzie > Extra then
-                        Super.Trace.Put (" length" & Length (I)'Image);
+                        Shared.Tree.Lexer.Trace.Put (" length" & Length 
(I)'Image);
                      end if;
                      if Length (I) < Min_Length then
                         Min_Length := Length (I);
@@ -997,7 +1035,7 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore is
                   end if;
                end loop;
                if Trace_McKenzie > Extra then
-                  Super.Trace.New_Line;
+                  Shared.Tree.Lexer.Trace.New_Line;
                end if;
             end;
          end loop Actions_Loop;
@@ -1008,8 +1046,8 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
 
             elsif Trace_McKenzie > Extra then
                Put_Line
-                 (Super.Trace.all, Super.Label (Parser_Index), 
"Minimal_Complete_Actions: drop " &
-                    Image (Actions (I), Descriptor) & " not minimal or 
recursive");
+                 (Shared.Tree, Super.Stream (Parser_Index), 
"Minimal_Complete_Actions: drop " &
+                    Image (Actions (I), Descriptor) & " not minimal");
             end if;
          end loop;
       end Enqueue_Min_Actions;
@@ -1022,13 +1060,12 @@ package body 
WisiToken.Parse.LR.McKenzie_Recover.Explore is
 
       elsif Orig_Config.Minimal_Complete_State = Done then
          if Trace_McKenzie > Extra then
-            Put_Line
-              (Super.Trace.all, Super.Label (Parser_Index), 
"Minimal_Complete_Actions: done");
+            Put_Line (Shared.Tree, Super.Stream (Parser_Index), 
"Minimal_Complete_Actions: done");
          end if;
          return Token_ID_Arrays.Empty_Vector;
       end if;
 
-      Enqueue_Min_Actions ("", Table.States 
(Orig_Config.Stack.Peek.State).Minimal_Complete_Actions, Orig_Config);
+      Enqueue_Min_Actions (Table.States 
(Orig_Config.Stack.Peek.State).Minimal_Complete_Actions, Orig_Config);
 
       loop
          exit when Is_Empty (Work);
@@ -1038,7 +1075,8 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
          begin
             if Trace_McKenzie > Extra then
                Put_Line
-                 (Super.Trace.all, Super.Label (Parser_Index), 
"Minimal_Complete_Actions: dequeue work item " &
+                 (Shared.Tree, Super.Stream (Parser_Index),
+                  "Minimal_Complete_Actions: dequeue work item " &
                     Image (Item.Action, Descriptor));
             end if;
 
@@ -1060,10 +1098,11 @@ package body 
WisiToken.Parse.LR.McKenzie_Recover.Explore is
 
                      case Actions.Length is
                      when 0 =>
-                        if Trace_McKenzie > Extra then
+                        if Trace_McKenzie > Detail then
                            Put_Line
-                             (Super.Trace.all, Super.Label (Parser_Index),
-                              "Minimal_Complete_Actions abandoned: no 
actions");
+                             (Shared.Tree, Super.Stream (Parser_Index),
+                              "Minimal_Complete_Actions state" & 
Item.Config.Stack.Peek.State'Image &
+                                " abandoned: no actions");
                         end if;
                         exit;
                      when 1 =>
@@ -1076,7 +1115,7 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
                         end case;
 
                      when others =>
-                        Enqueue_Min_Actions ("multiple actions ", Actions, 
Item.Config);
+                        Enqueue_Min_Actions (Actions, Item.Config);
                         exit;
                      end case;
                   end loop;
@@ -1085,6 +1124,9 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
             when Shift =>
                Minimal_Do_Shift (Item.Action, Item.Cost_Delta, Item.Config);
             end case;
+         exception
+         when Invalid_Case =>
+            null;
          end;
       end loop;
 
@@ -1098,44 +1140,52 @@ package body 
WisiToken.Parse.LR.McKenzie_Recover.Explore is
       return To_Vector (Inserted (1 .. Inserted_Last));
    exception
    when Bad_Config =>
-      return Token_ID_Arrays.Empty_Vector;
+      if Debug_Mode then
+         raise;
+      else
+         return Token_ID_Arrays.Empty_Vector;
+      end if;
    end Insert_Minimal_Complete_Actions;
 
    procedure Insert_Matching_Begin
-     (Super                 : not null access Base.Supervisor;
-      Shared                : not null access Base.Shared;
-      Parser_Index          : in              SAL.Base_Peek_Type;
-      Config                : in              Configuration;
-      Local_Config_Heap     : in out          Config_Heaps.Heap_Type;
-      Matching_Begin_Tokens : in              Token_ID_Arrays.Vector)
+     (Super                 : in out Base.Supervisor;
+      Shared                : in out Parser.Parser;
+      Parser_Index          : in     SAL.Base_Peek_Type;
+      Config                : in     Configuration;
+      Local_Config_Heap     : in out Config_Heaps.Heap_Type;
+      Matching_Begin_Tokens : in     Token_ID_Arrays.Vector)
    is
       Table      : Parse_Table renames Shared.Table.all;
-      Descriptor : WisiToken.Descriptor renames Super.Trace.Descriptor.all;
+      Descriptor : WisiToken.Descriptor renames 
Shared.Tree.Lexer.Descriptor.all;
    begin
-      --  We don't check for insert = current token; that's either ok or a
-      --  severe bug in Shared.Language_Matching_Begin_Tokens.
-
       if Config.Matching_Begin_Done then
          if Trace_McKenzie > Extra then
-            Put_Line (Super.Trace.all, Super.Label (Parser_Index), 
"Matching_Begin abandoned: done");
+            Put_Line (Shared.Tree, Super.Stream (Parser_Index), 
"Matching_Begin abandoned: done");
          end if;
          return;
       end if;
 
-      if Just_Pushed_Back_Or_Deleted (Config, Matching_Begin_Tokens 
(Matching_Begin_Tokens.First_Index)) then
+      if Just_Pushed_Back_Or_Deleted (Super, Shared, Config, 
Matching_Begin_Tokens (Matching_Begin_Tokens.First_Index))
+      then
          if Trace_McKenzie > Extra then
             Put_Line
-              (Super.Trace.all, Super.Label (Parser_Index), "Matching_Begin 
abandoned " &
+              (Shared.Tree, Super.Stream (Parser_Index), "Matching_Begin 
abandoned " &
                  Image (Matching_Begin_Tokens 
(Matching_Begin_Tokens.First_Index), Descriptor) & ": undo push_back");
          end if;
          return;
       end if;
 
       declare
-         New_Config  : Configuration := Config;
+         New_Config : Configuration := Config;
       begin
+         if Undo_Reduce_Valid (Super, Shared, New_Config) then
+            --  We may need Undo_Reduce to shift the matching token; see
+            --  ada_mode-recover_40.adb
+            Unchecked_Undo_Reduce (Super, Shared, New_Config);
+         end if;
+
          for ID of Matching_Begin_Tokens loop
-            Insert (New_Config, ID);
+            Insert (Super, Shared, New_Config, ID);
          end loop;
 
          declare
@@ -1143,7 +1193,7 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
             Parse_Items : aliased Parse.Parse_Item_Arrays.Vector;
             Dummy : constant Boolean :=  Parse.Parse
               (Super, Shared, Parser_Index, Parse_Items, New_Config,
-               Shared_Token_Goal => Invalid_Token_Index,
+               Shared_Token_Goal => Syntax_Trees.Invalid_Sequential_Index,
                All_Conflicts     => True,
                Trace_Prefix      => "parse Matching_Begin");
          begin
@@ -1155,38 +1205,35 @@ package body 
WisiToken.Parse.LR.McKenzie_Recover.Explore is
                      Item.Config.Matching_Begin_Done := True;
                      Item.Config.Cost := Item.Config.Cost + 
Table.McKenzie_Param.Matching_Begin;
                      Item.Config.Strategy_Counts (Matching_Begin) := 
Item.Config.Strategy_Counts (Matching_Begin) + 1;
-                     Item.Config.Error_Token.ID := Invalid_Token_ID;
-                     Item.Config.Check_Status := (Label => 
WisiToken.Semantic_Checks.Ok);
+                     Item.Config.Error_Token    := 
Syntax_Trees.Invalid_Recover_Token;
+                     Item.Config.In_Parse_Action_Status := (Label => 
Syntax_Trees.In_Parse_Actions.Ok);
 
                      if Trace_McKenzie > Detail then
-                        Base.Put
-                          ("Matching_Begin: insert " & Image 
(Matching_Begin_Tokens, Descriptor),
-                           Super, Shared, Parser_Index, Item.Config);
+                        Super.Put
+                          (Shared, "Matching_Begin: insert " & Image 
(Matching_Begin_Tokens, Descriptor),
+                           Parser_Index, Item.Config);
                      end if;
                      Local_Config_Heap.Add (Item.Config);
                   else
                      if Trace_McKenzie > Detail then
-                        Base.Put
-                          ("Matching_Begin: abandon " & Image 
(Matching_Begin_Tokens, Descriptor) & ": parse fail",
-                           Super, Shared, Parser_Index, Item.Config);
+                        Super.Put
+                          (Shared,
+                           "Matching_Begin: abandon " & Image 
(Matching_Begin_Tokens, Descriptor) & ": parse fail",
+                           Parser_Index, Item.Config);
                      end if;
                   end if;
                end;
             end loop;
          end;
       end;
-   exception
-   when SAL.Container_Full =>
-      --  From config_ops_sorted
-      Super.Config_Full ("Minimal_Complete_Actions 3", Parser_Index);
    end Insert_Matching_Begin;
 
    procedure Try_Insert_Terminal
-     (Super             : not null access Base.Supervisor;
-      Shared            : not null access Base.Shared;
-      Parser_Index      : in              SAL.Base_Peek_Type;
-      Config            : in out          Configuration;
-      Local_Config_Heap : in out          Config_Heaps.Heap_Type)
+     (Super             :         in out Base.Supervisor;
+      Shared            :         in out Parser.Parser;
+      Parser_Index      :         in     SAL.Base_Peek_Type;
+      Config            : aliased in out Configuration;
+      Local_Config_Heap :         in out Config_Heaps.Heap_Type)
    is
       use all type 
WisiToken.Parse.LR.Parser.Language_Matching_Begin_Tokens_Access;
       Tokens                : Token_ID_Array_1_3;
@@ -1196,11 +1243,12 @@ package body 
WisiToken.Parse.LR.McKenzie_Recover.Explore is
       Minimal_Inserted : Token_ID_Arrays.Vector;
    begin
       if Shared.Language_Matching_Begin_Tokens /= null then
-         Current_Token_ID_Peek_3
-           (Shared.Terminals.all, Config.Current_Shared_Token, 
Config.Insert_Delete, Config.Current_Insert_Delete,
-            Tokens);
+         Parse.Current_Token_ID_Peek_3 (Super, Shared, Config, Tokens);
 
-         Shared.Language_Matching_Begin_Tokens (Tokens, Config, 
Matching_Begin_Tokens, Forbid_Minimal_Insert);
+         if Tokens (1) /= Invalid_Token_ID then
+            Shared.Language_Matching_Begin_Tokens
+              (Super, Shared, Tokens, Config, Matching_Begin_Tokens, 
Forbid_Minimal_Insert);
+         end if;
       end if;
 
       if not Forbid_Minimal_Insert then
@@ -1231,40 +1279,30 @@ package body 
WisiToken.Parse.LR.McKenzie_Recover.Explore is
 
    exception
    when Bad_Config =>
-      null;
+      if Debug_Mode then
+         raise;
+      else
+         null;
+      end if;
    end Try_Insert_Terminal;
 
-   procedure Try_Insert_Quote
-     (Super             : not null access Base.Supervisor;
-      Shared            : not null access Base.Shared;
-      Parser_Index      : in              SAL.Base_Peek_Type;
-      Config            : in out          Configuration;
-      Local_Config_Heap : in out          Config_Heaps.Heap_Type)
+   procedure Try_Insert_Quote_1
+     (Super             : in out Base.Supervisor;
+      Shared            : in out Parser.Parser;
+      Parser_Index      : in     SAL.Base_Peek_Type;
+      Current_Line      : in     Line_Number_Type;
+      Lexer_Error_Node  : in     Syntax_Trees.Valid_Node_Access;
+      Config            : in out Configuration;
+      Local_Config_Heap : in out Config_Heaps.Heap_Type)
    is
-      use Config_Op_Arrays;
+      use Recover_Op_Arrays;
       use all type Parser.Language_String_ID_Set_Access;
+      use WisiToken.Syntax_Trees;
+      use Bounded_Streams;
 
-      Descriptor  : WisiToken.Descriptor renames Shared.Trace.Descriptor.all;
-      Check_Limit : WisiToken.Token_Index renames 
Shared.Table.McKenzie_Param.Check_Limit;
-
-      Current_Line            : constant Line_Number_Type := 
Shared.Terminals.all (Config.Current_Shared_Token).Line;
-      Lexer_Error_Token       : Base_Token;
-
-      function Recovered_Lexer_Error (Line : in Line_Number_Type) return 
Base_Token_Index
-      is begin
-         --  We are assuming the list of lexer errors is short, so binary
-         --  search would not be significantly faster.
-         for Err of reverse Shared.Lexer.Errors loop
-            if Err.Recover_Token /= Invalid_Token_Index and then
-              Shared.Terminals.all (Err.Recover_Token).Line = Line
-            then
-               return Err.Recover_Token;
-            end if;
-         end loop;
-         return Invalid_Token_Index;
-      end Recovered_Lexer_Error;
-
-      Lexer_Error_Token_Index : constant Base_Token_Index := 
Recovered_Lexer_Error (Current_Line);
+      Tree        : Syntax_Trees.Tree renames Shared.Tree;
+      Descriptor  : WisiToken.Descriptor renames 
Shared.Tree.Lexer.Descriptor.all;
+      Check_Limit : Syntax_Trees.Sequential_Index renames 
Shared.Table.McKenzie_Param.Check_Limit;
 
       function String_ID_Set (String_ID : in Token_ID) return Token_ID_Set
       is begin
@@ -1275,171 +1313,394 @@ package body 
WisiToken.Parse.LR.McKenzie_Recover.Explore is
          end if;
       end String_ID_Set;
 
+      procedure Delete_All_Pushed_Back
+        (Label     : in     String;
+         Config    : in out Configuration;
+         Max_Index :    out Sequential_Index)
+      with Post => Max_Index /= Sequential_Index'Last
+      --  Delete all tokens from Config.Input_Stream.
+      --  Max_Index is the last node_index deleted.
+      is
+         Stream    : Bounded_Streams.List renames Config.Input_Stream;
+         To_Delete : Bounded_Streams.Cursor;
+      begin
+         Max_Index := Sequential_Index'Last; --  should be overwritten
+         loop
+            exit when Length (Stream) = 0;
+
+            if Tree.Child_Count (Stream (First (Stream))) > 0 then
+               Parse.Left_Breakdown (Tree, Config.Input_Stream);
+
+            else
+               To_Delete := First (Stream);
+
+               declare
+                  Node  : constant Node_Access           := Stream (To_Delete);
+                  Index : constant Base_Sequential_Index := 
Tree.Get_Sequential_Index (Node);
+               begin
+                  if Index /= Invalid_Sequential_Index then
+                     --  Node is not from Shared_Stream, so we don't need to 
tell main
+                     --  Parser to delete it.
+                     if Is_Full (Config.Ops) then
+                        Super.Config_Full (Shared, "insert quote 2 a " & 
Label, Parser_Index);
+                        raise Bad_Config;
+                     end if;
+
+                     Append (Config.Ops, (Delete, Shared.Tree.ID (Node), 
Index));
+
+                     Max_Index := Index;
+                  end if;
+               end;
+
+               --  non_grammar are moved during "apply ops".
+               Delete (Config.Input_Stream, To_Delete);
+            end if;
+         end loop;
+      end Delete_All_Pushed_Back;
+
+      procedure Delete_Pushed_Back
+        (Label      : in     String;
+         Config     : in out Configuration;
+         Target_Ref : in out Config_Stream_Parents;
+         Max_Index  :    out Base_Sequential_Index)
+      with Pre => Length (Config.Input_Stream) > 0 and Tree.Is_Terminal 
(Target_Ref.Node)
+      --  Delete terminals in Config.Input_Stream, first terminal to
+      --  Prev_Terminal (First_Terminal (Target_Ref)). Max_Index is the last
+      --  Sequential_Index deleted; Invalid_Sequential_Index if none (ie
+      --  Target_Ref.Node is first terminal in Input_Stream). Target_Ref is
+      --  updated if Input_Stream is broken down to expose tokens.
+      is
+         Stream : Bounded_Streams.List renames Config.Input_Stream;
+
+         procedure Delete_First
+         is
+            To_Delete : Bounded_Streams.Cursor := Stream.First;
+         begin
+            if Is_Full (Config.Ops) then
+               Super.Config_Full (Shared, "insert quote 2 b " & Label, 
Parser_Index);
+               raise Bad_Config;
+            end if;
+
+            pragma Assert (Is_Terminal (Shared.Tree.ID (Stream (To_Delete)), 
Shared.Tree.Lexer.Descriptor.all));
+            pragma Assert (Tree.Get_Sequential_Index (Stream (To_Delete)) /= 
Invalid_Sequential_Index);
+            Append
+              (Config.Ops,
+               (Delete,
+                Shared.Tree.ID (Stream (To_Delete)),
+                Tree.Get_Sequential_Index (Stream (To_Delete))));
+
+            Max_Index := Tree.Get_Sequential_Index (Stream (To_Delete));
+
+            --  non_grammar are moved during "apply ops".
+            Delete (Stream, To_Delete);
+         end Delete_First;
+      begin
+         Max_Index := Invalid_Sequential_Index;
+         loop
+            exit when Target_Ref.Element = Stream.First and Target_Ref.Node = 
Stream (Stream.First);
+
+            if Tree.Label (Stream (Stream.First)) = Nonterm then
+               Parse.Left_Breakdown (Tree, Stream);
+
+               exit when Target_Ref.Node = Stream (Stream.First);
+
+               --  Find new Target_Ref.Element
+               Target_Ref.Element := Stream.First;
+               loop
+                  exit when
+                    (for some I in 1 .. Target_Ref.Parents.Depth =>
+                       Stream (Target_Ref.Element) = Target_Ref.Parents.Peek 
(I));
+
+                  Target_Ref.Element := Stream.Next (Target_Ref.Element);
+               end loop;
+
+               exit when Target_Ref.Element = Stream.First;
+               Delete_First;
+            else
+               Delete_First;
+            end if;
+         end loop;
+      end Delete_Pushed_Back;
+
+      procedure Delete_Pushed_Back
+        (Label        : in     String;
+         Config       : in out Configuration;
+         Target_Index : in     Sequential_Index;
+         Max_Index    :    out Base_Sequential_Index)
+      --  Delete tokens from Config.Input_Stream to Target_Index or
+      --  end of Input_Stream. Max_Index is the last node
+      --  deleted; Invalid_Sequential_Index if none.
+      is
+         Stream         : Bounded_Streams.List renames Config.Input_Stream;
+         Target_Element : constant Bounded_Streams.Cursor := First (Stream);
+      begin
+         if not Has_Element (Target_Element) then
+            Max_Index := Invalid_Sequential_Index;
+            return;
+         end if;
+
+         --  Find Target_Index in Config.Input_Stream
+         declare
+            Target_Ref : Config_Stream_Parents (Config.Input_Stream'Access);
+         begin
+            Parse.First_Sequential_Terminal (Super, Shared, Target_Ref);
+            loop
+               exit when Tree.Get_Sequential_Index (Target_Ref.Node) = 
Target_Index;
+
+               Parse.Next_Sequential_Terminal (Tree, Target_Ref);
+               exit when Target_Ref.Element = No_Element; --  Target_Index not 
in Input_Stream
+            end loop;
+
+            if Target_Ref.Element = Bounded_Streams.No_Element then
+               Delete_All_Pushed_Back (Label, Config, Max_Index);
+            else
+               Delete_Pushed_Back (Label, Config, Target_Ref, Max_Index);
+            end if;
+         end;
+      end Delete_Pushed_Back;
+
       procedure String_Literal_In_Stack
         (Label             : in     String;
-         New_Config        : in out Configuration;
+         Config            : in out Configuration;
          Matching          : in     SAL.Peek_Type;
          String_Literal_ID : in     Token_ID)
+      --  Matching is the peek index of a token in Config.Stack containing a
+      --  string literal (possibly more than one). Push back thru that
+      --  token, then delete all tokens after the string literal to Config
+      --  current token.
       is
-         Saved_Shared_Token : constant WisiToken.Token_Index := 
New_Config.Current_Shared_Token;
+         String_Literal : Config_Stream_Parents (Config.Input_Stream'Access);
 
-         Tok         : Recover_Token;
-         J           : WisiToken.Token_Index;
-         Parse_Items : aliased Parse.Parse_Item_Arrays.Vector;
+         Max_Deleted : Base_Sequential_Index;
       begin
-         --  Matching is the index of a token on New_Config.Stack containing a 
string
-         --  literal. Push back thru that token, then delete all tokens after
-         --  the string literal to Saved_Shared_Token.
-         if not Has_Space (New_Config.Ops, Ada.Containers.Count_Type 
(Matching)) then
-            Super.Config_Full ("insert quote 1 " & Label, Parser_Index);
+         --  Mark the current start of Config.Input_Stream, so we can search
+         --  new pushed_back tokens below. test_mckenzie_recover.adb
+         --  Strinq_Quote_2.
+         Parse.First_Terminal (Tree, String_Literal);
+
+         if not Has_Space (Config.Ops, Ada.Containers.Count_Type (Matching)) 
then
+            Super.Config_Full (Shared, "insert quote 1 " & Label, 
Parser_Index);
             raise Bad_Config;
          end if;
          for I in 1 .. Matching loop
-            if Push_Back_Valid (New_Config) then
-               Tok := New_Config.Stack.Pop.Token;
-               Append (New_Config.Ops, (Push_Back, Tok.ID, 
Tok.Min_Terminal_Index));
-            else
+            if not Push_Back_Valid (Super, Shared, Config, 
Push_Back_Undo_Reduce => False) then
                --  Probably pushing back thru a previously inserted token
-               raise Bad_Config;
+               raise Invalid_Case;
             end if;
+            Do_Push_Back (Tree, Config);
          end loop;
 
-         New_Config.Current_Shared_Token := Tok.Min_Terminal_Index;
-
-         --  Find last string literal in pushed back terminals.
-         J := Saved_Shared_Token - 1;
+         --  Search the pushed_back tokens for the last string literal.
+         if String_Literal.Element = No_Element then
+            Parse.Last_Sequential_Terminal (Super, Shared, String_Literal);
+         end if;
          loop
-            exit when Shared.Terminals.all (J).ID = String_Literal_ID;
-            J := J - 1;
+            exit when Shared.Tree.ID (String_Literal.Node) = String_Literal_ID;
+            Parse.Prev_Sequential_Terminal (Tree, String_Literal);
          end loop;
 
+         --  Delete pushed_back tokens before the string literal.
+         Delete_Pushed_Back (Label, Config, String_Literal, Max_Deleted);
+
+         --  Process the deletes so Config matches Ops. Also parse the string
+         --  literal.
+         declare
+            First_Node  : constant Valid_Node_Access := 
Parse.Peek_Current_First_Sequential_Terminal
+              (Super, Shared, Config);
+            Parse_Items : aliased Parse.Parse_Item_Arrays.Vector;
          begin
             if Parse.Parse
-              (Super, Shared, Parser_Index, Parse_Items, New_Config,
-               Shared_Token_Goal => J,
+              (Super, Shared, Parser_Index, Parse_Items, Config,
+               Shared_Token_Goal => Tree.Get_Sequential_Index 
(String_Literal.Node),
                All_Conflicts     => False,
                Trace_Prefix      => "insert quote parse pushback " & Label)
             then
-               --  The non-deleted tokens parsed without error. We don't care 
if any
-               --  conflicts were encountered; we are not using the parse 
result.
-               New_Config := Parse.Parse_Item_Array_Refs.Constant_Ref 
(Parse_Items, 1).Config;
-               Append (New_Config.Ops, (Fast_Forward, 
New_Config.Current_Shared_Token));
+               --  The tokens parsed without error. We don't care if any 
conflicts
+               --  were encountered; they were enqueued the first time this was
+               --  parsed.
+               Config := Parse.Parse_Item_Array_Refs.Constant_Ref 
(Parse_Items, 1).Config;
+               Append
+                 (Config.Ops,
+                  (Fast_Forward,
+                   FF_First_Index   => Tree.Get_Sequential_Index (First_Node),
+                   FF_Next_Index    => Tree.Get_Sequential_Index
+                     (Parse.Peek_Current_First_Sequential_Terminal (Super, 
Shared, Config))));
             else
                raise SAL.Programmer_Error;
             end if;
-         exception
-         when Bad_Config =>
-            raise SAL.Programmer_Error;
          end;
-
-         if New_Config.Current_Shared_Token < Saved_Shared_Token - 1 and then
-           (not Has_Space
-              (New_Config.Ops, Ada.Containers.Count_Type (Saved_Shared_Token - 
1 - New_Config.Current_Shared_Token)))
-         then
-            Super.Config_Full ("insert quote 2 " & Label, Parser_Index);
-            raise Bad_Config;
-         end if;
-
-         for J in New_Config.Current_Shared_Token .. Saved_Shared_Token - 1 
loop
-            Append (New_Config.Ops, (Delete, Shared.Terminals.all (J).ID, J));
-         end loop;
-
-         New_Config.Current_Shared_Token := Saved_Shared_Token;
-
       end String_Literal_In_Stack;
 
       procedure Push_Back_Tokens
         (Full_Label            : in     String;
-         New_Config            : in out Configuration;
-         Min_Pushed_Back_Index :    out Base_Token_Index)
+         Config                : in out Configuration;
+         Min_Pushed_Back_Index :    out Syntax_Trees.Sequential_Index)
+      with Post => Min_Pushed_Back_Index /= Sequential_Index'Last
+      --  Push back stack top; if it is empty, push back the next stack token.
+      --
+      --  Min_Pushed_Back_Index is sequential_index (first_sequential_terminal 
(pushed back token)).
       is
-         Item : Recover_Stack_Item;
+         Item  : Recover_Stack_Item;
+         First : Node_Access;
       begin
+         Min_Pushed_Back_Index := Sequential_Index'Last; -- Should be 
overwritten
          loop
-            Item := New_Config.Stack.Peek;
-            if Item.Token.Virtual then
-               --  Don't push back an inserted token
-               exit;
+            if not Push_Back_Valid (Super, Shared, Config, 
Push_Back_Undo_Reduce => False) then
+               --  Probably pushing back thru a previously inserted token
+               raise Invalid_Case;
+            end if;
 
-            elsif Item.Token.Byte_Region = Null_Buffer_Region then
-               --  Don't need push_back for an empty token
-               New_Config.Stack.Pop;
+            if Is_Full (Config.Ops) then
+               Super.Config_Full (Shared, Full_Label, Parser_Index);
+               raise Bad_Config;
+            end if;
 
-            elsif Item.Tree_Index = Invalid_Node_Index then
-               --  Item was pushed on stack during recovery, and we do not know
-               --  its Line. To avoid crossing a line boundary, we stop 
push_backs
-               --  here.
-               exit;
+            Item  := Config.Stack.Peek;
+            First := Tree.First_Terminal (Item.Token);
+            if First /= Invalid_Node_Access then
+               First := Tree.First_Sequential_Terminal (First);
+            end if;
 
-            else
-               if Shared.Terminals.all
-                 (Super.Parser_State (Parser_Index).Tree.First_Shared_Terminal 
(Item.Tree_Index)).Line = Current_Line
-                 --  Don't let push_back cross a line boundary.
-               then
-                  if Is_Full (New_Config.Ops) then
-                     Super.Config_Full (Full_Label, Parser_Index);
-                     raise Bad_Config;
-                  else
-                     New_Config.Stack.Pop;
-                     Append (New_Config.Ops, (Push_Back, Item.Token.ID, 
Item.Token.Min_Terminal_Index));
-                  end if;
-               end if;
+            Do_Push_Back (Tree, Config);
 
+            if First /= Invalid_Node_Access then
+               Min_Pushed_Back_Index := Tree.Get_Sequential_Index (First);
                exit;
             end if;
          end loop;
-         Min_Pushed_Back_Index := Item.Token.Min_Terminal_Index;
       end Push_Back_Tokens;
 
-      procedure Finish
+      procedure Delete_Shared_Stream
         (Label       : in     String;
-         New_Config  : in out Configuration;
-         First, Last : in     Base_Token_Index)
+         Config      : in out Configuration;
+         First, Last : in     Syntax_Trees.Sequential_Index)
+      --  Delete tokens First .. Last from Tree Shared_Stream; caller has
+      --  already done deletes Config.Input_Stream.
+      --  Config.Current_Shared_Token must be in First .. Last + 1. Leave
+      --  Current_Shared_Token at Last + 1.
       is
-         Adj_First : constant Base_Token_Index := (if First = 
Invalid_Token_Index then Last else First);
-         Adj_Last  : constant Base_Token_Index := (if Last = 
Invalid_Token_Index then First else Last);
-      begin
-         --  Delete tokens First .. Last; either First - 1 or Last + 1 should
-         --  be a String_Literal. Leave Current_Shared_Token at Last + 1.
+         Ref : Stream_Node_Parents := Tree.To_Stream_Node_Parents 
(Config.Current_Shared_Token);
+
+         procedure Find_First
+         is begin
+            if First = Tree.Get_Sequential_Index (Ref.Ref.Node) then
+               return;
+
+            elsif First < Tree.Get_Sequential_Index (Ref.Ref.Node) then
+               loop
+                  exit when Tree.Get_Sequential_Index (Ref.Ref.Node) = First;
+                  Tree.Prev_Sequential_Terminal (Ref, Parse_Stream => 
Invalid_Stream_ID, Preceding => True);
+               end loop;
+
+            else
+               raise Bad_Config;
+            end if;
+         end Find_First;
 
-         if Adj_Last = Invalid_Token_Index or Adj_First = Invalid_Token_Index 
then
-            pragma Assert (False);
+      begin
+         if not Has_Space (Config.Ops, Ada.Containers.Count_Type (Last - First 
+ 1)) then
+            Super.Config_Full (Shared, "insert quote 3 " & Label, 
Parser_Index);
             raise Bad_Config;
          end if;
 
-         New_Config.Error_Token.ID := Invalid_Token_ID;
-         New_Config.Check_Status   := (Label => WisiToken.Semantic_Checks.Ok);
+         Find_First;
 
-         --  This is a guess, so we give it a nominal cost
-         New_Config.Cost := New_Config.Cost + 1;
+         for I in First .. Last loop
+            if not (Tree.Label (Ref.Ref.Node) in Terminal_Label) then
+               --  It is exceedingly unlikely that the words in a real user 
string
+               --  will match a grammar production (unless we are writing a 
code
+               --  generator like WisiToken.Output_Ada, sigh). So we just 
abandon
+               --  this.
+               raise Invalid_Case;
+            end if;
+
+            Append
+              (Config.Ops,
+               (Delete, Shared.Tree.ID (Ref.Ref.Node),
+                Tree.Get_Sequential_Index (Ref.Ref.Node)));
+
+            Tree.Next_Sequential_Terminal (Ref, Following => True);
+         end loop;
+         Config.Current_Shared_Token := Ref.Ref;
+      end Delete_Shared_Stream;
+
+      procedure Finish
+        (Label       : in     String;
+         Config      : in out Configuration;
+         First, Last : in     Base_Sequential_Index)
+      --  Delete  tokens First .. Last from Config.Input_Stream and/or Tree 
Shared_Stream.
+      --  Either First - 1 or Last + 1 should be a String_Literal.
+      --  Config.Current_Shared_Token must be in First .. Last + 1. Leave
+      --  Current_Shared_Token at Last + 1.
+      is
+         Adj_First : constant Sequential_Index :=
+           (if First = Invalid_Sequential_Index
+            then (if Last = Invalid_Sequential_Index
+                  then raise Bad_Config
+                  else Last)
+            else First);
+
+         Adj_Last  : constant Sequential_Index := (if Last = 
Invalid_Sequential_Index then First else Last);
 
-         if not Has_Space (New_Config.Ops, Ada.Containers.Count_Type (Last - 
First)) then
-            Super.Config_Full ("insert quote 3 " & Label, Parser_Index);
+         Last_Deleted : Base_Sequential_Index := Invalid_Sequential_Index;
+      begin
+         if Adj_Last < Adj_First then
             raise Bad_Config;
          end if;
 
-         for I in Adj_First .. Adj_Last loop
-            Append (New_Config.Ops, (Delete, Shared.Terminals.all (I).ID, I));
-         end loop;
-         New_Config.Current_Shared_Token := Last + 1;
+         if Length (Config.Input_Stream) > 0 then
+            Delete_Pushed_Back (Label, Config, Target_Index => Adj_Last + 1, 
Max_Index => Last_Deleted);
+         end if;
+
+         if Last_Deleted = Adj_Last then
+            --  First .. Last deleted from input_stream.
+            null;
+         else
+            Delete_Shared_Stream
+              (Label, Config,
+               First =>
+                 (if Last_Deleted = Invalid_Sequential_Index
+                  then Adj_First
+                  else Last_Deleted + 1),
+               Last => Adj_Last);
+         end if;
+
+         Config.Error_Token  := Syntax_Trees.Invalid_Recover_Token;
+         Config.In_Parse_Action_Status := (Label => 
Syntax_Trees.In_Parse_Actions.Ok);
+
+         --  This is a guess, so we give it a nominal cost
+         Config.Cost := Config.Cost + 1;
 
          --  Let explore do insert after these deletes.
-         Append (New_Config.Ops, (Fast_Forward, 
New_Config.Current_Shared_Token));
+         declare
+            Target_Node : constant Valid_Node_Access := 
Parse.Peek_Current_First_Sequential_Terminal
+              (Super, Shared, Config);
+         begin
+            Append
+              (Config.Ops, (Fast_Forward, FF_First_Index | FF_Next_Index => 
Tree.Get_Sequential_Index (Target_Node)));
 
-         if New_Config.Resume_Token_Goal - Check_Limit < 
New_Config.Current_Shared_Token then
-            New_Config.Resume_Token_Goal := New_Config.Current_Shared_Token + 
Check_Limit;
-            if Trace_McKenzie > Extra then
-               Put_Line
-                 (Super.Trace.all, Super.Label (Parser_Index), 
"resume_token_goal:" & WisiToken.Token_Index'Image
-                    (New_Config.Resume_Token_Goal));
+            if Config.Resume_Token_Goal - Check_Limit < 
Tree.Get_Sequential_Index (Target_Node)
+            then
+               Config.Resume_Token_Goal := Tree.Get_Sequential_Index 
(Target_Node) + Check_Limit;
+               Super.Extend_Sequential_Index (Shared, Thru => 
Config.Resume_Token_Goal);
+
+               if Trace_McKenzie > Extra then
+                  Put_Line (Tree, Super.Stream (Parser_Index), 
"resume_token_goal:" & Config.Resume_Token_Goal'Image);
+               end if;
             end if;
-         end if;
+         end;
 
-         New_Config.Strategy_Counts (String_Quote) := 
New_Config.Strategy_Counts (String_Quote) + 1;
+         Config.Strategy_Counts (String_Quote) := Config.Strategy_Counts 
(String_Quote) + 1;
 
          if Trace_McKenzie > Detail then
-            Base.Put ("insert quote " & Label & " ", Super, Shared, 
Parser_Index, New_Config);
+            Super.Put (Shared, "insert quote " & Label & " ", Parser_Index, 
Config);
          end if;
+      exception
+      when Bad_Config =>
+         if Trace_McKenzie > Detail then
+            Put_Line (Tree, Super.Stream (Parser_Index), "insert quote 
Bad_Config " & Label);
+         end if;
+         raise;
       end Finish;
 
    begin
@@ -1450,12 +1711,12 @@ package body 
WisiToken.Parse.LR.McKenzie_Recover.Explore is
       --  before, at, or after that string literal.
       --
       --  Here we assume the parse error in Config.Error_Token is due to
-      --  putting the balancing quote in the wrong place (although we do
-      --  check that; see test_mckenzie_recover.adb String_Quote_6), and
-      --  attempt to find a better place to put the balancing quote. Then
-      --  all tokens from the balancing quote to the unbalanced quote are
-      --  now part of a string literal, so delete them, leaving just the
-      --  string literal created by Lexer error recovery.
+      --  putting the balancing quote in the wrong place (although we also
+      --  check that solution; see test_mckenzie_recover.adb
+      --  String_Quote_6), and attempt to find a better place to put the
+      --  balancing quote. Then all tokens from the balancing quote to the
+      --  unbalanced quote are now part of a string literal, so delete them,
+      --  leaving just the string literal created by Lexer error recovery.
 
       --  First we check to see if there is an unbalanced quote in the
       --  current line; if not, just return. Some lexer errors are for other
@@ -1464,254 +1725,424 @@ package body 
WisiToken.Parse.LR.McKenzie_Recover.Explore is
       --  An alternate strategy is to treat the lexer error as a parse error
       --  immediately, but that complicates the parse logic.
 
-      Config.String_Quote_Checked := Current_Line;
-
-      if Lexer_Error_Token_Index = Invalid_Token_Index then
-         return;
-      end if;
-
-      Lexer_Error_Token := Shared.Terminals.all (Lexer_Error_Token_Index);
-
       --  It is not possible to tell where the best place to put the
       --  balancing quote is, so we always try all reasonable places.
+      declare
+         Next_Line_Begin_Token : constant Valid_Node_Access := 
Tree.Line_Begin_Token
+           (Current_Line + 1, Super.Stream (Parser_Index), 
Following_Source_Terminal => True);
+         --  EOI if Current_Line is last line in source.
+      begin
+         Super.Extend_Sequential_Index (Shared, Next_Line_Begin_Token, 
Positive => True);
 
-      if Lexer_Error_Token.Byte_Region.First = 
Config.Error_Token.Byte_Region.First then
-         --  The parse error token is the string literal at the lexer error.
-         --
-         --  case a: Insert the balancing quote somewhere before the error
-         --  point. There is no way to tell how far back to put the balancing
-         --  quote, so we just do one non-empty token. See
-         --  test_mckenzie_recover.adb String_Quote_0. So far we have not found
-         --  a test case for more than one token.
-         declare
-            New_Config            : Configuration := Config;
-            Min_Pushed_Back_Index : Base_Token_Index;
-         begin
-            Push_Back_Tokens ("insert quote 4 a", New_Config, 
Min_Pushed_Back_Index);
-            Finish ("a", New_Config, Min_Pushed_Back_Index, 
Config.Current_Shared_Token - 1);
-            Local_Config_Heap.Add (New_Config);
-         end;
+         if Tree.Byte_Region (Lexer_Error_Node, Trailing_Non_Grammar => 
False).First =
+           Tree.Byte_Region (Config.Error_Token).First
+         then
+            --  The parse error token is the string literal at the lexer error.
+            --
+            --  case a: Insert the balancing quote somewhere before the error
+            --  point. There is no way to tell how far back to put the 
balancing
+            --  quote, so we just do one non-empty token. See
+            --  test_mckenzie_recover.adb String_Quote_0. So far we have not 
found
+            --  a test case for more than one token.
+            declare
+               New_Config            : Configuration := Config;
+               Min_Pushed_Back_Index : Syntax_Trees.Sequential_Index;
+            begin
+               Push_Back_Tokens ("insert quote 4 a", New_Config, 
Min_Pushed_Back_Index);
 
-         --  Note that it is not reasonable to insert a quote after the error
-         --  in this case. If that were the right solution, the parser error
-         --  token would not be the lexer repaired string literal, since a
-         --  string literal would be legal here.
+               pragma Assert (not Config.Error_Token.Virtual);
+               --  Error_Token can be a nonterm. 
ada_mode-recover_partial_09.adb
+               Finish
+                 ("a", New_Config,
+                  First => Min_Pushed_Back_Index,
+                  Last  => Tree.Get_Sequential_Index (Tree.First_Terminal 
(Config.Error_Token)) - 1);
+               Local_Config_Heap.Add (New_Config);
+            end;
 
-      elsif Lexer_Error_Token.Byte_Region.First < 
Config.Error_Token.Byte_Region.First then
-         --  The unbalanced quote is before the parse error token; see
-         --  test_mckenzie_recover.adb String_Quote_2.
-         --
-         --  The missing quote belongs after the parse error token, before or
-         --  at the end of the current line; try inserting it at the end of
-         --  the current line.
-         --
-         --  The lexer repaired string literal may be in a reduced token on the
-         --  stack.
+            --  Note that it is not reasonable to insert a quote after the 
error
+            --  in this case. If that were the right solution, the parser error
+            --  token would not be the lexer repaired string literal, since a
+            --  string literal would be legal here.
 
-         declare
-            Matching : SAL.Peek_Type := 1;
-         begin
-            Find_Descendant_ID
-              (Super.Parser_State (Parser_Index).Tree, Config, 
Lexer_Error_Token.ID,
-               String_ID_Set (Lexer_Error_Token.ID), Matching);
+         elsif Tree.Element_ID (Config.Error_Token) = Invalid_Token_ID or else
+           (Tree.Byte_Region (Lexer_Error_Node, Trailing_Non_Grammar => 
False).First <
+              Tree.Byte_Region (Config.Error_Token).First and
+              Tree.Get_Sequential_Index (Next_Line_Begin_Token) /= 
Invalid_Sequential_Index)
+         then
+            --  case b: the unbalanced quote is before the parse error token; 
see
+            --  test_mckenzie_recover.adb String_Quote_2, String_Quote_5.
+            --
+            --  The missing quote belongs after the parse error token, before 
or
+            --  at the end of the current line; try inserting it at the end of
+            --  the current line.
+            --
+            --  The lexer repaired string literal may be in a reduced token on 
the
+            --  stack.
 
-            if Matching = Config.Stack.Depth then
-               --  String literal is in a virtual nonterm; give up. So far 
this only
-               --  happens in a high cost non critical config.
-               if Trace_McKenzie > Detail then
-                  Put_Line
-                    (Super.Trace.all, Super.Label (Parser_Index), "insert 
quote b abandon; string literal in virtual");
+            declare
+               Matching : SAL.Peek_Type := 1;
+            begin
+               Find_Descendant_ID
+                 (Tree, Config, Shared.Tree.ID (Lexer_Error_Node),
+                  String_ID_Set (Shared.Tree.ID (Lexer_Error_Node)), Matching);
+
+               if Matching = Config.Stack.Depth then
+                  --  String literal is in a virtual nonterm; it is not from 
the lexer
+                  --  error, so abandon this.
+                  if Trace_McKenzie > Detail then
+                     Put_Line (Tree, Super.Stream (Parser_Index), "insert 
quote b abandon; string literal in virtual");
+                  end if;
+                  return;
                end if;
-               return;
-            end if;
 
+               declare
+                  New_Config : Configuration := Config;
+               begin
+                  String_Literal_In_Stack ("b", New_Config, Matching, 
Shared.Tree.ID (Lexer_Error_Node));
+
+                  Finish
+                    ("b", New_Config,
+                     First => Shared.Tree.Get_Sequential_Index 
(Shared.Tree.First_Terminal (Config.Error_Token)),
+                     Last  => Shared.Tree.Get_Sequential_Index 
(Next_Line_Begin_Token) - 1);
+                  Local_Config_Heap.Add (New_Config);
+               end;
+            end;
+
+         else
+            --  The unbalanced quote is after the parse error token.
+
+            --  case c: Assume a missing quote belongs immediately before the
+            --  current token. See test_mckenzie_recover.adb String_Quote_3.
             declare
                New_Config : Configuration := Config;
             begin
-               String_Literal_In_Stack ("b", New_Config, Matching, 
Lexer_Error_Token.ID);
+               Finish
+                 ("c", New_Config,
+                  First => Shared.Tree.Get_Sequential_Index
+                    (Parse.Peek_Current_First_Sequential_Terminal (Super, 
Shared, New_Config)),
+                  Last  => Shared.Tree.Get_Sequential_Index (Lexer_Error_Node) 
- 1);
+               Local_Config_Heap.Add (New_Config);
+            end;
+
+            --  case d: Assume a missing quote belongs somewhere farther before
+            --  the current token; try one non-empty (as in case a above). See
+            --  test_mckenzie_recover.adb String_Quote_4, String_Quote_6,
+            --  test/ada_mode-recover_string_quote_1.adb.
+            declare
+               New_Config            : Configuration := Config;
+               Min_Pushed_Back_Index : Syntax_Trees.Sequential_Index;
+            begin
+               Push_Back_Tokens ("insert quote 5 d", New_Config, 
Min_Pushed_Back_Index);
 
                Finish
-                 ("b", New_Config, Config.Current_Shared_Token, 
Shared.Line_Begin_Token.all (Current_Line + 1) - 1);
+                 ("d", New_Config,
+                  First => Min_Pushed_Back_Index,
+                  Last  => Tree.Get_Sequential_Index (Lexer_Error_Node) - 1);
                Local_Config_Heap.Add (New_Config);
+            exception
+            when SAL.Container_Empty =>
+               --  From Stack.Pop
+               raise Bad_Config;
             end;
-         end;
 
+            --  case e: Assume the actual error is an extra quote that 
terminates
+            --  an intended string literal early, in which case there is a 
token
+            --  on the stack containing the string literal that should be 
extended
+            --  to the found quote. See test_mckenzie_recover.adb 
String_Quote_1.
+            declare
+               Matching : SAL.Peek_Type := 1;
+            begin
+               --  Lexer_Error_Node is a string literal; find a matching one.
+               Find_Descendant_ID
+                 (Tree, Config, Shared.Tree.ID (Lexer_Error_Node),
+                  String_ID_Set (Shared.Tree.ID (Lexer_Error_Node)), Matching);
+
+               if Matching = Config.Stack.Depth then
+                  --  No matching string literal, so this case does not apply.
+                  null;
+               else
+                  declare
+                     New_Config : Configuration := Config;
+                  begin
+                     String_Literal_In_Stack ("e", New_Config, Matching, 
Shared.Tree.ID (Lexer_Error_Node));
+
+                     Finish
+                       ("e", New_Config,
+                        First => Shared.Tree.Get_Sequential_Index 
(Config.Current_Shared_Token.Node),
+                        Last  => Shared.Tree.Get_Sequential_Index 
(Lexer_Error_Node));
+                     Local_Config_Heap.Add (New_Config);
+                  end;
+               end if;
+            end;
+         end if;
+      end;
+   exception
+   when Invalid_Case =>
+      null;
+
+   when Bad_Config =>
+      if Debug_Mode then
+         raise;
       else
-         --  The unbalanced quote is after the parse error token.
+         null;
+      end if;
+   end Try_Insert_Quote_1;
 
-         --  case c: Assume a missing quote belongs immediately before the 
current token.
-         --  See test_mckenzie_recover.adb String_Quote_3.
+   procedure Try_Insert_Quote
+     (Super             : in out Base.Supervisor;
+      Shared            : in out Parser.Parser;
+      Parser_Index      : in     SAL.Base_Peek_Type;
+      Config            : in out Configuration;
+      Local_Config_Heap : in out Config_Heaps.Heap_Type)
+   is
+      Tree : Syntax_Trees.Tree renames Shared.Tree;
+      Current_Byte_Pos : constant Base_Buffer_Pos := Tree.Byte_Region 
(Config.Error_Token).Last;
+   begin
+      if Config.String_Quote_Checked_Byte_Pos /= Invalid_Buffer_Pos and then
+        Current_Byte_Pos <= Config.String_Quote_Checked_Byte_Pos
+      then
+         return;
+      else
          declare
-            New_Config : Configuration := Config;
-         begin
-            Finish ("c", New_Config, Config.Current_Shared_Token, 
Lexer_Error_Token_Index - 1);
-            Local_Config_Heap.Add (New_Config);
-         exception
-         when Bad_Config =>
-            null;
-         end;
+            use Bounded_Streams;
+            use Syntax_Trees;
 
-         --  case d: Assume a missing quote belongs somewhere farther before
-         --  the current token; try one non-empty (as in case a above). See
-         --  test_mckenzie_recover.adb String_Quote_4, String_Quote_6.
-         declare
-            New_Config            : Configuration := Config;
-            Min_Pushed_Back_Index : Base_Token_Index;
-         begin
-            Push_Back_Tokens ("insert quote 5 d", New_Config, 
Min_Pushed_Back_Index);
-            Finish ("d", New_Config, Min_Pushed_Back_Index, 
Lexer_Error_Token_Index - 1);
-            Local_Config_Heap.Add (New_Config);
-         exception
-         when SAL.Container_Empty =>
-            --  From Stack.Pop
-            null;
-         when Bad_Config =>
-            null;
-         end;
+            Current_Line : constant Base_Line_Number_Type :=
+              (if Config.Input_Stream.First = No_Element
+               then
+                 (if Config.Current_Shared_Token.Node /= Invalid_Node_Access
+                  then Tree.Line_Region (Config.Current_Shared_Token, 
Trailing_Non_Grammar => True).First
+                  else Invalid_Line_Number)
+               elsif not Config.Error_Token.Virtual and then
+                 Config.Error_Token.Node = Config.Input_Stream 
(Config.Input_Stream.First)
+               then Tree.Line_Region (Super.Stream (Parser_Index), 
Config.Error_Token).First
+               --  Null_Line_Region if unknown.
+               else Invalid_Line_Number);
 
-         --  case e: Assume the actual error is an extra quote that terminates
-         --  an intended string literal early, in which case there is a token
-         --  on the stack containing the string literal that should be extended
-         --  to the found quote. See test_mckenzie_recover.adb String_Quote_1.
-         declare
-            Matching : SAL.Peek_Type := 1;
          begin
-            --  Lexer_Error_Token is a string literal; find a matching one.
-            Find_Descendant_ID
-              (Super.Parser_State (Parser_Index).Tree, Config, 
Lexer_Error_Token.ID, String_ID_Set
-                 (Lexer_Error_Token.ID), Matching);
+            if Current_Line = Invalid_Line_Number or Current_Line = 
Null_Line_Region.First or not
+              (Config.String_Quote_Checked_Line = Invalid_Line_Number or else
+                 Config.String_Quote_Checked_Line < Current_Line)
+            then
+               return;
+            elsif Config.Current_Shared_Token = Invalid_Stream_Node_Ref then
+               --  Current token is in Config.Input_Stream, shared is past 
EOI. ada_mode-recover_partial_15.adb
+               return;
+            end if;
 
-            if Matching = Config.Stack.Depth then
-               --  No matching string literal, so this case does not apply.
-               null;
-            else
-               declare
-                  New_Config : Configuration := Config;
-               begin
-                  String_Literal_In_Stack ("e", New_Config, Matching, 
Lexer_Error_Token.ID);
+            Config.String_Quote_Checked_Line     := Current_Line;
+            Config.String_Quote_Checked_Byte_Pos := Current_Byte_Pos;
 
-                  Finish ("e", New_Config, Config.Current_Shared_Token, 
Lexer_Error_Token_Index);
-                  Local_Config_Heap.Add (New_Config);
-               end;
-            end if;
+            --  Find a recovered string quote lexer_error on the same line.
+            declare
+               Term  : Stream_Node_Parents := Tree.To_Stream_Node_Parents 
(Config.Current_Shared_Token);
+               Found : Boolean             := False;
+
+               procedure Search (Forward : in Boolean)
+               is begin
+                  loop
+                     exit when Term.Ref = Invalid_Stream_Node_Ref;
+                     --  Invalid when EOI or SOI has an error; 
test_incremental.adb Preserve_Parse_Errors_1
+
+                     for Err of Tree.Error_List (Term.Ref.Node) loop
+                        if Err in Lexer_Error then
+                           declare
+                              Lex_Err : WisiToken.Lexer.Error renames 
Lexer_Error (Err).Error;
+                           begin
+                              if Lex_Err.Recover_Char (1) in ''' | '"' and 
Lex_Err.Recover_Char (2) = ASCII.NUL then
+                                 Found := True;
+                                 return;
+                              end if;
+                           end;
+                        end if;
+                     end loop;
+
+                     exit when Tree.ID (Term.Ref.Node) =
+                       (if Forward
+                        then Tree.Lexer.Descriptor.EOI_ID
+                        else Tree.Lexer.Descriptor.SOI_ID);
+
+                     exit when Tree.Line_Region (Term, Super.Stream 
(Parser_Index)).First /= Current_Line;
+
+                     if Forward then
+                        Tree.Next_Terminal (Term, Following => True);
+                     else
+                        Tree.Prev_Terminal (Term, Super.Stream (Parser_Index), 
Preceding => True);
+                     end if;
+                  end loop;
+               end Search;
+
+            begin
+               if Term.Ref.Node = Invalid_Node_Access then
+                  --  Invalid when Current_Shared_Token is an empty nonterm.
+                  Tree.Next_Terminal (Term, Following => True);
+               end if;
+
+               Search (Forward => True);
+               if not Found then
+                  Term := Tree.To_Stream_Node_Parents 
(Config.Current_Shared_Token);
+                  Tree.Prev_Terminal (Term, Super.Stream (Parser_Index), 
Preceding => True);
+                  Search (Forward => False);
+               end if;
+
+               if not Found then
+                  return;
+               end if;
+
+               if Tree.ID (Term.Ref.Node) not in 
Tree.Lexer.Descriptor.String_1_ID |
+                 Tree.Lexer.Descriptor.String_2_ID
+               then
+                  return;
+               end if;
+
+               Try_Insert_Quote_1 (Super, Shared, Parser_Index, Current_Line, 
Term.Ref.Node, Config, Local_Config_Heap);
+            end;
          end;
       end if;
-   exception
-   when Bad_Config =>
-      null;
    end Try_Insert_Quote;
 
    procedure Try_Delete_Input
-     (Super             : not null access Base.Supervisor;
-      Shared            : not null access Base.Shared;
-      Parser_Index      : in              SAL.Base_Peek_Type;
-      Config            : in              Configuration;
-      Local_Config_Heap : in out          Config_Heaps.Heap_Type)
+     (Super             : in out Base.Supervisor;
+      Shared            : in out Parser.Parser;
+      Parser_Index      : in     SAL.Base_Peek_Type;
+      Config            : in     Configuration;
+      Local_Config_Heap : in out Config_Heaps.Heap_Type)
    is
       --  Try deleting (= skipping) the current shared input token.
 
-      use Config_Op_Arrays, Config_Op_Array_Refs;
-      Trace       : WisiToken.Trace'Class renames Super.Trace.all;
-      EOF_ID      : Token_ID renames Trace.Descriptor.EOI_ID;
-      Check_Limit : WisiToken.Token_Index renames 
Shared.Table.McKenzie_Param.Check_Limit;
+      use Recover_Op_Arrays, Recover_Op_Array_Refs;
+
+      Check_Limit : constant Syntax_Trees.Sequential_Index := 
Shared.Table.McKenzie_Param.Check_Limit;
 
       McKenzie_Param : McKenzie_Param_Type renames Shared.Table.McKenzie_Param;
 
-      ID : constant Token_ID := Shared.Terminals.all 
(Config.Current_Shared_Token).ID;
+      Next_Element_Node : constant Syntax_Trees.Node_Access := 
Parse.Peek_Current_Element_Node (Shared.Tree, Config);
+
+      Next_Node : constant Syntax_Trees.Node_Access := 
Shared.Tree.First_Terminal (Next_Element_Node);
+
+      Next_Index : constant Syntax_Trees.Sequential_Index :=
+        (if Next_Node = Syntax_Trees.Invalid_Node_Access
+         then Syntax_Trees.Sequential_Index'Last
+         else Shared.Tree.Get_Sequential_Index (Next_Node));
+
+      Next_ID : constant Token_ID :=
+        (if Next_Node = Syntax_Trees.Invalid_Node_Access
+         then Invalid_Token_ID
+         else Shared.Tree.ID (Next_Node));
    begin
-      if ID /= EOF_ID and then
-         --  can't delete EOF
-         (Length (Config.Ops) = 0 or else
-           --  Don't delete an ID we just inserted; waste of time
-           (not Equal (Constant_Ref (Config.Ops, Last_Index (Config.Ops)),
-                       (Insert, ID, Config.Current_Shared_Token))))
+      if  Next_Node = Syntax_Trees.Invalid_Node_Access then
+         --  Current token is an empty nonterm; we don't delete that here. It
+         --  can be deleted by Parse, if it can't be shifted.
+         return;
+
+      elsif Next_ID in Shared.Tree.Lexer.Descriptor.EOI_ID | Invalid_Token_ID 
then
+         --  can't delete EOI
+         return;
+
+      elsif Length (Config.Ops) > 0 and then
+        (Equal (Constant_Ref (Config.Ops, Last_Index (Config.Ops)), (Insert, 
Next_ID, Next_Index)) or
+           --  Don't delete an ID we just inserted; waste of time, leads to
+           --  infinite loop.
+
+           Constant_Ref (Config.Ops, Last_Index (Config.Ops)).Op = Undo_Reduce
+           --  Only need Undo_Reduce to Push_Back part of it or allow Insert;
+           --  allowing delete gives redundant configs.
+           --  ada_mode-recover_extra_end_loop.adb with incremental parse.
+        )
       then
-         declare
-            New_Config : Configuration := Config;
+         return;
+      end if;
 
-            function Matching_Push_Back return Boolean
-            is begin
-               for I in reverse First_Index (New_Config.Ops) .. Last_Index 
(New_Config.Ops) loop
-                  declare
-                     Op : Config_Op renames Config_Op_Array_Refs.Variable_Ref 
(New_Config.Ops, I).Element.all;
-                  begin
-                     exit when not (Op.Op in Undo_Reduce | Push_Back | Delete);
-                     if Op = (Push_Back, ID, New_Config.Current_Shared_Token) 
then
-                        return True;
-                     end if;
-                  end;
-               end loop;
-               return False;
-            end Matching_Push_Back;
-         begin
-            New_Config.Error_Token.ID := Invalid_Token_ID;
-            New_Config.Check_Status   := (Label => 
WisiToken.Semantic_Checks.Ok);
+      declare
+         New_Config : Configuration := Config;
 
-            New_Config.Cost := New_Config.Cost + McKenzie_Param.Delete (ID);
-            New_Config.Strategy_Counts (Delete) := Config.Strategy_Counts 
(Delete) + 1;
+         function Matching_Push_Back return Boolean
+         is begin
+            for I in reverse First_Index (New_Config.Ops) .. Last_Index 
(New_Config.Ops) loop
+               declare
+                  Op : Recover_Op renames Recover_Op_Array_Refs.Variable_Ref 
(New_Config.Ops, I).Element.all;
+               begin
+                  exit when not (Op.Op in Undo_Reduce | Push_Back | Delete);
+                  if Op = (Push_Back, Next_ID, Next_Index) then
+                     return True;
+                  end if;
+               end;
+            end loop;
+            return False;
+         end Matching_Push_Back;
+      begin
+         New_Config.Cost := New_Config.Cost + McKenzie_Param.Delete (Next_ID);
 
-            if Matching_Push_Back then
-               --  We are deleting a push_back; cancel the push_back cost, to 
make
-               --  this the same as plain deleting.
-               New_Config.Cost := Natural'Max (Natural'First, New_Config.Cost 
- McKenzie_Param.Push_Back (ID));
-            end if;
+         New_Config.Error_Token            := 
Syntax_Trees.Invalid_Recover_Token;
+         New_Config.In_Parse_Action_Status := (Label => 
Syntax_Trees.In_Parse_Actions.Ok);
 
-            if Is_Full (New_Config.Ops) then
-               Super.Config_Full ("delete", Parser_Index);
-               raise Bad_Config;
-            else
-               Append (New_Config.Ops, (Delete, ID, 
Config.Current_Shared_Token));
-            end if;
-            New_Config.Current_Shared_Token := New_Config.Current_Shared_Token 
+ 1;
+         New_Config.Strategy_Counts (Delete) := Config.Strategy_Counts 
(Delete) + 1;
 
-            if New_Config.Resume_Token_Goal - Check_Limit < 
New_Config.Current_Shared_Token then
-               New_Config.Resume_Token_Goal := New_Config.Current_Shared_Token 
+ Check_Limit;
-            end if;
+         if Matching_Push_Back then
+            --  We are deleting a push_back; cancel the push_back cost, to make
+            --  this the same as plain deleting.
+            New_Config.Cost := Natural'Max (Natural'First, New_Config.Cost - 
McKenzie_Param.Push_Back (Next_ID));
+         end if;
 
-            Local_Config_Heap.Add (New_Config);
+         if Is_Full (New_Config.Ops) then
+            Super.Config_Full (Shared, "delete", Parser_Index);
+            raise Bad_Config;
+         else
+            Append (New_Config.Ops, (Delete, Next_ID, Next_Index));
+         end if;
+
+         Parse.Do_Delete (Shared.Tree, New_Config);
 
-            if Trace_McKenzie > Detail then
-               Base.Put
-                 ("delete " & Image (ID, Trace.Descriptor.all), Super, Shared, 
Parser_Index, New_Config);
+         declare
+            Node : constant Syntax_Trees.Valid_Node_Access := 
Parse.Peek_Current_First_Sequential_Terminal
+              (Super, Shared, New_Config);
+            New_Next_Index : Syntax_Trees.Sequential_Index;
+         begin
+            New_Next_Index := Shared.Tree.Get_Sequential_Index (Node);
+            if New_Config.Resume_Token_Goal - Check_Limit < New_Next_Index then
+               New_Config.Resume_Token_Goal := New_Next_Index + Check_Limit;
+               Super.Extend_Sequential_Index (Shared, 
New_Config.Resume_Token_Goal);
             end if;
          end;
-      end if;
+
+         Local_Config_Heap.Add (New_Config);
+
+         if Trace_McKenzie > Detail then
+            Super.Put
+              (Shared, "delete " & Image (Next_ID, 
Shared.Tree.Lexer.Descriptor.all), Parser_Index, New_Config);
+         end if;
+      end;
    end Try_Delete_Input;
 
    procedure Process_One
-     (Super         : not null access Base.Supervisor;
-      Shared        : not null access Base.Shared;
-      Config_Status : out             Base.Config_Status)
+     (Super  : in out Base.Supervisor;
+      Shared : in out Parser.Parser)
    is
       --  Get one config from Super, check to see if it is a viable
       --  solution. If not, enqueue variations to check.
 
-      use all type Base.Config_Status;
       use all type Parser.Language_Fixes_Access;
-      use all type Semantic_Checks.Check_Status_Label;
+      use all type Syntax_Trees.In_Parse_Actions.Status_Label;
 
-      Trace      : WisiToken.Trace'Class renames Super.Trace.all;
-      Descriptor : WisiToken.Descriptor renames Super.Trace.Descriptor.all;
+      Descriptor : WisiToken.Descriptor renames 
Shared.Tree.Lexer.Descriptor.all;
       Table      : Parse_Table renames Shared.Table.all;
 
       Parser_Index : SAL.Base_Peek_Type;
-      Config       : Configuration;
+      Config       : aliased Configuration;
 
       Local_Config_Heap : Config_Heaps.Heap_Type;
       --  We collect all the variants to enqueue, then deliver them all at
       --  once to Super, to minimizes task interactions.
    begin
-      Super.Get (Parser_Index, Config, Config_Status);
+      Super.Get (Shared, Parser_Index, Config);
 
-      if Config_Status = All_Done then
+      if Parser_Index = SAL.Base_Peek_Type'First then
+         --  No more configs.
          return;
       end if;
 
       if Trace_McKenzie > Detail then
-         Base.Put ("dequeue", Super, Shared, Parser_Index, Config);
-         if Trace_McKenzie > Extra then
-            Put_Line (Trace, Super.Label (Parser_Index), "stack: " & Image 
(Config.Stack, Trace.Descriptor.all));
-         end if;
+         Super.Put (Shared, "dequeue", Parser_Index, Config);
       end if;
 
       --  Fast_Forward; parse Insert, Delete in Config.Ops that have not
@@ -1723,27 +2154,31 @@ package body 
WisiToken.Parse.LR.McKenzie_Recover.Explore is
          --  Config is from Language_Fixes.
 
          Fast_Forward (Super, Shared, Parser_Index, Local_Config_Heap, Config);
-         Super.Put (Parser_Index, Local_Config_Heap);
+         Super.Put (Shared, Parser_Index, Local_Config_Heap);
          return;
       end if;
 
       pragma Assert (Config.Current_Insert_Delete = 0);
-      --  Config.Current_Insert_Delete > 1 is a programming error.
+      --  Config.Current_Insert_Delete > 0 is a programming error.
 
-      if Config.Error_Token.ID /= Invalid_Token_ID then
+      if Shared.Tree.Element_ID (Config.Error_Token) /= Invalid_Token_ID then
          if Shared.Language_Fixes = null then
             null;
          else
-            Shared.Language_Fixes
-              (Trace, Shared.Lexer, Super.Label (Parser_Index), 
Shared.Table.all,
-               Shared.Terminals.all, Super.Parser_State (Parser_Index).Tree, 
Local_Config_Heap,
-               Config);
+            begin
+               Shared.Language_Fixes (Super, Shared, Parser_Index, 
Local_Config_Heap, Config);
+            exception
+            when Invalid_Case =>
+               if Debug_Mode then
+                  raise SAL.Programmer_Error with "Language_Fixes raised 
Invalid_Case; should handle that locally";
+               end if;
+            end;
 
             --  The solutions enqueued by Language_Fixes should be lower cost 
than
             --  others (typically 0), so they will be checked first.
          end if;
 
-         if Config.Check_Status.Label = Ok then
+         if Config.In_Parse_Action_Status.Label = Ok then
             --  Parse table Error action.
             --
             --  We don't clear Config.Error_Token here, because
@@ -1753,8 +2188,9 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
             null;
 
          else
-            --  Assume "ignore check error" is a viable solution. But give it a
-            --  cost, so a solution provided by Language_Fixes is preferred.
+            --  Assume "ignore in_parse_action error" is a viable solution. But
+            --  give it a cost, so a solution provided by Language_Fixes is
+            --  preferred.
 
             declare
                New_State : Unknown_State_Index;
@@ -1762,29 +2198,46 @@ package body 
WisiToken.Parse.LR.McKenzie_Recover.Explore is
                Config.Cost := Config.Cost + 
Table.McKenzie_Param.Ignore_Check_Fail;
                Config.Strategy_Counts (Ignore_Error) := Config.Strategy_Counts 
(Ignore_Error) + 1;
 
+               declare
+                  use Recover_Op_Arrays, Recover_Op_Array_Refs;
+                  Last : constant SAL.Base_Peek_Type := Last_Index 
(Config.Ops);
+               begin
+                  if Last /= SAL.Invalid_Peek_Index and then
+                    Constant_Ref (Config.Ops, Last).Op = Undo_Reduce and then
+                    Constant_Ref (Config.Ops, Last).Nonterm = 
Shared.Tree.Element_ID (Config.Error_Token)
+                  then
+                     --  We are ignoring this undo_reduce.
+                     Delete_Last (Config.Ops);
+                  end if;
+               end;
+
                --  finish reduce.
-               Config.Stack.Pop (SAL.Base_Peek_Type 
(Config.Check_Token_Count));
+               Config.Stack.Pop (Config.In_Parse_Action_Token_Count);
 
-               New_State := Goto_For (Table, Config.Stack.Peek.State, 
Config.Error_Token.ID);
+               New_State := Goto_For (Table, Config.Stack.Peek.State, 
Shared.Tree.Element_ID (Config.Error_Token));
 
                if New_State = Unknown_State then
                   if Config.Stack.Depth = 1 then
                      --  Stack is empty, and we did not get Accept; really bad 
syntax got
                      --  us here; abandon this config. See 
ada_mode-recover_bad_char.adb.
-                     Super.Put (Parser_Index, Local_Config_Heap);
+                     Super.Put (Shared, Parser_Index, Local_Config_Heap);
                      return;
                   else
                      raise SAL.Programmer_Error with
                        "process_one found test case for new_state = Unknown; 
old state " &
                        Trimmed_Image (Config.Stack.Peek.State) & " nonterm " & 
Image
-                         (Config.Error_Token.ID, Trace.Descriptor.all);
+                         (Shared.Tree.Element_ID (Config.Error_Token), 
Descriptor);
                   end if;
                end if;
 
-               Config.Stack.Push ((New_State, Invalid_Node_Index, 
Config.Error_Token));
+               Config.Stack.Push ((New_State, Config.Error_Token));
 
                --  We _don't_ clear Check_Status and Error_Token here; Check 
needs
                --  them, and sets them as appropriate.
+
+               if Trace_McKenzie > Detail then
+                  Super.Put (Shared, "ignore in_parse_action error and 
continue", Parser_Index, Config);
+               end if;
             end;
          end if;
       end if;
@@ -1792,11 +2245,11 @@ package body 
WisiToken.Parse.LR.McKenzie_Recover.Explore is
       --  Call Check to see if this config succeeds.
       case Check (Super, Shared, Parser_Index, Config, Local_Config_Heap) is
       when Success =>
-         Super.Success (Parser_Index, Config, Local_Config_Heap);
+         Super.Success (Shared, Parser_Index, Config, Local_Config_Heap);
          return;
 
       when Abandon =>
-         Super.Put (Parser_Index, Local_Config_Heap);
+         Super.Put (Shared, Parser_Index, Local_Config_Heap);
          return;
 
       when Continue =>
@@ -1805,9 +2258,9 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Explore 
is
       end case;
 
       if Trace_McKenzie > Detail then
-         Base.Put ("continuing", Super, Shared, Parser_Index, Config);
+         Super.Put (Shared, "continuing", Parser_Index, Config);
          if Trace_McKenzie > Extra then
-            Put_Line (Trace, Super.Label (Parser_Index), "stack: " & Image 
(Config.Stack, Trace.Descriptor.all));
+            Put_Line (Shared.Tree, Super.Stream (Parser_Index), "stack: " & 
LR.Image (Config.Stack, Shared.Tree));
          end if;
       end if;
 
@@ -1824,49 +2277,67 @@ package body 
WisiToken.Parse.LR.McKenzie_Recover.Explore is
 
       Try_Insert_Terminal (Super, Shared, Parser_Index, Config, 
Local_Config_Heap);
 
-      if Push_Back_Valid (Config) and then
-        (not Check_Reduce_To_Start (Super, Shared, Parser_Index, Config))
-        --  If Config reduces to the start nonterm, there's no point in 
Push_Back or Undo_Reduce.
-      then
-         Try_Push_Back (Super, Shared, Parser_Index, Config, 
Local_Config_Heap);
-
-         if Undo_Reduce_Valid (Config.Stack, Super.Parser_State 
(Parser_Index).Tree) then
-            Try_Undo_Reduce (Super, Shared, Parser_Index, Config, 
Local_Config_Heap);
+      declare
+         Valid : constant Boolean := Push_Back_Valid (Super, Shared, Config, 
Push_Back_Undo_Reduce => False);
+         Reduce_To_Start : constant Boolean := Check_Reduce_To_Start (Super, 
Shared, Parser_Index, Config);
+      begin
+         if Valid and then
+           (not Shared.Tree.Is_Empty_Nonterm (Config.Stack.Peek.Token) and
+              --  We only allow Push_Back of empty nonterm from Language_Fixes;
+              --  otherwise it is usually redundant with Undo_Reduce.
+              not Reduce_To_Start)
+              --  If Config reduces to the start nonterm, there's no point in 
Push_Back or Undo_Reduce.
+         then
+            Try_Push_Back (Super, Shared, Parser_Index, Config, 
Local_Config_Heap);
          end if;
+      end;
+
+      if Undo_Reduce_Valid (Super, Shared, Config) then
+         Try_Undo_Reduce (Super, Shared, Parser_Index, Config, 
Local_Config_Heap);
       end if;
 
       if None_Since_FF (Config.Ops, Insert) then
          Try_Delete_Input (Super, Shared, Parser_Index, Config, 
Local_Config_Heap);
       end if;
 
+      --  See if there is a mismatched quote. The solution is to delete
+      --  tokens, nominally replacing them with an expanded string literal.
+      --  So we try this when it is ok to try delete.
+      --
       --  This is run once per input line, independent of what other ops
-      --  have been done.
-      if Config.Check_Status.Label = Ok and
+      --  have been done. Compare to Config.String_Quote_Checked is done in
+      --  Try_Insert_Quote.
+      if Config.In_Parse_Action_Status.Label = Ok and
         (Descriptor.String_1_ID /= Invalid_Token_ID or Descriptor.String_2_ID 
/= Invalid_Token_ID) and
-        (Config.String_Quote_Checked = Invalid_Line_Number or else
-           Config.String_Quote_Checked < Shared.Terminals.all 
(Config.Current_Shared_Token).Line)
+        None_Since_FF (Config.Ops, Insert)
       then
-         --  See if there is a mismatched quote. The solution is to delete
-         --  tokens, nominally replacing them with an expanded string literal.
-         --  So we try this when it is ok to try delete.
-         if None_Since_FF (Config.Ops, Insert) then
-            Try_Insert_Quote (Super, Shared, Parser_Index, Config, 
Local_Config_Heap);
-         end if;
+         Try_Insert_Quote (Super, Shared, Parser_Index, Config, 
Local_Config_Heap);
       end if;
 
-      Super.Put (Parser_Index, Local_Config_Heap);
+      Super.Put (Shared, Parser_Index, Local_Config_Heap);
    exception
-   when Bad_Config =>
+   when Invalid_Case =>
       --  Just abandon this config; tell Super we are done.
-      Super.Put (Parser_Index, Local_Config_Heap);
+      Super.Put (Shared, Parser_Index, Local_Config_Heap);
+
+   when E : Bad_Config =>
+      if Debug_Mode then
+         --  Tell the developer about this bug.
+         Shared.Tree.Lexer.Trace.Put_Line ("Process_One: Bad_Config: " & 
Standard.Ada.Exceptions.Exception_Message (E));
+         Shared.Tree.Lexer.Trace.Put_Line 
(GNAT.Traceback.Symbolic.Symbolic_Traceback (E));
+         raise;
+      else
+         --  Just abandon this config; tell Super we are done.
+         Super.Put (Shared, Parser_Index, Local_Config_Heap);
+      end if;
 
    when E : others =>
-      Super.Put (Parser_Index, Local_Config_Heap);
+      Super.Put (Shared, Parser_Index, Local_Config_Heap);
       if Debug_Mode then
          raise;
       elsif Trace_McKenzie > Outline then
          Put_Line
-           (Super.Trace.all, Super.Label (Parser_Index),
+           (Shared.Tree, Super.Stream (Parser_Index),
             "Process_One: unhandled exception " & 
Ada.Exceptions.Exception_Name (E) & ": " &
               Ada.Exceptions.Exception_Message (E));
       end if;
diff --git a/wisitoken-parse-lr-mckenzie_recover-explore.ads 
b/wisitoken-parse-lr-mckenzie_recover-explore.ads
index b80124a70d..d4324c98f3 100644
--- a/wisitoken-parse-lr-mckenzie_recover-explore.ads
+++ b/wisitoken-parse-lr-mckenzie_recover-explore.ads
@@ -2,7 +2,7 @@
 --
 --  Code to explore parse table, enqueuing new configs to check.
 --
---  Copyright (C) 2018 Free Software Foundation, Inc.
+--  Copyright (C) 2018, 2021, 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -21,8 +21,7 @@ with WisiToken.Parse.LR.McKenzie_Recover.Base;
 private package WisiToken.Parse.LR.McKenzie_Recover.Explore is
 
    procedure Process_One
-     (Super         : not null access Base.Supervisor;
-      Shared        : not null access Base.Shared;
-      Config_Status : out             Base.Config_Status);
+     (Super  : in out Base.Supervisor;
+      Shared : in out Parser.Parser);
 
 end WisiToken.Parse.LR.McKenzie_Recover.Explore;
diff --git a/wisitoken-parse-lr-mckenzie_recover-parse.adb 
b/wisitoken-parse-lr-mckenzie_recover-parse.adb
index b56bba666f..51066b2cea 100644
--- a/wisitoken-parse-lr-mckenzie_recover-parse.adb
+++ b/wisitoken-parse-lr-mckenzie_recover-parse.adb
@@ -2,7 +2,7 @@
 --
 --  See spec
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -20,242 +20,863 @@ pragma License (Modified_GPL);
 package body WisiToken.Parse.LR.McKenzie_Recover.Parse is
 
    procedure Compute_Nonterm
-     (ID              : in     Token_ID;
-      Stack           : in     Recover_Stacks.Stack;
-      Tokens          : in out Recover_Token_Array;
-      Nonterm         :    out Recover_Token;
-      Default_Virtual : in     Boolean)
+     (Tree    : in     Syntax_Trees.Tree;
+      ID      : in     Token_ID;
+      Stack   : in     Recover_Stacks.Stack;
+      Tokens  : in out Syntax_Trees.Recover_Token_Array;
+      Nonterm :    out Syntax_Trees.Virtual_Recover_Token)
    is
-      Min_Terminal_Index_Set : Boolean := False;
+      use Syntax_Trees;
+
+      First_Terminal_Set : Boolean := False;
    begin
       Nonterm :=
-        (ID      => ID,
-         Virtual => (if Tokens'Length = 0 then Default_Virtual else False),
-         others  => <>);
+        (Virtual                   => True,
+         ID                        => ID,
+         Contains_Virtual_Terminal => False,
+         others                    => <>);
 
       for I in Tokens'Range loop
          Tokens (I) := Stack.Peek (Tokens'Last - I + 1).Token;
       end loop;
 
       for T of Tokens loop
-         Nonterm.Virtual := Nonterm.Virtual or T.Virtual;
-
-         if Nonterm.Byte_Region.First > T.Byte_Region.First then
-            Nonterm.Byte_Region.First := T.Byte_Region.First;
-         end if;
+         Nonterm.Contains_Virtual_Terminal := @ or 
Tree.Contains_Virtual_Terminal (T);
 
-         if Nonterm.Byte_Region.Last < T.Byte_Region.Last then
-            Nonterm.Byte_Region.Last := T.Byte_Region.Last;
+         if not First_Terminal_Set then
+            Nonterm.First_Terminal := Tree.First_Terminal (T);
+            if Nonterm.First_Terminal /= Syntax_Trees.Invalid_Node_Access then
+               First_Terminal_Set := True;
+            end if;
          end if;
+      end loop;
 
-         if not Min_Terminal_Index_Set then
-            if T.Min_Terminal_Index /= Invalid_Token_Index then
-               Min_Terminal_Index_Set     := True;
-               Nonterm.Min_Terminal_Index := T.Min_Terminal_Index;
-            end if;
+      for T of reverse Tokens loop
+         Nonterm.Last_Terminal := Tree.Last_Terminal (T);
+         if Nonterm.Last_Terminal /= Syntax_Trees.Invalid_Node_Access then
+            exit;
          end if;
       end loop;
    end Compute_Nonterm;
 
    function Reduce_Stack
-     (Shared          : not null access Base.Shared;
-      Stack           : in out          Recover_Stacks.Stack;
-      Action          : in              Reduce_Action_Rec;
-      Nonterm         :    out          Recover_Token;
-      Default_Virtual : in              Boolean)
-     return Semantic_Checks.Check_Status
+     (Shared_Parser : in out LR.Parser.Parser;
+      Stack         : in out Recover_Stacks.Stack;
+      Action        : in     Reduce_Action_Rec;
+      Nonterm       :    out Syntax_Trees.Recover_Token)
+     return Syntax_Trees.In_Parse_Actions.Status
    is
-      use all type Semantic_Checks.Semantic_Check;
-      use all type Semantic_Checks.Check_Status_Label;
+      use all type Syntax_Trees.In_Parse_Actions.In_Parse_Action;
+      use all type Syntax_Trees.In_Parse_Actions.Status_Label;
 
       Last   : constant SAL.Base_Peek_Type := SAL.Base_Peek_Type 
(Action.Token_Count);
-      Tokens : Recover_Token_Array (1 .. Last);
+      Tokens : Syntax_Trees.Recover_Token_Array (1 .. Last);
+
+      In_Parse_Action : constant Syntax_Trees.In_Parse_Actions.In_Parse_Action 
:= Shared_Parser.Get_In_Parse_Action
+        (Action.Production);
    begin
-      pragma Assert (Stack.Depth > Last);
-      Compute_Nonterm (Action.Production.LHS, Stack, Tokens, Nonterm, 
Default_Virtual);
+      if Stack.Depth <= Last then
+         raise Bad_Config;
+      end if;
+
+      Compute_Nonterm (Shared_Parser.Tree, Action.Production.LHS, Stack, 
Tokens, Nonterm);
 
-      if Action.Check = null then
+      if In_Parse_Action = null then
          --  Now we can pop the stack.
          Stack.Pop (SAL.Base_Peek_Type (Action.Token_Count));
          return (Label => Ok);
       else
-         return Status : constant Semantic_Checks.Check_Status :=
-           Action.Check (Shared.Lexer, Nonterm, Tokens, Recover_Active => True)
+         return Status : constant Syntax_Trees.In_Parse_Actions.Status :=
+           In_Parse_Action (Shared_Parser.Tree, Nonterm, Tokens, 
Recover_Active => True)
          do
             if Status.Label = Ok then
                Stack.Pop (SAL.Base_Peek_Type (Action.Token_Count));
+
+               --  We don't pop the stack for error, so Language_Fixes and 
other
+               --  recover ops can access the child tokens.
             end if;
          end return;
       end if;
    end Reduce_Stack;
 
-   function Parse_One_Item
-     (Super             :         not null access Base.Supervisor;
-      Shared            :         not null access Base.Shared;
-      Parser_Index      :         in              SAL.Peek_Type;
-      Parse_Items       : aliased in out          Parse_Item_Arrays.Vector;
-      Parse_Item_Index  :         in              Positive;
-      Shared_Token_Goal :         in              Base_Token_Index;
-      Trace_Prefix      :         in              String)
-     return Boolean
+   procedure Left_Breakdown
+     (Tree   : in     Syntax_Trees.Tree;
+      Stream : in out Bounded_Streams.List)
    is
-      --  Perform parse actions on Parse_Items (Parse_Item_Index), until one
-      --  fails (return False) or Shared_Token_Goal is shifted (return
-      --  True).
+      use Bounded_Streams;
+      use Syntax_Trees;
+
+      --  Derived from Syntax_Trees.Left_Breakdown. We do not delete virtual
+      --  terminals, to allow insert before, delete.
+
+      Cur          : Cursor            := Stream.First;
+      To_Delete    : Cursor            := Cur;
+      Node         : Valid_Node_Access := Stream (Cur);
+      Next_I       : Positive_Index_Type;
+      Next_Sibling : Cursor            := No_Element;
+   begin
+      loop
+         Next_I := Positive_Index_Type'Last;
+
+         for I in reverse 2 .. Tree.Child_Count (Node) loop
+            declare
+               Child : constant Valid_Node_Access := Tree.Child (Node, I);
+            begin
+               if Tree.Child_Count (Child) > 0 or Tree.Label (Child) in 
Terminal_Label then
+                  Next_I := I;
+               end if;
+
+               Cur := Stream.Insert (Element => Child, Before => Cur);
+
+               Next_Sibling := Cur;
+
+               --  We don't do Tree.Clear_Parent (Child) here, because we are 
not
+               --  editing the syntax tree. If this config succeeds,
+               --  Tree.Left_Breakdown will be called.
+            end;
+         end loop;
+
+         if Tree.Child_Count (Tree.Child (Node, 1)) > 0 or Tree.Label 
(Tree.Child (Node, 1)) in Terminal_Label then
+            Next_I := 1;
+         else
+            --  Tree.Child (Node, 1) is an empty nonterm.
+            if Next_I = Positive_Index_Type'Last then
+               --  Node is an empty nonterm; move to first sibling below.
+               null;
+
+            elsif Next_I > 2 then
+               --  First non_empty is in Node.Children (Next_I); delete other 
empty
+               --  nonterms that were added to the stream.
+               for I in 2 .. Next_I - 1 loop
+                  declare
+                     To_Delete_2 : Cursor := Cur;
+                  begin
+                     Stream.Next (Cur);
+                     Stream.Delete (To_Delete_2);
+                  end;
+               end loop;
+               pragma Assert (Stream.Element (Cur) = Tree.Child (Node, 
Next_I));
+
+               --  Delete the nonterm that we were breaking down, and record 
the one
+               --  we are now breaking down for deletion.
+               raise SAL.Not_Implemented with "found test case for Next_I > 2";
+               --  Stream.Delete (To_Delete);
+               --  To_Delete := Cur;
+            end if;
+         end if;
+
+         if Next_I = Positive_Index_Type'Last then
+            --  Node is an empty nonterm; move to next sibling, which was 
pushed
+            --  on Stream in an earlier loop. test_incremental.adb Recover_04.
+            pragma Assert (Next_Sibling /= No_Element);
+            Cur  := Next_Sibling;
+            Node := Stream (Cur);
+            if Tree.Label (Node) in Terminal_Label or Cur = To_Delete then
+               null;
+            else
+               --  Delete the nonterm that we were breaking down, and record 
the one
+               --  we are now breaking down for deletion.
+               Stream.Delete (To_Delete);
+               To_Delete := Cur;
+            end if;
+         else
+            Node := Tree.Child (Node, Next_I);
+         end if;
+
+         if Tree.Label (Node) in Terminal_Label then
+            if Next_I = 1 then
+               Stream.Insert (Element => Node, Before => Cur);
+            else
+               --  already inserted above
+               null;
+            end if;
+
+            Stream.Delete (To_Delete);
+            exit;
+         end if;
+      end loop;
+
+   exception
+   when SAL.Container_Full =>
+      --  From Stream.Insert. We don't raise Bad_Config here, because the
+      --  only problem is the subtree is too large.
+      raise Invalid_Case;
+   end Left_Breakdown;
+
+   procedure Current_Token_ID_Peek_3
+     (Super         :         in out Base.Supervisor;
+      Shared_Parser :         in out LR.Parser.Parser;
+      Config        : aliased in     Configuration;
+      Tokens        :            out Token_ID_Array_1_3)
+   --  Return the current token from Config in Tokens (1). Return the two
+   --  following tokens in Tokens (2 .. 3).
+   is
+      use Recover_Op_Arrays;
+      use Recover_Op_Array_Refs;
+      use Syntax_Trees;
+
+      --  We can't use Parse.Get_Current_Token, Parse.Next_Token, because we
+      --  are not allowed to modify Config. In particular, we cannot apply
+      --  Breakdown to Config.Input_Stream on the second and third tokens,
+      --  which is required if Delete applies to those tokens.
       --
-      --  We return Boolean, not Check_Status, because Abandon and Continue
-      --  are up to the caller.
+      --  We can extract the first three IDs without modifying anything.
       --
-      --  If any actions have conflicts, append the conflict configs and 
actions to
-      --  Parse_Items.
+      --  Fast_Forward applies the Delete to the current token, so we should
+      --  not have to here, but rather than rely on that and special case it
+      --  here, we handle all three in the same way.
+
+      Tokens_Last           : Integer               := 0;
+      Current_Insert_Delete : SAL.Base_Peek_Type    := 
Config.Current_Insert_Delete;
+      Peek_State            : Peek_Sequential_State := Peek_Sequential_Start 
(Super, Shared_Parser, Config);
+      Inc_Shared_Token      : Boolean               := True;
+   begin
+      loop -- three tokens, skip Op = Delete
+         declare
+            Next_Node : constant Valid_Node_Access := Peek_Sequential_Terminal 
(Peek_State);
+            Index     : constant Sequential_Index  := 
Shared_Parser.Tree.Get_Sequential_Index (Next_Node);
+         begin
+            --  Sequential_Index may have gaps, so we must extend on each 
token.
+            Super.Extend_Sequential_Index (Shared_Parser, Index + 1);
+
+            if Current_Insert_Delete /= No_Insert_Delete and then
+              Token_Index (Constant_Ref (Config.Insert_Delete, 
Current_Insert_Delete)) =
+              Shared_Parser.Tree.Get_Sequential_Index (Next_Node)
+            then
+               Inc_Shared_Token := False;
+               declare
+                  Op : Insert_Delete_Op renames Constant_Ref 
(Config.Insert_Delete, Current_Insert_Delete);
+               begin
+                  case Insert_Delete_Op_Label (Op.Op) is
+                  when Insert =>
+                     Tokens_Last          := @ + 1;
+                     Tokens (Tokens_Last) := ID (Op);
 
-      use Parse_Item_Arrays;
-      use Config_Op_Arrays;
-      use all type Semantic_Checks.Check_Status_Label;
+                  when Delete =>
+                     Peek_Next_Sequential_Terminal (Shared_Parser.Tree, 
Peek_State);
+                  end case;
+
+                  Current_Insert_Delete := @ + 1;
+
+                  if Current_Insert_Delete > Last_Index (Config.Insert_Delete) 
then
+                     Current_Insert_Delete := No_Insert_Delete;
+                  end if;
+               end;
+            else
+               Inc_Shared_Token     := True;
+               Tokens_Last          := @ + 1;
+               Tokens (Tokens_Last) := Shared_Parser.Tree.ID (Next_Node);
+            end if;
+         end;
+
+         exit when Tokens (Tokens_Last) = 
Shared_Parser.Tree.Lexer.Descriptor.EOI_ID or Tokens_Last = 3;
+
+         if Inc_Shared_Token then
+            Peek_Next_Sequential_Terminal (Shared_Parser.Tree, Peek_State);
+         end if;
+      end loop;
+
+      for I in Tokens_Last + 1 .. 3 loop
+         Tokens (I) := Invalid_Token_ID;
+      end loop;
+   end Current_Token_ID_Peek_3;
+
+   function Peek_Current_Element_Node
+     (Tree   : in Syntax_Trees.Tree;
+      Config : in Configuration)
+     return Syntax_Trees.Valid_Node_Access
+   is
+      use all type Bounded_Streams.Cursor;
+   begin
+      return
+        (if Config.Input_Stream.First = Bounded_Streams.No_Element
+         then Tree.Get_Node (Tree.Shared_Stream, 
Config.Current_Shared_Token.Element)
+         else Config.Input_Stream (Config.Input_Stream.First));
+   end Peek_Current_Element_Node;
+
+   function Peek_Current_First_Terminal
+     (Tree   : in Syntax_Trees.Tree;
+      Config : in Configuration)
+     return Syntax_Trees.Valid_Node_Access
+   is
+      use Bounded_Streams;
+      use Syntax_Trees;
+   begin
+      if Config.Input_Stream.First /= No_Element then
+         declare
+            Result : constant Node_Access := First_Terminal (Tree, 
Config.Input_Stream);
+         begin
+            if Result /= Invalid_Node_Access then
+               return Result;
+            end if;
+         end;
+      end if;
+
+      return Tree.First_Terminal (Config.Current_Shared_Token).Node;
+   end Peek_Current_First_Terminal;
+
+   function Peek_Current_First_Sequential_Terminal
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in     Configuration)
+     return Syntax_Trees.Valid_Node_Access
+   is
+      use Syntax_Trees;
+      Result : constant Valid_Node_Access := Peek_Current_First_Terminal 
(Shared_Parser.Tree, Config);
+   begin
+      Super.Extend_Sequential_Index (Shared_Parser, Thru => Result, Positive 
=> True);
+      return Result;
+   end Peek_Current_First_Sequential_Terminal;
+
+   procedure First_Sequential_Terminal
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Ref           :    out Config_Stream_Parents)
+   is
+      use Bounded_Streams;
+      use Syntax_Trees;
+      Tree       : Syntax_Trees.Tree renames Shared_Parser.Tree;
+      First_Term : Node_Access;
+   begin
+      Ref.Element := Ref.Stream.First;
+      Ref.Node    := Invalid_Node_Access;
+
+      loop
+         exit when not Has_Element (Ref.Element);
+         First_Term := Tree.First_Terminal (Ref.Stream.Element (Ref.Element));
+         if First_Term = Invalid_Node_Access then
+            --  Ref.Element is an empty nonterm; skip it
+            null;
+         else
+            Base.Extend_Sequential_Index
+              (Super, Shared_Parser,
+               Thru     => First_Term,
+               Positive => True);
+            Ref.Node := Tree.First_Sequential_Terminal (Ref.Stream.Element 
(Ref.Element), Ref.Parents);
+            exit when Ref.Node /= Invalid_Node_Access;
+         end if;
+
+         Ref.Stream.Next (Ref.Element);
+      end loop;
+   end First_Sequential_Terminal;
+
+   function First_Terminal
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Bounded_Streams.List)
+     return Syntax_Trees.Node_Access
+   is
+      use Bounded_Streams;
+      use Syntax_Trees;
+      Cur  : Cursor      := Stream.First;
+      Node : Node_Access := Invalid_Node_Access;
+   begin
+      loop
+         exit when not Has_Element (Cur);
+
+         Node := Tree.First_Terminal (Stream (Cur));
+         exit when Node /= Invalid_Node_Access;
+
+         Stream.Next (Cur);
+      end loop;
+      return Node;
+   end First_Terminal;
+
+   procedure First_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Config_Stream_Parents)
+   is
+      use Bounded_Streams;
+      use Syntax_Trees;
+   begin
+      Ref.Element := Ref.Stream.First;
+      loop
+         exit when not Has_Element (Ref.Element);
+
+         Ref.Node := Tree.First_Terminal (Ref.Stream.all (Ref.Element));
+         exit when Ref.Node /= Invalid_Node_Access;
+
+         Ref.Stream.Next (Ref.Element);
+      end loop;
+   end First_Terminal;
+
+   procedure Last_Sequential_Terminal
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Ref           : in out Config_Stream_Parents)
+   is
+      use Bounded_Streams;
+      use Syntax_Trees;
+      Tree  : Syntax_Trees.Tree renames Shared_Parser.Tree;
+
+   begin
+      Ref.Element := Ref.Stream.Last;
+      Ref.Node    := Invalid_Node_Access;
+
+      loop
+         exit when not Has_Element (Ref.Element);
+
+         Base.Extend_Sequential_Index
+           (Super, Shared_Parser,
+            Thru     => Tree.Last_Terminal (Ref.Stream.Element (Ref.Element)),
+            Positive => True);
+         Ref.Node := Tree.Last_Sequential_Terminal (Ref.Stream.all 
(Ref.Element), Ref.Parents);
+         exit when Ref.Node /= Invalid_Node_Access;
+
+         Ref.Stream.Previous (Ref.Element);
+      end loop;
+   end Last_Sequential_Terminal;
+
+   procedure Next_Sequential_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Config_Stream_Parents)
+   is
+      use Bounded_Streams;
+   begin
+      Tree.Next_Sequential_Terminal (Ref.Node, Ref.Parents);
+
+      loop
+         exit when Ref.Node /= Syntax_Trees.Invalid_Node_Access;
+         Ref.Element := Ref.Stream.Next (Ref.Element);
+         if Ref.Element = No_Element then
+            Ref.Node := Syntax_Trees.Invalid_Node_Access;
+            exit;
+         end if;
+         Ref.Node := Tree.First_Sequential_Terminal (Ref.Stream.all 
(Ref.Element), Ref.Parents);
+      end loop;
+   end Next_Sequential_Terminal;
+
+   procedure Prev_Sequential_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Config_Stream_Parents)
+   is
+      use Bounded_Streams;
+   begin
+      Tree.Prev_Sequential_Terminal (Ref.Node, Ref.Parents);
+
+      loop
+         exit when Ref.Node /= Syntax_Trees.Invalid_Node_Access;
+         Ref.Element := Ref.Stream.Previous (Ref.Element);
+         if Ref.Element = No_Element then
+            Ref.Node := Syntax_Trees.Invalid_Node_Access;
+            exit;
+         end if;
+         Ref.Node := Tree.Last_Sequential_Terminal (Ref.Stream.all 
(Ref.Element), Ref.Parents);
+      end loop;
+   end Prev_Sequential_Terminal;
+
+   procedure Do_Delete
+     (Tree   : in     Syntax_Trees.Tree;
+      Config : in out Configuration)
+   is
+      use Syntax_Trees;
+      use all type Bounded_Streams.Cursor;
+   begin
+      --  Handle skip empty nonterms, breakdown of non-empty nonterms.
+      --  Config.Input_Stream can end in an empty nonterm;
+      --  ada_mode-interactive_02.adb.
+      loop
+         if Config.Input_Stream.First = Bounded_Streams.No_Element then
+            if Tree.Label (Config.Current_Shared_Token.Element) in 
Terminal_Label then
+               Tree.Stream_Next (Config.Current_Shared_Token, Rooted => False);
+               return;
+            else
+               --  Current_Shared_Token needs Breakdown; move it to 
Config.Input_Stream.
+               Config.Input_Stream.Append
+                 (Tree.Get_Node (Config.Current_Shared_Token.Stream, 
Config.Current_Shared_Token.Element));
+               Tree.Stream_Next (Config.Current_Shared_Token, Rooted => False);
+            end if;
+
+         else
+            declare
+               Next_Node : constant Valid_Node_Access := Config.Input_Stream 
(Config.Input_Stream.First);
+            begin
+               if Tree.Label (Next_Node) in Terminal_Label then
+                  Config.Input_Stream.Delete_First;
+                  return;
+
+               elsif Tree.Is_Empty_Nonterm (Next_Node) then
+                  Config.Input_Stream.Delete_First;
+
+               else
+                  Left_Breakdown (Tree, Config.Input_Stream);
+               end if;
+            end;
+         end if;
+      end loop;
+   end Do_Delete;
+
+   function Get_Current_Token
+     (Super                   : in out Base.Supervisor;
+      Shared_Parser           : in out LR.Parser.Parser;
+      Config                  : in out Configuration;
+      Inc_Shared_Stream_Token :    out Boolean;
+      Inc_Input_Stream_Token  :    out Boolean)
+     return Syntax_Trees.Recover_Token
+   --  Return the current token from Config. If a Delete op applies,
+   --  Config is updated to reflect the delete. Otherwise Config is not
+   --  changed; calling Get_Current_Token again on (a copy of) Config
+   --  will return the same token as this call.
+   --
+   --  Use Peek_Current_Token_ID if Config may not change at all.
+   --
+   --  Inc_*_Token are for Next_Token.
+   is
+      use Recover_Op_Arrays;
+      use Recover_Op_Array_Refs;
+      use Syntax_Trees;
+      use all type Bounded_Streams.Cursor;
+      Tree : Syntax_Trees.Tree renames Shared_Parser.Tree;
+   begin
+      loop -- Op = Delete requires loop
+         if Config.Current_Insert_Delete /= No_Insert_Delete and then
+           (declare
+               Current_Node : constant Syntax_Trees.Valid_Node_Access := 
Peek_Current_First_Sequential_Terminal
+                 (Super, Shared_Parser, Config);
+            begin
+               Token_Index (Constant_Ref (Config.Insert_Delete, 
Config.Current_Insert_Delete)) =
+                 Tree.Get_Sequential_Index (Current_Node))
+         then
+            declare
+               Op : Insert_Delete_Op renames Constant_Ref 
(Config.Insert_Delete, Config.Current_Insert_Delete);
+            begin
+               case Insert_Delete_Op_Label (Op.Op) is
+               when Insert =>
+                  Inc_Shared_Stream_Token := False;
+                  Inc_Input_Stream_Token  := False;
+                  return (Virtual                   => True,
+                          ID                        => ID (Op),
+                          Contains_Virtual_Terminal => True,
+                          others                    => <>);
+
+               when Delete =>
+                  pragma Assert (Is_Terminal (Op.Del_ID, 
Tree.Lexer.Descriptor.all));
+
+                  Do_Delete (Tree, Config);
+
+                  Config.Current_Insert_Delete := @ + 1;
+
+                  if Config.Current_Insert_Delete > Last_Index 
(Config.Insert_Delete) then
+                     Config.Current_Insert_Delete := No_Insert_Delete;
+                     Clear (Config.Insert_Delete);
+                  end if;
+               end case;
+            end;
+
+         elsif Config.Input_Stream.First /= Bounded_Streams.No_Element then
+            Inc_Shared_Stream_Token := False;
+            Inc_Input_Stream_Token  := True;
+            return Tree.Get_Recover_Token (Config.Input_Stream 
(Config.Input_Stream.First));
+
+         else
+            Inc_Shared_Stream_Token := True;
+            return Tree.Get_Recover_Token (Config.Current_Shared_Token);
+         end if;
+      end loop;
+   end Get_Current_Token;
+
+   procedure Next_Token
+     (Super                   :         in out Base.Supervisor;
+      Shared_Parser           :         in out LR.Parser.Parser;
+      Config                  : aliased in out Configuration;
+      Inc_Shared_Stream_Token :         in     Boolean;
+      Inc_Input_Stream_Token  :         in     Boolean)
+   --  Increment the appropriate "current token" index in Config.
+   --  Inc_*_Token are from Get_Current_Token.
+   is
+      use Recover_Op_Arrays, Recover_Op_Array_Refs;
+      use all type Bounded_Streams.Cursor;
+      Tree : Syntax_Trees.Tree renames Shared_Parser.Tree;
+   begin
+      if Last_Index (Config.Insert_Delete) > 0 and then
+        Config.Current_Insert_Delete = Last_Index (Config.Insert_Delete)
+      then
+         Config.Current_Insert_Delete := No_Insert_Delete;
+         Clear (Config.Insert_Delete);
+      else
+         declare
+            Current_Node : constant Syntax_Trees.Valid_Node_Access := 
Peek_Current_First_Sequential_Terminal
+              (Super, Shared_Parser, Config);
+         begin
+            if Config.Current_Insert_Delete /= No_Insert_Delete and then
+              Token_Index (Constant_Ref (Config.Insert_Delete, 
Config.Current_Insert_Delete + 1)) =
+              Tree.Get_Sequential_Index (Current_Node)
+            then
+               Config.Current_Insert_Delete := @ + 1;
+
+            else
+               if Config.Input_Stream.First = Bounded_Streams.No_Element then
+                  if Inc_Shared_Stream_Token then
+                     Tree.Stream_Next (Config.Current_Shared_Token, Rooted => 
False);
+                  end if;
+               else
+                  if Inc_Input_Stream_Token then
+                     Config.Input_Stream.Delete_First;
+                  end if;
+               end if;
+            end if;
+         end;
+      end if;
+   end Next_Token;
 
-      Trace      : WisiToken.Trace'Class renames Super.Trace.all;
-      Descriptor : WisiToken.Descriptor renames Super.Trace.Descriptor.all;
-      Table      : Parse_Table renames Shared.Table.all;
+   function Parse_One_Item
+     (Super             :         in out Base.Supervisor;
+      Shared_Parser     :         in out LR.Parser.Parser;
+      Parser_Index      :         in     SAL.Peek_Type;
+      Parse_Items       : aliased in out Parse_Item_Arrays.Vector;
+      Parse_Item_Index  :         in     Positive;
+      Shared_Token_Goal :         in     Syntax_Trees.Base_Sequential_Index;
+      Trace_Prefix      :         in     String)
+     return Boolean
+   --  Perform parse actions on Parse_Items (Parse_Item_Index), until it
+   --  encounters an error (return False) or Shared_Token_Goal is shifted
+   --  (return True), or if Shared_Token_Goal is
+   --  Invalid_Sequential_Index, until Item.Config.Insert_Delete is
+   --  empty.
+   --
+   --  We return Boolean, not Status, because Abandon and Continue
+   --  are up to the caller.
+   --
+   --  If any actions have conflicts, append the conflict configs and actions 
to
+   --  Parse_Items.
+
+   is
+      use Parse_Item_Arrays;
+      use Recover_Op_Arrays;
+      use all type Syntax_Trees.In_Parse_Actions.Status_Label;
 
-      Item   : Parse_Item renames Parse_Item_Array_Refs.Variable_Ref 
(Parse_Items, Parse_Item_Index).Element.all;
-      Config : Configuration renames Item.Config;
-      Action : Parse_Action_Node_Ptr renames Item.Action;
+      Tree       : WisiToken.Syntax_Trees.Tree renames Shared_Parser.Tree;
+      Trace      : WisiToken.Trace'Class renames Tree.Lexer.Trace.all;
+      Descriptor : WisiToken.Descriptor renames 
Shared_Parser.Tree.Lexer.Descriptor.all;
+      Table      : Parse_Table renames Shared_Parser.Table.all;
 
-      Conflict : Parse_Action_Node_Ptr;
+      Item        : Parse_Item renames Parse_Item_Array_Refs.Variable_Ref
+        (Parse_Items, Parse_Item_Index).Element.all;
+      Config      : Configuration renames Item.Config;
+      Action_Cur  : Parse_Action_Node_Ptr renames Item.Action;
+      Action      : Parse_Action_Rec;
 
-      Restore_Terminals_Current : Base_Token_Index;
-      Current_Token             : Base_Token := McKenzie_Recover.Current_Token
-        (Terminals                 => Shared.Terminals.all,
-         Terminals_Current         => Config.Current_Shared_Token,
-         Restore_Terminals_Current => Restore_Terminals_Current,
-         Insert_Delete             => Config.Insert_Delete,
-         Current_Insert_Delete     => Config.Current_Insert_Delete);
+      Inc_Shared_Stream_Token : Boolean;
+      Inc_Input_Stream_Token  : Boolean;
+      Current_Token           : Syntax_Trees.Recover_Token := Get_Current_Token
+        (Super, Shared_Parser, Config, Inc_Shared_Stream_Token, 
Inc_Input_Stream_Token);
 
       New_State : Unknown_State_Index;
       Success   : Boolean := True;
 
+      procedure Get_Action
+      is
+         --  We use the incremental parse algorithm even if the main parse is
+         --  full parse, because Push_Back places whole nonterms on
+         --  Config.Input_Stream.
+         --
+         --  Same logic as in Parser.Get_Action, but this
+         --  operates on Config.
+      begin
+         loop
+            declare
+               Current_State : constant State_Index := Config.Stack.Peek.State;
+            begin
+               if Is_Terminal (Tree.Element_ID (Current_Token), Descriptor) 
then
+                  Action_Cur := Action_For (Table, Current_State, 
Tree.Element_ID (Current_Token));
+                  Action     := Action_Cur.Item;
+                  return;
+               else
+                  --  nonterminal.
+                  declare
+                     New_State : constant Unknown_State_Index := Goto_For
+                       (Table, Current_State, Tree.Element_ID (Current_Token));
+
+                     Dummy : Ada.Containers.Count_Type;
+                     pragma Unreferenced (Dummy);
+                  begin
+                     if New_State /= Unknown_State then
+                        Action_Cur := null;
+                        Action     :=
+                          (Verb       => Shift,
+                           Production => Invalid_Production_ID,
+                           State      => New_State);
+                        return;
+                     else
+                        declare
+                           First_In_Current : constant 
Syntax_Trees.Node_Access := Shared_Parser.Tree.First_Terminal
+                             (Current_Token);
+                        begin
+                           if First_In_Current = 
Syntax_Trees.Invalid_Node_Access then
+                              --  Current_Token is an empty nonterm; skip it.
+                              Next_Token
+                                (Super, Shared_Parser, Config, 
Inc_Shared_Stream_Token, Inc_Input_Stream_Token);
+
+                              Current_Token := Get_Current_Token
+                                (Super, Shared_Parser, Config, 
Inc_Shared_Stream_Token, Inc_Input_Stream_Token);
+                           else
+                              Action_Cur := Action_For (Table, Current_State, 
Tree.ID (First_In_Current));
+                              Action     := Action_Cur.Item;
+
+                              case Action.Verb is
+                              when Shift =>
+                                 if Config.Input_Stream.Length = 0 then
+                                    --  Current_Token is from Shared_Stream. 
We can't do Breakdown in
+                                    --  Shared_Stream; that might invalidate 
other Config.Current_Token.
+                                    --  So add token to Config.Input_Stream, 
then breakdown.
+                                    Config.Input_Stream.Append 
(Current_Token.Element_Node);
+                                    Tree.Stream_Next 
(Config.Current_Shared_Token, Rooted => False);
+                                 end if;
+
+                                 Left_Breakdown (Tree, Config.Input_Stream);
+
+                                 Current_Token := Get_Current_Token
+                                   (Super, Shared_Parser, Config, 
Inc_Shared_Stream_Token, Inc_Input_Stream_Token);
+
+                                 if Trace_McKenzie > Extra then
+                                    Trace.Put_Line
+                                      (Trace_Prefix & ": breakdown; 
input_stream: " & LR.Image
+                                         (Config.Input_Stream, Tree));
+                                    Trace.Put_Line (" ... current_token: " & 
Shared_Parser.Tree.Image (Current_Token));
+                                 end if;
+                                 return;
+
+                              when Accept_It | Reduce =>
+                                 return;
+
+                              when Error =>
+                                 --  We don't do Undo_Reduce here; Explore 
will do that with an appropriate cost.
+                                 return;
+                              end case;
+                           end if;
+                        end;
+                     end if;
+                  end;
+               end if;
+            end;
+         end loop;
+      end Get_Action;
+
    begin
       if Trace_McKenzie > Detail then
          if Trace_McKenzie > Extra then
+            Put_Line (Trace, Trace_Prefix & ": stack: " & LR.Image 
(Config.Stack, Tree));
             if Config.Current_Insert_Delete /= No_Insert_Delete then
-               Put_Line (Trace, Super.Label (Parser_Index), Trace_Prefix & ": 
Insert_Delete: " &
-                           Image (Config.Insert_Delete, Trace.Descriptor.all));
+               Put_Line (Tree, Super.Stream (Parser_Index), Trace_Prefix & ": 
Insert_Delete: " &
+                           Image (Config.Insert_Delete, Descriptor));
+            end if;
+            if Config.Input_Stream.Length > 0 then
+               Put_Line (Tree, Super.Stream (Parser_Index), Trace_Prefix & ": 
input_stream: " &
+                           LR.Image (Config.Input_Stream, Tree));
             end if;
          end if;
 
-         Base.Put (Trace_Prefix & ": " & Image (Current_Token, Descriptor), 
Super, Shared, Parser_Index, Config);
-         if Shared_Token_Goal /= Invalid_Token_Index then
-            Put_Line (Trace, Super.Label (Parser_Index), Trace_Prefix & ": 
Shared_Token_Goal :" &
-                        WisiToken.Token_Index'Image (Shared_Token_Goal));
+         if Shared_Token_Goal /= Syntax_Trees.Invalid_Sequential_Index then
+            Put_Line (Tree, Super.Stream (Parser_Index), Trace_Prefix & ": 
Shared_Token_Goal :" &
+                        Shared_Token_Goal'Image);
          end if;
       end if;
 
       Item.Parsed := True;
 
-      if Action = null then
-         Action := Action_For (Table, Config.Stack.Peek.State, 
Current_Token.ID);
+      if Action_Cur = null then
+         --  Item is original Config; else Item is from a conflict
+         Get_Action;
+      else
+         Action := Action_Cur.Item;
       end if;
 
       loop
-         Conflict := Action.Next;
-         loop
-            exit when Conflict = null;
-            if Is_Full (Parse_Items) then
-               if Trace_McKenzie > Outline then
-                  Put_Line (Trace, Super.Label (Parser_Index), Trace_Prefix & 
": too many conflicts; abandoning");
-               end if;
-            else
-               declare
-                  New_Config : Configuration := Config;
-               begin
-                  New_Config.Current_Shared_Token := Restore_Terminals_Current;
-
+         declare
+            Conflict : constant Parse_Action_Node_Ptr := (if Action_Cur = null 
then null else Action_Cur.Next);
+         begin
+            --  We don't loop on Conflict here; if Conflict.Next is non null, 
it
+            --  will be enqueued when Conflict is parsed.
+            if Conflict /= null then
+               if Is_Full (Parse_Items) then
+                  if Trace_McKenzie > Outline then
+                     Put_Line (Tree, Super.Stream (Parser_Index),
+                               Trace_Prefix & ": too many conflicts; 
abandoning");
+                     raise Bad_Config;
+                  end if;
+               else
                   if Trace_McKenzie > Detail then
                      Put_Line
-                       (Trace, Super.Label (Parser_Index), Trace_Prefix & ":" 
& State_Index'Image
-                          (New_Config.Stack.Peek.State) & ": add conflict " &
+                       (Tree, Super.Stream (Parser_Index), Trace_Prefix & ":" 
& State_Index'Image
+                          (Config.Stack.Peek.State) & ": add conflict " &
                           Image (Conflict.Item, Descriptor));
                   end if;
 
-                  Append (Parse_Items, (New_Config, Conflict, Parsed => False, 
Shift_Count => Item.Shift_Count));
-               end;
+                  Append
+                    (Parse_Items,
+                     (Config, Conflict,
+                      Parsed       => False,
+                      Shift_Count  => Item.Shift_Count,
+                      Reduce_Count => Item.Reduce_Count));
+               end if;
             end if;
-            Conflict := Conflict.Next;
-         end loop;
+         end;
 
          if Trace_McKenzie > Extra then
             Put_Line
-              (Trace, Super.Label (Parser_Index), Trace_Prefix & ":" & 
State_Index'Image (Config.Stack.Peek.State) &
-                 " :" & WisiToken.Token_Index'Image 
(Config.Current_Shared_Token) &
-                 ":" & Image (Current_Token, Descriptor) &
-                 " : " & Image (Action.Item, Descriptor) &
-                 (if Action.Item.Verb = Reduce
-                  then " via" & Config.Stack.Peek (SAL.Peek_Type 
(Action.Item.Token_Count + 1)).State'Image
+              (Tree, Super.Stream (Parser_Index), Trace_Prefix & ":" &
+                 Config.Stack.Peek.State'Image &
+                 ":" & Syntax_Trees.Image (Tree, Current_Token) &
+                 " : " & Image (Action, Descriptor) &
+                 (if Action.Verb = Reduce
+                  then " via" & Config.Stack.Peek (SAL.Peek_Type 
(Action.Token_Count + 1)).State'Image
                   else ""));
          end if;
 
-         case Action.Item.Verb is
+         case Action.Verb is
          when Shift =>
             Item.Shift_Count := Item.Shift_Count + 1;
 
-            Config.Stack.Push
-              ((Action.Item.State,
-                Invalid_Node_Index,
-                (Current_Token.ID,
-                 Byte_Region        => Current_Token.Byte_Region,
-                 Min_Terminal_Index =>
-                   (if Config.Current_Insert_Delete = No_Insert_Delete
-                    then Config.Current_Shared_Token
-                    else Invalid_Token_Index),
-                 Name              => Null_Buffer_Region,
-                 Virtual           => Config.Current_Insert_Delete /= 
No_Insert_Delete)));
-
-            Current_Token := Next_Token
-              (Terminals                 => Shared.Terminals.all,
-               Terminals_Current         => Config.Current_Shared_Token,
-               Restore_Terminals_Current => Restore_Terminals_Current,
-               Insert_Delete             => Config.Insert_Delete,
-               Current_Insert_Delete     => Config.Current_Insert_Delete);
+            Config.Stack.Push ((Action.State, Syntax_Trees.Make_Rooted 
(Current_Token)));
+
+            Next_Token (Super, Shared_Parser, Config, Inc_Shared_Stream_Token, 
Inc_Input_Stream_Token);
+            Current_Token := Get_Current_Token
+              (Super, Shared_Parser, Config, Inc_Shared_Stream_Token, 
Inc_Input_Stream_Token);
 
          when Reduce =>
+            Item.Reduce_Count := @ + 1;
             declare
-               Nonterm : Recover_Token;
+               Nonterm : Syntax_Trees.Recover_Token;
             begin
-               Config.Check_Status := Reduce_Stack
-                 (Shared, Config.Stack, Action.Item, Nonterm,
-                  Default_Virtual => Config.Current_Insert_Delete /= 
No_Insert_Delete);
+               Config.In_Parse_Action_Status := Reduce_Stack (Shared_Parser, 
Config.Stack, Action, Nonterm);
 
-               case Config.Check_Status.Label is
+               case Config.In_Parse_Action_Status.Label is
                when Ok =>
                   New_State := Config.Stack.Peek.State;
-                  New_State := Goto_For (Table, New_State, 
Action.Item.Production.LHS);
+                  New_State := Goto_For (Table, New_State, 
Action.Production.LHS);
 
                   if New_State = Unknown_State then
-                     --  Most likely from an inappropriate language fix.
                      if Trace_McKenzie > Outline then
-                        Base.Put (Trace_Prefix & ": Unknown_State: ", Super, 
Shared, Parser_Index, Config);
-                        Put_Line (Trace, Trace_Prefix & ": stack: " & Image 
(Config.Stack, Descriptor));
+                        Base.Put (Super, Shared_Parser, Trace_Prefix & ": 
Unknown_State: ", Parser_Index, Config);
+                        Put_Line
+                          (Tree, Super.Stream (Parser_Index), Trace_Prefix & 
": stack: " &
+                             LR.Image (Config.Stack, Tree));
                      end if;
 
                      --  We can't just return False here; user must abandon 
this config.
-                     raise Bad_Config;
+                     --  This is not always an error; it could be from an 
inappropriate
+                     --  language fix or it could be the wrong branch of a 
conflict.
+                     --  ada_mode-recover_partial_15.adb.
+                     raise Invalid_Case;
                   end if;
 
-                  Config.Stack.Push ((New_State, Invalid_Node_Index, Nonterm));
+                  Config.Stack.Push ((New_State, Nonterm));
+
+               when Syntax_Trees.In_Parse_Actions.Error =>
+                  Config.Error_Token                 := Nonterm;
+                  Config.In_Parse_Action_Token_Count := SAL.Base_Peek_Type 
(Action.Token_Count);
+                  Success                            := False;
 
-               when Semantic_Checks.Error =>
-                  Config.Error_Token       := Nonterm;
-                  Config.Check_Token_Count := Action.Item.Token_Count;
-                  Success                  := False;
+                  if Trace_McKenzie > Extra then
+                     Put_Line
+                       (Tree, Super.Stream (Parser_Index), Trace_Prefix & ": 
in_parse_action fail " &
+                          Config.In_Parse_Action_Status.Label'Image);
+                  end if;
                end case;
             end;
 
          when Error =>
 
-            Config.Error_Token :=
-              (ID          => Current_Token.ID,
-               Byte_Region => Current_Token.Byte_Region,
-               others      => <>);
+            Config.Error_Token := Current_Token;
             Success            := False;
 
          when Accept_It =>
@@ -263,53 +884,52 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Parse is
          end case;
 
          exit when not Success or
-           Action.Item.Verb = Accept_It or
-           (if Shared_Token_Goal = Invalid_Token_Index
+           Action.Verb = Accept_It or
+           (if Shared_Token_Goal = Syntax_Trees.Invalid_Sequential_Index
             then Length (Config.Insert_Delete) = 0
-            else Config.Current_Shared_Token > Shared_Token_Goal);
+            else
+              (declare Term : constant Syntax_Trees.Node_Access := 
Peek_Current_First_Sequential_Terminal
+                 (Super, Shared_Parser, Config);
+               begin Shared_Parser.Tree.Get_Sequential_Index (Term) > 
Shared_Token_Goal));
 
-         Action := Action_For (Table, Config.Stack.Peek.State, 
Current_Token.ID);
+         Get_Action;
       end loop;
 
-      Config.Current_Shared_Token := Restore_Terminals_Current;
-
       return Success;
    end Parse_One_Item;
 
    function Parse
-     (Super             :         not null access Base.Supervisor;
-      Shared            :         not null access Base.Shared;
-      Parser_Index      :         in              SAL.Peek_Type;
-      Parse_Items       : aliased    out          Parse_Item_Arrays.Vector;
-      Config            :         in              Configuration;
-      Shared_Token_Goal :         in              Base_Token_Index;
-      All_Conflicts     :         in              Boolean;
-      Trace_Prefix      :         in              String)
+     (Super             :         in out Base.Supervisor;
+      Shared_Parser     :         in out LR.Parser.Parser;
+      Parser_Index      :         in     SAL.Peek_Type;
+      Parse_Items       : aliased    out Parse_Item_Arrays.Vector;
+      Config            :         in     Configuration;
+      Shared_Token_Goal :         in     Syntax_Trees.Base_Sequential_Index;
+      All_Conflicts     :         in     Boolean;
+      Trace_Prefix      :         in     String)
      return Boolean
    is
       use Parse_Item_Arrays;
-      Trace : WisiToken.Trace'Class renames Super.Trace.all;
-
       Last_Parsed : Natural;
       Success     : Boolean;
    begin
       Clear (Parse_Items);
-      Append (Parse_Items, (Config, Action => null, Parsed => False, 
Shift_Count => 0));
+      Append (Parse_Items, (Config, Action => null, Parsed => False, 
Shift_Count => 0, Reduce_Count => 0));
 
       --  Clear any errors; so they reflect the parse result.
       declare
          Config : Configuration renames Parse_Item_Array_Refs.Variable_Ref
            (Parse_Items, First_Index (Parse_Items)).Config;
       begin
-         Config.Error_Token.ID := Invalid_Token_ID;
-         Config.Check_Status   := (Label => Semantic_Checks.Ok);
+         Config.Error_Token        := Syntax_Trees.Invalid_Recover_Token;
+         Config.In_Parse_Action_Status := (Label => 
Syntax_Trees.In_Parse_Actions.Ok);
       end;
 
       Last_Parsed := First_Index (Parse_Items);
       loop
          --  Loop over initial config and any conflicts.
          Success := Parse_One_Item
-           (Super, Shared, Parser_Index, Parse_Items, Last_Parsed, 
Shared_Token_Goal, Trace_Prefix);
+           (Super, Shared_Parser, Parser_Index, Parse_Items, Last_Parsed, 
Shared_Token_Goal, Trace_Prefix);
 
          exit when Last_Index (Parse_Items) = Last_Parsed;
 
@@ -317,7 +937,7 @@ package body WisiToken.Parse.LR.McKenzie_Recover.Parse is
 
          Last_Parsed := Last_Parsed + 1;
          if Trace_McKenzie > Detail then
-            Put_Line (Trace, Super.Label (Parser_Index), Trace_Prefix & ": 
parse conflict");
+            Put_Line (Shared_Parser.Tree, Super.Stream (Parser_Index), 
Trace_Prefix & ": parse conflict");
          end if;
       end loop;
 
diff --git a/wisitoken-parse-lr-mckenzie_recover-parse.ads 
b/wisitoken-parse-lr-mckenzie_recover-parse.ads
index 99fcee905b..743fbd8ece 100644
--- a/wisitoken-parse-lr-mckenzie_recover-parse.ads
+++ b/wisitoken-parse-lr-mckenzie_recover-parse.ads
@@ -2,7 +2,7 @@
 --
 --  Config parsing subprograms.
 --
---  Copyright (C) 2018 - 2019, 2021 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -20,35 +20,123 @@ pragma License (Modified_GPL);
 with SAL.Gen_Bounded_Definite_Vectors.Gen_Refs;
 with WisiToken.Parse.LR.McKenzie_Recover.Base;
 private package WisiToken.Parse.LR.McKenzie_Recover.Parse is
+   use all type WisiToken.Syntax_Trees.Node_Label;
 
    function Reduce_Stack
-     (Shared          : not null access Base.Shared;
-      Stack           : in out          Recover_Stacks.Stack;
-      Action          : in              Reduce_Action_Rec;
-      Nonterm         :    out          Recover_Token;
-      Default_Virtual : in              Boolean)
-     return Semantic_Checks.Check_Status;
-   --  Reduce Stack according to Action, setting Nonterm. If
-   --  Action.Token_Count = 0, set Nonterm.Virtual := Default_Virtual.
+     (Shared_Parser : in out LR.Parser.Parser;
+      Stack         : in out Recover_Stacks.Stack;
+      Action        : in     Reduce_Action_Rec;
+      Nonterm       :    out Syntax_Trees.Recover_Token)
+     return Syntax_Trees.In_Parse_Actions.Status;
+   --  Reduce Stack according to Action, setting Nonterm.
+
+   procedure Current_Token_ID_Peek_3
+     (Super         :         in out Base.Supervisor;
+      Shared_Parser :         in out LR.Parser.Parser;
+      Config        : aliased in     Configuration;
+      Tokens        :            out Token_ID_Array_1_3)
+   with Post =>
+     (for all Tok of Tokens => Tok = Invalid_Token_ID or else
+        Is_Terminal (Tok, Shared_Parser.Tree.Lexer.Descriptor.all));
+   --  Return the current terminal token from Config in Tokens (1).
+   --  Return the two following terminal tokens in Tokens (2 .. 3). In
+   --  incremental parse, they may be virtual.
+   --
+   --  In Parse because it has similar code to Current_Token, Next_Token.
+
+   function Peek_Current_Element_Node
+     (Tree   : in Syntax_Trees.Tree;
+      Config : in Configuration)
+     return Syntax_Trees.Valid_Node_Access;
+   --  Stream element from Config.Shared_Token or Config.Input_Stream.
+
+   function Peek_Current_First_Terminal
+     (Tree   : in Syntax_Trees.Tree;
+      Config : in Configuration)
+     return Syntax_Trees.Valid_Node_Access;
+   --  First_Terminal from Shared_Stream starting at Config.Shared_Token, or 
Config.Input_Stream.
+
+   function Peek_Current_First_Sequential_Terminal
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in     Configuration)
+     return Syntax_Trees.Valid_Node_Access;
+   --  Calls Peek_Current_First_Terminal, then
+   --  Super.Extend_Sequential_Index on the result.
+
+   function First_Terminal
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Bounded_Streams.List)
+     return Syntax_Trees.Node_Access;
+   --  Return first terminal in Stream; Invalid_Node_Access if none.
+
+   procedure First_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Config_Stream_Parents);
+
+   procedure First_Sequential_Terminal
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Ref           :    out Config_Stream_Parents);
+   --  Calls Extend_Sequential_Terminal as needed.
+
+   procedure Last_Sequential_Terminal
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Ref           : in out Config_Stream_Parents);
+   --  Calls Extend_Sequential_Terminal as needed.
+
+   procedure Next_Sequential_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Config_Stream_Parents)
+   with Pre => Bounded_Streams.Has_Element (Ref.Element) and Ref.Node /= 
Syntax_Trees.Invalid_Node_Access;
+   --  Can step past EOI.
+   --  Calls Extend_Sequential_Terminal as needed.
+
+   procedure Prev_Sequential_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Config_Stream_Parents)
+   with Pre => Bounded_Streams.Has_Element (Ref.Element) and Ref.Node /= 
Syntax_Trees.Invalid_Node_Access;
+
+   procedure Left_Breakdown
+     (Tree   : in     Syntax_Trees.Tree;
+      Stream : in out Bounded_Streams.List)
+   with Pre => Stream.Length > 0 and then
+               (declare Node : constant Syntax_Trees.Node_Access := Stream 
(Stream.First);
+                begin Node /= Syntax_Trees.Invalid_Node_Access and then
+                   (Tree.Label (Node) = Syntax_Trees.Nonterm and
+                      Tree.First_Terminal (Node) /= 
Syntax_Trees.Invalid_Node_Access)),
+     Post =>
+       (declare Node : constant Syntax_Trees.Node_Access := Stream 
(Stream.First);
+        begin Node /= Syntax_Trees.Invalid_Node_Access and then
+           (Tree.Label (Node) in Syntax_Trees.Terminal_Label));
+   --  Bring the first terminal in Stream (which cannot be empty) to
+   --  Stream; delete any preceding empty nonterms.
+
+   procedure Do_Delete
+     (Tree   : in     Syntax_Trees.Tree;
+      Config : in out Configuration);
+   --  Delete Config Current_Token. Does not append to Config.Ops.
 
    type Parse_Item is record
-      Config      : Configuration;
-      Action      : Parse_Action_Node_Ptr;
-      Parsed      : Boolean := False;
-      Shift_Count : Natural := 0;
+      Config       : aliased Configuration;
+      Action       : Parse_Action_Node_Ptr := null;
+      Parsed       : Boolean               := False;
+      Shift_Count  : Natural               := 0;
+      Reduce_Count : Natural               := 0;
 
       --  On return from Parse, if Parsed = False, this item was queued by a
       --  conflict, but not parsed; it should be ignored.
       --
       --  Otherwise, if Config.Error_Token.ID = Invalid_Token_ID and
-      --  Config.Check_Status.Label = Ok, Config was parsed successfully to
-      --  the goal.
+      --  Config.User_Parse_Action_Status.Label = Ok, Config was parsed
+      --  successfully to the goal.
       --
       --  Otherwise, the parser failed a semantic check, or encountered an
       --  Error action. Action gives the last action processed. Shift_Count
-      --  gives the number of shifts performed. If Check_Status.Label is
-      --  Error, Action.Item.Verb must be Reduce, and Config is in the
-      --  pre-reduce state.
+      --  gives the number of shifts performed. If
+      --  User_Parse_Action_Status.Label is Error, Action.Item.Verb must be
+      --  Reduce, and Config is in the pre-reduce state.
    end record;
 
    package Parse_Item_Arrays is new SAL.Gen_Bounded_Definite_Vectors
@@ -58,20 +146,21 @@ private package WisiToken.Parse.LR.McKenzie_Recover.Parse 
is
    package Parse_Item_Array_Refs is new Parse_Item_Arrays.Gen_Refs;
 
    function Parse
-     (Super             :         not null access Base.Supervisor;
-      Shared            :         not null access Base.Shared;
-      Parser_Index      :         in              SAL.Peek_Type;
-      Parse_Items       : aliased    out          Parse_Item_Arrays.Vector;
-      Config            :         in              Configuration;
-      Shared_Token_Goal :         in              Base_Token_Index;
-      All_Conflicts     :         in              Boolean;
-      Trace_Prefix      :         in              String)
+     (Super             :         in out Base.Supervisor;
+      Shared_Parser     :         in out WisiToken.Parse.LR.Parser.Parser;
+      Parser_Index      :         in     SAL.Peek_Type;
+      Parse_Items       : aliased    out Parse_Item_Arrays.Vector;
+      Config            :         in     Configuration;
+      Shared_Token_Goal :         in     Syntax_Trees.Base_Sequential_Index;
+      All_Conflicts     :         in     Boolean;
+      Trace_Prefix      :         in     String)
      return Boolean;
-   --  Attempt to parse Config and any conflict configs. If not
-   --  All_Conflicts, return when Config.Insert_Delete is all processed,
-   --  and either Shared_Token_Goal = Invalid_Token_Index or
-   --  Shared_Token_Goal is shifted. If All_Conflicts, return when all
-   --  conflict configs have been parsed.
+   --  Attempt to parse Config and any conflict configs. A config is
+   --  parsed when Config.Insert_Delete is all processed, and either
+   --  Shared_Token_Goal = Invalid_Sequential_Index, or Shared_Token_Goal is
+   --  shifted or an error is encountered. If All_Conflicts, return when
+   --  all conflict configs have been parsed; if not All_Conflicts,
+   --  return when one config is parsed without error.
    --
    --  Parsed configs are in Parse_Items; there is more than one if a
    --  conflict is encountered. Parse returns True if at least one
diff --git a/wisitoken-parse-lr-mckenzie_recover.adb 
b/wisitoken-parse-lr-mckenzie_recover.adb
index 3287fb42b2..12d6f97fb9 100644
--- a/wisitoken-parse-lr-mckenzie_recover.adb
+++ b/wisitoken-parse-lr-mckenzie_recover.adb
@@ -2,7 +2,7 @@
 --
 --  See spec
 --
---  Copyright (C) 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -19,158 +19,146 @@ pragma License (Modified_GPL);
 
 with Ada.Characters.Handling;
 with Ada.Exceptions;
-with Ada.Unchecked_Deallocation;
-with System.Multiprocessors;
+with GNAT.Traceback.Symbolic;
+with GNATCOLL.Memory;
+with WisiToken.Lexer;
 with WisiToken.Parse.LR.McKenzie_Recover.Base;
 with WisiToken.Parse.LR.McKenzie_Recover.Explore;
-with WisiToken.Parse.LR.Parser_Lists;
+with WisiToken.Parse.LR.McKenzie_Recover.Parse;
 package body WisiToken.Parse.LR.McKenzie_Recover is
-   use all type System.Multiprocessors.CPU_Range;
-
-   type Supervisor_Access is access all Base.Supervisor;
-   type Shared_Access is access all Base.Shared;
-
-   task type Worker_Task is
-      entry Start
-        (ID     : in Integer;
-         Super  : in Supervisor_Access;
-         Shared : in Shared_Access);
-      --  Start getting parser/configs to check from Config_Store. Stop when
-      --  Super reports All_Done;
-
-      entry Done;
-      --  Available after Super has reported All_Done.
-   end Worker_Task;
-
-   type Worker_Access is access Worker_Task;
-   procedure Free is new Ada.Unchecked_Deallocation (Worker_Task, 
Worker_Access);
-
-   task body Worker_Task
-   is
-      use all type Base.Config_Status;
-      Super  : Supervisor_Access;
-      Shared : Shared_Access;
-
-      Status : Base.Config_Status := Valid;
-   begin
-      loop
-         select
-            accept Start
-              (ID     : in Integer;
-               Super  : in Supervisor_Access;
-               Shared : in Shared_Access)
-
-            do
-               Task_Attributes.Set_Value (ID);
-               Worker_Task.Super  := Super;
-               Worker_Task.Shared := Shared;
-            end Start;
-         or
-            terminate;
-         end select;
-
-         loop
-            Explore.Process_One (Super, Shared, Status);
-            exit when Status = All_Done;
-         end loop;
-
-         accept Done;
-
-         Super  := null;
-         Shared := null;
-      end loop;
-
-   exception
-   when E : others =>
-      Super.Fatal (E);
-   end Worker_Task;
-
-   Worker_Tasks : array (1 .. System.Multiprocessors.CPU_Range'Max (1, 
System.Multiprocessors.Number_Of_CPUs - 1)) of
-     Worker_Access;
-   --  Declaring an array of tasks directly causes a circular elaboration
-   --  problem, and would mean a task that terminates due to an exception
-   --  is never restarted.
 
    procedure To_Recover
-     (Parser_Stack : in     Parser_Lists.Parser_Stacks.Stack;
+     (Parser_Stack : in     Syntax_Trees.Stream_ID;
       Tree         : in     Syntax_Trees.Tree;
-      Stack        : in out Recover_Stacks.Stack)
+      Stack        : in out Recover_Stacks.Stack;
+      Input_Stream : in out Bounded_Streams.List)
    is
-      Depth : constant SAL.Peek_Type := Parser_Stack.Depth;
+      Parser_Stack_Depth : constant SAL.Peek_Type := Tree.Stack_Depth 
(Parser_Stack);
    begin
       pragma Assert (Stack.Depth = 0);
-      if Stack.Size < Depth then
-         raise SAL.Programmer_Error with "recover stack needs more space;" & 
Depth'Image;
+      if Stack.Size < Parser_Stack_Depth then
+         raise SAL.Programmer_Error with "recover stack needs more space;" & 
Parser_Stack_Depth'Image;
       end if;
-      for I in reverse 1 .. Depth loop
+      for I in reverse 1 .. Parser_Stack_Depth loop
          declare
-            Item  : Parser_Lists.Parser_Stack_Item renames Parser_Stack (I);
-            Token : constant Recover_Token := (if I = Depth then (others => 
<>) else Tree.Recover_Token (Item.Token));
+            Element : constant Syntax_Trees.Stream_Index  := Tree.Peek 
(Parser_Stack, I);
+            Node    : constant Syntax_Trees.Node_Access   := Tree.Get_Node 
(Parser_Stack, Element);
+            Token   : constant Syntax_Trees.Recover_Token :=
+              (if I = Parser_Stack_Depth
+               then (others => <>)
+               else Tree.Get_Recover_Token ((Parser_Stack, Element, Node)));
          begin
-            Stack.Push ((Item.State, Item.Token, Token));
+            Stack.Push ((Tree.State (Parser_Stack, Element), Token));
          end;
       end loop;
+
+      if Tree.Stream_Input_Length (Parser_Stack) > 0 then
+         --  Parse stream input has tokens from breakdown of a nonterm in
+         --  Shared_Stream, or an error token.
+         declare
+            use Syntax_Trees;
+            Index : Stream_Index := Tree.Stream_Next (Parser_Stack, 
Tree.Stack_Top (Parser_Stack));
+         begin
+            loop
+               exit when Index = Invalid_Stream_Index;
+               Input_Stream.Append (Tree.Get_Node (Parser_Stack, Index));
+               Index := Tree.Stream_Next (Parser_Stack, Index);
+            end loop;
+         end;
+      end if;
    end To_Recover;
 
    procedure Recover_Init
-     (Shared_Parser : in out LR.Parser.Parser;
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out Parser.Parser;
       Parser_State  : in out Parser_Lists.Parser_State)
    is
+      use Recover_Op_Arrays;
       use all type WisiToken.Parse.LR.Parser.Language_Fixes_Access;
 
-      Trace  : WisiToken.Trace'Class renames Shared_Parser.Trace.all;
-      Config : Configuration;
-      Error  : Parse_Error renames Parser_State.Errors 
(Parser_State.Errors.Last);
+      Tree       : Syntax_Trees.Tree renames Shared_Parser.Tree;
+      Trace      : WisiToken.Trace'Class renames Tree.Lexer.Trace.all;
+      Config     : Configuration;
+      Error_Ref  : constant Syntax_Trees.Stream_Error_Ref  := 
Parser_State.Current_Error_Ref (Tree);
+      Error      : constant Syntax_Trees.Error_Data'Class  := 
Syntax_Trees.Error (Error_Ref);
+      Error_Node : constant Syntax_Trees.Valid_Node_Access := 
Syntax_Trees.Error_Node (Error_Ref);
    begin
-      Parser_State.Recover.Enqueue_Count := Parser_State.Recover.Enqueue_Count 
+ 1;
-
-      Config.Resume_Token_Goal := Parser_State.Shared_Token + 
Shared_Parser.Table.McKenzie_Param.Check_Limit;
+      Parser_State.Recover.Enqueue_Count := @ + 1;
 
       if Trace_McKenzie > Outline then
          Trace.New_Line;
          Trace.Put_Line
-           ("parser" & Integer'Image (Parser_State.Label) &
-              ": State" & State_Index'Image (Parser_State.Stack (1).State) &
-              " Current_Token " & Parser_State.Tree.Image 
(Parser_State.Current_Token, Trace.Descriptor.all) &
-              " Resume_Token_Goal" & WisiToken.Token_Index'Image 
(Config.Resume_Token_Goal));
+           ("parser " & Tree.Trimmed_Image (Parser_State.Stream) &
+              ": State" & Tree.State (Parser_State.Stream)'Image &
+              " Current_Token " & Tree.Image
+                (Tree.Current_Token (Parser_State.Stream), 
Terminal_Node_Numbers => True));
          Trace.Put_Line
-           ((case Error.Label is
-             when Action => "Action",
-             when Check => "Check, " & Semantic_Checks.Image 
(Error.Check_Status, Trace.Descriptor.all),
-             when Message => raise SAL.Programmer_Error));
+           (if Error in Parse_Error
+            then "Parser_Action"
+            elsif Error in In_Parse_Action_Error
+            then "In_Parse_Action, " &
+              Tree.Image (Tree.Stack_Top (Parser_State.Stream)) & " " &
+              Error.Image (Tree, Error_Node)
+            else raise SAL.Programmer_Error);
+         if Trace_McKenzie > Detail then
+            Trace.Put_Line ("parse stream:");
+            Trace.Put_Line
+              (Tree.Image
+                 (Parser_State.Stream,
+                  Children    => Trace_McKenzie > Extra,
+                  Shared      => True,
+                  Non_Grammar => True));
+         end if;
          if Trace_McKenzie > Extra then
-            Put_Line
-              (Trace, Parser_State.Label, "stack: " & Parser_Lists.Image
-                 (Parser_State.Stack, Trace.Descriptor.all, 
Parser_State.Tree));
+            Trace.New_Line;
          end if;
       end if;
 
+      declare
+         use Syntax_Trees;
+         First_Current : constant Node_Access := Tree.First_Terminal
+           (Tree.Current_Token (Parser_State.Stream).Node);
+      begin
+         if Tree.Shared_Token (Parser_State.Stream) = Invalid_Stream_Node_Ref 
then
+            --  test_incremental.adb Preserve_Parse_Errors_1; EOI has error
+            pragma Assert (First_Current /= Invalid_Node_Access);
+            Config.Current_Shared_Token := Invalid_Stream_Node_Ref;
+         else
+            Config.Current_Shared_Token := Tree.First_Terminal_In_Node
+              (Tree.Shared_Token (Parser_State.Stream));
+         end if;
+
+         declare
+            Seq : constant Base_Sequential_Index := Tree.Get_Sequential_Index
+              (if First_Current /= Invalid_Node_Access
+               then First_Current
+               else Config.Current_Shared_Token.Node);
+         begin
+            Config.Resume_Token_Goal :=
+              (if Seq = Invalid_Sequential_Index then 0 else Seq) + -- Invalid 
on empty source text.
+              Shared_Parser.Table.McKenzie_Param.Check_Limit;
+         end;
+      end;
+
       --  Additional initialization of Parser_State.Recover is done in
       --  Supervisor.Initialize.
 
-      To_Recover (Parser_State.Stack, Parser_State.Tree, Config.Stack);
+      Config.Input_Stream.Initialize;
+      To_Recover (Parser_State.Stream, Tree, Config.Stack, 
Config.Input_Stream);
 
-      --  Parser_State.Recover_Insert_Delete must be empty (else we would not 
get
-      --  here). Therefore Parser_State current token is in
-      --  Shared_Parser.Shared_Token.
+      if Error in Parse_Error then
+         Config.Error_Token := Tree.Get_Recover_Token (Error_Node);
 
-      Config.Current_Shared_Token := Parser_State.Shared_Token;
-
-      case Error.Label is
-      when Action =>
-         Config.Error_Token := Parser_State.Tree.Recover_Token 
(Error.Error_Token);
          if Trace_McKenzie > Detail then
-            Put ("enqueue", Trace, Parser_State.Label, 
Shared_Parser.Terminals, Config,
-                 Task_ID => False);
+            Put ("enqueue", Tree, Parser_State.Stream, Config);
          end if;
 
-      when Check =>
+      elsif Error in In_Parse_Action_Error then
          if Shared_Parser.Language_Fixes = null then
             --  The only fix is to ignore the error.
             if Trace_McKenzie > Detail then
                Config.Strategy_Counts (Ignore_Error) := 1;
-               Put ("enqueue", Trace, Parser_State.Label, 
Shared_Parser.Terminals, Config,
-                    Task_ID => False);
+               Put ("enqueue", Tree, Parser_State.Stream, Config);
             end if;
 
          else
@@ -179,58 +167,48 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
             --  the root config. Later logic will enqueue the 'ignore error'
             --  solution; see McKenzie_Recover.Explore Process_One.
 
-            Config.Check_Status      := Error.Check_Status;
-            Config.Error_Token       := Config.Stack.Peek.Token;
-            Config.Check_Token_Count := Undo_Reduce (Config.Stack, 
Parser_State.Tree);
+            --  Undo_Reduce can be invalid here; see 
ada-mode/test/ada_mode-recover_27.adb
+            if Undo_Reduce_Valid (Super, Shared_Parser, Config) then
+               Config.In_Parse_Action_Status := In_Parse_Action_Error 
(Error).Status;
+               Config.Error_Token            := Config.Stack.Peek.Token;
 
-            Config_Op_Arrays.Append (Config.Ops, (Undo_Reduce, 
Config.Error_Token.ID, Config.Check_Token_Count));
+               Unchecked_Undo_Reduce (Super, Shared_Parser, Config);
 
-            if Trace_McKenzie > Detail then
-               Put ("undo_reduce " & Image
-                      (Config.Error_Token.ID, Trace.Descriptor.all), Trace, 
Parser_State.Label,
-                    Shared_Parser.Terminals, Config, Task_ID => False);
+               Config.In_Parse_Action_Token_Count := Element (Config.Ops, 
Last_Index (Config.Ops)).Token_Count;
+
+               if Trace_McKenzie > Detail then
+                  Put
+                    ("undo_reduce " & Image
+                       (Tree.Element_ID (Config.Error_Token), 
Tree.Lexer.Descriptor.all),
+                       Tree, Parser_State.Stream, Config);
+               end if;
+            else
+               --  Ignore error
+               if Trace_McKenzie > Detail then
+                  Config.Strategy_Counts (Ignore_Error) := 1;
+                  Put ("enqueue", Tree, Parser_State.Stream, Config);
+               end if;
             end if;
          end if;
 
-      when Message =>
-         --  Last error entry should be the failure that caused us to enter
-         --  recovery.
+      else
          raise SAL.Programmer_Error;
-      end case;
+      end if;
 
       Parser_State.Recover.Config_Heap.Add (Config);
    end Recover_Init;
 
    function Recover (Shared_Parser : in out LR.Parser.Parser) return 
Recover_Status
    is
-      use all type Parser.Post_Recover_Access;
-      Trace : WisiToken.Trace'Class renames Shared_Parser.Trace.all;
-
+      Tree    : Syntax_Trees.Tree renames Shared_Parser.Tree;
+      Trace   : WisiToken.Trace'Class renames Tree.Lexer.Trace.all;
       Parsers : Parser_Lists.List renames Shared_Parser.Parsers;
 
+      Initial_Memory_Use : constant GNATCOLL.Memory.Watermark_Info := 
GNATCOLL.Memory.Get_Ada_Allocations;
+
       Skip_Next : Boolean := False;
 
-      Super : aliased Base.Supervisor
-        (Trace'Access,
-         Check_Delta_Limit => 
Shared_Parser.Table.McKenzie_Param.Check_Delta_Limit,
-         Enqueue_Limit     => Shared_Parser.Table.McKenzie_Param.Enqueue_Limit,
-         Parser_Count      => Parsers.Count);
-
-      Shared : aliased Base.Shared
-        (Shared_Parser.Trace,
-         Shared_Parser.Lexer.all'Access,
-         Shared_Parser.Table,
-         Shared_Parser.Language_Fixes,
-         Shared_Parser.Language_Matching_Begin_Tokens,
-         Shared_Parser.Language_String_ID_Set,
-         Shared_Parser.Terminals'Access,
-         Shared_Parser.Line_Begin_Token'Access);
-
-      Task_Count : constant System.Multiprocessors.CPU_Range :=
-        (if Shared_Parser.Table.McKenzie_Param.Task_Count = 0
-         then Worker_Tasks'Last
-         --  Keep one CPU free for this main task, and the user.
-         else Shared_Parser.Table.McKenzie_Param.Task_Count);
+      Super : Base.Supervisor (Parser_Count => Shared_Parser.Parsers.Count);
 
    begin
       if Trace_McKenzie > Outline then
@@ -238,55 +216,29 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
          Trace.Put_Line (" McKenzie error recovery");
       end if;
 
-      Super.Initialize (Parsers'Unrestricted_Access, 
Shared_Parser.Terminals'Unrestricted_Access);
+      Super.Initialize (Shared_Parser);
 
       for Parser_State of Parsers loop
-         Recover_Init (Shared_Parser, Parser_State);
+         Recover_Init (Super, Shared_Parser, Parser_State);
       end loop;
 
-      if Trace_McKenzie > Outline then
-         Trace.New_Line;
-         Trace.Put_Line (Task_Count'Image & " parallel tasks");
-      end if;
-
-      for I in Worker_Tasks'First .. Task_Count loop
-         if Worker_Tasks (I) = null then
-            Worker_Tasks (I) := new Worker_Task;
-            if Debug_Mode then
-               Trace.Put_Line ("new Worker_Task" & 
System.Multiprocessors.CPU_Range'Image (I));
-            end if;
-
-         elsif Worker_Tasks (I)'Terminated then
-            Free (Worker_Tasks (I));
-            Worker_Tasks (I) := new Worker_Task;
-            if Debug_Mode then
-               Trace.Put_Line ("recreated Worker_Task" & 
System.Multiprocessors.CPU_Range'Image (I));
-            end if;
-         end if;
-
-         Worker_Tasks (I).Start (Integer (I), Super'Unchecked_Access, 
Shared'Unchecked_Access);
+      loop
+         exit when Super.Done;
+         Explore.Process_One (Super, Shared_Parser);
       end loop;
 
-      declare
-         use Ada.Exceptions;
-         ID      : Exception_Id;
-         Message : Ada.Strings.Unbounded.Unbounded_String;
-      begin
-         Super.Done (ID, Message); -- Wait for all parsers to fail or succeed
-
-         --  Ensure all worker tasks stop getting configs before proceeding;
-         --  otherwise local variables disappear while the task is still trying
-         --  to access them.
-         for I in Worker_Tasks'First .. Task_Count loop
-            if not Worker_Tasks (I)'Terminated then
-               Worker_Tasks (I).Done;
-            end if;
-         end loop;
+      Super.Finish (Shared_Parser);
 
-         if ID /= Null_Id then
-            Raise_Exception (ID, -Message);
-         end if;
-      end;
+      if Trace_Memory > Outline then
+         declare
+            use GNATCOLL.Memory;
+            Memory_Use : constant Watermark_Info := Get_Ada_Allocations;
+         begin
+            Trace.Put_Line
+              ("recover memory use: high" & Byte_Count'Image (Memory_Use.High 
- Initial_Memory_Use.High) &
+                 " current" & Byte_Count'Image (Memory_Use.Current - 
Initial_Memory_Use.Current));
+         end;
+      end if;
 
       --  Spawn new parsers for multiple solutions.
       --
@@ -305,7 +257,7 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
 
          Cur         : Cursor             := Parsers.First;
          Solutions   : SAL.Base_Peek_Type := 0;
-         Spawn_Limit : SAL.Base_Peek_Type := Shared_Parser.Max_Parallel; -- 
per parser
+         Spawn_Limit : SAL.Base_Peek_Type := Shared_Parser.Table.Max_Parallel;
       begin
          for Parser of Parsers loop
             if Parser.Recover.Success then
@@ -313,19 +265,19 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
             end if;
          end loop;
 
-         if Solutions > Shared_Parser.Max_Parallel and Trace_McKenzie > 
Outline then
+         if Solutions > Shared_Parser.Table.Max_Parallel and Trace_McKenzie > 
Outline then
             Trace.Put_Line ("too many parallel parsers required in recover; 
dropping some solutions");
-            Spawn_Limit := Shared_Parser.Max_Parallel / Parsers.Count;
+            Spawn_Limit := Shared_Parser.Table.Max_Parallel / Parsers.Count;
          end if;
 
          loop
             declare
-               Data : McKenzie_Data renames State_Ref (Cur).Recover;
+               Data : McKenzie_Data renames Cur.State_Ref.Recover;
             begin
                if Data.Success then
                   if Trace_McKenzie > Outline then
                      Trace.Put_Line
-                       (Integer'Image (Label (Cur)) &
+                       (" " & Tree.Trimmed_Image (Cur.Stream) &
                           ": succeed" & SAL.Base_Peek_Type'Image 
(Data.Results.Count) &
                           ", enqueue" & Integer'Image (Data.Enqueue_Count) &
                           ", check " & Integer'Image (Data.Check_Count) &
@@ -334,14 +286,17 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
 
                   if Data.Results.Count > 1 then
                      for I in 1 .. SAL.Base_Peek_Type'Min (Spawn_Limit, 
Data.Results.Count - 1) loop
-                        Parsers.Prepend_Copy (Cur); --  does not copy recover
+                        Parsers.Prepend_Copy
+                          (Cur, Tree, Syntax_Trees.User_Data_Access_Constant 
(Shared_Parser.User_Data), Trace);
+                        --  Does not copy recover.
+
                         if Trace_McKenzie > Outline or Trace_Parse > Outline 
then
                            Trace.Put_Line
-                             ("spawn parser" & Integer'Image 
(Parsers.First.Label) & " from " &
-                                Trimmed_Image (Cur.Label) & " (" & 
Trimmed_Image (Integer (Parsers.Count)) &
+                             ("spawn " & Tree.Trimmed_Image 
(Parsers.First.Stream) & " from " &
+                                Tree.Trimmed_Image (Cur.Stream) & " (" &
+                                Trimmed_Image (Integer (Parsers.Count)) &
                                 " active)");
-                           Put ("", Trace, Parsers.First.Label, 
Shared_Parser.Terminals,
-                                Data.Results.Peek, Task_ID => False, Strategy 
=> True);
+                           Put ("", Tree, Parsers.First.Stream, 
Data.Results.Peek, Strategy => True);
                         end if;
 
                         State_Ref (Parsers.First).Recover.Results.Add 
(Data.Results.Remove);
@@ -350,20 +305,19 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
                   end if;
 
                   if Trace_McKenzie > Outline or Trace_Parse > Outline then
-                     Put ("", Trace, Cur.State_Ref.Label, 
Shared_Parser.Terminals, Data.Results.Peek,
-                          Task_ID => False, Strategy => True);
+                     Put ("", Tree, Cur.Stream, Data.Results.Peek, Strategy => 
True);
                   end if;
                else
                   if Trace_McKenzie > Outline then
                      Trace.Put_Line
-                       (Integer'Image (Cur.Label) &
+                       (" " & Tree.Trimmed_Image (Cur.Stream) &
                           ": fail, enqueue" & Integer'Image 
(Data.Enqueue_Count) &
                           (if Data.Config_Full_Count > 0 then ", config_full" 
& Data.Config_Full_Count'Image else "") &
-                          ", check " & Integer'Image (Data.Check_Count) &
-                          ", max shared_token " & WisiToken.Token_Index'Image 
(Shared_Parser.Terminals.Last_Index));
+                          ", check " & Integer'Image (Data.Check_Count));
                   end if;
                end if;
 
+               Data.Config_Heap.Clear;
             end;
             Next (Cur);
             exit when Is_Done (Cur);
@@ -385,42 +339,65 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
                   --  Can't have active 'renames State_Ref' when terminate a 
parser
                   declare
                      use Parser_Lists;
-                     use Config_Op_Arrays, Config_Op_Array_Refs;
+                     use Recover_Op_Arrays, Recover_Op_Array_Refs;
+                     use Syntax_Trees;
 
                      Parser_State : Parser_Lists.Parser_State renames 
Current_Parser.State_Ref;
 
-                     Descriptor : WisiToken.Descriptor renames 
Shared_Parser.Trace.Descriptor.all;
-                     Stack      : Parser_Stacks.Stack renames 
Parser_State.Stack;
-                     Tree       : Syntax_Trees.Tree renames Parser_State.Tree;
-                     Data       : McKenzie_Data renames Parser_State.Recover;
-                     Result     : Configuration renames Data.Results.Peek;
+                     Stack  : Stream_ID renames Parser_State.Stream;
+                     Result : Configuration renames 
Parser_State.Recover.Results.Peek;
+
+                     --  We have to use Tree.Update_Error to set components of 
the current
+                     --  error, because different parsers can set different 
data in the
+                     --  same error. We can't save a copy of the current 
error, because
+                     --  Undo_Reduce changes an In_Parse_Action_Error to a 
Message_Error.
+                     --  We keep a local recover_op_nodes here to accumulate 
ops until
+                     --  we've applied all of Result.Ops, to avoid copying the 
error node
+                     --  for each op.
+                     Error_Recover_Ops : Recover_Op_Nodes_Arrays.Vector := 
To_Recover_Op_Nodes (Result.Ops);
+                     --  WORKAROUND: GNAT Community 2021 is confused about 
this being constant
+                     pragma Warnings (Off, Error_Recover_Ops);
+
+                     Op_Index : SAL.Base_Peek_Type := No_Insert_Delete;
+                     --  Current op in Error_Recover_Ops, for setting 
Ins_Node, Del_Node.
+
+                     Insert_Delete_Matches_Ops : Boolean := True;
+                     --  True if all insert/delete ops are performed here;
+                     --  Parser_State.Recover_Insert_Delete_Current is left at 
No_Element.
+
+                     Stack_Matches_Ops : Boolean := True;
+                     First_Insert      : Boolean := True;
+
+                     Last_Recover_Node_Index : Sequential_Index := 
Sequential_Index'First;
+
+                     Pre_FF_Index : Base_Sequential_Index := 
Invalid_Sequential_Index;
+                     --  The current token sequential_index before a 
fast_forward, when
+                     --  Stack_Matches_Ops is true before the Fast_Forward.
 
-                     Stack_Matches_Ops     : Boolean := True;
-                     Shared_Token_Changed  : Boolean := False;
-                     Current_Token_Virtual : Boolean := False;
-                     First_Insert          : Boolean := True;
                   begin
                      --  The verb will be reset by the main parser; just 
indicate the
                      --  parser recovered from the error.
                      Parser_State.Set_Verb (Shift);
 
-                     Parser_State.Errors (Parser_State.Errors.Last).Recover := 
Result;
+                     Parser_State.Set_Current_Error_Features (Tree);
+
+                     pragma Assert (Parser_State.Current_Recover_Op = 
No_Insert_Delete);
+
+                     Parser_State.Total_Recover_Cost := @ + Result.Cost;
+                     Parser_State.Max_Recover_Ops_Length := 
Ada.Containers.Count_Type'Max
+                       (@, Length (Result.Ops));
 
                      Parser_State.Resume_Token_Goal := 
Result.Resume_Token_Goal;
 
                      if Trace_McKenzie > Extra then
-                        Put_Line (Trace, Parser_State.Label, "before Ops 
applied:", Task_ID => False);
-                        Put_Line
-                          (Trace, Parser_State.Label, "stack " & Image (Stack, 
Descriptor, Tree),
-                           Task_ID => False);
-                        Put_Line
-                          (Trace, Parser_State.Label, "Shared_Token  " & Image
-                             (Parser_State.Shared_Token, 
Shared_Parser.Terminals, Descriptor),
-                           Task_ID => False);
-                        Put_Line
-                          (Trace, Parser_State.Label, "Current_Token " & 
Parser_State.Tree.Image
-                             (Parser_State.Current_Token, Descriptor),
-                           Task_ID => False);
+                        Put_Line (Tree, Parser_State.Stream, "before Ops 
applied:");
+                        Trace.Put_Line
+                          ("   stack/stream:" & ASCII.LF & Tree.Image
+                             (Parser_State.Stream, Stack => True, Input => 
True, Shared => True, Children => True));
+                        Trace.Put_Line
+                          ("   Shared_Token  " & Tree.Image (Tree.Shared_Token 
(Parser_State.Stream)));
+                        Trace.Put_Line
+                          ("   Current_Token " & Tree.Image 
(Tree.Current_Token (Parser_State.Stream)));
                      end if;
 
                      --  We don't apply all Ops to the parser stack here, 
because there can
@@ -428,108 +405,173 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
                      --  can be conflicts; we let the main parser handle that. 
We can apply
                      --  all ops up to the first insert.
                      --
-                     --  Other than Add_Terminal, there's no need to modify
-                     --  Parser_State.Tree. Any tree nodes created by the 
failed parse that
-                     --  are pushed back are useful for error repair, and will 
just be
-                     --  ignored in future parsing. This also avoids enlarging 
a
-                     --  non-flushed branched tree, which saves time and space.
+                     --  Other than Add_Terminal, there's no need to modify 
Tree. Any tree
+                     --  nodes created by the failed parse that are pushed 
back are useful
+                     --  for error repair, and will just be ignored in future 
parsing. This
+                     --  also avoids enlarging a non-flushed branched tree, 
which saves
+                     --  time and space.
                      --
                      --  Language_Fixes may abuse the rules about adding Ops, 
so we check
-                     --  that as much as is reasonable here. We use Assert to 
get an
-                     --  immediate error in a debug build, and raise 
Bad_Config to avoid
-                     --  further corruption in a release build.
+                     --  that as much as is reasonable here.
 
                      for I in First_Index (Result.Ops) .. Last_Index 
(Result.Ops) loop
                         declare
-                           Op : Config_Op renames Constant_Ref (Result.Ops, I);
+                           Op : Recover_Op renames Constant_Ref (Result.Ops, 
I);
+
+                           --  We don't declare Current_Token here, because 
Delete may need to
+                           --  delete it.
+
+                           procedure Raise_Bad_Config (Message : in String)
+                           is begin
+                              if Debug_Mode then
+                                 raise SAL.Programmer_Error with Message;
+                              end if;
+
+                              if Trace_McKenzie > Outline then
+                                 Put_Line (Tree, Parser_State.Stream, Message);
+                              end if;
+                              raise Bad_Config;
+                           end Raise_Bad_Config;
                         begin
                            case Op.Op is
                            when Fast_Forward =>
-                              --  The parser would do shifts and reduces for 
the tokens we are
-                              --  skipping here
-                              Stack_Matches_Ops := False;
+                              declare
+                                 Current_Token_Node : constant Node_Access := 
Tree.Current_Token
+                                   (Parser_State.Stream).Node;
+                              begin
+                                 if Stack_Matches_Ops then
+                                    if Op.FF_Next_Index = 
Tree.Get_Sequential_Index
+                                      (Tree.First_Sequential_Terminal 
(Current_Token_Node))
+                                    then
+                                       --  Fast_Forward is a noop. 
test_mckenzie_recover String_Quote_5.
+                                       null;
+
+                                    else
+                                       if Tree.Label (Current_Token_Node) = 
Nonterm then
+                                          declare
+                                             Target : Stream_Node_Parents;
+
+                                             procedure Find_FF_Target (Ref : 
in Stream_Node_Ref)
+                                             is begin
+                                                Target := 
Tree.To_Stream_Node_Parents (Ref);
+                                                Tree.First_Sequential_Terminal 
(Target, Following => True);
+                                                loop
+                                                   exit when 
Tree.Get_Sequential_Index (Target.Ref.Node) =
+                                                     Op.FF_Next_Index;
+                                                   
Tree.Next_Sequential_Terminal (Target, Following => True);
+                                                end loop;
+                                             end Find_FF_Target;
+
+                                          begin
+                                             Find_FF_Target 
(Tree.Current_Token (Parser_State.Stream));
+
+                                             if Tree.First_Terminal
+                                               (Tree.Get_Node 
(Target.Ref.Stream, Target.Ref.Element)) /=
+                                               Target.Ref.Node
+                                             then
+                                                --  Target is a nonterm that 
should not be shifted as a whole
+                                                --  (otherwise FF index would 
be after Target), so break it down.
+                                                --  
ada_mode-recover_bad_char.adb
+                                                if Target.Ref.Stream = 
Tree.Shared_Stream then
+                                                   --  First we need to move 
all tokens Shared_Token .. Target
+                                                   --  to the input stream. 
ada_mode-recover_10.adb
+                                                   pragma Assert
+                                                     (Tree.Shared_Token 
(Parser_State.Stream) =
+                                                        Tree.Current_Token 
(Parser_State.Stream));
+
+                                                   Tree.Move_Shared_To_Input
+                                                     (First  => 
Tree.Current_Token (Parser_State.Stream),
+                                                      Last   => Target.Ref,
+                                                      Stream => 
Parser_State.Stream);
+
+                                                   Find_FF_Target 
(Tree.First_Input (Parser_State.Stream));
+                                                end if;
+                                                Tree.Breakdown
+                                                  (Target, 
Shared_Parser.Productions,
+                                                   
Syntax_Trees.User_Data_Access_Constant (Shared_Parser.User_Data),
+                                                   First_Terminal => True);
+                                             end if;
+                                          end;
+                                       end if;
+
+                                       --  The parser would do shifts and 
reduces for the tokens we are
+                                       --  skipping here
+                                       Stack_Matches_Ops := False;
+
+                                       Pre_FF_Index := 
Tree.Get_Sequential_Index
+                                         (Tree.First_Sequential_Terminal 
(Current_Token_Node));
+                                    end if;
+
+                                 else
+                                    Pre_FF_Index := Invalid_Sequential_Index;
+                                 end if;
+                              end;
 
                            when Undo_Reduce =>
                               --  If Stack_Matches_Ops, we must do the 
Stack.Pop and Pushes, and we
                               --  can use Stack.Peek to check if the 
Undo_Reduce is valid.
                               --
-                              --  If not Stack_Matches_Ops, we have to assume 
Undo_Reduce is valid.
+                              --  If not Stack_Matches_Ops, we have to assume 
Op.UR_Token_Index is correct.
                               --
                               --  See test_mckenzie_recover.adb Extra_Begin 
for an example of Undo_Reduce
                               --  after other ops.
+                              --
+                              --  We can't use 
McKenzie_Recover.Undo_Reduce_Valid here; that takes a
+                              --  Config stack, not a parser stack. So we 
duplicate part of it.
                               if Stack_Matches_Ops then
-                                 if not (Tree.Is_Nonterm (Stack.Peek.Token) and
-                                           (I = First_Index (Result.Ops) or 
else
-                                              Push_Back_Valid
-                                                (Tree.First_Shared_Terminal 
(Stack.Peek.Token), Result.Ops, I - 1)))
+                                 if not (Nonterm = Tree.Label (Tree.Peek 
(Stack)) and
+                                           Op.Nonterm = Tree.ID 
(Parser_State.Stream, Tree.Peek (Stack)))
                                  then
-                                    pragma Assert (False);
-                                    if Trace_McKenzie > Outline then
-                                       Put_Line
-                                         (Trace, Parser_State.Label, "invalid 
Undo_Reduce in apply config",
-                                          Task_ID => False);
-                                    end if;
-                                    raise Bad_Config;
+                                    Raise_Bad_Config ("Undo_Reduce does not 
match stack top in apply config");
                                  end if;
+                              end if;
 
-                                 for C of Tree.Children (Stack.Pop.Token) loop
-                                    Stack.Push ((Tree.State (C), C));
-                                 end loop;
+                              if Stack_Matches_Ops then
+                                 Parser_State.Undo_Reduce
+                                   (Tree, Shared_Parser.Table.all,
+                                    Syntax_Trees.User_Data_Access_Constant 
(Shared_Parser.User_Data));
                               end if;
 
                            when Push_Back =>
-                              --  If Stack_Matches_Ops, we must do the 
Stack.Pop, and can use that
-                              --  to check if the Push_Back is valid.
-                              --
-                              --  If not Stack_Matches_Ops, we have to assume 
Op.PB_Token_Index is
-                              --  correct, and we do not do Stack.Pop. We can 
still check the target
-                              --  token index against the previous ops.
-                              --
-                              --  See test_mckenzie_recover.adb Erorr_2 for an 
example of Push_Back
-                              --  after other ops.
-                              if not
-                                (I = First_Index (Result.Ops) or else
-                                   Push_Back_Valid
-                                     (Target_Token_Index =>
-                                        (if Stack_Matches_Ops
-                                         then Tree.First_Shared_Terminal 
(Stack.Peek.Token)
-                                         else Op.PB_Token_Index),
-                                      Ops     => Result.Ops,
-                                      Prev_Op => I - 1))
-                              then
-                                 if Trace_McKenzie > Outline then
-                                    Put_Line
-                                      (Trace, Parser_State.Label, "invalid 
Push_Back in apply config op" & I'Image,
-                                       Task_ID => False);
-                                 end if;
-                                 pragma Assert (False);
-                                 raise Bad_Config;
-                              end if;
+                              --  If Stack_Matches_Ops, we must do the 
Tree.Push_Back.
 
-                              if Stack_Matches_Ops then
-                                 Stack.Pop;
+                              --  If not Stack_Matches_Ops, we assume 
Push_Back_Valid ensures that
+                              --  the Push_Back is indeed valid here, so the 
main parser will not
+                              --  encounter an error; 
test_mckenzie_recover.adb Error_3.
 
-                                 if Op.PB_Token_Index /= Invalid_Token_Index 
then
-                                    --  Pushing back an empty nonterm has no 
effect on the input stream.
-                                    Parser_State.Shared_Token := 
Op.PB_Token_Index;
-                                    Shared_Token_Changed      := True;
+                              if Stack_Matches_Ops then
+                                 if not (Op.PB_ID = Tree.ID 
(Parser_State.Stream, Tree.Peek (Stack))) then
+                                    Raise_Bad_Config
+                                      ("Push_Back does not match stack top in 
apply config: " &
+                                         Image (Op, 
Tree.Lexer.Descriptor.all));
                                  end if;
+                                 Tree.Push_Back (Parser_State.Stream);
+                              else
+                                 pragma Assert (I > 1); --  else 
stack_matches_ops is true.
+                                 declare
+                                    Prev_Op : Recover_Op renames Constant_Ref 
(Result.Ops, I - 1);
+                                 begin
+                                    if (Prev_Op.Op = Fast_Forward and 
Op.PB_Token_Index /= Invalid_Sequential_Index)
+                                      and then Pre_FF_Index = Op.PB_Token_Index
+                                    then
+                                       --  This Push_Back exactly cancels the 
previous Fast_Forward, so we
+                                       --  must apply following insert/delete. 
test_mckenzie_recover.adb
+                                       --  Push_Back_2.
+                                       Stack_Matches_Ops := True;
+                                    end if;
+                                 end;
+                                 Pre_FF_Index := Invalid_Sequential_Index;
                               end if;
 
                            when Insert =>
-                              Recover_Op_Arrays.Append
-                                (Parser_State.Recover_Insert_Delete,
-                                 (Op              => Insert,
-                                  Ins_ID          => Op.Ins_ID,
-                                  Ins_Token_Index => Op.Ins_Token_Index,
-                                  Ins_Tree_Node   => Invalid_Node_Index));
-
-                              if Parser_State.Recover_Insert_Delete_Current = 
No_Index then
-                                 Parser_State.Recover_Insert_Delete_Current :=
-                                   Recover_Op_Arrays.Last_Index 
(Parser_State.Recover_Insert_Delete);
+                              Op_Index := @ + 1;
+                              if Parser_State.Current_Recover_Op = 
No_Insert_Delete then
+                                 Parser_State.First_Recover_Op;
                               end if;
 
-                              if First_Insert and Op.Ins_Token_Index = 
Parser_State.Shared_Token then
+                              if First_Insert and Op.Ins_Before = 
Tree.Get_Sequential_Index
+                                (Tree.First_Sequential_Terminal 
(Tree.Current_Token (Parser_State.Stream)).Node)
+                              then
                                  --  We need First_Insert here, not just 
Stack_Matches_Ops, when the
                                  --  first insert is preceeded only by 
Push_Back and Undo_Reduce, with
                                  --  at least one Undo_Reduce (so 
Stack_Matches_Ops is False when we
@@ -537,101 +579,138 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
 
                                  First_Insert := False;
 
+                                 Error_Recover_Ops (Op_Index).Ins_Node :=
+                                   Tree.Insert_Virtual_Terminal 
(Parser_State.Stream, Op.Ins_ID).Node;
+                                 --  Modifies Tree.Current_Token
+
+                                 Parser_State.Next_Recover_Op (Tree);
+
                                  --  Normally Insert is completed by 
Stack.Push; we let the main parser
                                  --  do that.
                                  Stack_Matches_Ops := False;
 
-                                 --  Add_Terminal is normally done in the 
lexer phase, so we do this here.
-                                 Parser_State.Current_Token := 
Parser_State.Tree.Add_Terminal
-                                   (Op.Ins_ID, Op.Ins_Token_Index);
-                                 Recover_Op_Array_Refs.Variable_Ref
-                                   (Parser_State.Recover_Insert_Delete,
-                                    Recover_Op_Arrays.Last_Index 
(Parser_State.Recover_Insert_Delete)).Ins_Tree_Node :=
-                                      Parser_State.Current_Token;
-
-                                 Current_Token_Virtual                      := 
True;
-                                 Parser_State.Recover_Insert_Delete_Current := 
No_Index;
                               else
-                                 --  Let main parser handle it
-                                 null;
+                                 Insert_Delete_Matches_Ops := False;
                               end if;
 
                            when Delete =>
-                              Recover_Op_Arrays.Append
-                                (Parser_State.Recover_Insert_Delete,
-                                 (Op              => Delete,
-                                  Del_ID          => Op.Del_ID,
-                                  Del_Token_Index => Op.Del_Token_Index));
-
-                              if Stack_Matches_Ops and Op.Del_Token_Index = 
Parser_State.Shared_Token then
-                                 --  Delete has no effect on Stack, so we can 
apply multiple deletes.
-                                 Parser_State.Shared_Token := 
Op.Del_Token_Index + 1;
-                                 Shared_Token_Changed      := True;
-
-                                 Parser_State.Recover_Insert_Delete_Current := 
No_Index;
-                              else
-                                 if Parser_State.Recover_Insert_Delete_Current 
= No_Index then
-                                    Parser_State.Recover_Insert_Delete_Current 
:=
-                                      Recover_Op_Arrays.Last_Index 
(Parser_State.Recover_Insert_Delete);
-                                 end if;
-
+                              Op_Index := @ + 1;
+                              if Parser_State.Current_Recover_Op = 
No_Insert_Delete then
+                                 Parser_State.First_Recover_Op;
                               end if;
 
+                              if Op.Del_Token_Index < Last_Recover_Node_Index 
then
+                                 Raise_Bad_Config ("Delete is out of order");
+                              end if;
+                              Last_Recover_Node_Index := Op.Del_Token_Index;
+
+                              --  We have to apply more than one delete here 
if they are
+                              --  consecutive (for example, 
ada_mode-recover_extra_end_loop.adb
+                              --  deletes "end loop ;"), because the main 
parser expects
+                              --  Current_Token to be correct before checking 
for
+                              --  Delete on return from Recover.
+                              declare
+                                 Deleted_Node : constant Valid_Node_Access := 
Tree.First_Terminal
+                                   (Tree.Current_Token 
(Parser_State.Stream)).Node;
+                              begin
+                                 if Stack_Matches_Ops and 
Insert_Delete_Matches_Ops and
+                                   Op.Del_Token_Index = 
Tree.Get_Sequential_Index (Deleted_Node)
+                                 then
+                                    Parser_State.Next_Recover_Op (Tree);
+
+                                    declare
+                                       --  WORKAROUND: GNAT Community 2021 
reports "'Op' must be a variable"
+                                       --  if we use this expression for the 
Op parameter.
+                                       Op : Recover_Op_Nodes renames 
Error_Recover_Ops (Op_Index);
+                                    begin
+                                       Parser_State.Do_Delete
+                                         (Tree, Op,
+                                          User_Data_Access_Constant 
(Shared_Parser.User_Data));
+                                    end;
+
+                                 else
+                                    Insert_Delete_Matches_Ops := False;
+                                 end if;
+                              end;
                            end case;
                         end;
                      end loop;
 
-                     --  If not Shared_Token_Changed, Shared_Token is the 
error token,
-                     --  which is the next token to read. If 
Shared_Token_Changed, we have
-                     --  set Shared_Token consistent with that; it is the next 
token to
-                     --  read. If Current_Token_Virtual, then after all the 
virtual tokens
-                     --  are inserted, the main parser would normally increment
-                     --  Parser_State.Shared_Token to get the next token, but 
we don't want
-                     --  that now. We could set Shared_Token to 1 less, but 
this way the
-                     --  debug messages all show the expected Shared_Terminal.
-
-                     Parser_State.Inc_Shared_Token := not 
Current_Token_Virtual;
-
-                     --  The main parser always sets Current_Token to be the 
syntax tree
-                     --  node containing Shared_Token; ensure that is true 
here (virtual
-                     --  tokens where handled above).
+                     declare
+                        Err : Error_Data'Class := Syntax_Trees.Error 
(Parser_State.Current_Error_Ref (Tree));
+                     begin
+                        Recover_Op_Array_Var_Ref (Err) := Error_Recover_Ops;
+
+                        if Test_McKenzie_Recover then
+                           Recover_Test_Var_Ref (Err) := new Recover_Test_Info'
+                             (Ops           => Result.Ops,
+                              Cost          => Result.Cost,
+                              Enqueue_Count => 
Parser_State.Recover.Enqueue_Count,
+                              Check_Count   => 
Parser_State.Recover.Check_Count);
+                        end if;
 
-                     if (not Current_Token_Virtual) and Shared_Token_Changed 
then
-                        Parser_State.Current_Token := Shared_Parser.Terminals
-                          (Parser_State.Shared_Token).Tree_Index;
-                     end if;
+                        Parser_State.Update_Error (Tree, Err, 
User_Data_Access_Constant (Shared_Parser.User_Data));
+                     end;
 
                      if Trace_McKenzie > Extra then
-                        Put_Line (Trace, Parser_State.Label, "after Ops 
applied:", Task_ID => False);
-                        Put_Line
-                          (Trace, Parser_State.Label, "stack " & 
Parser_Lists.Image
-                             (Stack, Descriptor, Tree),
-                           Task_ID => False);
-                        Put_Line
-                          (Trace, Parser_State.Label, "Shared_Token  " & Image
-                             (Parser_State.Shared_Token, 
Shared_Parser.Terminals, Descriptor), Task_ID => False);
-                        Put_Line
-                          (Trace, Parser_State.Label, "Current_Token " & 
Parser_State.Tree.Image
-                             (Parser_State.Current_Token, Descriptor), Task_ID 
=> False);
-                        Put_Line
-                          (Trace, Parser_State.Label, "recover_insert_delete" &
-                             Parser_State.Recover_Insert_Delete_Current'Image 
& ":" &
-                             Image (Parser_State.Recover_Insert_Delete, 
Descriptor), Task_ID => False);
-                        Put_Line
-                          (Trace, Parser_State.Label, "inc_shared_token " &
-                             Boolean'Image (Parser_State.Inc_Shared_Token),
-                           Task_ID => False);
+                        Put_Line (Tree, Parser_State.Stream, "after Ops 
applied:");
+                        Trace.Put_Line
+                          ("   stack/stream:" & ASCII.LF & Tree.Image
+                             (Parser_State.Stream, Stack => True, Input => 
True, Shared => True, Children => True));
+                        Trace.Put_Line
+                          ("   Shared_Token  " & Tree.Image (Tree.Shared_Token 
(Parser_State.Stream)));
+                        Trace.Put_Line
+                          ("   Current_Token " & Tree.Image 
(Tree.Current_Token (Parser_State.Stream)));
+                        Trace.Put_Line
+                          ("   remaining recover_insert_delete " & 
Parser_State.Recover_Image
+                             (Tree, Current_Only => True));
+                        Trace.Put_Line ("   resume_token_goal" & 
Parser_State.Resume_Token_Goal'Image);
                      end if;
+
+                     Parser_State.Total_Recover_Cost := @ + 
Parser_State.Recover.Results.Min_Key;
+                     Parser_State.Recover.Results.Clear;
                   end;
                exception
-               when Bad_Config =>
-                  if Parsers.Count = 1 then
-                     --  Oops. just give up
+               when Invalid_Case =>
+                  Parsers.Terminate_Parser (Current_Parser, Tree, "invalid 
config in recover", Trace);
+                  --  Terminate advances Current_Parser
+                  Skip_Next := True;
+
+                  if Parsers.Count = 0 then
+                     --  Oops. Just give up.
                      return Fail_Programmer_Error;
                   end if;
-                  Parsers.Terminate_Parser (Current_Parser, "bad config in 
recover", Trace, Shared_Parser.Terminals);
+
+               when E : Bad_Config =>
+                  if Debug_Mode then
+                     Trace.Put_Line 
(GNAT.Traceback.Symbolic.Symbolic_Traceback (E)); -- includes Prefix
+                  end if;
+
+                  Parsers.Terminate_Parser (Current_Parser, Tree, "bad config 
in recover", Trace);
                   --  Terminate advances Current_Parser
                   Skip_Next := True;
+
+                  if Parsers.Count = 0 then
+                     --  Oops. Just give up.
+                     return Fail_Programmer_Error;
+                  end if;
+               end;
+
+            else
+               --  Recover failed for this parser. Clear any previous recover
+               --  information on the error that triggered error recovery.
+               declare
+                  use Syntax_Trees;
+                  Parser_State : Parser_Lists.Parser_State renames 
Current_Parser.State_Ref;
+                  Error_Ref    : constant Stream_Error_Ref := 
Parser_State.Current_Error_Ref (Tree);
+                  Error        : Error_Data'Class := Syntax_Trees.Error 
(Error_Ref);
+                  Recover_Ops  : Recover_Op_Nodes_Arrays.Vector renames 
Recover_Op_Array_Var_Ref (Error);
+               begin
+                  Parser_State.Recover.Results.Clear;
+                  Recover_Ops := Recover_Op_Nodes_Arrays.Empty_Vector;
+                  Tree.Update_Error
+                    (Parser_State.Stream, Error_Ref, Error,
+                     User_Data_Access_Constant (Shared_Parser.User_Data));
                end;
             end if;
             if Skip_Next then
@@ -641,238 +720,531 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
             end if;
          end loop;
       end;
-      if Shared_Parser.Post_Recover /= null then
-         Shared_Parser.Post_Recover.all;
-      end if;
 
       return Super.Recover_Result;
 
    exception
    when E : others =>
+      Clear_Sequential_Index (Shared_Parser);
       if Debug_Mode then
-         Trace.Put (Ada.Exceptions.Exception_Name (E) & ": " & 
Ada.Exceptions.Exception_Message (E), Prefix => True);
-         Trace.New_Line;
-         raise;
+         Trace.Put_Line (Ada.Exceptions.Exception_Name (E) & ": " & 
Ada.Exceptions.Exception_Message (E));
+         Trace.Put_Line (GNAT.Traceback.Symbolic.Symbolic_Traceback (E)); -- 
includes Prefix
+         raise SAL.Programmer_Error;
       else
          return Fail_Programmer_Error;
       end if;
    end Recover;
 
    ----------
-   --  Spec private subprograms; for language-specific
-   --  child packages.
+   --  Spec private subprograms; for child packages. Declaration order
+
+   function Peek_Sequential_Start
+     (Super         :         in out Base.Supervisor;
+      Shared_Parser :         in out LR.Parser.Parser;
+      Config        : aliased in     Configuration)
+     return Peek_Sequential_State
+   is
+      use all type WisiToken.Syntax_Trees.Stream_Node_Ref;
+      Tree : Syntax_Trees.Tree renames Shared_Parser.Tree;
+   begin
+      return State : Peek_Sequential_State (Config.Input_Stream'Access) do
+         Parse.First_Sequential_Terminal (Super, Shared_Parser, 
State.Input_Terminal);
+
+         if Config.Current_Shared_Token = Syntax_Trees.Invalid_Stream_Node_Ref 
then
+            --  test_incremental.adb Preserve_Parse_Errors_1; EOI has error
+            State.Sequential_Terminal := 
Syntax_Trees.Invalid_Stream_Node_Parents;
+            --  This should be the only time we set State.Sequential_Terminal
+            --  Invalid.
+         else
+            State.Sequential_Terminal := Tree.To_Stream_Node_Parents 
(Config.Current_Shared_Token);
+            if Syntax_Trees.Rooted (State.Sequential_Terminal.Ref) or
+              State.Sequential_Terminal.Ref.Node = 
Syntax_Trees.Invalid_Node_Access
+              --  Ref is an empty nonterm. ada_mode-interactive_03.adb
+            then
+               Tree.First_Terminal (State.Sequential_Terminal, Following => 
True);
+               Super.Extend_Sequential_Index
+                 (Shared_Parser, Thru => State.Sequential_Terminal.Ref.Node, 
Positive => True);
+               Tree.First_Sequential_Terminal (State.Sequential_Terminal, 
Following => True);
+               pragma Assert (State.Sequential_Terminal.Ref /= 
Syntax_Trees.Invalid_Stream_Node_Ref);
+            end if;
+         end if;
+      end return;
+   end Peek_Sequential_Start;
 
-   procedure Check (ID : Token_ID; Expected_ID : in Token_ID)
+   function Peek_Sequential_Terminal (State : in Peek_Sequential_State) return 
Syntax_Trees.Node_Access
    is begin
-      pragma Assert (ID = Expected_ID, Token_ID'Image (ID) & " /=" & 
Token_ID'Image (Expected_ID));
-   end Check;
+      if State.Input_Terminal.Node = Syntax_Trees.Invalid_Node_Access then
+         return State.Sequential_Terminal.Ref.Node;
+
+      else
+         return State.Input_Terminal.Node;
+      end if;
+   end Peek_Sequential_Terminal;
 
-   function Current_Token
-     (Terminals                 :         in     Base_Token_Arrays.Vector;
-      Terminals_Current         :         in out WisiToken.Base_Token_Index;
-      Restore_Terminals_Current :            out WisiToken.Base_Token_Index;
-      Insert_Delete             : aliased in out Config_Op_Arrays.Vector;
-      Current_Insert_Delete     :         in out SAL.Base_Peek_Type)
-     return Base_Token
+   procedure Peek_Next_Sequential_Terminal
+     (Tree   : in     Syntax_Trees.Tree;
+      State  : in out Peek_Sequential_State)
    is
-      use Config_Op_Arrays;
-      use Config_Op_Array_Refs;
+      use Syntax_Trees;
+   begin
+      if State.Input_Terminal.Node = Invalid_Node_Access then
+         Tree.Next_Sequential_Terminal (State.Sequential_Terminal, Following 
=> True);
 
-      procedure Inc_I_D
-      is begin
-         Current_Insert_Delete := Current_Insert_Delete + 1;
-         if Current_Insert_Delete > Last_Index (Insert_Delete) then
-            Current_Insert_Delete := No_Insert_Delete;
-            Clear (Insert_Delete);
-         end if;
-      end Inc_I_D;
+      else
+         Parse.Next_Sequential_Terminal (Tree, State.Input_Terminal);
 
-   begin
-      if Terminals_Current = Invalid_Token_Index then
-         --  Happens with really bad syntax; see test_mckenzie_recover.adb 
Error_4.
-         raise Bad_Config;
+         --  if State.Input_Terminal.Node = Invalid_Node_Access then
+         --  State.Sequential_Terminal is correct.
       end if;
+   end Peek_Next_Sequential_Terminal;
 
-      loop
-         if Current_Insert_Delete = No_Insert_Delete then
-            Restore_Terminals_Current := Terminals_Current;
-            return Terminals (Terminals_Current);
+   procedure Check (ID : Token_ID; Expected_ID : in Token_ID; Descriptor : in 
WisiToken.Descriptor)
+   is begin
+      if ID /= Expected_ID then
+         raise Bad_Config with "expected " & Image (Expected_ID, Descriptor) & 
" found " & Image (ID, Descriptor);
+      end if;
+   end Check;
 
-         elsif Token_Index (Constant_Ref (Insert_Delete, 
Current_Insert_Delete)) = Terminals_Current then
-            declare
-               Op : Insert_Delete_Op renames Constant_Ref (Insert_Delete, 
Current_Insert_Delete);
-            begin
-               case Insert_Delete_Op_Label (Op.Op) is
-               when Insert =>
-                  --  Decrement Terminals_Current so Next_Token knows it 
should always
-                  --  increment it. Save the initial value, to restore in case 
of error.
-                  Restore_Terminals_Current := Terminals_Current;
-                  Terminals_Current         := Terminals_Current - 1;
-                  return (ID => ID (Op), others => <>);
-
-               when Delete =>
-                  Terminals_Current         := Terminals_Current + 1;
-                  Restore_Terminals_Current := Terminals_Current;
-                  Inc_I_D;
-               end case;
-            end;
-         else
-            return Terminals (Terminals_Current);
+   procedure Delete_Check
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration;
+      Node          : in     Syntax_Trees.Valid_Node_Access;
+      Expected_ID   : in     Token_ID)
+   is begin
+      Super.Extend_Sequential_Index (Shared_Parser, Thru => Node, Positive => 
True);
+      declare
+         use Recover_Op_Arrays;
+         Op : constant Recover_Op :=
+           (Delete,
+            (if Expected_ID = Invalid_Token_ID
+             then Shared_Parser.Tree.ID (Node)
+             else Expected_ID),
+            Shared_Parser.Tree.Get_Sequential_Index (Node));
+      begin
+         if Expected_ID /= Invalid_Token_ID then
+            Check (Shared_Parser.Tree.ID (Node), Expected_ID, 
Shared_Parser.Tree.Lexer.Descriptor.all);
          end if;
-      end loop;
-   end Current_Token;
-
-   function Current_Token_ID_Peek
-     (Terminals             :         in Base_Token_Arrays.Vector;
-      Terminals_Current     :         in Base_Token_Index;
-      Insert_Delete         : aliased in Config_Op_Arrays.Vector;
-      Current_Insert_Delete :         in SAL.Base_Peek_Type)
-     return Token_ID
-   is
-      use Config_Op_Array_Refs;
+         if Is_Full (Config.Ops) or Is_Full (Config.Insert_Delete) then
+            raise Bad_Config;
+         end if;
+         Append (Config.Ops, Op);
+         Append (Config.Insert_Delete, Op);
+         Config.Current_Insert_Delete := 1;
+      end;
+   end Delete_Check;
 
-      Result : Token_ID;
+   procedure Delete_Check
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration;
+      ID            : in     Token_ID)
+   is
+      Node : constant Syntax_Trees.Valid_Node_Access := 
Parse.Peek_Current_First_Terminal (Shared_Parser.Tree, Config);
    begin
-      if Terminals_Current = Base_Token_Index'First then
-         --  Happens with really bad syntax.
-         raise Bad_Config;
-      end if;
-
-      --  First set Result from Terminals; may be overridden by
-      --  Insert_Delete below.
-      Result := Terminals (Terminals_Current).ID;
+      Delete_Check (Super, Shared_Parser, Config, Node, ID);
+   end Delete_Check;
 
-      if Current_Insert_Delete = No_Insert_Delete then
-         null;
+   procedure Delete_Check
+     (Super         :         in out Base.Supervisor;
+      Shared_Parser :         in out LR.Parser.Parser;
+      Config        : aliased in out Configuration;
+      IDs           :         in     Token_ID_Array)
+   is
+      State : Peek_Sequential_State := Peek_Sequential_Start (Super, 
Shared_Parser, Config);
+   begin
+      for ID of IDs loop
+         Delete_Check (Super, Shared_Parser, Config, Peek_Sequential_Terminal 
(State), ID);
+         Peek_Next_Sequential_Terminal (Shared_Parser.Tree, State);
+      end loop;
+   end Delete_Check;
 
-      elsif Token_Index (Constant_Ref (Insert_Delete, Current_Insert_Delete)) 
= Terminals_Current then
-         declare
-            Op : Insert_Delete_Op renames Constant_Ref (Insert_Delete, 
Current_Insert_Delete);
-         begin
-            case Insert_Delete_Op_Label (Op.Op) is
-            when Insert =>
-               Result := Op.Ins_ID;
+   procedure Delete_Check
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration;
+      Peek_State    : in out Peek_Sequential_State;
+      ID            : in     Token_ID)
+   is begin
+      Delete_Check (Super, Shared_Parser, Config, Peek_Sequential_Terminal 
(Peek_State), ID);
+      Peek_Next_Sequential_Terminal (Shared_Parser.Tree, Peek_State);
+   end Delete_Check;
 
-            when Delete =>
-               --  This should have been handled in Check
+   procedure Do_Push_Back
+     (Tree   : in     Syntax_Trees.Tree;
+      Config : in out Configuration)
+   is
+      use Syntax_Trees;
+      Token : constant Recover_Token := Config.Stack.Pop.Token;
+   begin
+      Recover_Op_Arrays.Append
+        (Config.Ops, (Push_Back, Tree.Element_ID (Token), 
Tree.Get_Sequential_Index (Tree.First_Terminal (Token))));
+
+      if Token.Virtual then
+         if Token.First_Terminal = Invalid_Node_Access then
+            --  Token is an empty nonterm. Doing nothing is ok; the empty 
nonterm
+            --  will be created by the parse process.
+            null;
+         else
+            case Tree.Label (Token.First_Terminal) is
+            when Terminal_Label =>
+               Config.Input_Stream.Prepend (Token.First_Terminal);
+            when Nonterm =>
                raise SAL.Programmer_Error;
             end case;
-         end;
+         end if;
+      else
+         Config.Input_Stream.Prepend (Token.Element_Node);
       end if;
-      return Result;
-   end Current_Token_ID_Peek;
-
-   procedure Current_Token_ID_Peek_3
-     (Terminals             :         in     Base_Token_Arrays.Vector;
-      Terminals_Current     :         in     Base_Token_Index;
-      Insert_Delete         : aliased in     Config_Op_Arrays.Vector;
-      Current_Insert_Delete :         in     SAL.Base_Peek_Type;
-      Tokens                :            out Token_ID_Array_1_3)
+   end Do_Push_Back;
+
+   procedure Set_Initial_Sequential_Index
+     (Parsers    : in out Parser_Lists.List;
+      Tree       : in     Syntax_Trees.Tree;
+      Streams    : in out     Syntax_Trees.Stream_ID_Array;
+      Terminals  : in out Syntax_Trees.Stream_Node_Parents_Array;
+      Initialize : in     Boolean)
    is
-      Terminals_Next : WisiToken.Token_Index := Terminals_Current + 1;
-   begin
-      if Terminals_Current = Base_Token_Index'First then
-         --  Happens with really bad syntax; see test_mckenzie_recover.adb 
Error_4.
-         raise Bad_Config;
-      end if;
+      use Syntax_Trees;
 
-      --  First set Tokens from Terminals; may be overridden by
-      --  Insert_Delete below.
-      Tokens (1) := Terminals (Terminals_Current).ID;
-      if Terminals_Next <= Terminals.Last_Index then
-         Tokens (2) := Terminals (Terminals_Next).ID;
-         Terminals_Next := Terminals_Next + 1;
-         if Terminals_Next <= Terminals.Last_Index then
-            Tokens (3) := Terminals (Terminals_Next).ID;
-         else
-            Tokens (3) := Invalid_Token_ID;
+      --  The parsers may have different error points, and different parse
+      --  stream input after the error point; we arbitrarily pick the last
+      --  terminal in the first parser stack top as the origin for
+      --  Sequential_Index. Because most terminal nodes are shared, we must
+      --  set Sequential_Index consistently for all parsers, including in
+      --  terminal tokens copied from Shared_Stream (for Set_Error or
+      --  Add_Deleted). So we walk prev/next terminal for each parser.
+
+      --  If not Initialize, we are clearing sequential_index. A node may
+      --  have been copied from shared_stream into a parse stream after
+      --  sequential_index was initialized, to set Deleted_After due to a
+      --  Delete op (test_mckenzie_recover.adb Error_2). So we need to
+      --  traverse the shared stream as well as the parse streams.
+
+      Seq_Index : constant Base_Sequential_Index := (if Initialize then 1 else 
Invalid_Sequential_Index);
+      I : Positive_Index_Type := 1; --  first parse stream
+   begin
+      --  First set starting point.
+      for Parser_State of Parsers loop
+         Streams (I) := Parser_State.Stream;
+
+         --  Start with the stack top; it is the error token for
+         --  In_Parse_Actions, is one stream element before the error token for
+         --  Parse_Actions, is in the parse stream, and is SOI for an empty
+         --  buffer. It may be a nonterm, possibly empty.
+         Terminals (I) := Tree.To_Stream_Node_Parents
+           (Tree.To_Rooted_Ref (Parser_State.Stream, Tree.Peek 
(Parser_State.Stream)));
+
+         if not Initialize and I = 1 then
+            --  Set Terminals (Terminals'Last), which must be in the shared 
stream.
+            --  Current_Token is not invalid here; error recover succeeded.
+            declare
+               Current_Token : Rooted_Ref renames Tree.Current_Token 
(Parser_State.Stream);
+            begin
+               if Tree.ID (Current_Token.Node) = Tree.Lexer.Descriptor.EOI_ID 
then
+                  --  test_mckenzie_recover.adb Empty_Comments
+                  Terminals (Terminals'Last) := Tree.To_Stream_Node_Parents
+                    (Tree.To_Rooted_Ref (Tree.Shared_Stream, Tree.Stream_Last 
(Tree.Shared_Stream, Skip_EOI => False)));
+               else
+                  if Current_Token.Stream /= Tree.Shared_Stream then
+                     --  Current_Token is the error token, so it was copied to 
the parse
+                     --  stream input. test_mckenzie_recover.adb Error_4.
+                     Terminals (Terminals'Last) := Tree.To_Stream_Node_Parents
+                       (Tree.To_Rooted_Ref
+                          (Parser_State.Stream, Tree.Stream_Last 
(Parser_State.Stream, Skip_EOI => False)));
+                     Tree.Next_Terminal (Terminals (Terminals'Last), Following 
=> True);
+                     if Terminals (Terminals'Last).Ref = 
Invalid_Stream_Node_Ref then
+                        --  EOI was in stream input
+                        Terminals (Terminals'Last) := 
Tree.To_Stream_Node_Parents
+                          (Tree.To_Rooted_Ref
+                             (Tree.Shared_Stream, Tree.Stream_Last 
(Tree.Shared_Stream, Skip_EOI => False)));
+                     end if;
+                     pragma Assert
+                       (Terminals (Terminals'Last).Ref.Stream = 
Tree.Shared_Stream and
+                          (Terminals (Terminals'Last).Ref.Node /= 
Invalid_Node_Access and then
+                             Tree.Label (Terminals (Terminals'Last).Ref.Node) 
in Terminal_Label));
+
+                  else
+                     Terminals (Terminals'Last) := Tree.To_Stream_Node_Parents 
(Current_Token);
+                     Tree.Last_Terminal (Terminals (Terminals'Last), Streams 
(Terminals'Last), Preceding => True);
+                  end if;
+               end if;
+            end;
          end if;
-      else
-         Tokens (2) := Invalid_Token_ID;
-         Tokens (3) := Invalid_Token_ID;
+
+         Tree.Last_Terminal (Terminals (I), Streams (I), Preceding => True);
+         I := @ + 1;
+      end loop;
+
+      if not Initialize then
+         Streams (Streams'Last) := Tree.Shared_Stream;
       end if;
 
-      if Current_Insert_Delete = No_Insert_Delete then
-         null;
-      else
-         for I in Tokens'Range loop
+      --  Get all Terminals to the same node. Terminals (1) is the
+      --  "reference" terminal.
+      for I in Terminals'First + 1 .. Terminals'Last loop
+         if Terminals (I).Ref.Node /= Terminals (1).Ref.Node then
+            --  There are several cases:
+            --
+            --  1. I node is copied
+            --  2. Reference node is before or after I node.
+            --  3. Reference node is deleted in I.
+            --
+            --  In case 3, the I node does not need Sequential_Index.
+            --
+            --  Note that the reference node cannot be inserted or deleted in 
the
+            --  reference parser, because we start with stack_top,
+            --  which is after any deleted tokens.
+
             declare
-               use Config_Op_Arrays, Config_Op_Array_Refs;
-               J : constant SAL.Base_Peek_Type := Current_Insert_Delete + 
SAL.Peek_Type (I) - 1;
+               Ref_Byte_Pos : constant Buffer_Pos := Tree.Byte_Region
+                 (Terminals (1), Parse_Stream => Streams (1), 
Trailing_Non_Grammar => True).First;
             begin
-               if (J in First_Index (Insert_Delete) .. Last_Index 
(Insert_Delete)) and then
-                 Token_Index (Constant_Ref (Insert_Delete, J)) = 
Terminals_Current
-               then
+               loop
                   declare
-                     Op : Insert_Delete_Op renames Constant_Ref 
(Insert_Delete, J);
+                     Byte_Pos : Buffer_Pos := Tree.Byte_Region
+                       (Terminals (I), Streams (I), Trailing_Non_Grammar => 
True).First;
                   begin
-                     case Insert_Delete_Op_Label (Op.Op) is
-                     when Insert =>
-                        Tokens (I) := Op.Ins_ID;
-
-                     when Delete =>
-                        --  This should have been handled in Check
-                        raise SAL.Programmer_Error;
-                     end case;
+                     if Ref_Byte_Pos = Byte_Pos then
+                        --  case 1.
+                        Tree.Set_Sequential_Index (Terminals (I).Ref.Node, 
Seq_Index);
+                        exit;
+
+                     elsif Ref_Byte_Pos < Byte_Pos then
+                        if Tree.ID (Terminals (I).Ref.Node) = 
Tree.Lexer.Descriptor.SOI_ID then
+                           Tree.Set_Sequential_Index (Terminals (I).Ref.Node, 
Seq_Index);
+                           exit;
+                        else
+                           Tree.Prev_Terminal (Terminals (I), Streams (I), 
Preceding => True);
+
+                           Byte_Pos := Tree.Byte_Region
+                             (Terminals (I), Parse_Stream => Streams (I), 
Trailing_Non_Grammar => True).First;
+
+                           exit when Ref_Byte_Pos > Byte_Pos; -- case 3.
+                        end if;
+                     else
+                        if Tree.ID (Terminals (I).Ref.Node) = 
Tree.Lexer.Descriptor.EOI_ID then
+                           Tree.Set_Sequential_Index (Terminals (I).Ref.Node, 
Seq_Index);
+                           exit;
+                        else
+                           Tree.Next_Terminal (Terminals (I), Following => 
True);
+                           Byte_Pos := Tree.Byte_Region
+                             (Terminals (I), Parse_Stream => Streams (I), 
Trailing_Non_Grammar => True).First;
+                           exit when Ref_Byte_Pos < Byte_Pos; -- case 3.
+                        end if;
+                     end if;
                   end;
-               end if;
+               end loop;
             end;
-         end loop;
-      end if;
-   end Current_Token_ID_Peek_3;
+         end if;
+      end loop;
 
-   procedure Delete_Check
-     (Terminals : in     Base_Token_Arrays.Vector;
-      Config    : in out Configuration;
-      ID        : in     Token_ID)
+      for I in Terminals'Range loop
+         Tree.Set_Sequential_Index (Terminals (I).Ref.Node, Seq_Index);
+      end loop;
+   end Set_Initial_Sequential_Index;
+
+   procedure Extend_Sequential_Index
+     (Tree      : in     Syntax_Trees.Tree;
+      Streams   : in     Syntax_Trees.Stream_ID_Array;
+      Terminals : in out Syntax_Trees.Stream_Node_Parents_Array;
+      Target    : in     Syntax_Trees.Base_Sequential_Index;
+      Positive  : in     Boolean;
+      Clear     : in     Boolean)
    is
-      use Config_Op_Arrays;
-      Op : constant Config_Op := (Delete, ID, Config.Current_Shared_Token);
+      use Syntax_Trees;
+      Index : Base_Sequential_Index :=
+        (if Clear
+         then Invalid_Sequential_Index
+         elsif Tree.Get_Sequential_Index (Terminals (1).Ref.Node) /= 
Invalid_Sequential_Index
+         then Tree.Get_Sequential_Index (Terminals (1).Ref.Node)
+         else 1);
+
+      Skip_Step_Reference : Boolean := False;
+      Skip_Step_Terminals : array (2 .. Terminals'Last) of Boolean := (others 
=> False);
+      Target_Seen : array (1 .. Terminals'Last) of Boolean := (others => 
False);
    begin
-      Check (Terminals (Config.Current_Shared_Token).ID, ID);
-      if Is_Full (Config.Ops) or Is_Full (Config.Insert_Delete) then
-         raise Bad_Config;
-      end if;
-      Append (Config.Ops, Op);
-      Append (Config.Insert_Delete, Op);
-      Config.Current_Insert_Delete := 1;
-   end Delete_Check;
+      loop
+         if Skip_Step_Reference then
+            Skip_Step_Reference := False;
+         else
+            if Positive then
+               if Tree.ID (Terminals (1).Ref.Node) = 
Tree.Lexer.Descriptor.EOI_ID then
+                  Target_Seen (1) := True;
+               else
+                  Tree.Next_Terminal (Terminals (1), Following => True);
+                  if Tree.Get_Sequential_Index (Terminals (1).Ref.Node) /= 
Invalid_Sequential_Index and
+                    Tree.Get_Sequential_Index (Terminals (1).Ref.Node) >= 
Target
+                  then
+                     Target_Seen (1) := True;
+                  end if;
+               end if;
+            else
+               if Tree.ID (Terminals (1).Ref.Node) = 
Tree.Lexer.Descriptor.SOI_ID then
+                  Target_Seen (1) := True;
+               else
+                  Tree.Prev_Terminal (Terminals (1), Streams (1), Preceding => 
True);
+                  if Tree.Get_Sequential_Index (Terminals (1).Ref.Node) /= 
Invalid_Sequential_Index and
+                    Tree.Get_Sequential_Index (Terminals (1).Ref.Node) <= 
Target
+                  then
+                     Target_Seen (1) := True;
+                  end if;
+               end if;
+            end if;
+         end if;
 
-   procedure Delete_Check
-     (Terminals : in     Base_Token_Arrays.Vector;
-      Config    : in out Configuration;
-      Index     : in out WisiToken.Token_Index;
-      ID        : in     Token_ID)
-   is begin
-      Check (Terminals (Index).ID, ID);
-      Delete (Terminals, Config, Index);
-   end Delete_Check;
+         if not Clear  then
+            if Positive then
+               Index := @ + 1;
+            else
+               Index := @ - 1;
+            end if;
+         end if;
 
-   procedure Delete
-     (Terminals : in     Base_Token_Arrays.Vector;
-      Config    : in out Configuration;
-      Index     : in out WisiToken.Token_Index)
+         declare
+            Ref_Byte_Pos : constant Buffer_Pos := Tree.Byte_Region
+              (Terminals (1), Streams (1), Trailing_Non_Grammar => True).First;
+         begin
+            for I in 2 .. Terminals'Last loop
+               if Skip_Step_Terminals (I) then
+                  Skip_Step_Terminals (I) := False;
+               else
+                  if Positive then
+                     if Tree.ID (Terminals (I).Ref.Node) = 
Tree.Lexer.Descriptor.EOI_ID then
+                        Target_Seen (I) := True;
+                     else
+                        Tree.Next_Terminal (Terminals (I), Following => True);
+                        if Tree.Get_Sequential_Index (Terminals (I).Ref.Node) 
>= Target then
+                           Target_Seen (I) := True;
+                        end if;
+                     end if;
+                  else
+                     if Tree.ID (Terminals (I).Ref.Node) = 
Tree.Lexer.Descriptor.SOI_ID then
+                        Target_Seen (I) := True;
+                     else
+                        Tree.Prev_Terminal (Terminals (I), Streams (I), 
Preceding => True);
+                        if Tree.Get_Sequential_Index (Terminals (I).Ref.Node) 
<= Target then
+                           Target_Seen (I) := True;
+                        end if;
+                     end if;
+                  end if;
+               end if;
+
+               declare
+                  Byte_Pos : constant Buffer_Pos := Tree.Byte_Region
+                    (Terminals (I), Streams (1), Trailing_Non_Grammar => 
True).First;
+               begin
+                  if Terminals (I).Ref.Node = Terminals (1).Ref.Node then
+                     --  Don't set this here; that will confuse the exit 
criteria for not
+                     --  Initialize. It will be set via Terminals (1) below.
+                     null;
+
+                  elsif Ref_Byte_Pos = Byte_Pos then
+                     Tree.Set_Sequential_Index (Terminals (I).Ref.Node, Index);
+
+                  else
+                     if Positive then
+                        if Ref_Byte_Pos > Byte_Pos then
+                           --  Parser node is deleted in reference; wait for 
Parser to catch
+                           --  up.
+                           Skip_Step_Reference := True;
+                           Tree.Set_Sequential_Index (Terminals (I).Ref.Node, 
Index);
+
+                        else
+                           --  Ref_Byte_Pos < Byte_Pos
+                           --  Reference node is deleted in Parser; wait for 
reference to catch up.
+                           Skip_Step_Terminals (I) := True;
+                        end if;
+
+                     else -- Positive = False
+                        if Ref_Byte_Pos < Byte_Pos then
+                           --  Parser node is deleted in reference; wait for 
reference to catch
+                           --  up.
+                           Skip_Step_Reference := True;
+                           Tree.Set_Sequential_Index (Terminals (I).Ref.Node, 
Index);
+
+                        else
+                           --  Ref_Byte_Pos > Byte_Pos
+                           --  Reference node is deleted in Parser; wait for 
reference to catch up.
+                           Skip_Step_Terminals (I) := True;
+                        end if;
+                     end if;
+                  end if;
+               end;
+            end loop;
+         end;
+
+         if Clear then
+            if (for all Seen of Target_Seen => Seen) and then
+              (for all Term of Terminals =>
+                 Tree.Label (Term.Ref.Node) = Source_Terminal and then
+                   Tree.Get_Sequential_Index (Term.Ref.Node) = 
Invalid_Sequential_Index)
+            then
+               exit;
+            end if;
+
+            if not Skip_Step_Reference then
+               Tree.Set_Sequential_Index (Terminals (1).Ref.Node, Index);
+            end if;
+
+         else
+            if not Skip_Step_Reference then
+               Tree.Set_Sequential_Index (Terminals (1).Ref.Node, Index);
+            end if;
+
+            exit when
+              (if Positive
+               then Index >= Target
+               else Index <= Target) and
+              (for all Term of Terminals => Tree.Label (Term.Ref.Node) = 
Source_Terminal and then
+                 Tree.Get_Sequential_Index (Term.Ref.Node) /= 
Invalid_Sequential_Index);
+
+            exit when
+              (for all Term of Terminals =>
+                 Tree.ID (Term.Ref.Node) =
+                   (if Positive
+                    then Tree.Lexer.Descriptor.EOI_ID
+                    else Tree.Lexer.Descriptor.SOI_ID));
+         end if;
+
+      end loop;
+   end Extend_Sequential_Index;
+
+   procedure Clear_Sequential_Index (Shared_Parser : in out 
WisiToken.Parse.LR.Parser.Parser)
    is
-      use Config_Op_Arrays;
-      Op : constant Config_Op := (Delete, Terminals (Index).ID, Index);
+      Streams       : Syntax_Trees.Stream_ID_Array (1 .. 
Shared_Parser.Parsers.Count + 1);
+      Min_Terminals : Syntax_Trees.Stream_Node_Parents_Array (1 .. 
Shared_Parser.Parsers.Count + 1);
+      Max_Terminals : Syntax_Trees.Stream_Node_Parents_Array (1 .. 
Shared_Parser.Parsers.Count + 1);
    begin
-      if Is_Full (Config.Ops) or Is_Full (Config.Insert_Delete) then
-         raise Bad_Config;
+      if Shared_Parser.Parsers.Count = 0 then
+         --  We get here when recover fails by terminating all parsers.
+         return;
+      end if;
+
+      Set_Initial_Sequential_Index
+        (Shared_Parser.Parsers, Shared_Parser.Tree, Streams, Max_Terminals, 
Initialize => False);
+      Min_Terminals := Max_Terminals;
+      Extend_Sequential_Index
+        (Shared_Parser.Tree, Streams, Max_Terminals, Positive => True,
+         Target => Shared_Parser.Max_Sequential_Index, Clear => True);
+      Extend_Sequential_Index
+        (Shared_Parser.Tree, Streams, Min_Terminals, Positive => False,
+         Target => Shared_Parser.Min_Sequential_Index, Clear => True);
+
+      if Debug_Mode then
+         Shared_Parser.Tree.Sequential_Index_Cleared;
       end if;
-      Append (Config.Ops, Op);
-      Append (Config.Insert_Delete, Op);
-      Config.Current_Insert_Delete := 1;
-      Index := Index + 1;
-   end Delete;
+   end Clear_Sequential_Index;
 
    function Find_ID
-     (Config         : in     Configuration;
-      ID             : in     Token_ID)
+     (Tree   : in Syntax_Trees.Tree;
+      Config : in Configuration;
+      ID     : in Token_ID)
      return Boolean
    is begin
       for I in 1 .. Config.Stack.Depth - 1 loop
          --  Depth has Invalid_Token_ID
-         if ID = Config.Stack.Peek (I).Token.ID then
+         if ID = Tree.Element_ID (Config.Stack.Peek (I).Token) then
             return True;
          end if;
       end loop;
@@ -880,14 +1252,15 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
    end Find_ID;
 
    procedure Find_ID
-     (Config         : in     Configuration;
+     (Tree           : in     Syntax_Trees.Tree;
+      Config         : in     Configuration;
       ID             : in     Token_ID;
       Matching_Index : in out SAL.Peek_Type)
    is begin
       loop
          exit when Matching_Index = Config.Stack.Depth; -- Depth has 
Invalid_Token_ID
          declare
-            Stack_ID : Token_ID renames Config.Stack.Peek 
(Matching_Index).Token.ID;
+            Stack_ID : Token_ID renames Tree.Element_ID (Config.Stack.Peek 
(Matching_Index).Token);
          begin
             exit when Stack_ID = ID;
          end;
@@ -896,14 +1269,15 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
    end Find_ID;
 
    procedure Find_ID
-     (Config         : in     Configuration;
+     (Tree           : in     Syntax_Trees.Tree;
+      Config         : in     Configuration;
       IDs            : in     Token_ID_Set;
       Matching_Index : in out SAL.Peek_Type)
    is begin
       loop
          exit when Matching_Index >= Config.Stack.Depth; -- Depth has 
Invalid_Token_ID
          declare
-            ID : Token_ID renames Config.Stack.Peek (Matching_Index).Token.ID;
+            ID : Token_ID renames Tree.Element_ID (Config.Stack.Peek 
(Matching_Index).Token);
          begin
             exit when ID in IDs'First .. IDs'Last and then IDs (ID);
          end;
@@ -918,14 +1292,22 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
       ID_Set         : in     Token_ID_Set;
       Matching_Index : in out SAL.Peek_Type)
    is
-      use Syntax_Trees;
+      function Found return Boolean
+      is
+         use Syntax_Trees;
+         Token renames Config.Stack.Peek (Matching_Index).Token;
+      begin
+         return
+           Tree.Element_ID (Token) in ID_Set'Range and then
+           (ID_Set (Tree.Element_ID (Token)) and
+              (not Token.Virtual and then
+                 Tree.Find_Descendant (Token.Element_Node, ID) /=
+                 Invalid_Node_Access));
+      end Found;
    begin
       loop
          exit when Matching_Index >= Config.Stack.Depth; -- Depth has 
Invalid_Token_ID
-         exit when Config.Stack.Peek (Matching_Index).Token.ID in ID_Set'Range 
and then
-           (ID_Set (Config.Stack.Peek (Matching_Index).Token.ID) and
-              (Config.Stack.Peek (Matching_Index).Tree_Index /= 
Invalid_Node_Index and then
-                 Tree.Find_Descendant (Config.Stack.Peek 
(Matching_Index).Tree_Index, ID) /= Invalid_Node_Index));
+         exit when Found;
 
          Matching_Index := Matching_Index + 1;
       end loop;
@@ -933,7 +1315,7 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
 
    procedure Find_Matching_Name
      (Config              : in     Configuration;
-      Lexer               : access constant WisiToken.Lexer.Instance'Class;
+      Tree                : in     Syntax_Trees.Tree;
       Name                : in     String;
       Matching_Name_Index : in out SAL.Peek_Type;
       Case_Insensitive    : in     Boolean)
@@ -944,17 +1326,14 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
       loop
          exit when Matching_Name_Index >= Config.Stack.Depth; -- Depth has 
Invalid_Token_ID
          declare
-            Token       : Recover_Token renames Config.Stack.Peek 
(Matching_Name_Index).Token;
-            Name_Region : constant Buffer_Region :=
-              (if Token.Name = Null_Buffer_Region
-               then Token.Byte_Region
-               else Token.Name);
+            Token       : Syntax_Trees.Recover_Token renames Config.Stack.Peek 
(Matching_Name_Index).Token;
+            Name_Region : constant Buffer_Region := Tree.Name (Token);
          begin
             exit when Name_Region /= Null_Buffer_Region and then
               Match_Name =
               (if Case_Insensitive
-               then To_Lower (Lexer.Buffer_Text (Name_Region))
-               else Lexer.Buffer_Text (Name_Region));
+               then To_Lower (Tree.Lexer.Buffer_Text (Name_Region))
+               else Tree.Lexer.Buffer_Text (Name_Region));
 
             Matching_Name_Index := Matching_Name_Index + 1;
          end;
@@ -963,7 +1342,7 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
 
    procedure Find_Matching_Name
      (Config              : in     Configuration;
-      Lexer               : access constant WisiToken.Lexer.Instance'Class;
+      Tree                : in     Syntax_Trees.Tree;
       Name                : in     String;
       Matching_Name_Index : in out SAL.Peek_Type;
       Other_ID            : in     Token_ID;
@@ -978,19 +1357,16 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
       loop
          exit when Matching_Name_Index >= Config.Stack.Depth; -- Depth has 
Invalid_Token_ID
          declare
-            Token       : Recover_Token renames Config.Stack.Peek 
(Matching_Name_Index).Token;
-            Name_Region : constant Buffer_Region :=
-              (if Token.Name = Null_Buffer_Region
-               then Token.Byte_Region
-               else Token.Name);
+            Token       : Syntax_Trees.Recover_Token renames Config.Stack.Peek 
(Matching_Name_Index).Token;
+            Name_Region : constant Buffer_Region := Tree.Name (Token);
          begin
             exit when Name_Region /= Null_Buffer_Region and then
               Match_Name =
               (if Case_Insensitive
-               then To_Lower (Lexer.Buffer_Text (Name_Region))
-               else Lexer.Buffer_Text (Name_Region));
+               then To_Lower (Tree.Lexer.Buffer_Text (Name_Region))
+               else Tree.Lexer.Buffer_Text (Name_Region));
 
-            if Other_ID = Token.ID then
+            if Other_ID = Tree.Element_ID (Token) then
                Other_Count := Other_Count + 1;
             end if;
 
@@ -999,226 +1375,350 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
       end loop;
    end Find_Matching_Name;
 
-   procedure Insert (Config : in out Configuration; ID : in Token_ID)
+   procedure Insert
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration;
+      ID            : in     Token_ID)
    is begin
-      Insert (Config, Config.Current_Shared_Token, ID);
+      Insert
+        (Super, Shared_Parser, Config, 
Parse.Peek_Current_First_Sequential_Terminal (Super, Shared_Parser, Config), 
ID);
    end Insert;
 
-   procedure Insert (Config : in out Configuration; IDs : in Token_ID_Array)
+   procedure Insert
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration;
+      IDs           : in     Token_ID_Array)
    is begin
       for ID of IDs loop
-         Insert (Config, ID);
+         Insert (Super, Shared_Parser, Config, ID);
       end loop;
    end Insert;
 
-   procedure Insert (Config : in out Configuration; Index : in 
WisiToken.Token_Index; ID : in Token_ID)
-   is
-      use Config_Op_Arrays;
-      Op : constant Config_Op := (Insert, ID, Index);
-   begin
-      if Is_Full (Config.Ops) or Is_Full (Config.Insert_Delete) then
-         raise Bad_Config;
-      end if;
-      Append (Config.Ops, Op);
-      Append (Config.Insert_Delete, Op);
-      Config.Current_Insert_Delete := 1;
+   procedure Insert
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration;
+      Before        : in     Syntax_Trees.Valid_Node_Access;
+      ID            : in     Token_ID)
+   is begin
+      Super.Extend_Sequential_Index (Shared_Parser, Thru => Before, Positive 
=> True);
+      declare
+         use Recover_Op_Arrays;
+         Op : constant Recover_Op := (Insert, ID, 
Shared_Parser.Tree.Get_Sequential_Index (Before));
+      begin
+         if Is_Full (Config.Ops) or Is_Full (Config.Insert_Delete) then
+            raise Bad_Config;
+         end if;
+         Append (Config.Ops, Op);
+         Append (Config.Insert_Delete, Op);
+         Config.Current_Insert_Delete := 1;
+      end;
    end Insert;
 
-   function Next_Token
-     (Terminals                 :         in     Base_Token_Arrays.Vector;
-      Terminals_Current         :         in out Base_Token_Index;
-      Restore_Terminals_Current :         in out WisiToken.Base_Token_Index;
-      Insert_Delete             : aliased in out Config_Op_Arrays.Vector;
-      Current_Insert_Delete     :         in out SAL.Base_Peek_Type)
-     return Base_Token
+   function Undo_Reduce_Op_Order_Valid (Ops : in Recover_Op_Arrays.Vector) 
return Boolean
+   --  Subset of checks in Push_Back_Undo_Reduce_Valid, when the target 
nonterm is empty.
    is
-      use Config_Op_Arrays, Config_Op_Array_Refs;
-
-      function Next_Terminal return Base_Token
-      is begin
-         Terminals_Current         := Terminals_Current + 1;
-         Restore_Terminals_Current := Terminals_Current;
-         return Terminals (Terminals_Current);
-      end Next_Terminal;
-
+      use Recover_Op_Arrays;
    begin
-      loop
-         if Last_Index (Insert_Delete) > 0 and then Current_Insert_Delete = 
Last_Index (Insert_Delete) then
-            Current_Insert_Delete := No_Insert_Delete;
-            Clear (Insert_Delete);
-            return Next_Terminal;
-
-         elsif Current_Insert_Delete = No_Insert_Delete then
-            return Next_Terminal;
+      declare
+         Op : Recover_Op renames Element (Ops, Last_Index (Ops));
+      begin
+         case Op.Op is
+         when Fast_Forward =>
+            --  Normally any Undo_Reduce must be done before Insert and after
+            --  Delete, to eliminate duplicate results from push_back/reduce
+            --  before and after delete (see test_mckenzie_recover.adb
+            --  Extra_Begin, ada_mode-recover_extra_end_loop.adb with 
incremental
+            --  parse). Fast_Forward resets that.
+            return True;
 
-         elsif Token_Index (Constant_Ref (Insert_Delete, Current_Insert_Delete 
+ 1)) = Terminals_Current + 1 then
-            Current_Insert_Delete := Current_Insert_Delete + 1;
-            declare
-               Op : Insert_Delete_Op renames Constant_Ref (Insert_Delete, 
Current_Insert_Delete);
-            begin
-               case Insert_Delete_Op_Label'(Op.Op) is
-               when Insert =>
-                  return (ID => Op.Ins_ID, others => <>);
-
-               when Delete =>
-                  Terminals_Current         := Terminals_Current + 1;
-                  Restore_Terminals_Current := Terminals_Current;
-               end case;
-            end;
+         when Undo_Reduce | Push_Back =>
+            return True;
 
-         else
-            return Next_Terminal;
-         end if;
-      end loop;
-   end Next_Token;
+         when Insert =>
+            return False;
 
-   function Push_Back_Valid
-     (Target_Token_Index : in WisiToken.Base_Token_Index;
-      Ops             : in Config_Op_Arrays.Vector;
-      Prev_Op         : in Positive_Index_Type)
+         when Delete =>
+            return True;
+         end case;
+      end;
+   end Undo_Reduce_Op_Order_Valid;
+
+   function Push_Back_Undo_Reduce_Valid
+     (Super                 : in out Base.Supervisor;
+      Shared_Parser :         in out LR.Parser.Parser;
+      Target_Op             : in     Recover_Op_Label;
+      Target_Node           : in     Syntax_Trees.Node_Access;
+      Ops                   : in     Recover_Op_Arrays.Vector;
+      Last_Op_Index         : in     Positive_Index_Type;
+      Push_Back_Undo_Reduce : in     Boolean)
      return Boolean
+   --  Target_Node is the first terminal in a token that is the object of
+   --  Target_Op; a Push_Back or Undo_Reduce that will be the next Op
+   --  after Last_Op. Return True if that respects restrictions on Op
+   --  order.
+   --
+   --  Language_Fixes may set Push_Back_Undo_Reduce True; other callers
+   --  must set it False.
    is
-      use Config_Op_Arrays;
-      Fast_Forward_Seen : Boolean := False;
+      use Syntax_Trees;
+      use Recover_Op_Arrays;
+
+      Tree : Syntax_Trees.Tree  renames Shared_Parser.Tree;
+
+      Target_Index : Base_Sequential_Index :=
+        (if Target_Node = Invalid_Node_Access
+         then Invalid_Sequential_Index
+         else Tree.Get_Sequential_Index (Target_Node));
+
+      Fast_Forward_Seen        : Boolean               := False;
+      Fast_Forward_First_Index : Base_Sequential_Index := 
Invalid_Sequential_Index;
+
+      function Check_Insert_Delete (Op : in Insert_Delete_Op) return Boolean
+      is begin
+         return Fast_Forward_Seen and then
+           (Target_Index = Invalid_Sequential_Index or
+              (if Push_Back_Undo_Reduce
+               then
+                 (case Insert_Delete_Op_Label'(Op.Op) is
+                  when Insert => Target_Index >= Op.Ins_Before,
+                  when Delete => Target_Index > Op.Del_Token_Index)
+               --  allow Language_Fixes (which sets Push_Back_Undo_Reduce) to 
insert
+               --  more ops at a previous error location, but not cross Insert 
or
+               --  Delete; that would cause out-of-order ops. 
ada_mode-recover_37.adb
+               else Target_Index > Fast_Forward_First_Index
+               --  test_mckenzie_recover.adb Push_Back_2, String_Quote_7
+              ));
+      end Check_Insert_Delete;
+
    begin
-      --  We require a Fast_Forward after Insert or Delete, to eliminate
-      --  duplicate results from push_back before and after a
-      --  delete (see test_mckenzie_recover.adb Extra_Begin).
-      --
-      --  If Target_Token_Index is greater than the new current terminal
-      --  implied by Prev_Op, the Push_Back is valid. Otherwise, it is
-      --  invalid (it should have been done first); we only need to look at
-      --  one op other than Fast_Forward.
-      for I in reverse First_Index (Ops) .. Prev_Op loop
+      if Target_Index = Invalid_Sequential_Index and Target_Node /= 
Invalid_Node_Access then
+         Super.Extend_Sequential_Index (Shared_Parser, Target_Node, Positive 
=> False);
+         Target_Index := Tree.Get_Sequential_Index (Target_Node);
+      end if;
+
+      for I in reverse First_Index (Ops) .. Last_Op_Index loop
          declare
-            Op : Config_Op renames Element (Ops, I);
+            Op : Recover_Op renames Element (Ops, I);
          begin
             case Op.Op is
             when Fast_Forward =>
-               --  We need to see the op before the Fast_Forward to tell if 
Push_Back
-               --  to Target_Token_Index is ok.
-               Fast_Forward_Seen := True;
+               --  Normally any Push_Back must be done before any Insert or 
Delete,
+               --  to eliminate duplicate results from push_back/reduce before 
and
+               --  after delete (see test_mckenzie_recover.adb Extra_Begin).
+               --  Fast_Forward resets that.
+               --
+               --  Push_Back/Undo_Reduce into a Fast_Forward region is ok, but 
not
+               --  all of a Fast_Forward; that would just repeat the same ops.
+               Fast_Forward_Seen        := True;
+               Fast_Forward_First_Index := Op.FF_First_Index;
 
             when Undo_Reduce =>
-               --  We don't know what the new terminal is from this op. We'll 
just
-               --  have to trust the programmers.
-               return True;
+               --  We allow mixing push_back and undo_reduce in any order, so 
we can
+               --  get to an arbitrary point inside a nonterm to do 
insert/delete.
+
+               if Op.UR_Token_Index = Invalid_Sequential_Index then
+                  --  Undo_Reduced token was empty; need to see the next one.
+                  null;
+               else
+                  if Target_Index = Invalid_Sequential_Index then
+                     --  Target token is empty; it does not cross anything.
+                     return True;
+                  end if;
+
+                  --  No point in checking Fast_Forward_Seen here; we don't 
have the
+                  --  last terminal index of Op. So this allows 
Push_Back/Undo_Reduce of the
+                  --  entire fast_forward.
+
+                  case Target_Op is
+                  when Undo_Reduce =>
+                     --  Undo_Reduce after Undo_Reduce; must undo part or all 
of the original
+                     --  nonterm; see test_mckenzie_recover.adb Missing_Name_2.
+                     return Target_Index >= Op.UR_Token_Index;
+
+                  when Push_Back =>
+                     --  Push_Back after Undo_Reduce; must push back only part 
of the
+                     --  unreduced nonterm, unless overridden by 
Language_Fixes.
+                     --  test/ada_mode-recover_block_name_mismatch.adb
+                     return
+                       (if Push_Back_Undo_Reduce
+                        then True
+                        else Target_Index > Op.UR_Token_Index);
+
+                  when others =>
+                     raise SAL.Programmer_Error;
+                  end case;
+               end if;
 
             when Push_Back =>
-               --  If neither the proposed Push_Back nor Op is for an empty 
token,
-               --  successive Push_Backs have decreasing targets; see
-               --  test_mckenzie_recover.adb Missing_Name_0.
-               --
-               --  However, if there is a Fast_Forward between two Push_Backs,
-               --  Target_Token_Index must be >= Op.PB_Token_Index. See
-               --  ada-mode-recover_27.adb.
-               --
-               --  If called from Undo_Reduce_Valid where the Undo_Reduce 
token is
-               --  empty, we get Target_Token_Index = Op.PB_Token_Index.
-               return Target_Token_Index = Invalid_Token_Index or else
-                 Op.PB_Token_Index = Invalid_Token_Index or else
-                 (if Fast_Forward_Seen
-                  then Target_Token_Index > Op.PB_Token_Index
-                  else Target_Token_Index <= Op.PB_Token_Index);
+               if Op.PB_Token_Index = Invalid_Sequential_Index then
+                  --  Pushed_Back token was empty; need to see the next one.
+                  null;
+               else
+                  if Target_Index = Invalid_Sequential_Index then
+                     --  Target token is empty; it does not cross anything.
+                     return True;
+                  end if;
+
+                  case Target_Op is
+                  when Undo_Reduce =>
+                     if Fast_Forward_Seen then
+                        --  Unreducing a token somewhere in the push_back.
+                        return Target_Index >= Op.PB_Token_Index;
+                     else
+                        --  Need to keep going to see if we cross a 
fast_forward
+                        null;
+                     end if;
+
+                  when Push_Back =>
+                     --  Between Fast_Forwards, successive non-empty Push_Back
+                     --  have decreasing targets; see test_mckenzie_recover.adb
+                     --  Missing_Name_0.
+                     --
+                     --  If the target push_back crosses a Fast_Forward, it 
must not cross
+                     --  a preceding op; Target_Index must be >= 
Op.PB_Token_Index. See
+                     --  ada-mode-recover_27.adb.
+                     if Fast_Forward_Seen then
+                        --  Target push_back/undo_reduce does not cross the 
previous
+                        --  push_back/undo_reduce.
+                        return Target_Index > Op.PB_Token_Index;
+
+                     else
+                        --  Need to keep going to see if we cross a 
fast_forward
+                        null;
+                     end if;
+
+                  when others =>
+                     raise SAL.Programmer_Error;
+                  end case;
+               end if;
 
             when Insert =>
-               --  If Target_Token_Index = Op.Ins_Token_Index, we want the edit
-               --  point to be at the same token as before; that's ok.
-               --
-               --  If Target_Token_Index > Ins_Token_Index, the Push_Back is 
partway
-               --  into a Fast_Forward.
-               return Fast_Forward_Seen and
-                 (Target_Token_Index = Invalid_Token_Index or else
-                    Target_Token_Index >= Op.Ins_Token_Index);
+               return Check_Insert_Delete (Op);
 
             when Delete =>
-               --  As for Insert
-               return Fast_Forward_Seen and
-                 (Target_Token_Index = Invalid_Token_Index or else
-                    Target_Token_Index >= Op.Del_Token_Index);
+               return Check_Insert_Delete (Op);
             end case;
          end;
       end loop;
-      --  We can only get here if the only ops in Ops are Fast_Forward,
-      --  which is a programming error.
-      pragma Assert (False);
-      raise Bad_Config;
-   end Push_Back_Valid;
 
-   procedure Push_Back (Config : in out Configuration)
+      --  We get here if we are looking for the next Push_Back or
+      --  Undo_Reduce. In effect, Op.*_Token_Index is now 0, which means any
+      --  Push_Back or Undo_Reduce is ok.
+      return True;
+   end Push_Back_Undo_Reduce_Valid;
+
+   function Push_Back_Valid
+     (Super                 : in out Base.Supervisor;
+      Shared_Parser :         in out LR.Parser.Parser;
+      Config                : in     Configuration;
+      Push_Back_Undo_Reduce : in     Boolean)
+     return Boolean
    is
-      use Config_Op_Arrays, Config_Op_Array_Refs;
-
-      Item        : constant Recover_Stack_Item := Config.Stack.Pop;
-      Token_Index : constant Base_Token_Index   := 
Item.Token.Min_Terminal_Index;
-
-      function Compare (Left : in Base_Token_Index; Right : in Config_Op) 
return Boolean
-        is (case Right.Op is
-            when Fast_Forward    => False,
-            when Undo_Reduce     => False,
-            when Push_Back       => False,
-            when Insert => Left < Right.Ins_Token_Index,
-            when Delete => Left < Right.Del_Token_Index);
-      --  If Left = Right.Token_Index, we assume the Right ops go _after_
-      --  the Left, so the Left do not need to be repeated.
+      Tree : Syntax_Trees.Tree renames Shared_Parser.Tree;
    begin
-      if Token_Index /= Invalid_Token_Index then
-         Config.Current_Shared_Token := Token_Index;
-         for I in First_Index (Config.Ops) .. Last_Index (Config.Ops) loop
-            if Compare (Token_Index, Constant_Ref (Config.Ops, I)) then
-               if Is_Full (Config.Insert_Delete) then
-                  raise Bad_Config;
-               end if;
-               Append (Config.Insert_Delete, Constant_Ref (Config.Ops, I));
-            end if;
-         end loop;
+      if Config.Stack.Depth <= 1 then
+         return False;
       end if;
 
-      if Is_Full (Config.Ops) then
-         raise Bad_Config;
+      declare
+         use Syntax_Trees;
+         Token : Recover_Token renames Config.Stack.Peek.Token;
+         First_Terminal : constant Node_Access := Tree.First_Terminal (Token);
+      begin
+         return
+           --  Push_Back needs a terminal node or an empty nonterm.
+           --  ada_mode-recover_38.adb partial parse,
+           --  ada_mode-recover_indent_3.adb partial parse.
+           (not Token.Virtual or else
+              (Token.Virtual and then
+                 ((Is_Terminal (Token.ID, Tree.Lexer.Descriptor.all) and
+                    Token.First_Terminal /= Invalid_Node_Access) or
+                    Is_Nonterminal (Token.ID, Tree.Lexer.Descriptor.all))))
+
+           and then
+           (Push_Back_Undo_Reduce or not Tree.Contains_Virtual_Terminal 
(Token)) and then
+           --  Normally, if Contains_Virtual_Terminal, Token was inserted 
earlier
+           --  in this or a previous recover session; no point in recomputing 
it.
+           --  However, Language_Fixes can push back a virtual nonterm in order
+           --  to insert something before it; ada_mode-interactive_01.adb
+
+           --  We allow both Push_Back and Undo_Reduce of empty nonterms
+           --  (First_Terminal = Invalid_Node_Access); Push_Back is easier to 
use
+           --  in Language_Fixes, Undo_Reduce is required to change the stack
+           --  state to allow completing a production with a non-empty nonterm.
+           (Recover_Op_Arrays.Length (Config.Ops) = 0 or else
+              Push_Back_Undo_Reduce_Valid
+                (Super, Shared_Parser,
+                 Push_Back,
+                 First_Terminal,
+                 Config.Ops,
+                 Recover_Op_Arrays.Last_Index (Config.Ops),
+                 Push_Back_Undo_Reduce));
+      end;
+   end Push_Back_Valid;
+
+   procedure Push_Back
+     (Super                 : in out Base.Supervisor;
+      Shared_Parser         : in out LR.Parser.Parser;
+      Config                : in out Configuration;
+      Push_Back_Undo_Reduce : in     Boolean)
+   is begin
+      --  We relax the "don't push back into previous recover" restriction
+      --  for Language_Fixes; see test_mckenzie_recover.adb Missing_Name_5.
+      if not Push_Back_Valid (Super, Shared_Parser, Config, 
Push_Back_Undo_Reduce => Push_Back_Undo_Reduce) then
+         raise Invalid_Case;
       end if;
-      Append (Config.Ops, (Push_Back, Item.Token.ID, 
Config.Current_Shared_Token));
+
+      Do_Push_Back (Shared_Parser.Tree, Config);
    end Push_Back;
 
-   procedure Push_Back_Check (Config : in out Configuration; Expected_ID : in 
Token_ID)
+   procedure Push_Back_Check
+     (Super                 : in out Base.Supervisor;
+      Shared_Parser         : in out LR.Parser.Parser;
+      Config                : in out Configuration;
+      Expected_ID           : in     Token_ID;
+      Push_Back_Undo_Reduce : in     Boolean)
    is begin
-      Check (Config.Stack.Peek (1).Token.ID, Expected_ID);
-      Push_Back (Config);
+      Check
+        (Shared_Parser.Tree.Element_ID (Config.Stack.Peek (1).Token),
+         Expected_ID,
+         Shared_Parser.Tree.Lexer.Descriptor.all);
+      Push_Back (Super, Shared_Parser, Config, Push_Back_Undo_Reduce);
    end Push_Back_Check;
 
-   procedure Push_Back_Check (Config : in out Configuration; Expected : in 
Token_ID_Array)
+   procedure Push_Back_Check
+     (Super                 : in out Base.Supervisor;
+      Shared_Parser         : in out LR.Parser.Parser;
+      Config                : in out Configuration;
+      Expected              : in     Token_ID_Array;
+      Push_Back_Undo_Reduce : in     Boolean)
    is begin
       for ID of Expected loop
-         if Push_Back_Valid (Config) then
-            Push_Back_Check (Config, ID);
-         else
-            raise Bad_Config;
-         end if;
+         Push_Back_Check (Super, Shared_Parser, Config, ID, 
Push_Back_Undo_Reduce);
       end loop;
    end Push_Back_Check;
 
    procedure Put
      (Message      : in     String;
-      Trace        : in out WisiToken.Trace'Class;
-      Parser_Label : in     Natural;
-      Terminals    : in     Base_Token_Arrays.Vector;
+      Tree         : in     Syntax_Trees.Tree;
+      Parser_Label : in     Syntax_Trees.Stream_ID;
       Config       : in     Configuration;
-      Task_ID      : in     Boolean := True;
       Strategy     : in     Boolean := False)
+   --  For debugging output
    is
-      --  For debugging output
-
-      --  Build a string, call trace.put_line once, so output from multiple
-      --  tasks is not interleaved (mostly).
-      use Config_Op_Array_Refs;
+      use Recover_Op_Array_Refs;
       use all type Ada.Strings.Unbounded.Unbounded_String;
-      use all type WisiToken.Semantic_Checks.Check_Status_Label;
+      use all type Bounded_Streams.Cursor;
+      use all type WisiToken.Syntax_Trees.In_Parse_Actions.Status_Label;
+      use all type WisiToken.Syntax_Trees.Recover_Token;
 
-      Descriptor : WisiToken.Descriptor renames Trace.Descriptor.all;
+      Descriptor : WisiToken.Descriptor renames Tree.Lexer.Descriptor.all;
 
       Result : Ada.Strings.Unbounded.Unbounded_String :=
-        (if Task_ID then +"task" & Task_Attributes.Value'Image else +"") &
-        Integer'Image (Parser_Label) & ": " &
+        +" " & Tree.Trimmed_Image (Parser_Label) & ": " & --  leading space 
for consistency with existing tests.
         (if Message'Length > 0 then Message & ":" else "");
    begin
       Result := Result & Natural'Image (Config.Cost);
@@ -1231,75 +1731,145 @@ package body WisiToken.Parse.LR.McKenzie_Recover is
       else
          Result := Result & ", ";
       end if;
-      if Config.Check_Status.Label /= Ok then
-         Result := Result & Semantic_Checks.Check_Status_Label'Image 
(Config.Check_Status.Label) & " ";
-      elsif Config.Error_Token.ID /= Invalid_Token_ID then
-         Result := Result & "Error " & Image (Config.Error_Token, Descriptor) 
& " ";
+      if Config.In_Parse_Action_Status.Label /= Ok then
+         Result := Result & Config.In_Parse_Action_Status.Label'Image & " ";
+      elsif Config.Error_Token /= Syntax_Trees.Invalid_Recover_Token then
+         Result := Result & "Error " & Syntax_Trees.Image (Tree, 
Config.Error_Token) & " ";
       end if;
-      Result := Result & Image (Config.Stack, Descriptor, Depth => 1);
+      Result := Result & Image (Config.Stack, Tree, Depth => 1);
 
-      if Config.Current_Insert_Delete = No_Insert_Delete then
-         Result := Result & "|" & Image (Config.Current_Shared_Token, 
Terminals, Descriptor) & "|";
-      else
+      if Config.Current_Insert_Delete /= No_Insert_Delete then
          Result := Result & "/" & Trimmed_Image (Config.Current_Insert_Delete) 
& ":" &
            Image (Constant_Ref (Config.Insert_Delete, 
Config.Current_Insert_Delete), Descriptor) & "/";
+
+      elsif Config.Input_Stream.First /= Bounded_Streams.No_Element then
+         Result := Result & "\" & Tree.Image
+           (Config.Input_Stream (Config.Input_Stream.First), Node_Numbers => 
True) & "\";
+
+      else
+         Result := Result & "|" & Tree.Image (Config.Current_Shared_Token, 
Terminal_Node_Numbers => True) & "|";
       end if;
 
       Result := Result & Image (Config.Ops, Descriptor);
-      if Config.Minimal_Complete_State /= None then
-         Result := Result & " minimal_complete " & 
Config.Minimal_Complete_State'Image;
-      end if;
-      Trace.Put_Line (-Result);
+      Tree.Lexer.Trace.Put_Line (-Result);
    end Put;
 
    procedure Put_Line
-     (Trace        : in out WisiToken.Trace'Class;
-      Parser_Label : in     Natural;
-      Message      : in     String;
-      Task_ID      : in     Boolean := True)
+     (Tree         : in     Syntax_Trees.Tree;
+      Parser_Label : in     Syntax_Trees.Stream_ID;
+      Message      : in     String)
    is begin
-      Trace.Put_Line
-        ((if Task_ID then "task" & Task_Attributes.Value'Image else "") &
-           Integer'Image (Parser_Label) & ": " & Message);
+      Tree.Lexer.Trace.Put_Line (Tree.Trimmed_Image (Parser_Label) & ": " & 
Message);
    end Put_Line;
 
-   function Undo_Reduce
-     (Stack : in out Recover_Stacks.Stack;
-      Tree  : in     Syntax_Trees.Tree)
-     return Ada.Containers.Count_Type
+   function Undo_Reduce_Valid
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration)
+     return Boolean
    is
-      Nonterm_Item : constant Recover_Stack_Item := Recover_Stacks.Pop (Stack);
+      Tree : Syntax_Trees.Tree renames Shared_Parser.Tree;
    begin
+      if Config.Stack.Depth = 0 then
+         return False;
+      end if;
+
       declare
-         Children : constant Valid_Node_Index_Array := Tree.Children 
(Nonterm_Item.Tree_Index);
+         use Recover_Op_Arrays;
+
+         Token : Syntax_Trees.Recover_Token renames Config.Stack.Peek.Token;
       begin
-         for C of Children loop
-            Stack.Push ((Tree.State (C), C, Tree.Recover_Token (C)));
-         end loop;
-         return Children'Length;
+         if Token.Virtual or else not Tree.Is_Nonterm (Token.Element_Node) then
+            return False;
+
+         elsif Length (Config.Ops) = 0 then
+            return True;
+
+         else
+            --  Undo_Reduce needs to know what tokens the nonterm contains, to
+            --  push them on the stack. Thus we need a valid Tree first 
terminal
+            --  node, or an empty nonterm.
+            return
+              (Tree.Child_Count (Token.Node) = 0 and then
+                 Undo_Reduce_Op_Order_Valid (Config.Ops))
+              or else
+              (Push_Back_Undo_Reduce_Valid
+                 (Super, Shared_Parser, Undo_Reduce,  
Tree.First_Sequential_Terminal (Token.Node), Config.Ops,
+                  Last_Index (Config.Ops),
+                  Push_Back_Undo_Reduce => False));
+         end if;
       end;
-   end Undo_Reduce;
+   end Undo_Reduce_Valid;
+
+   procedure Unchecked_Undo_Reduce
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration)
+   is
+      Table        : Parse_Table renames Shared_Parser.Table.all;
+      Tree         : Syntax_Trees.Tree renames Shared_Parser.Tree;
+      Stack        : Recover_Stacks.Stack renames Config.Stack;
+      Nonterm_Item : constant Recover_Stack_Item := Recover_Stacks.Pop (Stack);
+
+      First_Terminal : constant Syntax_Trees.Node_Access := 
Tree.First_Source_Terminal
+        (Nonterm_Item.Token.Element_Node, Trailing_Non_Grammar => False, 
Following => False);
+      --  If First_Terminal (element) is virtual, it might be from current
+      --  error recovery, not the shared_stream, so extend_sequential_index
+      --  would not give it an index.
+
+      Prev_State : State_Index                             := Stack.Peek.State;
+      Children   : constant Syntax_Trees.Node_Access_Array := Tree.Children 
(Nonterm_Item.Token.Element_Node);
+   begin
+      --  We don't move an In_Parse_Action from Nonterm to First_Terminal
+      --  here, since we are not updating the tree; that's done in Recover
+      --  when the recover actions are applied to the parser state.
+      for C of Children loop
+         if Is_Terminal (Tree.ID (C), Tree.Lexer.Descriptor.all) then
+            Prev_State := Shift_State (Action_For (Table, Prev_State, Tree.ID 
(C)));
+         else
+            Prev_State := Goto_For (Table, Prev_State, Tree.ID (C));
+         end if;
+         if Stack.Is_Full then
+            raise Bad_Config;
+         end if;
+         Stack.Push ((Prev_State, Tree.Get_Recover_Token (C)));
+      end loop;
+
+      if First_Terminal /= Syntax_Trees.Invalid_Node_Access and then
+        Tree.ID (First_Terminal) /= Tree.Lexer.Descriptor.SOI_ID
+      then
+         Super.Extend_Sequential_Index (Shared_Parser, First_Terminal, 
Positive => False);
+      end if;
+
+      Recover_Op_Arrays.Append
+        (Config.Ops,
+         (Undo_Reduce, Tree.ID (Nonterm_Item.Token.Element_Node), 
Children'Length,
+         Tree.Get_Sequential_Index (First_Terminal)));
+   end Unchecked_Undo_Reduce;
 
    procedure Undo_Reduce_Check
-     (Config   : in out Configuration;
-      Tree     : in     Syntax_Trees.Tree;
-      Expected : in     Token_ID)
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration;
+      Expected      : in     Token_ID)
    is begin
-      pragma Assert (Config.Stack.Depth > 1);
-      Check (Config.Stack.Peek (1).Token.ID, Expected);
-      Config_Op_Arrays.Append (Config.Ops, (Undo_Reduce, Expected, Undo_Reduce 
(Config.Stack, Tree)));
-   exception
-   when SAL.Container_Full =>
-      raise Bad_Config;
+      if not Undo_Reduce_Valid (Super, Shared_Parser, Config) then
+         raise Invalid_Case;
+      end if;
+      Check
+        (Shared_Parser.Tree.Element_ID (Config.Stack.Peek (1).Token), Expected,
+         Shared_Parser.Tree.Lexer.Descriptor.all);
+      Unchecked_Undo_Reduce (Super, Shared_Parser, Config);
    end Undo_Reduce_Check;
 
    procedure Undo_Reduce_Check
-     (Config   : in out Configuration;
-      Tree     : in     Syntax_Trees.Tree;
-      Expected : in     Token_ID_Array)
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration;
+      Expected      : in     Token_ID_Array)
    is begin
       for ID of Expected loop
-         Undo_Reduce_Check (Config, Tree, ID);
+         Undo_Reduce_Check (Super, Shared_Parser, Config, ID);
       end loop;
    end Undo_Reduce_Check;
 
diff --git a/wisitoken-parse-lr-mckenzie_recover.ads 
b/wisitoken-parse-lr-mckenzie_recover.ads
index 2f33cd23fd..219c87da00 100644
--- a/wisitoken-parse-lr-mckenzie_recover.ads
+++ b/wisitoken-parse-lr-mckenzie_recover.ads
@@ -11,7 +11,7 @@
 --  [Grune 2008] Parsing Techniques, A Practical Guide, Second
 --  Edition. Dick Grune, Ceriel J.H. Jacobs.
 --
---  Copyright (C) 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -26,15 +26,23 @@
 
 pragma License (Modified_GPL);
 
-with Ada.Task_Attributes;
 with WisiToken.Parse.LR.Parser;
-with WisiToken.Lexer;
+with WisiToken.Parse.LR.Parser_Lists;
+limited with WisiToken.Parse.LR.McKenzie_Recover.Base;
 package WisiToken.Parse.LR.McKenzie_Recover is
+   use all type WisiToken.Syntax_Trees.Stream_Index;
    use all type Ada.Containers.Count_Type;
 
    Bad_Config : exception;
    --  Raised when a config is determined to violate some programming
-   --  convention; abandon it.
+   --  convention; abandon it. In Debug_Mode, report it, so it can be
+   --  fixed. We don't use SAL.Programmer_Error for this, because the
+   --  programming bug can easily be ignored by abandoning the config.
+
+   Invalid_Case : exception;
+   --  Raised to abandon error recover cases that don't apply, when they
+   --  are not easily abandoned by 'if' or 'case'. We don't use
+   --  Bad_Config for that, because it is not a programmer error.
 
    type Recover_Status is (Fail_Check_Delta, Fail_Enqueue_Limit, 
Fail_No_Configs_Left, Fail_Programmer_Error, Success);
 
@@ -51,86 +59,98 @@ package WisiToken.Parse.LR.McKenzie_Recover is
    --  Similarly, setting this true keeps all solutions that are found,
    --  and forces at least three.
 
+   procedure Clear_Sequential_Index (Shared_Parser : in out 
WisiToken.Parse.LR.Parser.Parser);
+   --  Reset nodes set by Set_Sequential_Index.
+
 private
 
    ----------
-   --  Visible for language-specific child packages. Alphabetical.
-
-   procedure Check (ID : Token_ID; Expected_ID : in Token_ID)
-   with Inline => True;
-   --  Check that ID = Expected_ID; raise Assertion_Error if not.
-   --  Implemented using 'pragma Assert'.
-
-   function Current_Token
-     (Terminals                 :         in     Base_Token_Arrays.Vector;
-      Terminals_Current         :         in out Base_Token_Index;
-      Restore_Terminals_Current :            out WisiToken.Base_Token_Index;
-      Insert_Delete             : aliased in out Config_Op_Arrays.Vector;
-      Current_Insert_Delete     :         in out SAL.Base_Peek_Type)
-     return Base_Token;
-   --  Return the current token, from either Terminals or Insert_Delete;
-   --  set up for Next_Token.
+   --  Visible for child packages. Alphabetical.
    --
-   --  See Next_Token for more info.
-
-   function Current_Token_ID_Peek
-     (Terminals             :         in Base_Token_Arrays.Vector;
-      Terminals_Current     :         in Base_Token_Index;
-      Insert_Delete         : aliased in Config_Op_Arrays.Vector;
-      Current_Insert_Delete :         in SAL.Base_Peek_Type)
-     return Token_ID;
-   --  Return the current token from either Terminals or
-   --  Insert_Delete, without setting up for Next_Token.
-
-   procedure Current_Token_ID_Peek_3
-     (Terminals             :         in     Base_Token_Arrays.Vector;
-      Terminals_Current     :         in     Base_Token_Index;
-      Insert_Delete         : aliased in     Config_Op_Arrays.Vector;
-      Current_Insert_Delete :         in     SAL.Base_Peek_Type;
-      Tokens                :            out Token_ID_Array_1_3);
-   --  Return the current token (in Tokens (1)) from either Terminals or
-   --  Insert_Delete, without setting up for Next_Token. Return the two
-   --  following tokens in Tokens (2 .. 3).
+   --  The various Check subprograms raise Bad_Config for check fail, and
+   --  there are no preconditions, so the checks are always performed.
+
+   type Config_Stream_Parents (Stream : access constant Bounded_Streams.List) 
is
+   record
+      --  Like Syntax_Trees.Stream_Node_Parents, but using a Configuration
+      --  input stream; does not continue into Tree streams.
+      Element : Bounded_Streams.Cursor;
+      Node    : Syntax_Trees.Node_Access;
+      Parents : Syntax_Trees.Node_Stacks.Stack;
+   end record;
+
+   type Peek_Sequential_State (Stream : access constant Bounded_Streams.List) 
is
+   record
+      --  Like Syntax_Trees.Stream_Node_Parents, but using a Configuration
+      --  input stream; continues forward into Tree shared stream. There is
+      --  no Prev operation; cannot continue backward into config stack.
+      Input_Terminal      : Config_Stream_Parents (Stream);   -- in 
Config.Input_Stream
+      Sequential_Terminal : Syntax_Trees.Stream_Node_Parents; -- in 
Tree.Shared_Stream
+   end record;
+
+   procedure Check
+     (ID          : in Token_ID;
+      Expected_ID : in Token_ID;
+      Descriptor  : in WisiToken.Descriptor)
+   with Inline => True;
+   --  Check that ID = Expected_ID; raise Bad_Config if not.
 
    procedure Delete_Check
-     (Terminals : in     Base_Token_Arrays.Vector;
-      Config    : in out Configuration;
-      ID        : in     Token_ID);
-   --  Check that Terminals (Config.Current_Shared_Token) = ID. Append a
-   --  Delete op to Config.Ops, and insert it in Config.Insert_Delete in
-   --  token_index order.
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration;
+      ID            : in     Token_ID);
+   --  Check that the next input token in Config has ID. Append a Delete op
+   --  to Config.Ops, and append it to Config.Insert_Delete.
+   --
+   --  ID = Invalid_Token_ID skips the check.
    --
    --  This or the next routine must be used instead of Config.Ops.Append
    --  (Delete...) unless the code also takes care of changing
-   --  Config.Current_Shared_Token. Note that this routine does _not_
-   --  increment Config.Current_Shared_Token, so it can only be used to
-   --  delete one token.
+   --  Config.Current_Shared_Token or Config.Input_Stream. Note that this
+   --  routine does _not_ increment Config.Current_Shared_Token or
+   --  Config.Input_Stream, so it can only be used to delete one token.
 
    procedure Delete_Check
-     (Terminals : in     Base_Token_Arrays.Vector;
-      Config    : in out Configuration;
-      Index     : in out     WisiToken.Token_Index;
-      ID        : in     Token_ID);
-   --  Check that Terminals (Index) = ID. Append a Delete op to
-   --  Config.Ops, and insert it in Config.Insert_Delete in token_index
-   --  order. Increments Index, for convenience when deleting several
-   --  tokens.
-
-   procedure Delete
-     (Terminals : in     Base_Token_Arrays.Vector;
-      Config    : in out Configuration;
-      Index     : in out WisiToken.Token_Index);
-   --  Same as Delete_Check, without the check.
+     (Super         :         in out Base.Supervisor;
+      Shared_Parser :         in out LR.Parser.Parser;
+      Config        : aliased in out Configuration;
+      IDs           :         in     Token_ID_Array);
+   --  Call Delete_Check for each ID in IDs, incrementing to the next
+   --  token for each.
+
+   procedure Delete_Check
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration;
+      Peek_State    : in out Peek_Sequential_State;
+      ID            : in     Token_ID);
+   --  If ID is not Invalid_Token_ID, check that
+   --  Parse.Peek_Sequential_Terminal (Peek_State) has ID. Append a Delete op
+   --  to Config.Ops, and append it to Config.Insert_Delete. Then
+   --  increment Peek_State to the next shared terminal.
+   --
+   --  Peek_State is initialized by Peek_Sequential_Start.
+
+   procedure Do_Push_Back
+     (Tree   : in     Syntax_Trees.Tree;
+      Config : in out Configuration)
+   with Pre => not Recover_Op_Arrays.Is_Full (Config.Ops);
+   --  Push back Config.Stack top to Config.Input_Stream. Appends to
+   --  Config.Ops. Nonterms are not broken down. We assume caller has
+   --  checked Push_Back_Valid.
 
    function Find_ID
-     (Config         : in     Configuration;
-      ID             : in     Token_ID)
+     (Tree   : in Syntax_Trees.Tree;
+      Config : in Configuration;
+      ID     : in Token_ID)
      return Boolean;
    --  Search Config.Stack for a token with ID, starting at
    --  stack top. Return True if found, False if not.
 
    procedure Find_ID
-     (Config         : in     Configuration;
+     (Tree           : in     Syntax_Trees.Tree;
+      Config         : in     Configuration;
       ID             : in     Token_ID;
       Matching_Index : in out SAL.Peek_Type);
    --  Search Config.Stack for a token with ID, starting at
@@ -138,7 +158,8 @@ private
    --  If not found, Matching_Index = Config.Stack.Depth.
 
    procedure Find_ID
-     (Config         : in     Configuration;
+     (Tree           : in     Syntax_Trees.Tree;
+      Config         : in     Configuration;
       IDs            : in     Token_ID_Set;
       Matching_Index : in out SAL.Peek_Type);
    --  Search Config.Stack for a token with ID in IDs, starting at
@@ -158,7 +179,7 @@ private
 
    procedure Find_Matching_Name
      (Config              : in     Configuration;
-      Lexer               : access constant WisiToken.Lexer.Instance'Class;
+      Tree                : in     Syntax_Trees.Tree;
       Name                : in     String;
       Matching_Name_Index : in out SAL.Peek_Type;
       Case_Insensitive    : in     Boolean);
@@ -168,7 +189,7 @@ private
 
    procedure Find_Matching_Name
      (Config              : in     Configuration;
-      Lexer               : access constant WisiToken.Lexer.Instance'Class;
+      Tree                : in     Syntax_Trees.Tree;
       Name                : in     String;
       Matching_Name_Index : in out SAL.Peek_Type;
       Other_ID            : in     Token_ID;
@@ -180,131 +201,188 @@ private
    --
    --  Also count tokens with ID = Other_ID.
 
-   procedure Insert (Config : in out Configuration; ID : in Token_ID);
-   --  Append an Insert op at Config.Current_Shared_Token, to Config.Ops,
-   --  and insert it in Config.Insert_Deleted in token_index order.
-
-   procedure Insert (Config : in out Configuration; IDs : in Token_ID_Array);
+   procedure Insert
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration;
+      ID            : in     Token_ID);
+   --  Append an Insert before Config.Current_Shared_Token or
+   --  Config.Input_Stream.First op to Config.Ops, and append it to
+   --  Config.Insert_Deleted.
+
+   procedure Insert
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration;
+      IDs           : in     Token_ID_Array);
    --  Call Insert for each item in IDs.
 
-   procedure Insert (Config : in out Configuration; Index : in 
WisiToken.Token_Index; ID : in Token_ID);
-   --  Same as Insert, but at Index, not Config.Current_Shared_Token.
-
-   function Next_Token
-     (Terminals                 :         in     Base_Token_Arrays.Vector;
-      Terminals_Current         :         in out Base_Token_Index;
-      Restore_Terminals_Current :         in out Base_Token_Index;
-      Insert_Delete             : aliased in out Config_Op_Arrays.Vector;
-      Current_Insert_Delete     :         in out SAL.Base_Peek_Type)
-     return Base_Token;
-   --  Return the next token, from either Terminals or Insert_Delete;
-   --  update Terminals_Current or Current_Insert_Delete.
+   procedure Insert
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration;
+      Before        : in     Syntax_Trees.Valid_Node_Access;
+      ID            : in     Token_ID);
+   --  Same as Insert, but before Before.
+
+   function Peek_Sequential_Start
+     (Super         :         in out Base.Supervisor;
+      Shared_Parser :         in out LR.Parser.Parser;
+      Config        : aliased in     Configuration)
+     return Peek_Sequential_State;
+
+   function Peek_Sequential_Terminal (State : in Peek_Sequential_State) return 
Syntax_Trees.Node_Access;
+   --  Return State current sequential terminal; set by
+   --  Peek_Sequential_Start and Peek_Next_Sequential_Terminal.
    --
-   --  If result is Insert_Delete.Last_Index, Current_Insert_Delete =
-   --  Last_Index; Insert_Delete is cleared and Current_Insert_Delete
-   --  reset on next call.
+   --  Returns Invalid_Node_Access when the current sequential terminal
+   --  is past EOI, possibly because shared EOI had an error, and was
+   --  found in the config input stream.
+
+   procedure Peek_Next_Sequential_Terminal
+     (Tree  : in     Syntax_Trees.Tree;
+      State : in out Peek_Sequential_State);
+   --  Step State to next sequential terminal. Can step past EOI.
+
+   procedure Set_Initial_Sequential_Index
+     (Parsers    : in out WisiToken.Parse.LR.Parser_Lists.List;
+      Tree       : in     Syntax_Trees.Tree;
+      Streams    : in out Syntax_Trees.Stream_ID_Array;
+      Terminals  : in out Syntax_Trees.Stream_Node_Parents_Array;
+      Initialize : in     Boolean)
+   with Pre => Terminals'First = 1 and Terminals'Last = (if Initialize then 
Parsers.Count else Parsers.Count + 1) and
+               Streams'First = Terminals'First and Streams'Last = 
Terminals'Last,
+     Post => (for all Term of Terminals =>
+                (if Initialize
+                 then Tree.Get_Sequential_Index (Term.Ref.Node) /= 
Syntax_Trees.Invalid_Sequential_Index
+                 else Tree.Get_Sequential_Index (Term.Ref.Node) = 
Syntax_Trees.Invalid_Sequential_Index));
+   --  If Initialize, prepare for setting sequential_index in the parse
+   --  streams for error recover. If not Initialize, prepare for clearing
+   --  sequential_index after recover is done. Terminals'Last is the
+   --  shared stream (see body for rationale).
    --
-   --  When done parsing, caller must reset actual Terminals_Current to
-   --  Restore_Terminals_Current.
-   --
-   --  Insert_Delete contains only Insert and Delete ops, in token_index
-   --  order. Those ops are applied when Terminals_Current =
-   --  op.token_index.
+   --  Set Terminals to a common starting point for
+   --  Extend_Sequential_Index, nominally parser Current_Token for
+   --  Initialize, stack top for not Initialize. If Initialize, set
+   --  Sequential_Index in all Terminals nodes to 1; if not Initialize,
+   --  set to Invalid_Sequential_Index.
+
+   procedure Extend_Sequential_Index
+     (Tree      : in     Syntax_Trees.Tree;
+      Streams   : in     Syntax_Trees.Stream_ID_Array;
+      Terminals : in out Syntax_Trees.Stream_Node_Parents_Array;
+      Target    : in     Syntax_Trees.Base_Sequential_Index;
+      Positive  : in     Boolean;
+      Clear     : in     Boolean)
+   with Pre =>
+     (if Clear
+      then (for all Term of Terminals =>
+              Tree.Get_Sequential_Index (Term.Ref.Node) = 
Syntax_Trees.Invalid_Sequential_Index)
+      else
+        (for all Term of Terminals =>
+           Tree.Get_Sequential_Index (Term.Ref.Node) /= 
Syntax_Trees.Invalid_Sequential_Index)
+           and then
+          (for some Term of Terminals =>
+             Tree.ID (Term.Ref.Node) /=
+             (if Positive
+              then Tree.Lexer.Descriptor.EOI_ID
+              else Tree.Lexer.Descriptor.SOI_ID) and
+             (if Positive
+              then Tree.Get_Sequential_Index (Term.Ref.Node) < Target
+              else Tree.Get_Sequential_Index (Term.Ref.Node) > Target))),
+     Post =>
+       (for all Term of Terminals =>
+          (if Clear
+           then Tree.Get_Sequential_Index (Term.Ref.Node) = 
Syntax_Trees.Invalid_Sequential_Index
+           else Tree.Get_Sequential_Index (Term.Ref.Node) /= 
Syntax_Trees.Invalid_Sequential_Index));
+   --  If Clear, clear all Sequential_Index,
+   --  starting at Terminals, and moving in Positive direction.
+   --  Otherwise, set Sequential_Index in Tree nodes before/after
+   --  Terminals, thru Target.
 
    function Push_Back_Valid
-     (Target_Token_Index : in WisiToken.Base_Token_Index;
-      Ops             : in Config_Op_Arrays.Vector;
-      Prev_Op         : in Positive_Index_Type)
+     (Super                 : in out Base.Supervisor;
+      Shared_Parser :         in out LR.Parser.Parser;
+      Config                : in     Configuration;
+      Push_Back_Undo_Reduce : in     Boolean)
      return Boolean;
-
-   function Push_Back_Valid (Config : in Configuration) return Boolean
-     is (Config.Stack.Depth > 1 and then
-           (not Config.Stack.Peek.Token.Virtual and
-              --  If Virtual, this is from earlier in this recover session; no 
point
-              --  in trying to redo it.
-              (Config_Op_Arrays.Length (Config.Ops) = 0 or else
-                 Push_Back_Valid
-                   (Config.Stack.Peek.Token.Min_Terminal_Index,
-                    Config.Ops,
-                    Config_Op_Arrays.Last_Index (Config.Ops)))));
-
-   procedure Push_Back (Config : in out Configuration)
-   with Pre => Push_Back_Valid (Config);
-   --  Pop the top Config.Stack item, set Config.Current_Shared_Token to
-   --  the first terminal in that item. If the item is empty,
-   --  Config.Current_Shared_Token is unchanged.
-
-   procedure Push_Back_Check (Config : in out Configuration; Expected_ID : in 
Token_ID)
-   with Pre => Push_Back_Valid (Config);
-   --  In effect, call Check and Push_Back.
-
-   procedure Push_Back_Check (Config : in out Configuration; Expected : in 
Token_ID_Array);
-   --  Call Push_Back_Check for each item in Expected.
+   --  True if Push_Back is a valid op for Config.
    --
-   --  Raises Bad_Config if any of the push_backs is invalid.
+   --  Normally Push_Back_Valid forbids push_back of an entire
+   --  Undo_Reduce; Language_Fixes may override that by setting
+   --  Push_Back_Undo_Reduce True.
+
+   procedure Push_Back
+     (Super                 : in out Base.Supervisor;
+      Shared_Parser         : in out LR.Parser.Parser;
+      Config                : in out Configuration;
+      Push_Back_Undo_Reduce : in     Boolean);
+   --  If not Push_Back_Valid, raise Invalid_Case. Otherwise do
+   --  Push_Back.
+   --
+   --  Normally Push_Back_Valid forbids push_back of an entire
+   --  Undo_Reduce; Language_Fixes may override that by setting
+   --  Push_Back_Undo_Reduce True.
+
+   procedure Push_Back_Check
+     (Super                 : in out Base.Supervisor;
+      Shared_Parser         : in out LR.Parser.Parser;
+      Config                : in out Configuration;
+      Expected_ID           : in     Token_ID;
+      Push_Back_Undo_Reduce : in     Boolean);
+   --  Check that Config.Stack top has Expected_ID; raise Bad_Config if
+   --  not. Then call Push_Back.
+
+   procedure Push_Back_Check
+     (Super                 : in out Base.Supervisor;
+      Shared_Parser         : in out LR.Parser.Parser;
+      Config                : in out Configuration;
+      Expected              : in     Token_ID_Array;
+      Push_Back_Undo_Reduce : in     Boolean);
+   --  Call Push_Back_Check for each item in Expected.
 
    procedure Put
      (Message      : in     String;
-      Trace        : in out WisiToken.Trace'Class;
-      Parser_Label : in     Natural;
-      Terminals    : in     Base_Token_Arrays.Vector;
+      Tree         : in     Syntax_Trees.Tree;
+      Parser_Label : in     Syntax_Trees.Stream_ID;
       Config       : in     Configuration;
-      Task_ID      : in     Boolean := True;
       Strategy     : in     Boolean := False);
-   --  Put Message and an image of Config to Trace.
+   --  Put Message and an image of Config to Tree.Lexer.Trace.
 
    procedure Put_Line
-     (Trace        : in out WisiToken.Trace'Class;
-      Parser_Label : in     Natural;
-      Message      : in     String;
-      Task_ID      : in     Boolean := True);
-   --  Put message to Trace, with parser and task info.
+     (Tree         : in     Syntax_Trees.Tree;
+      Parser_Label : in     Syntax_Trees.Stream_ID;
+      Message      : in     String);
+   --  Put message to Tree.Lexer.Trace, with parser and task info.
 
    function Undo_Reduce_Valid
-     (Stack   : in Recover_Stacks.Stack;
-      Tree    : in Syntax_Trees.Tree)
-     return Boolean
-     --  Check if Undo_Reduce is valid when there is no previous Config_Op.
-     --
-     --  Undo_Reduce needs to know what tokens the nonterm contains, to
-     --  push them on the stack. Thus we need a valid Tree index. It is
-     --  tempting to also allow an empty nonterm when Tree_Index is
-     --  invalid, but that fails when the real Undo_Reduce results in
-     --  another empty nonterm on the stack; see test_mckenzie_recover.adb
-     --  Error_During_Resume_3.
-     is (Stack.Depth > 1 and then
-           Stack.Peek.Tree_Index /= Invalid_Node_Index and then
-               Tree.Is_Nonterm (Stack.Peek.Tree_Index));
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration)
+     return Boolean;
+   --  True if Undo_Reduce is valid for Config.
 
-   function Undo_Reduce_Valid
-     (Stack   : in Recover_Stacks.Stack;
-      Tree    : in Syntax_Trees.Tree;
-      Ops     : in Config_Op_Arrays.Vector;
-      Prev_Op : in Positive_Index_Type)
-     return Boolean
-   is (Undo_Reduce_Valid (Stack, Tree) and then Push_Back_Valid 
(Stack.Peek.Token.Min_Terminal_Index, Ops, Prev_Op));
-
-   function Undo_Reduce
-     (Stack : in out Recover_Stacks.Stack;
-      Tree  : in     Syntax_Trees.Tree)
-     return Ada.Containers.Count_Type
-   with Pre => Undo_Reduce_Valid (Stack, Tree);
-   --  Undo the reduction that produced the top stack item, return the
-   --  token count for that reduction.
+   procedure Unchecked_Undo_Reduce
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration);
+   --  Undo the reduction that produced the top stack item, append op.
 
    procedure Undo_Reduce_Check
-     (Config   : in out Configuration;
-      Tree     : in     Syntax_Trees.Tree;
-      Expected : in     Token_ID)
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration;
+      Expected      : in     Token_ID)
    with Inline => True;
-   --  Call Check, Undo_Reduce.
+   --  If not Undo_Reduce_Valid, raise Invalid_Case. Else call Check,
+   --  Unchecked_Undo_Reduce. Caller should check for space in
+   --  Config.Ops.
 
    procedure Undo_Reduce_Check
-     (Config   : in out Configuration;
-      Tree     : in     Syntax_Trees.Tree;
-      Expected : in     Token_ID_Array);
+     (Super         : in out Base.Supervisor;
+      Shared_Parser : in out LR.Parser.Parser;
+      Config        : in out Configuration;
+      Expected      : in     Token_ID_Array);
    --  Call Undo_Reduce_Check for each item in Expected.
 
-   package Task_Attributes is new Ada.Task_Attributes (Integer, 0);
-
 end WisiToken.Parse.LR.McKenzie_Recover;
diff --git a/wisitoken-parse-lr-parser-parse.adb 
b/wisitoken-parse-lr-parser-parse.adb
new file mode 100644
index 0000000000..187b74ad28
--- /dev/null
+++ b/wisitoken-parse-lr-parser-parse.adb
@@ -0,0 +1,659 @@
+--  Abstract :
+--
+--  see spec.
+--
+--  Copyright (C) 2002 - 2005, 2008 - 2015, 2017 - 2022 Free Software 
Foundation, Inc.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+pragma License (Modified_GPL);
+
+separate (WisiToken.Parse.LR.Parser)
+overriding procedure Parse
+  (Shared_Parser    : in out LR.Parser.Parser;
+   Recover_Log_File : in     Ada.Text_IO.File_Type;
+   Edits            : in     KMN_Lists.List := KMN_Lists.Empty_List;
+   Pre_Edited       : in     Boolean        := False)
+is
+   use Syntax_Trees;
+   use all type KMN_Lists.List;
+   use all type Ada.Containers.Count_Type;
+
+   Trace : WisiToken.Trace'Class renames Shared_Parser.Tree.Lexer.Trace.all;
+
+   Current_Verb : All_Parse_Action_Verbs;
+   Zombie_Count : SAL.Base_Peek_Type;
+
+begin
+   if Trace_Time then
+      Trace.Put_Clock ("start");
+   end if;
+
+   if Shared_Parser.User_Data /= null then
+      Shared_Parser.User_Data.Reset;
+   end if;
+
+   Shared_Parser.Tree.Lexer.Errors.Clear;
+
+   Shared_Parser.String_Quote_Checked := Invalid_Line_Number;
+
+   if Edits /= KMN_Lists.Empty_List then
+      if not Shared_Parser.Tree.Editable then
+         --  previous parse failed, left tree in uncertain state
+         raise WisiToken.Parse_Error with "previous parse failed, can't edit 
tree";
+      end if;
+
+      if Trace_Parse > Detail or Trace_Incremental_Parse > Outline then
+         Trace.New_Line;
+         Trace.Put_Line ("pre edit tree:");
+         Shared_Parser.Tree.Print_Tree (Line_Numbers => True, Non_Grammar => 
True);
+         Trace.New_Line;
+      end if;
+
+      Edit_Tree (Shared_Parser, Edits);
+
+      if Trace_Time then
+         Trace.Put_Clock ("post edit tree");
+      end if;
+
+      if Trace_Memory > Detail then
+         Trace.Put_Line ("post edit tree");
+         Report_Memory (Trace, Prefix => True);
+      end if;
+      if Trace_Parse > Outline or Trace_Incremental_Parse > Outline then
+         Trace.New_Line;
+         --  Parents not set, can't get Line_Numbers
+         Trace.Put_Line ("edited stream:");
+         Trace.Put_Line
+           (Shared_Parser.Tree.Image
+              (Shared_Parser.Tree.Shared_Stream,
+               Children    => Trace_Parse > Extra or Trace_Incremental_Parse > 
Extra,
+               Non_Grammar => Trace_Parse > Extra or Trace_Incremental_Parse > 
Extra));
+         Trace.New_Line;
+      end if;
+
+      if Shared_Parser.Tree.Stream_Length (Shared_Parser.Tree.Shared_Stream) = 
3 and then Shared_Parser.Tree.ID
+        (Shared_Parser.Tree.Stream_First (Shared_Parser.Tree.Shared_Stream, 
Skip_SOI => True).Node) =
+        Shared_Parser.Tree.Lexer.Descriptor.Accept_ID
+      then
+         if Trace_Parse > Outline then
+            Trace.Put_Line ("edited tree does not need parse; no or only 
non_grammar changes");
+         end if;
+         Shared_Parser.Tree.Clear_Parse_Streams;
+         Shared_Parser.Parsers.Clear;
+         return;
+      end if;
+
+   elsif Pre_Edited then
+      --  Unit test providing an edited stream; see test_syntax_trees.adb
+      --  Breakdown_Optimized_List_01. We assume this is the same as a tree
+      --  resulting from Edit_Tree.
+      null;
+
+   else
+      --  Normal initial parse
+      Shared_Parser.Tree.Clear;
+      Shared_Parser.Lex_All;
+      if Trace_Memory > Detail then
+         Trace.Put_Line ("post lex");
+         Report_Memory (Trace, Prefix => True);
+      end if;
+   end if;
+
+   Shared_Parser.Parsers := Parser_Lists.New_List (Shared_Parser.Tree);
+
+   Shared_Parser.Tree.Start_Parse 
(Shared_Parser.Parsers.First.State_Ref.Stream, Shared_Parser.Table.State_First);
+
+   Main_Loop :
+   loop
+      --  exit on Accept_It action or syntax error.
+
+      Parse_Verb (Shared_Parser, Current_Verb, Zombie_Count);
+
+      if Trace_Parse > Extra then
+         Trace.Put_Line ("cycle start; current_verb: " & Image (Current_Verb));
+      end if;
+
+      case Current_Verb is
+      when Pause =>
+         null;
+
+      when Shift =>
+         --  We just shifted a token; get the next token from
+         --  Tree.Shared_Stream, Tree parse stream input, or Parser
+         --  Insert_Delete.
+
+         for Parser_State of Shared_Parser.Parsers loop
+            if Parser_State.Verb = Shift then
+               --  Handle inserting from Parser_State.Recover_Insert_Delete;
+               --  otherwise, Tree.Current_Token is correct.
+
+               declare
+                  Tree : Syntax_Trees.Tree renames Shared_Parser.Tree;
+               begin
+                  if Parser_State.Current_Recover_Op /= No_Insert_Delete then
+                     declare
+                        Error_Ref : constant Syntax_Trees.Stream_Error_Ref := 
Parser_State.Current_Error_Ref (Tree);
+                        Err : Error_Data'Class := Syntax_Trees.Error 
(Error_Ref);
+                        Op  : Recover_Op_Nodes renames 
Recover_Op_Array_Var_Ref (Err)(Parser_State.Current_Recover_Op);
+                     begin
+                        if Op.Op = Insert then
+                           declare
+                              Next_Sequential_Terminal : constant 
Syntax_Trees.Terminal_Ref :=
+                                Tree.First_Sequential_Terminal 
(Tree.Current_Token (Parser_State.Stream));
+                           begin
+                              if Op.Ins_Before = Tree.Get_Sequential_Index 
(Next_Sequential_Terminal.Node) then
+                                 --  We know Next_Sequential_Terminal is the 
first terminal of
+                                 --  Current_Token, and therefore we can 
insert before it. If it was
+                                 --  embedded in a nonterm, that nonterm would 
have been broken down in
+                                 --  order to shift the previous terminals.
+                                 Op.Ins_Node := Tree.Insert_Virtual_Terminal 
(Parser_State.Stream, Op.Ins_ID).Node;
+
+                                 Parser_State.Next_Recover_Op (Tree);
+
+                                 Parser_State.Update_Error
+                                   (Tree, Err,
+                                    Syntax_Trees.User_Data_Access_Constant 
(Shared_Parser.User_Data));
+                              end if;
+                           end;
+                        end if;
+                     end;
+                  end if;
+
+                  if Trace_Parse > Extra then
+                     Trace.Put_Line
+                       (" " & Tree.Trimmed_Image (Parser_State.Stream) &
+                          ": current_token " & Tree.Image
+                            (Shared_Parser.Tree.Current_Token 
(Parser_State.Stream), First_Terminal => True));
+                     Trace.Put_Line
+                       ("    shared_token " & Tree.Image 
(Shared_Parser.Tree.Shared_Token (Parser_State.Stream)));
+                     if Tree.Has_Input (Parser_State.Stream) then
+                        Trace.Put_Line
+                          ("    stream input " & Tree.Image 
(Parser_State.Stream, Stack => False, Input => True));
+                     end if;
+                     if Parser_State.Current_Recover_Op /= No_Insert_Delete 
then
+                        Trace.Put_Line
+                          ("    recover_insert_delete:" & 
Parser_State.Recover_Image (Tree, Current_Only => True));
+                     end if;
+                  end if;
+               end;
+            end if;
+         end loop;
+
+      when Accept_It =>
+         --  All parsers accepted or are zombies.
+         declare
+            Count : constant SAL.Base_Peek_Type := Shared_Parser.Parsers.Count;
+            Current_Parser : Parser_Lists.Cursor := 
Shared_Parser.Parsers.First;
+         begin
+            if Count = 1 then
+               --  Nothing more to do
+               exit Main_Loop;
+
+            elsif Zombie_Count + 1 = Count then
+               --  All but one are zombies
+               loop
+                  if Current_Parser.Verb = Accept_It then
+                     Current_Parser.Next;
+                  else
+                     declare
+                        Temp  : Parser_Lists.Cursor := Current_Parser;
+                     begin
+                        Current_Parser.Next;
+                        Shared_Parser.Parsers.Terminate_Parser
+                          (Temp, Shared_Parser.Tree, "zombie", Trace);
+                     end;
+                  end if;
+                  exit when Current_Parser.Is_Done;
+               end loop;
+
+               exit Main_Loop;
+
+            else
+               --  More than one parser is active.
+               declare
+                  use all type Parser_Lists.Cursor;
+                  Error_Parser_Count : Integer := (if 
Shared_Parser.Tree.Lexer.Errors.Length > 0 then 1 else 0);
+
+                  Recover_Cost           : Integer;
+                  Min_Recover_Cost       : Integer                   := 
Integer'Last;
+                  Recover_Ops_Length     : Ada.Containers.Count_Type;
+                  Min_Recover_Ops_Length : Ada.Containers.Count_Type := 
Ada.Containers.Count_Type'Last;
+                  Recover_Cur            : Parser_Lists.Cursor       := 
Current_Parser;
+               begin
+                  Current_Parser := Shared_Parser.Parsers.First;
+                  loop
+                     if Current_Parser.Verb = Accept_It then
+                        if Current_Parser.State_Ref.Error_Count > 0 then
+                           Error_Parser_Count := Error_Parser_Count + 1;
+                        end if;
+                        Current_Parser.Next;
+                     else
+                        declare
+                           Temp  : Parser_Lists.Cursor := Current_Parser;
+                        begin
+                           Current_Parser.Next;
+                           Shared_Parser.Parsers.Terminate_Parser
+                             (Temp, Shared_Parser.Tree, "zombie", Trace);
+                        end;
+                     end if;
+                     exit when Current_Parser.Is_Done;
+                  end loop;
+
+                  if Error_Parser_Count > 0 then
+                     --  There was at least one error. We assume that caused 
the ambiguous
+                     --  parse, and we pick the parser with the minimum cost 
and minimum
+                     --  recover ops length (consistent with Duplicate_State) 
to allow the
+                     --  parse to succeed. We terminate the other parsers so 
the remaining
+                     --  parser can do Execute_Actions.
+                     --
+                     --  If there are multiple errors, this metric is not very 
meaningful.
+                     --
+                     --  Note all surviving parsers must have the same error 
count.
+                     Current_Parser := Shared_Parser.Parsers.First;
+                     loop
+                        Recover_Cost := 
Current_Parser.State_Ref.Total_Recover_Cost;
+                        if Recover_Cost < Min_Recover_Cost then
+                           Min_Recover_Cost       := Recover_Cost;
+                           Min_Recover_Ops_Length := 
Current_Parser.State_Ref.Max_Recover_Ops_Length;
+                           Recover_Cur            := Current_Parser;
+
+                        elsif Recover_Cost = Min_Recover_Cost then
+                           Recover_Ops_Length := 
Current_Parser.State_Ref.Max_Recover_Ops_Length;
+                           if Recover_Ops_Length < Min_Recover_Ops_Length then
+                              Min_Recover_Ops_Length := Recover_Ops_Length;
+                              Recover_Cur    := Current_Parser;
+                           end if;
+                        end if;
+                        Current_Parser.Next;
+                        exit when Current_Parser.Is_Done;
+                     end loop;
+
+                     Current_Parser := Shared_Parser.Parsers.First;
+                     loop
+                        if Current_Parser = Recover_Cur then
+                           Current_Parser.Next;
+                        else
+                           declare
+                              Temp  : Parser_Lists.Cursor := Current_Parser;
+                           begin
+                              Current_Parser.Next;
+                              Shared_Parser.Parsers.Terminate_Parser
+                                (Temp, Shared_Parser.Tree,
+                                 (if Recover_Cost = Min_Recover_Cost and then
+                                    Recover_Ops_Length = Min_Recover_Ops_Length
+                                  then "random"
+                                  else "recover cost/min length"),
+                                 Trace);
+                           end;
+                        end if;
+                        exit when Current_Parser.Is_Done;
+                     end loop;
+
+                     exit Main_Loop;
+
+                  else
+                     --  There were no previous errors. We allow the parse to 
fail, on the
+                     --  assumption that an otherwise correct input should not 
yield an
+                     --  ambiguous parse.
+                     Current_Parser := Shared_Parser.Parsers.First;
+                     raise WisiToken.Parse_Error with 
Shared_Parser.Tree.Error_Message
+                       (Shared_Parser.Tree.Current_Token 
(Current_Parser.Stream),
+                        "Ambiguous parse:" & SAL.Base_Peek_Type'Image (Count) 
& " parsers active.");
+                  end if;
+               end;
+            end if;
+         end;
+
+      when Reduce =>
+         null;
+
+      when Error =>
+         --  All parsers errored; attempt recovery
+         declare
+            use all type McKenzie_Recover.Recover_Status;
+
+            Recover_Result : McKenzie_Recover.Recover_Status := 
Fail_Check_Delta;
+
+            Pre_Recover_Parser_Count : constant SAL.Base_Peek_Type := 
Shared_Parser.Parsers.Count;
+            Start : Ada.Calendar.Time;
+         begin
+            --  Recover algorithms expect current token at
+            --  Parsers(*).Current_Token, will set
+            --  Parsers(*).Recover_Insert_Delete with new input tokens and
+            --  deletions, adjust Parsers(*).Stack, and set
+            --  Parsers(*).Current_Token and Parsers(*).Verb.
+
+            if Trace_Time then
+               Trace.Put_Clock ("pre-recover" & 
Shared_Parser.Parsers.Count'Img & " active");
+               Start := Ada.Calendar.Clock;
+            end if;
+
+            if not Shared_Parser.Table.Error_Recover_Enabled then
+               if Trace_Parse > Outline or Trace_McKenzie > Outline then
+                  Trace.Put_Line ("recover disabled");
+               end if;
+            else
+               Recover_Result := McKenzie_Recover.Recover (Shared_Parser);
+               if Trace_Time then
+                  declare
+                     use Ada.Calendar;
+                     Recover_Duration : constant Duration := Clock - Start;
+                  begin
+                     Trace.Put_Clock
+                       ("post-recover" & Shared_Parser.Parsers.Count'Img & " 
active," & Recover_Duration'Image);
+                  end;
+               end if;
+
+               if Trace_Parse > Outline then
+                  if Recover_Result = Success  then
+                     Trace.New_Line;
+                     Trace.Put_Line
+                       ("recover: succeed, parser count" & 
SAL.Base_Peek_Type'Image (Shared_Parser.Parsers.Count));
+                  else
+                     Trace.Put_Line
+                       ("recover: fail " & 
McKenzie_Recover.Recover_Status'Image (Recover_Result) &
+                          ", parser count" & SAL.Base_Peek_Type'Image 
(Shared_Parser.Parsers.Count));
+                  end if;
+               end if;
+
+               if Ada.Text_IO.Is_Open (Recover_Log_File) then
+                  Recover_To_Log (Shared_Parser, Recover_Log_File, 
Recover_Result, Pre_Recover_Parser_Count);
+               end if;
+            end if;
+
+            if Recover_Result = Success then
+               Shared_Parser.Resume_Active := True;
+
+               for Parser_State of Shared_Parser.Parsers loop
+                  Parser_State.Resume_Active          := True;
+                  Parser_State.Conflict_During_Resume := False;
+
+                  case Parser_State.Verb is
+                  when Error =>
+                     --  Force this parser to be terminated.
+                     Parser_State.Zombie_Token_Count := 
Shared_Parser.Table.McKenzie_Param.Zombie_Limit + 1;
+
+                     if Trace_Parse > Outline and Trace_McKenzie <= Extra then
+                        Trace.Put_Line
+                          (" " & Shared_Parser.Tree.Trimmed_Image 
(Parser_State.Stream) & ": fail:");
+                     end if;
+
+                  when Shift =>
+                     Parser_State.Zombie_Token_Count := 0;
+                     if Trace_Parse > Detail and Trace_McKenzie <= Extra then
+                        Trace.Put_Line
+                          (" " & Shared_Parser.Tree.Trimmed_Image 
(Parser_State.Stream) & ": stack/stream:");
+                        Trace.Put_Line
+                          (Shared_Parser.Tree.Image
+                             (Parser_State.Stream, Stack => True, Input => 
True, Shared => True,
+                              Node_Numbers => not Trace_Parse_No_State_Numbers,
+                              Children => Trace_Parse > Detail));
+                        Trace.Put_Line
+                          ("    Current_Token: " & Shared_Parser.Tree.Image
+                             (Shared_Parser.Tree.Current_Token 
(Parser_State.Stream), Terminal_Node_Numbers => True));
+                        Trace.Put_Line
+                          ("    Shared_Token: " & Shared_Parser.Tree.Image
+                             (Shared_Parser.Tree.Shared_Token 
(Parser_State.Stream), Terminal_Node_Numbers => True));
+                        Trace.Put_Line
+                          ("    recover_insert_delete:" &
+                             (if Parser_State.Current_Recover_Op = 
No_Insert_Delete
+                              then ""
+                              else Parser_State.Recover_Image 
(Shared_Parser.Tree, Current_Only => True)));
+
+                        if Trace_Parse > Detail then
+                           Trace.Put_Line
+                             ("    resume_active: True, token goal" & 
Parser_State.Resume_Token_Goal'Image);
+                        end if;
+                     end if;
+
+                  when Reduce | Pause | Accept_It =>
+                     raise SAL.Programmer_Error;
+                  end case;
+               end loop;
+
+               if Trace_Parse > Detail then
+                  Trace.New_Line;
+               end if;
+
+            else
+               --  Terminate with error (parse_error because user expects 
parse to
+               --  succeed on Syntax_Error). Parser_State has all the required 
info
+               --  on the original error (recorded by Error in Do_Action).
+               McKenzie_Recover.Clear_Sequential_Index (Shared_Parser);
+               raise WisiToken.Parse_Error with "recover fail: " & 
Recover_Result'Image;
+            end if;
+
+            --  Recover sets Parser.Verb to Shift for all active parsers, to
+            --  indicate it no longer has an error. Set Current_Verb to reflect
+            --  that.
+            Current_Verb := Shift;
+         end;
+      end case;
+
+      --  We don't use 'for Parser_State of Parsers loop' here,
+      --  because terminate on error and spawn on conflict require
+      --  changing the parser list.
+      declare
+         Current_Parser : Parser_Lists.Cursor := Shared_Parser.Parsers.First;
+      begin
+         Action_Loop :
+         loop
+            exit Action_Loop when Current_Parser.Is_Done;
+
+            --  We don't check duplicate state during resume, because the 
tokens
+            --  inserted/deleted by error recover may cause initially duplicate
+            --  states to diverge.
+            if not Shared_Parser.Resume_Active and Current_Verb = Shift then
+               Shared_Parser.Parsers.Duplicate_State (Current_Parser, 
Shared_Parser.Tree, Trace);
+               --  If Duplicate_State terminated Current_Parser, 
Current_Parser now
+               --  points to the next parser. Otherwise it is unchanged.
+            end if;
+
+            exit Action_Loop when Current_Parser.Is_Done;
+
+            if Trace_Parse > Extra then
+               declare
+                  Parser_State : Parser_Lists.Parser_State renames 
Shared_Parser.Parsers
+                    (Parser_Lists.To_Parser_Node_Access (Current_Parser));
+               begin
+                  Trace.Put_Line
+                    (" " & Shared_Parser.Tree.Trimmed_Image 
(Parser_State.Stream) &
+                       ".verb: " & Image (Parser_State.Verb));
+                  Trace.Put_Line
+                    (" ... stack/stream: " &
+                       Shared_Parser.Tree.Image
+                         (Parser_State.Stream, Stack => True, Input => True, 
Shared => True, Children => False,
+                          State_Numbers => not Trace_Parse_No_State_Numbers));
+                  if Parser_State.Current_Recover_Op /= No_Insert_Delete then
+                     Trace.Put_Line
+                       (" ... recover_insert_delete:" & 
Parser_State.Recover_Image
+                          (Shared_Parser.Tree, Current_Only => True));
+                  end if;
+               end;
+            end if;
+
+            --  Each branch of the following 'if' calls either 
Current_Parser.Free
+            --  (which advances to the next parser) or Current_Parser.Next.
+
+            if Current_Parser.Verb = Error then
+               --  This parser is a zombie; see Check_Error.
+               --
+               --  Check to see if it is time to terminate it
+               if Current_Parser.State_Ref.Zombie_Token_Count <= 
Shared_Parser.Table.McKenzie_Param.Zombie_Limit
+               then
+                  if Trace_Parse > Detail then
+                     Trace.Put_Line (" " & Shared_Parser.Tree.Trimmed_Image 
(Current_Parser.Stream) & ": zombie");
+                  end if;
+
+                  Current_Parser.Next;
+               else
+                  Shared_Parser.Parsers.Terminate_Parser
+                    (Current_Parser, Shared_Parser.Tree, "zombie", Trace);
+               end if;
+
+            elsif Current_Parser.Verb = Current_Verb then
+
+               declare
+                  Action_Cur : Parse_Action_Node_Ptr;
+                  Action     : Parse_Action_Rec;
+                  Conflict   : Parse_Action_Node_Ptr;
+               begin
+                  LR.Parser.Get_Action (Shared_Parser, 
Current_Parser.State_Ref, Action_Cur, Action);
+
+                  Conflict := (if Action_Cur = null then null else 
Action_Cur.Next);
+
+                  if Conflict /= null then
+                     loop
+                        exit when Conflict = null;
+                        --  Spawn a new parser (before modifying 
Current_Parser stack).
+
+                        Current_Parser.State_Ref.Conflict_During_Resume := 
Current_Parser.State_Ref.Resume_Active;
+
+                        if Shared_Parser.Parsers.Count = 
Shared_Parser.Table.Max_Parallel then
+                           --  If errors were recovered, terminate a parser 
that used the
+                           --  highest cost solution.
+                           declare
+                              use all type 
WisiToken.Parse.LR.Parser_Lists.Cursor;
+                              Max_Recover_Cost : Integer             := 0;
+                              Cur              : Parser_Lists.Cursor := 
Shared_Parser.Parsers.First;
+                              Max_Parser       : Parser_Lists.Cursor := Cur;
+                           begin
+                              loop
+                                 exit when Cur.Is_Done;
+                                 if Cur.State_Ref.Total_Recover_Cost > 
Max_Recover_Cost then
+                                    Max_Parser       := Cur;
+                                    Max_Recover_Cost := 
Cur.State_Ref.Total_Recover_Cost;
+                                 end if;
+                                 Cur.Next;
+                              end loop;
+
+                              if Max_Recover_Cost > 0 then
+                                 if Max_Parser = Current_Parser then
+                                    Current_Parser.Next;
+
+                                    Shared_Parser.Parsers.Terminate_Parser
+                                      (Max_Parser, Shared_Parser.Tree, "too 
many parsers; max error repair cost",
+                                       Trace);
+
+                                    --  We changed Current_Parser, so start 
over
+                                    goto Continue_Action_Loop;
+                                 else
+                                    Shared_Parser.Parsers.Terminate_Parser
+                                      (Max_Parser, Shared_Parser.Tree, "too 
many parsers; max error repair cost",
+                                       Trace);
+                                 end if;
+                              end if;
+                           end;
+                        end if;
+
+                        if Shared_Parser.Parsers.Count = 
Shared_Parser.Table.Max_Parallel then
+                           declare
+                              Parser_State : Parser_Lists.Parser_State renames 
Current_Parser.State_Ref;
+                           begin
+                              raise WisiToken.Parse_Error with 
Shared_Parser.Tree.Error_Message
+                                (Shared_Parser.Tree.Shared_Token 
(Parser_State.Stream),
+                                 "too many parallel parsers required in 
grammar state" &
+                                   Shared_Parser.Tree.State 
(Current_Parser.Stream)'Image &
+                                   "; simplify grammar, or increase 
max-parallel (" &
+                                   SAL.Base_Peek_Type'Image 
(Shared_Parser.Table.Max_Parallel) & ")");
+                           end;
+
+                        else
+                           if Trace_Parse > Outline then
+                              declare
+                                 Parser_State : Parser_Lists.Parser_State 
renames Current_Parser.State_Ref;
+                              begin
+                                 Trace.Put_Line
+                                   (" " & Shared_Parser.Tree.Trimmed_Image 
(Current_Parser.Stream) & ": " &
+                                      (if Trace_Parse_No_State_Numbers
+                                       then "--"
+                                       else Trimmed_Image 
(Shared_Parser.Tree.State (Parser_State.Stream))) & ": " &
+                                      Shared_Parser.Tree.Image
+                                        (Shared_Parser.Tree.Current_Token 
(Parser_State.Stream),
+                                         Terminal_Node_Numbers => True) & " : 
" &
+                                      "spawn " & 
Shared_Parser.Tree.Next_Stream_ID_Trimmed_Image & ", (" &
+                                      Trimmed_Image (1 + Integer 
(Shared_Parser.Parsers.Count)) & " active)");
+                                 if Debug_Mode then
+                                    Trace.Put_Line ("tree size: " & 
Shared_Parser.Tree.Tree_Size_Image);
+                                 end if;
+                              end;
+                           end if;
+
+                           Shared_Parser.Parsers.Prepend_Copy
+                             (Current_Parser, Shared_Parser.Tree,
+                              Syntax_Trees.User_Data_Access_Constant 
(Shared_Parser.User_Data), Trace);
+                           Do_Action (Conflict.Item, 
Shared_Parser.Parsers.First, Shared_Parser);
+
+                           --  We must terminate error parsers immediately in 
order to avoid
+                           --  zombie parsers during recovery.
+                           declare
+                              Temp : Parser_Lists.Cursor := 
Shared_Parser.Parsers.First;
+                           begin
+                              Check_Error (Shared_Parser, Temp);
+                           end;
+                        end if;
+
+                        Conflict := Conflict.Next;
+                     end loop;
+                  end if;
+
+                  Do_Action (Action, Current_Parser, Shared_Parser);
+               end;
+               Check_Error (Shared_Parser, Current_Parser);
+
+            else
+               --  Current parser is waiting for others to catch up
+               Current_Parser.Next;
+            end if;
+            <<Continue_Action_Loop>>
+         end loop Action_Loop;
+      end;
+   end loop Main_Loop;
+
+   if Trace_Parse > Outline then
+      Trace.Put_Line (" " & Shared_Parser.Tree.Trimmed_Image 
(Shared_Parser.Parsers.First.Stream) & ": succeed");
+   end if;
+
+   Finish_Parse (Shared_Parser, Incremental_Parse => Pre_Edited or Edits /= 
KMN_Lists.Empty_List);
+
+   if Trace_Time then
+      Trace.Put_Clock ("finish parse");
+   end if;
+
+   --  We don't raise Syntax_Error for lexer errors, since they are all
+   --  recovered, either by inserting a quote, or by ignoring the
+   --  character.
+exception
+when Partial_Parse =>
+   Finish_Parse (Shared_Parser, Incremental_Parse => False);
+   if Trace_Time then
+      Trace.Put_Clock ("finish partial parse");
+   end if;
+
+when Syntax_Error | WisiToken.Parse_Error =>
+   if Trace_Time then
+      Trace.Put_Clock ("finish - error");
+   end if;
+   raise;
+
+when E : others =>
+   declare
+      Msg : constant String := Ada.Exceptions.Exception_Name (E) & ": " & 
Ada.Exceptions.Exception_Message (E);
+   begin
+      if Debug_Mode then
+         --  If this is from a McKenzie task, that also outputs a stack trace.
+         Trace.Put_Line ("exception: " & Msg);
+         Trace.Put_Line (GNAT.Traceback.Symbolic.Symbolic_Traceback (E)); -- 
includes Prefix
+         Trace.New_Line;
+      end if;
+
+      --  Emacs displays the exception message in the echo area; easy to miss
+      raise WisiToken.Parse_Error with Msg;
+   end;
+end Parse;
diff --git a/wisitoken-parse-lr-parser.adb b/wisitoken-parse-lr-parser.adb
index 04b6f6b446..2f6e4e56dd 100644
--- a/wisitoken-parse-lr-parser.adb
+++ b/wisitoken-parse-lr-parser.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2002 - 2005, 2008 - 2015, 2017 - 2020 Free Software 
Foundation, Inc.
+--  Copyright (C) 2002 - 2005, 2008 - 2015, 2017 - 2022 Free Software 
Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -30,148 +30,415 @@ pragma License (Modified_GPL);
 with Ada.Calendar.Formatting;
 with Ada.Exceptions;
 with GNAT.Traceback.Symbolic;
+with WisiToken.In_Parse_Actions;
 with WisiToken.Parse.LR.McKenzie_Recover;
 package body WisiToken.Parse.LR.Parser is
 
    function Reduce_Stack_1
-     (Current_Parser : in     Parser_Lists.Cursor;
+     (Shared_Parser  : in out Parser;
+      Current_Parser : in     Parser_Lists.Cursor;
       Action         : in     Reduce_Action_Rec;
-      Nonterm        :    out WisiToken.Valid_Node_Index;
-      Lexer          : in     WisiToken.Lexer.Handle;
-      Trace          : in out WisiToken.Trace'Class)
-     return WisiToken.Semantic_Checks.Check_Status_Label
+      New_State      : in     State_Index)
+     return Syntax_Trees.In_Parse_Actions.Status_Label
    is
       --  We treat semantic check errors as parse errors here, to allow
       --  error recovery to take better advantage of them. One recovery
       --  strategy is to fix things so the semantic check passes.
 
-      use all type Semantic_Checks.Check_Status_Label;
-      use all type Semantic_Checks.Semantic_Check;
+      use all type Syntax_Trees.In_Parse_Actions.Status_Label;
+      use all type Syntax_Trees.In_Parse_Actions.In_Parse_Action;
 
       Parser_State  : Parser_Lists.Parser_State renames 
Current_Parser.State_Ref.Element.all;
-      Children_Tree : Valid_Node_Index_Array (1 .. SAL.Base_Peek_Type 
(Action.Token_Count));
-   begin
-      for I in reverse Children_Tree'Range loop
-         Children_Tree (I) := Parser_State.Stack.Pop.Token;
-      end loop;
 
-      Nonterm := Parser_State.Tree.Add_Nonterm
-        (Action.Production, Children_Tree, Action.Action,
-         Default_Virtual => Parser_State.Tree.Is_Virtual 
(Parser_State.Current_Token));
-      --  Computes Nonterm.Byte_Region, Virtual
+      Nonterm : constant Syntax_Trees.Rooted_Ref := Shared_Parser.Tree.Reduce
+        (Parser_State.Stream, Action.Production, Action.Token_Count, New_State,
+         Recover_Conflict => Parser_State.Resume_Active and 
Shared_Parser.Parsers.Count > 1);
 
+      In_Parse_Action : constant Syntax_Trees.In_Parse_Actions.In_Parse_Action 
:= Shared_Parser.Get_In_Parse_Action
+        (Action.Production);
+   begin
       if Trace_Parse > Detail then
-         Trace.Put_Line (Parser_State.Tree.Image (Nonterm, 
Trace.Descriptor.all, Include_Children => True));
+         Shared_Parser.Tree.Lexer.Trace.Put_Line
+           (Shared_Parser.Tree.Image
+              (Nonterm.Node,
+               Children              => True,
+               Terminal_Node_Numbers => True,
+               RHS_Index             => True));
       end if;
 
-      if Action.Check = null then
+      if In_Parse_Action = null then
          return Ok;
 
       else
+         --  We have to call the semantic action even when Resume_Active,
+         --  because it might do other things than return a status (ie
+         --  Propagate_Name).
          declare
-            Nonterm_Token  : Recover_Token                := 
Parser_State.Tree.Recover_Token (Nonterm);
-            Children_Token : constant Recover_Token_Array := 
Parser_State.Tree.Recover_Token_Array (Children_Tree);
-            Status         : Semantic_Checks.Check_Status;
-         begin
-            Status := Action.Check (Lexer, Nonterm_Token, Children_Token, 
Recover_Active => False);
+            Nonterm_Token : Syntax_Trees.Recover_Token := 
Shared_Parser.Tree.Get_Recover_Token (Nonterm);
 
-            if Nonterm_Token.Name /= Null_Buffer_Region then
-               Parser_State.Tree.Set_Name_Region (Nonterm, Nonterm_Token.Name);
-            end if;
+            Children_Token : constant Syntax_Trees.Recover_Token_Array :=
+              Shared_Parser.Tree.Children_Recover_Tokens (Parser_State.Stream, 
Nonterm.Element);
+            Status         : Syntax_Trees.In_Parse_Actions.Status;
+         begin
+            Status := In_Parse_Action
+              (Shared_Parser.Tree, Nonterm_Token, Children_Token, 
Recover_Active => False);
 
             if Trace_Parse > Detail then
-               Trace.Put_Line ("semantic check " & Semantic_Checks.Image 
(Status, Trace.Descriptor.all));
+               Shared_Parser.Tree.Lexer.Trace.Put_Line
+                 ("in_parse_action " & WisiToken.In_Parse_Actions.Image 
(Status, Shared_Parser.Tree, Nonterm.Node));
             end if;
 
             case Status.Label is
             when Ok =>
                return Ok;
 
-            when Semantic_Checks.Error =>
+            when Syntax_Trees.In_Parse_Actions.Error =>
                if Parser_State.Resume_Active then
                   --  Ignore this error; that's how McKenzie_Recover decided 
to fix it
                   return Ok;
 
                else
-                  Parser_State.Errors.Append
-                    ((Label          => Check,
-                      First_Terminal => Trace.Descriptor.First_Terminal,
-                      Last_Terminal  => Trace.Descriptor.Last_Terminal,
-                      Check_Status   => Status,
-                      Recover        => (others => <>)));
+                  Shared_Parser.Tree.Add_Error_To_Stack_Top
+                    (Parser_State.Stream,
+                     In_Parse_Action_Error'
+                       (Status       => Status,
+                        Recover_Ops  => Recover_Op_Nodes_Arrays.Empty_Vector,
+                        Recover_Test => null),
+                     Syntax_Trees.User_Data_Access_Constant 
(Shared_Parser.User_Data));
+
                   return Status.Label;
                end if;
             end case;
-         exception
-         when Partial_Parse =>
-            --  From Action.Check
-            Parser_State.Tree.Set_Root (Nonterm);
-            raise;
          end;
       end if;
    end Reduce_Stack_1;
 
+   procedure Get_Action
+     (Shared_Parser : in out LR.Parser.Parser;
+      Parser_State  : in out Parser_Lists.Parser_State;
+      Action_Cur    :    out Parse_Action_Node_Ptr;
+      Action        :    out Parse_Action_Rec)
+   is
+      --  Same logic as in McKenzie_Recover.Parse.Get_Action, but this
+      --  operates on Parser_State.
+      use Syntax_Trees;
+
+      Table : Parse_Table renames Shared_Parser.Table.all;
+      Tree  : Syntax_Trees.Tree renames Shared_Parser.Tree;
+   begin
+      loop -- handle delete empty nonterm, undo_reduce
+         declare
+            Current_State : constant State_Index := Tree.State 
(Parser_State.Stream);
+
+            function Handle_Error return Boolean
+            --  Return True if should return immediately; False if Undo_Reduce 
was done.
+            is begin
+               if Tree.Label (Tree.Peek (Parser_State.Stream)) in 
Terminal_Label then
+                  return True;
+
+               else
+                  --  [Wagner Graham 1998] has Right_Breakdown here, but that 
is often
+                  --  overkill; we only need Undo_Reduce until Current_Token is
+                  --  shiftable. ada_mode-interactive_03.adb
+                  --
+                  --  IMPROVEME: if error recovery is correct here, we do more
+                  --  Undo_Reduce than necessary (and it will never happen in
+                  --  Error_Recovery).
+
+                  if Parser_State.Last_Action.Verb = Reduce then
+                     --  We are in an erroneous branch of a conflict, or there 
is a real error.
+                     --  ada_mode-incremental_01.adb
+                     return True;
+                  end if;
+
+                  if Trace_Parse > Detail then
+                     Shared_Parser.Tree.Lexer.Trace.Put_Line
+                       (" " & Shared_Parser.Tree.Trimmed_Image 
(Parser_State.Stream) & ": " &
+                          Trimmed_Image (Current_State) & ": " & Tree.Image
+                            (Shared_Parser.Tree.Current_Token 
(Parser_State.Stream), First_Terminal => True) &
+                          " error; undo_reduce");
+                     Shared_Parser.Tree.Lexer.Trace.Put
+                       (" ... " & Tree.Image (Tree.Peek (Parser_State.Stream), 
State => True));
+                  end if;
+                  Undo_Reduce
+                    (Tree, Table, Parser_State.Stream, 
Syntax_Trees.User_Data_Access_Constant
+                       (Shared_Parser.User_Data));
+
+                  if Trace_Parse > Detail then
+                     Shared_Parser.Tree.Lexer.Trace.Put
+                       (" => " & Tree.Image (Tree.Peek (Parser_State.Stream), 
State => True),
+                        Prefix => False);
+                     Shared_Parser.Tree.Lexer.Trace.New_Line;
+                  end if;
+                  return False;
+               end if;
+            end Handle_Error;
+
+            Current_Node : constant Valid_Node_Access := 
Shared_Parser.Tree.Current_Token (Parser_State.Stream).Node;
+
+         begin
+            if Tree.Label (Current_Node) in Terminal_Label then
+               Action_Cur := Action_For (Table, Current_State, Tree.ID 
(Current_Node));
+               Action     := Action_Cur.Item;
+
+               case Action.Verb is
+               when Shift | Accept_It | Reduce =>
+                  return;
+               when Error =>
+                  if Handle_Error then
+                     return;
+                  end if;
+               end case;
+            else
+               declare
+                  New_State : constant Unknown_State_Index := Goto_For
+                    (Table, Current_State, Tree.ID (Current_Node));
+               begin
+                  if New_State /= Unknown_State then
+                     Action_Cur := null;
+                     Action     :=
+                       (Verb       => Shift,
+                        Production => Invalid_Production_ID,
+                        State      => New_State);
+                     return;
+                  else
+                     declare
+                        Checking_Next : Boolean := False;
+
+                        procedure Delete_Empty
+                        is begin
+                           if Trace_Parse > Detail then
+                              Shared_Parser.Tree.Lexer.Trace.Put_Line
+                                (" " & Shared_Parser.Tree.Trimmed_Image 
(Parser_State.Stream) & ": " &
+                                   (if Trace_Parse_No_State_Numbers
+                                    then "-- : "
+                                    else Trimmed_Image 
(Shared_Parser.Tree.State (Parser_State.Stream)) & ": ") &
+                                   ": delete empty nonterm " &
+                                   Tree.Image
+                                     (Shared_Parser.Tree.Current_Token 
(Parser_State.Stream), First_Terminal => True));
+                           end if;
+
+                           Tree.Delete_Current_Token (Parser_State.Stream);
+                        end Delete_Empty;
+
+                        function Get_First_Terminal return Valid_Node_Access
+                        is
+                           Temp : Node_Access := Tree.First_Terminal 
(Current_Node);
+                        begin
+                           if Temp = Invalid_Node_Access then
+                              --  Current_Token is an empty nonterm; peek at 
next terminal,
+                              --  do reduce until this nonterm is shiftable.
+                              --  ada_mode-interactive_03.adb
+                              --  test_incremental.adb Recover_1 
aspect_specification_opt.
+                              Temp := Tree.First_Terminal 
(Shared_Parser.Tree.Current_Token (Parser_State.Stream)).Node;
+                              Checking_Next := True;
+                           end if;
+                           return Temp;
+                        end Get_First_Terminal;
+
+                        First_In_Current : constant Valid_Node_Access := 
Get_First_Terminal;
+
+                     begin
+                        Action_Cur := Action_For (Table, Current_State, 
Tree.ID (First_In_Current));
+                        Action     := Action_Cur.Item;
+
+                        case Action.Verb is
+                        when Shift =>
+                           if Checking_Next then
+                              --  If the empty nonterm was shiftable, it would 
have been handled by
+                              --  Goto_For above. test_incremental.adb 
Edit_Code_9. Edit_Tree could
+                              --  delete this nonterm, but handling it here is 
simpler.
+                              Delete_Empty;
+
+                           else
+                              declare
+                                 Current_Token : Rooted_Ref := 
Shared_Parser.Tree.Current_Token (Parser_State.Stream);
+                              begin
+                                 if Shared_Parser.Tree.Current_Token 
(Parser_State.Stream).Stream /=
+                                   Parser_State.Stream
+                                 then
+                                    --  To breakdown a shared_stream token, we 
first have to create a
+                                    --  parse stream input element for it, and 
do the breakdown in the
+                                    --  parse stream input.
+                                    Tree.Move_Shared_To_Input 
(Parser_State.Stream);
+                                    Current_Token := 
Shared_Parser.Tree.Current_Token (Parser_State.Stream);
+                                 end if;
+
+                                 if Trace_Parse > Detail then
+                                    Shared_Parser.Tree.Lexer.Trace.Put_Line
+                                      (" " & Shared_Parser.Tree.Trimmed_Image 
(Parser_State.Stream) &
+                                         ": left_breakdown " &
+                                         Tree.Image (Current_Token, 
First_Terminal => True));
+                                 end if;
+                                 Tree.Left_Breakdown
+                                   (Current_Token, 
Syntax_Trees.User_Data_Access_Constant (Shared_Parser.User_Data));
+
+                                 if Trace_Parse > Extra then
+                                    Shared_Parser.Tree.Lexer.Trace.Put_Line
+                                      (" ... current_token: " & Tree.Image 
(Current_Token, First_Terminal => True));
+                                    if Trace_Parse > Detail then
+                                       Shared_Parser.Tree.Lexer.Trace.Put_Line
+                                         (" ... input stream: " & Tree.Image
+                                            (Parser_State.Stream, Stack => 
False, Input => True, Shared => True));
+                                    end if;
+                                 end if;
+                              end;
+                              return;
+                           end if;
+
+                        when Accept_It | Reduce =>
+                           return;
+
+                        when Error =>
+                           if Checking_Next then
+                              Delete_Empty;
+
+                           elsif Handle_Error then
+                              return;
+                           end if;
+                        end case;
+                     end;
+                  end if;
+               end;
+            end if;
+         end;
+      end loop;
+   end Get_Action;
+
    procedure Do_Action
-     (Action         : in Parse_Action_Rec;
-      Current_Parser : in Parser_Lists.Cursor;
-      Shared_Parser  : in LR.Parser.Parser)
+     (Action         : in     Parse_Action_Rec;
+      Current_Parser : in     Parser_Lists.Cursor;
+      Shared_Parser  : in out LR.Parser.Parser)
+   --  Apply Action to Current_Parser; sets Current_Parser.Verb.
    is
-      use all type Semantic_Checks.Check_Status_Label;
+      use all type Syntax_Trees.In_Parse_Actions.Status_Label;
 
       Parser_State : Parser_Lists.Parser_State renames 
Current_Parser.State_Ref;
-      Trace        : WisiToken.Trace'Class renames Shared_Parser.Trace.all;
-      Nonterm      : WisiToken.Valid_Node_Index;
-      Status       : Semantic_Checks.Check_Status_Label;
+      Trace        : WisiToken.Trace'Class renames 
Shared_Parser.Tree.Lexer.Trace.all;
+      Status       : Syntax_Trees.In_Parse_Actions.Status_Label;
+
    begin
       if Trace_Parse > Detail then
          Trace.Put
-           (Integer'Image (Current_Parser.Label) & ": " &
-              Trimmed_Image (Parser_State.Stack.Peek.State) & ": " &
-              Parser_State.Tree.Image (Parser_State.Current_Token, 
Trace.Descriptor.all) & " : ");
-         Put (Trace, Action);
+           --  No prefix, leading space for compatibility with existing tests.
+           (" " & Shared_Parser.Tree.Trimmed_Image (Parser_State.Stream) & ": 
" &
+              (if Trace_Parse_No_State_Numbers
+               then "-- : "
+               else Trimmed_Image (Shared_Parser.Tree.State 
(Parser_State.Stream)) & ": ") &
+              Shared_Parser.Tree.Image
+                (Shared_Parser.Tree.Current_Token (Parser_State.Stream),
+                 First_Terminal => True, Terminal_Node_Numbers => True) & " : 
" &
+              Trace_Image (Action, Shared_Parser.Tree.Lexer.Descriptor.all));
          Trace.New_Line;
       end if;
 
       case Action.Verb is
       when Shift =>
-         Current_Parser.Set_Verb (Shift);
-         Parser_State.Stack.Push ((Action.State, Parser_State.Current_Token));
-         Parser_State.Tree.Set_State (Parser_State.Current_Token, 
Action.State);
+         Parser_State.Set_Verb (Shift);
+         Parser_State.Last_Action := Action;
+
+         Shared_Parser.Tree.Shift (Parser_State.Stream, Action.State);
 
       when Reduce =>
          declare
             New_State : constant Unknown_State_Index := Goto_For
               (Table => Shared_Parser.Table.all,
-               State => Parser_State.Stack (SAL.Base_Peek_Type 
(Action.Token_Count) + 1).State,
+               State => Shared_Parser.Tree.State
+                 (Parser_State.Stream, Shared_Parser.Tree.Peek
+                    (Parser_State.Stream, SAL.Base_Peek_Type 
(Action.Token_Count) + 1)),
                ID    => Action.Production.LHS);
          begin
             if New_State = Unknown_State then
                --  This is due to a bug in the LALR parser generator (see
                --  lalr_generator_bug_01.wy); we treat it as a syntax error.
-               Current_Parser.Set_Verb (Error);
+               Parser_State.Set_Verb (Error);
+               Parser_State.Last_Action := (Error, Invalid_Production_ID);
+
+               Shared_Parser.Tree.Add_Error_To_Input
+                 (Parser_State.Stream,
+                  Parse_Error'
+                    (First_Terminal => 1,
+                     Last_Terminal  => 0,
+                     Expecting      => (1 .. 0 => False),
+                     Recover_Ops    => Recover_Op_Nodes_Arrays.Empty_Vector,
+                     Recover_Test   => null),
+                  Syntax_Trees.User_Data_Access_Constant 
(Shared_Parser.User_Data));
+
                if Trace_Parse > Detail then
-                  Trace.Put_Line (" ... error");
+                  Trace.Put_Line (" ... error unknown state");
                end if;
 
             else
-               Status := Reduce_Stack_1 (Current_Parser, Action, Nonterm, 
Shared_Parser.Lexer, Trace);
+               begin
+                  Status := Reduce_Stack_1 (Shared_Parser, Current_Parser, 
Action, New_State);
+               exception
+               when Partial_Parse =>
+                  if Parser_State.Resume_Active or Shared_Parser.Parsers.Count 
> 1 then
+                     --  Wait until there is one parser not in resume.
+                     if Trace_Parse > Outline then
+                        Trace.Put_Line (" ... partial parse done, waiting for 
other parsers");
+                     end if;
+                  else
+                     if Trace_Parse > Outline then
+                        Trace.Put_Line (" ... partial parse done");
+                     end if;
 
-               --  Even when Reduce_Stack_1 returns Error, it did reduce the 
stack, so
-               --  push Nonterm.
-               Parser_State.Stack.Push ((New_State, Nonterm));
+                     declare
+                        Current_Token : constant Syntax_Trees.Rooted_Ref := 
Shared_Parser.Tree.Current_Token
+                          (Parser_State.Stream);
+                     begin
 
-               Parser_State.Tree.Set_State (Nonterm, New_State);
+                        --  Insert EOI on Shared_Stream
+                        if Shared_Parser.Tree.ID (Current_Token.Node) /=
+                          Shared_Parser.Tree.Lexer.Descriptor.EOI_ID
+                        then
+                           declare
+                              Last_Token_Byte_Region_Last : constant 
Buffer_Pos := Shared_Parser.Tree.Byte_Region
+                                (Current_Token.Node, Trailing_Non_Grammar => 
False).Last;
+                              Last_Token_Char_Region_Last : constant 
Buffer_Pos := Shared_Parser.Tree.Char_Region
+                                (Current_Token.Node, Trailing_Non_Grammar => 
False).Last;
+                              Last_Token_Line_Region_Last : constant 
Line_Number_Type := Shared_Parser.Tree.Line_Region
+                                (Current_Token, Trailing_Non_Grammar => 
True).Last;
+
+                              EOI_Token : constant Lexer.Token :=
+                                (ID          => 
Shared_Parser.Tree.Lexer.Descriptor.EOI_ID,
+                                 Byte_Region =>
+                                   (First    => Last_Token_Byte_Region_Last + 
1,
+                                    Last     => Last_Token_Byte_Region_Last),
+                                 Char_Region =>
+                                   (First    => Last_Token_Char_Region_Last + 
1,
+                                    Last     => Last_Token_Char_Region_Last),
+                                 Line_Region => (First | Last => 
Last_Token_Line_Region_Last));
+                           begin
+                              Shared_Parser.Tree.Insert_Source_Terminal
+                                (Shared_Parser.Tree.Shared_Stream,
+                                 Terminal => EOI_Token,
+                                 Before   => Shared_Parser.Tree.Stream_Next 
(Current_Token).Element,
+                                 Errors   => Syntax_Trees.Null_Error_List);
+                           end;
+                        end if;
+                     end;
+                     raise;
+                  end if;
+               end;
 
                case Status is
                when Ok =>
-                  Current_Parser.Set_Verb (Reduce);
+                  Parser_State.Set_Verb (Reduce);
+                  Parser_State.Last_Action := Action;
 
                   if Trace_Parse > Detail then
-                     Trace.Put_Line (" ... goto state " & Trimmed_Image 
(New_State));
+                     Trace.Put_Line
+                       (" ... goto state " &
+                          (if Trace_Parse_No_State_Numbers
+                           then "--"
+                           else Trimmed_Image (New_State)));
                   end if;
 
-               when Semantic_Checks.Error =>
-                  Current_Parser.Set_Verb (Error);
+               when Syntax_Trees.In_Parse_Actions.Error =>
+                  Parser_State.Set_Verb (Error);
+                  Parser_State.Last_Action := Action; -- not Error, since we 
did a reduce.
+                  Parser_State.Error_Count        := @ + 1;
                   Parser_State.Zombie_Token_Count := 1;
                end case;
             end if;
@@ -179,44 +446,63 @@ package body WisiToken.Parse.LR.Parser is
 
       when Accept_It =>
          case Reduce_Stack_1
-           (Current_Parser,
-            (Reduce, Action.Production, Action.Action, Action.Check, 
Action.Token_Count),
-            Nonterm, Shared_Parser.Lexer, Trace)
+           (Shared_Parser, Current_Parser,
+            (Reduce, Action.Production, Action.Token_Count),
+            Accept_State)
          is
          when Ok =>
-            Current_Parser.Set_Verb (Action.Verb);
+            Parser_State.Set_Verb (Action.Verb);
 
-            Parser_State.Tree.Set_Root (Nonterm);
-
-         when Semantic_Checks.Error =>
-            Current_Parser.Set_Verb (Error);
+         when Syntax_Trees.In_Parse_Actions.Error =>
+            Parser_State.Set_Verb (Error);
             Parser_State.Zombie_Token_Count := 1;
          end case;
 
       when Error =>
-         Current_Parser.Set_Verb (Action.Verb);
+         Parser_State.Set_Verb (Action.Verb);
+         Parser_State.Error_Count := @ + 1;
 
          Parser_State.Zombie_Token_Count := 1;
 
          declare
+            use WisiToken.Syntax_Trees;
+            Tree : Syntax_Trees.Tree renames Shared_Parser.Tree;
+
             Expecting : constant Token_ID_Set := LR.Expecting
-              (Shared_Parser.Table.all, Parser_State.Stack.Peek.State);
+              (Shared_Parser.Table.all, Tree.State (Parser_State.Stream));
+
+            New_Error : constant Parse_Error :=
+              (First_Terminal => Tree.Lexer.Descriptor.First_Terminal,
+               Last_Terminal  => Tree.Lexer.Descriptor.Last_Terminal,
+               Expecting      => Expecting,
+               Recover_Ops    => Recover_Op_Nodes_Arrays.Empty_Vector,
+               Recover_Test   => null);
+
          begin
-            Parser_State.Errors.Append
-              ((Label          => LR.Action,
-                First_Terminal => Trace.Descriptor.First_Terminal,
-                Last_Terminal  => Trace.Descriptor.Last_Terminal,
-                Error_Token    => Parser_State.Current_Token,
-                Expecting      => Expecting,
-                Recover        => (others => <>)));
-
-            if Trace_Parse > Outline then
-               Put
-                 (Trace,
-                  Integer'Image (Current_Parser.Label) & ":" &
-                    Unknown_State_Index'Image (Parser_State.Stack.Peek.State) 
& ": expecting: " &
-                    Image (Expecting, Trace.Descriptor.all));
-               Trace.New_Line;
+            if Tree.Input_Has_Matching_Error (Parser_State.Stream, New_Error) 
then
+               --  Keep the recover information so it can be used again.
+               null;
+
+            else
+               Tree.Delete_Errors_In_Input
+                 (Parser_State.Stream,
+                  Error_Pred_Parse'Access,
+                  Syntax_Trees.User_Data_Access_Constant 
(Shared_Parser.User_Data));
+
+               Tree.Add_Error_To_Input
+                 (Stream    => Parser_State.Stream,
+                  Data      => New_Error,
+                  User_Data => Syntax_Trees.User_Data_Access_Constant 
(Shared_Parser.User_Data));
+
+            end if;
+
+            if Trace_Parse > Detail then
+               Trace.Put_Line
+                 (" " & Tree.Trimmed_Image (Parser_State.Stream) & ": " &
+                    (if Trace_Parse_No_State_Numbers
+                     then "--"
+                     else Trimmed_Image (Tree.State (Parser_State.Stream))) &
+                    ": expecting: " & Image (Expecting, 
Tree.Lexer.Descriptor.all));
             end if;
          end;
       end case;
@@ -226,123 +512,218 @@ package body WisiToken.Parse.LR.Parser is
      (Shared_Parser : in out LR.Parser.Parser;
       Parser_State  : in out Parser_Lists.Parser_State)
    is
-      use Recover_Op_Arrays, Recover_Op_Array_Refs;
-      Ins_Del     : Vector renames Parser_State.Recover_Insert_Delete;
-      Ins_Del_Cur : Extended_Index renames 
Parser_State.Recover_Insert_Delete_Current;
+      use Recover_Op_Nodes_Arrays;
+      use Syntax_Trees;
+      use Parser_Lists;
+
+      Tree : Syntax_Trees.Tree renames Shared_Parser.Tree;
    begin
-      if Trace_Parse > Extra then
-         Shared_Parser.Trace.Put_Line
-           (Integer'Image (Parser_State.Label) & ": shared_token:" &
-              WisiToken.Token_Index'Image (Parser_State.Shared_Token) &
-              " inc_shared_token: " & Boolean'Image 
(Parser_State.Inc_Shared_Token) &
-              " recover_insert_delete:" &
-              (if Parser_State.Recover_Insert_Delete_Current = No_Index
-               then ""
-               else Parser_State.Recover_Insert_Delete_Current'Image & " " &
-                  Image
-                    (Constant_Ref (Parser_State.Recover_Insert_Delete, 
Parser_State.Recover_Insert_Delete_Current),
-                     Shared_Parser.Trace.Descriptor.all)));
+      if Parser_State.Current_Recover_Op = No_Insert_Delete then
+         return;
       end if;
 
-      loop
-         exit when Ins_Del_Cur = Recover_Op_Arrays.No_Index;
-         declare
-            Op : Recover_Op renames Constant_Ref (Ins_Del, Ins_Del_Cur);
-         begin
-            if Op.Op = Delete and then
-              Op.Del_Token_Index =
-              (if Parser_State.Inc_Shared_Token
-               then Parser_State.Shared_Token + 1
-               else Parser_State.Shared_Token)
-            then
-               Parser_State.Shared_Token := Parser_State.Shared_Token + 1;
-               --  We don't reset Inc_Shared_Token here; only after the next 
token is
-               --  actually used.
-               Ins_Del_Cur := Ins_Del_Cur + 1;
-               if Ins_Del_Cur > Last_Index (Ins_Del)  then
-                  Ins_Del_Cur := No_Index;
+      declare
+         --  Resume only processes recover_ops in one error; the current error.
+         --  If it also fixes a following error, that error has no recover.
+         Err          : Error_Data'Class := Error 
(Parser_State.Current_Error_Ref (Tree));
+         Recover_Ops  : Recover_Op_Nodes_Arrays.Vector renames 
Recover_Op_Array_Var_Ref (Err);
+         Err_Modified : Boolean          := False;
+      begin
+         One_Error :
+         loop
+            declare
+               Op  : Recover_Op_Nodes renames Recover_Ops 
(Parser_State.Current_Recover_Op);
+            begin
+               if Op.Op = Delete then
+                  declare
+                     Deleted_Node : constant Valid_Node_Access := 
Tree.First_Terminal
+                       (Tree.Current_Token (Parser_State.Stream)).Node;
+                  begin
+                     if Op.Del_Index = Tree.Get_Sequential_Index 
(Deleted_Node) then
+                        Err_Modified := True;
+
+                        Parser_State.Do_Delete
+                          (Tree, Op, User_Data_Access_Constant 
(Shared_Parser.User_Data));
+
+                        if Trace_Parse > Extra  then
+                           Tree.Lexer.Trace.Put_Line
+                             (" " & Tree.Trimmed_Image (Parser_State.Stream) & 
": delete " & Op.Del_Index'Image);
+                        end if;
+
+                        Parser_State.Next_Recover_Op (Tree);
+
+                        exit One_Error when Parser_State.Current_Recover_Op = 
No_Insert_Delete;
+
+                     else
+                        exit One_Error;
+                     end if;
+                  end;
+               else
+                  exit One_Error;
                end if;
-            else
-               exit;
-            end if;
-         end;
-      end loop;
+            end;
+         end loop One_Error;
+
+         if Err_Modified then
+            Parser_State.Update_Error (Tree, Err, User_Data_Access_Constant 
(Shared_Parser.User_Data));
+         end if;
+      end;
    end Do_Deletes;
 
+   procedure Parse_Verb
+     (Shared_Parser : in out LR.Parser.Parser;
+      Verb          :    out All_Parse_Action_Verbs;
+      Zombie_Count  :    out SAL.Base_Peek_Type)
    --  Verb: the type of parser cycle to execute;
    --
-   --  Accept : all Parsers.Verb return Accept - done parsing.
+   --     Accept_It : all Parsers.Verb return Accept - done parsing.
    --
-   --  Shift : some Parsers.Verb return Shift, all with the same current
-   --  token in Shared_Parser.Terminals.
+   --     Shift : some Parsers.Verb return Shift.
    --
-   --  Pause : Resume is active, and this parser has reached Resume_Goal,
-   --  so it is waiting for the others to catch up.
+   --     Pause : Resume is active, and this parser has reached
+   --  Resume_Goal, so it is waiting for the others to catch up. Or
+   --  resume is not active, and this parser has shifted a nonterminal,
+   --  while some other parser has broken down that nonterminal; it is
+   --  waiting for the others to catch up. This ensures parsers are
+   --  within Mckenzie_Param.Zombie_Limit of the same terminal when they
+   --  enter error recovery.
    --
-   --  Reduce : some Parsers.Verb return Reduce.
+   --     Reduce : some Parsers.Verb return Reduce.
    --
-   --  Error : all Parsers.Verb return Error.
+   --     Error : all Parsers.Verb return Error.
    --
    --  Zombie_Count: count of parsers in Error state
-   procedure Parse_Verb
-     (Shared_Parser : in out LR.Parser.Parser;
-      Verb          :    out All_Parse_Action_Verbs;
-      Zombie_Count  :    out SAL.Base_Peek_Type)
    is
+      use all type WisiToken.Syntax_Trees.Stream_Node_Ref;
+
       Shift_Count   : SAL.Base_Peek_Type := 0;
       Accept_Count  : SAL.Base_Peek_Type := 0;
-      Error_Count   : SAL.Base_Peek_Type := 0;
       Resume_Active : Boolean            := False;
+      Some_Paused   : Boolean            := False;
+
+      Min_Sequential_Index : Syntax_Trees.Sequential_Index := 
Syntax_Trees.Sequential_Index'Last;
+      Max_Byte_Last        : Buffer_Pos                    := Buffer_Pos'First;
    begin
       Zombie_Count := 0;
 
       for Parser_State of Shared_Parser.Parsers loop
+         --  Parser_State.Verb is set by Do_Action, except Pause, Accept_It are
+         --  set here.
          case Parser_State.Verb is
          when Pause | Shift =>
-            Do_Deletes (Shared_Parser, Parser_State);
-
             Shift_Count := Shift_Count + 1;
             Parser_State.Set_Verb (Shift);
 
+            --  We call Do_Deletes here so it can break down a nonterm if 
needed;
+            --  then the check for resume done is correct.
+            --  ada_mode-recover_bad_char.adb.
+            Do_Deletes (Shared_Parser, Parser_State);
+
             if Parser_State.Resume_Active then
-               --  There may still be ops left in Recover_Insert_Delete after 
we get
-               --  to Resume_Token_Goal, probably from a Language_Fix or 
string quote
-               --  fix that deletes a lot of tokens.
-               if Parser_State.Resume_Token_Goal <= Parser_State.Shared_Token 
and
-                 Parser_State.Recover_Insert_Delete_Current = 
Recover_Op_Arrays.No_Index
-               then
-                  Parser_State.Resume_Active := False;
-                  if Trace_Parse > Detail then
-                     Shared_Parser.Trace.Put_Line (Integer'Image 
(Parser_State.Label) & ": resume_active: False");
+               --  We want to set Resume_Active False _after_ we shift the goal
+               --  token, so we check the stack top. test_incremental.adb
+               --  Nonterm_Resume_01.
+               declare
+                  use WisiToken.Syntax_Trees;
+
+                  function Get_Terminal return Node_Access
+                  is
+                     Ref : Stream_Node_Parents := 
Shared_Parser.Tree.To_Stream_Node_Parents
+                       (Shared_Parser.Tree.To_Rooted_Ref
+                          (Parser_State.Stream, Shared_Parser.Tree.Peek 
(Parser_State.Stream)));
+                  begin
+                     Shared_Parser.Tree.Last_Sequential_Terminal (Ref, 
Parser_State.Stream, Preceding => True);
+                     return Ref.Ref.Node;
+                  end Get_Terminal;
+
+                  Terminal : constant Node_Access := Get_Terminal;
+
+                  Terminal_Index : constant Base_Sequential_Index := 
Shared_Parser.Tree.Get_Sequential_Index
+                    (Terminal);
+               begin
+                  if Terminal_Index = Invalid_Sequential_Index
+                    --  Most likely we just shifted a nonterm that got past 
the resume
+                    --  goal; ada_mode-interactive_02.adb.
+                    or else
+                    (Parser_State.Resume_Token_Goal <= Terminal_Index and
+                       Parser_State.Current_Recover_Op = No_Insert_Delete)
+                       --  Parser_State.Current_Recover_Op can be 
No_Insert_Delete here
+                       --  when Current_Token is a nonterm that needs to be 
broken down
+                       --  before the remaining ops can be performed.
+                       --  ada_mode-interactive_01.adb
+                  then
+                     Parser_State.Resume_Active := False;
+                     Parser_State.Resume_Token_Goal := 
Syntax_Trees.Invalid_Sequential_Index;
+                     Parser_State.Clear_Current_Error_Features;
+                     if Trace_Parse > Detail then
+                        Shared_Parser.Tree.Lexer.Trace.Put_Line
+                          (" " & Shared_Parser.Tree.Trimmed_Image 
(Parser_State.Stream) & ": resume_active: False");
+                     end if;
+                  else
+                     Resume_Active := True;
                   end if;
-               else
-                  Resume_Active := True;
-               end if;
+               end;
+
+            elsif Shared_Parser.Resume_Active then
+               declare
+                  use Syntax_Trees;
+                  First_Terminal : constant Node_Access := 
Shared_Parser.Tree.First_Sequential_Terminal
+                    (Shared_Parser.Tree.Shared_Token 
(Parser_State.Stream).Node);
+               begin
+                  if First_Terminal /= Invalid_Node_Access then
+                     Min_Sequential_Index := Syntax_Trees.Sequential_Index'Min
+                       (@, Shared_Parser.Tree.Get_Sequential_Index 
(First_Terminal));
+                  else
+                     --  No terminal in Shared_Token
+                     null;
+                  end if;
+               end;
+
+            else
+               --  Ensure parsers stay close to the same terminal; see note 
below at
+               --  use of Max_Byte_Last.
+               declare
+                  use Syntax_Trees;
+
+                  --  We don't just use Byte_Region (stack_top), because that 
can be
+                  --  slow, and we do this every parse cycle.
+                  Last_Term : constant Node_Access := 
Shared_Parser.Tree.Last_Terminal
+                    (Shared_Parser.Tree.Get_Node (Parser_State.Stream, 
Shared_Parser.Tree.Peek (Parser_State.Stream)));
+               begin
+                  if Last_Term /= Invalid_Node_Access then
+                     Max_Byte_Last := Buffer_Pos'Max
+                       (@, Shared_Parser.Tree.Byte_Region (Last_Term, 
Trailing_Non_Grammar => False).Last);
+                  end if;
+               end;
             end if;
 
          when Reduce =>
             Verb := Reduce;
+            --  No need to review rest of parsers, and Zombie_Count will be
+            --  ignored.
             return;
 
          when Accept_It =>
             Accept_Count := Accept_Count + 1;
 
          when Error =>
-            if Shared_Parser.Enable_McKenzie_Recover then
-               --  This parser is waiting for others to error; they can 
continue
-               --  parsing.
-               Zombie_Count := Zombie_Count + 1;
-            else
-               Error_Count := Error_Count + 1;
-            end if;
+            --  This parser is waiting for others to error; they can continue
+            --  parsing.
+            Zombie_Count := Zombie_Count + 1;
          end case;
       end loop;
 
       if Accept_Count > 0 and Shared_Parser.Parsers.Count = Accept_Count + 
Zombie_Count then
          Verb := Accept_It;
 
-      elsif Shared_Parser.Parsers.Count = Error_Count + Zombie_Count then
+         if Shared_Parser.Resume_Active then
+            Shared_Parser.Resume_Active := False;
+            McKenzie_Recover.Clear_Sequential_Index (Shared_Parser);
+         end if;
+
+      elsif Shared_Parser.Parsers.Count = Zombie_Count then
          Verb := Error;
+         return;
 
       elsif Shift_Count > 0 then
          Verb := Shift;
@@ -352,905 +733,2789 @@ package body WisiToken.Parse.LR.Parser is
       end if;
 
       if Resume_Active then
+         Shared_Parser.Resume_Active := True;
+
          for Parser_State of Shared_Parser.Parsers loop
             if Parser_State.Verb = Shift and not Parser_State.Resume_Active 
then
                Parser_State.Set_Verb (Pause);
+               if Trace_Parse > Detail then
+                  Shared_Parser.Tree.Lexer.Trace.Put_Line
+                    (" " & Shared_Parser.Tree.Trimmed_Image 
(Parser_State.Stream) & ": pause: resume exit");
+               end if;
             end if;
          end loop;
-      end if;
-   end Parse_Verb;
-
-   ----------
-   --  Public subprograms, declaration order
 
-   overriding procedure Finalize (Object : in out LR.Parser.Parser)
-   is begin
-      Free_Table (Object.Table);
-   end Finalize;
+      elsif Shared_Parser.Resume_Active then
+         --  Ensure all parsers are on the same terminal before exiting resume.
+         --  All error recover insert and delete are done, so all parsers must
+         --  see the same terminals.
+         for Parser_State of Shared_Parser.Parsers loop
+            if Parser_State.Verb = Shift then
+               declare
+                  use Syntax_Trees;
+                  First_Terminal : constant Node_Access := 
Shared_Parser.Tree.First_Sequential_Terminal
+                    (Shared_Parser.Tree.Shared_Token 
(Parser_State.Stream).Node);
+               begin
+                  if First_Terminal /= Invalid_Node_Access and then
+                    Min_Sequential_Index /= Syntax_Trees.Sequential_Index'Last 
and then
+                    Min_Sequential_Index /= 
Shared_Parser.Tree.Get_Sequential_Index (First_Terminal)
+                  then
+                     Some_Paused := True;
+                     Parser_State.Set_Verb (Pause);
+                     if Trace_Parse > Detail then
+                        Shared_Parser.Tree.Lexer.Trace.Put_Line
+                          (" " & Shared_Parser.Tree.Trimmed_Image 
(Parser_State.Stream) &
+                             ": pause: resume sync (min index" & 
Min_Sequential_Index'Image & ")");
+                     end if;
+                  end if;
+               end;
+            end if;
+         end loop;
 
-   procedure New_Parser
-     (Parser                         :    out          LR.Parser.Parser;
-      Trace                          : not null access WisiToken.Trace'Class;
-      Lexer                          : in              WisiToken.Lexer.Handle;
-      Table                          : in              Parse_Table_Ptr;
-      Language_Fixes                 : in              Language_Fixes_Access;
-      Language_Matching_Begin_Tokens : in              
Language_Matching_Begin_Tokens_Access;
-      Language_String_ID_Set         : in              
Language_String_ID_Set_Access;
-      User_Data                      : in              
WisiToken.Syntax_Trees.User_Data_Access;
-      Max_Parallel                   : in              SAL.Base_Peek_Type := 
Default_Max_Parallel;
-      Terminate_Same_State           : in              Boolean            := 
True)
-   is
-      use all type Syntax_Trees.User_Data_Access;
-   begin
-      Parser.Lexer     := Lexer;
-      Parser.Trace     := Trace;
-      Parser.User_Data := User_Data;
+         if Shared_Parser.Resume_Active and not Some_Paused then
+            Shared_Parser.Resume_Active := False;
+            McKenzie_Recover.Clear_Sequential_Index (Shared_Parser);
+         end if;
 
-      --  Terminals,  Line_Begin_Token are initialized to empty arrays.
+      elsif Shared_Parser.Parsers.Count > 1 then
+         --  Ensure parsers stay close to the same terminal. In general,
+         --  Parser_State.Current_Token.Byte_Region should not be within
+         --  another parser stack_top, unless it just included that token in a
+         --  reduce. But in incremental parse, one parser can shift a nonterm,
+         --  while another parser has broken down that nonterm and is working
+         --  thru it one terminal at a time.
+         declare
+            Not_Paused         : array (1 .. Shared_Parser.Parsers.Count) of 
Boolean := (others => False);
+            Parser_Index       : SAL.Base_Peek_Type                            
      := Not_Paused'First;
+            Max_Terminal_Count : Integer                                       
      := 0;
+         begin
+            for Parser_State of Shared_Parser.Parsers loop
+               if Parser_State.Verb = Shift then
+                  declare
+                     use Syntax_Trees;
+                     Current_Token_Node : constant Node_Access := 
Shared_Parser.Tree.Current_Token
+                       (Parser_State.Stream).Node;
+                     First_Terminal : constant Node_Access :=
+                       (if Current_Token_Node /= Invalid_Node_Access
+                        then Shared_Parser.Tree.First_Terminal 
(Current_Token_Node)
+                        else Invalid_Node_Access);
+                  begin
+                     if First_Terminal /= Invalid_Node_Access then
+                        if Shift_Count < Shared_Parser.Parsers.Count then
+                           --  Some parsers are zombies; otherwise this count 
is a waste of time.
+                           --  ada_mode-recover_40.adb used to require 
Max_Terminal_Count (before
+                           --  Matching_Begin added 'null;') .
+                           Max_Terminal_Count := Integer'Max
+                             (@, Shared_Parser.Tree.Count_Terminals 
(Current_Token_Node));
+                        end if;
 
-      Parser.Table                          := Table;
-      Parser.Language_Fixes                 := Language_Fixes;
-      Parser.Language_Matching_Begin_Tokens := Language_Matching_Begin_Tokens;
-      Parser.Language_String_ID_Set         := Language_String_ID_Set;
+                        if Shared_Parser.Tree.Label (First_Terminal) = 
Source_Terminal then
+                           declare
+                              Region : constant Buffer_Region := 
Shared_Parser.Tree.Byte_Region
+                                (First_Terminal, Trailing_Non_Grammar => 
False);
+                           begin
+                              --  Max_Byte_Last is last byte of farthest token 
on stack top; parsers
+                              --  whose Current_Token are within that token 
are not paused, so they
+                              --  can catch up.
+                              if Region.First < Max_Byte_Last then
+                                 Not_Paused (Parser_Index) := False;
+                              end if;
+                           end;
+                        end if;
+                     end if;
+                  end;
+                  Parser_Index := @ + 1;
+               end if;
+            end loop;
 
-      Parser.Enable_McKenzie_Recover := not McKenzie_Defaulted (Table.all);
+            for Parser_State of Shared_Parser.Parsers loop
+               if Parser_State.Verb = Error then
+                  Parser_State.Zombie_Token_Count := @ + Max_Terminal_Count;
 
-      Parser.Max_Parallel         := Max_Parallel;
-      Parser.Terminate_Same_State := Terminate_Same_State;
+                  if Trace_Parse > Extra then
+                     Shared_Parser.Tree.Lexer.Trace.Put_Line
+                       (" " & Shared_Parser.Tree.Trimmed_Image 
(Parser_State.Stream) & ": zombie (" &
+                          Integer'Image
+                            (Shared_Parser.Table.McKenzie_Param.Zombie_Limit - 
Parser_State.Zombie_Token_Count) &
+                          " tokens remaining)");
+                  end if;
+               end if;
+            end loop;
 
-      if User_Data /= null then
-         User_Data.Set_Lexer_Terminals (Lexer, 
Parser.Terminals'Unchecked_Access);
+            if (for all P of Not_Paused => P = False) then
+               --  All parsers Current_Token are after farthest stack top; 
none need
+               --  to be paused.
+               null;
+            else
+               Parser_Index := Not_Paused'First;
+               for Parser_State of Shared_Parser.Parsers loop
+                  if Parser_State.Verb = Shift and not Not_Paused 
(Parser_Index) then
+                     Parser_State.Set_Verb (Pause);
+                     if Trace_Parse > Detail then
+                        Shared_Parser.Tree.Lexer.Trace.Put_Line
+                          (" " & Shared_Parser.Tree.Trimmed_Image 
(Parser_State.Stream) & ": pause: main sync");
+                     end if;
+                  end if;
+               end loop;
+            end if;
+         end;
       end if;
-   end New_Parser;
+   end Parse_Verb;
 
-   overriding procedure Parse (Shared_Parser : aliased in out LR.Parser.Parser)
+   procedure Recover_To_Log
+     (Shared_Parser            : in out LR.Parser.Parser;
+      Recover_Log_File         : in     Ada.Text_IO.File_Type;
+      Recover_Result           : in     McKenzie_Recover.Recover_Status;
+      Pre_Recover_Parser_Count : in     SAL.Base_Peek_Type)
    is
-      use all type Ada.Strings.Unbounded.Unbounded_String;
-      use all type Syntax_Trees.User_Data_Access;
-      use all type Ada.Containers.Count_Type;
-
-      Trace : WisiToken.Trace'Class renames Shared_Parser.Trace.all;
+      use Ada.Text_IO;
+   begin
+      Put
+        (Recover_Log_File,
+         Ada.Calendar.Formatting.Image (Ada.Calendar.Clock) & " " &
+           Shared_Parser.Partial_Parse_Active'Image & " " &
+           Recover_Result'Image & " " &
+           Pre_Recover_Parser_Count'Image & " '" &
+           Shared_Parser.Tree.Lexer.File_Name & "'");
+
+      for Parser of Shared_Parser.Parsers loop
+         Put (Recover_Log_File, '(');
+         if Parser.Recover.Results.Count > 0 then
+            --  Count can be 0 when error recovery fails
+            Put (Recover_Log_File, Image 
(Parser.Recover.Results.Peek.Strategy_Counts));
+         end if;
+         Put
+           (Recover_Log_File,
+            Integer'Image (Parser.Recover.Enqueue_Count) &
+              Integer'Image (Parser.Recover.Check_Count) & " " &
+              Boolean'Image (Parser.Recover.Success));
+         Put (Recover_Log_File, ')');
+      end loop;
 
-      Current_Verb   : All_Parse_Action_Verbs;
-      Action         : Parse_Action_Node_Ptr;
-      Zombie_Count   : SAL.Base_Peek_Type;
+      New_Line (Recover_Log_File);
+      Flush (Recover_Log_File);
+   exception
+   when others =>
+      New_Line (Recover_Log_File);
+      Flush (Recover_Log_File);
+   end Recover_To_Log;
 
-      procedure Check_Error (Check_Parser : in out Parser_Lists.Cursor)
+   procedure Check_Error
+     (Shared_Parser : in out LR.Parser.Parser;
+      Check_Parser  : in out Parser_Lists.Cursor)
+   is
+      procedure Report_Error
       is
-         procedure Report_Error
-         is begin
-            Shared_Parser.Parsers.First_State_Ref.Errors.Append
-              ((Label          => LR.Message,
-                First_Terminal => Trace.Descriptor.First_Terminal,
-                Last_Terminal  => Trace.Descriptor.Last_Terminal,
-                Recover        => <>,
-                Msg            => +"error during resume"));
-            if Debug_Mode then
-               raise SAL.Programmer_Error with Check_Parser.Label'Image & ": 
error during resume";
-            else
-               raise Syntax_Error;
+         --  This is actually a bug in error recovery, not a source syntax 
error.
+         Msg : constant String := Shared_Parser.Tree.Trimmed_Image 
(Check_Parser.Stream) &
+              ": error during resume";
+      begin
+         if Debug_Mode then
+            raise SAL.Programmer_Error with Msg;
+         else
+            raise WisiToken.Parse_Error with Msg;
+         end if;
+      end Report_Error;
+
+   begin
+      if Check_Parser.Verb = Error then
+         --  This parser errored on last input. This is how grammar conflicts
+         --  are resolved when the input text is valid, in which case we should
+         --  just terminate this parser. However, this may be due to invalid
+         --  input text, so we keep the parser alive but suspended for a few
+         --  tokens, to see if the other parsers also error, in which case they
+         --  all participate in error recovery.
+
+         --  We do not create zombie parsers during resume.
+         if not Check_Parser.State_Ref.Resume_Active then
+            --  Parser is now a zombie
+            if Trace_Parse > Detail then
+               Shared_Parser.Tree.Lexer.Trace.Put_Line
+                 (" " & Shared_Parser.Tree.Trimmed_Image (Check_Parser.Stream) 
& ": zombie");
             end if;
-         end Report_Error;
+            Check_Parser.Next;
 
-      begin
-         if Check_Parser.Verb = Error then
-            --  This parser errored on last input. This is how grammar 
conflicts
-            --  are resolved when the input text is valid, in which case we 
should
-            --  just terminate this parser. However, this may be due to invalid
-            --  input text, so we keep the parser alive but suspended for a few
-            --  tokens, to see if the other parsers also error, in which case 
they
-            --  all participate in error recovery.
-
-            --  We do not create zombie parsers during resume.
-            if not Check_Parser.State_Ref.Resume_Active then
-               --  Parser is now a zombie
-               if Trace_Parse > Detail then
-                  Trace.Put_Line (Integer'Image (Check_Parser.Label) & ": 
zombie");
-               end if;
-               Check_Parser.Next;
+         else
+            if Shared_Parser.Parsers.Count = 1 then
+               Report_Error;
 
             else
-               if Shared_Parser.Parsers.Count = 1 then
-                  Report_Error;
-
+               --  This is ok if a conflict occured during resume - we assume 
this is
+               --  a branch that failed during recover as well. Otherwise it's 
a
+               --  programmer error.
+               if Check_Parser.State_Ref.Conflict_During_Resume then
+                  Shared_Parser.Parsers.Terminate_Parser
+                    (Check_Parser, Shared_Parser.Tree, "error in conflict 
during resume",
+                     Shared_Parser.Tree.Lexer.Trace.all);
                else
-                  --  This is ok if a conflict occured during resume - we 
assume this is
-                  --  a branch that failed during recover as well. Otherwise 
it's a
-                  --  programmer error.
-                  if Check_Parser.State_Ref.Conflict_During_Resume then
-                     Shared_Parser.Parsers.Terminate_Parser
-                       (Check_Parser, "error in conflict during resume", 
Shared_Parser.Trace.all,
-                        Shared_Parser.Terminals);
-                  else
-                     Report_Error;
-                  end if;
+                  Report_Error;
                end if;
             end if;
-         else
-            Check_Parser.Next;
          end if;
-      end Check_Error;
+      else
+         Check_Parser.Next;
+      end if;
+   end Check_Error;
 
+   procedure Finish_Parse
+     (Parser            : in out LR.Parser.Parser;
+      Incremental_Parse : in     Boolean)
+   --  Final actions after LR accept state reached; call
+   --  User_Data.Insert_Token, Delete_Token.
+   is
+      use WisiToken.Syntax_Trees;
+      use all type Ada.Containers.Count_Type;
+      Parser_State : Parser_Lists.Parser_State renames 
Parser.Parsers.First_State_Ref;
+
+      Last_Deleted_Node_Parent : Node_Access;
    begin
+      --  We need parents set in the following code.
+      Parser_State.Clear_Stream;
+      Parser.Tree.Clear_Parse_Streams;
+
+      if Trace_Parse > Extra and then 
Parser_State.Recover_Insert_Delete.Length > 0 then
+         Parser.Tree.Lexer.Trace.New_Line;
+         Parser.Tree.Lexer.Trace.Put_Line ("before insert/delete tree:");
+         Parser.Tree.Lexer.Trace.Put_Line
+           (Parser.Tree.Image
+              (Children     => True,
+               Non_Grammar  => True,
+               Augmented    => True,
+               Line_Numbers => True));
+         Parser.Tree.Lexer.Trace.Put_Line
+           ("recover_insert_delete: " & Parser_Lists.Recover_Image 
(Parser_State, Parser.Tree));
+         Parser.Tree.Lexer.Trace.New_Line;
+      end if;
+
+      --  ada-mode-recover_33.adb requires calling Insert_Token,
+      --  Delete_Token in lexical order, which is Recover_Insert_Delete
+      --  order. Other use cases would benefit from calling all Delete
+      --  first, or all Insert first, but we use this order as the least
+      --  surprising.
+      for Node of Parser_State.Recover_Insert_Delete loop
+         for Err of Parser.Tree.Error_List (Node) loop
+            if not (Err in Lexer_Error) then
+               for Op of Recover_Op_Array_Const_Ref (Err) loop
+                  case Op.Op is
+                  when Insert =>
+                     if Parser.User_Data /= null then
+                        Parser.User_Data.Insert_Token (Parser.Tree, 
Op.Ins_Node);
+                     end if;
+                  when Delete =>
+                     --  Op.Del_Node.Non_Grammar were previously moved to
+                     --  Op.Del_Node.Parent in Syntax_Tree.Add_Deleted; now we 
can edit the
+                     --  shared stream, so we can clear them.
+                     Parser.Tree.Non_Grammar_Var (Op.Del_Node).Clear;
+
+                     if Parser.Tree.Parent (Op.Del_Node) /= 
Last_Deleted_Node_Parent then
+                        Last_Deleted_Node_Parent := Parser.Tree.Parent 
(Op.Del_Node);
+                        if Parser.User_Data /= null then
+                           Parser.User_Data.Delete_Tokens (Parser.Tree, 
Last_Deleted_Node_Parent);
+                        end if;
+                     end if;
+                  end case;
+               end loop;
+            end if;
+         end loop;
+      end loop;
+
+      if Trace_Parse > Extra or Trace_Action > Extra then
+         Parser.Tree.Lexer.Trace.Put_Line ("post-parse tree:");
+         Parser.Tree.Lexer.Trace.Put_Line
+           (Parser.Tree.Image
+              (Children     => True,
+               Non_Grammar  => True,
+               Augmented    => True,
+               Line_Numbers => True));
+         Parser.Tree.Lexer.Trace.New_Line;
+      end if;
+
+      if Trace_Memory > Detail then
+         Parser.Tree.Lexer.Trace.Put_Line ("parse complete");
+         --  IMPROVEME: we want Prefix True when running as an Emacs
+         --  subprocess, False otherwise. No way to tell from here.
+         Report_Memory (Parser.Tree.Lexer.Trace.all, Prefix => True);
+      end if;
+
       if Debug_Mode then
-         Trace.Put_Clock ("start");
+         declare
+            Error_Reported : WisiToken.Syntax_Trees.Node_Sets.Set;
+         begin
+            if Parser.User_Data = null then
+               declare
+                  Dummy : User_Data_Type;
+               begin
+                  Parser.Tree.Validate_Tree
+                    (Dummy, Error_Reported,
+                     Node_Index_Order => not Incremental_Parse,
+                     Validate_Node    => Syntax_Trees.Mark_In_Tree'Access);
+               end;
+               Parser.Tree.Clear_Augmented;
+            else
+               Parser.Tree.Validate_Tree (Parser.User_Data.all, 
Error_Reported, Node_Index_Order => False);
+            end if;
+
+            if Error_Reported.Count /= 0 then
+               raise WisiToken.Parse_Error with "parser: validate_tree failed";
+            end if;
+         end;
       end if;
+   end Finish_Parse;
+
+   ----------
+   --  Public subprograms, declaration order
 
-      if Shared_Parser.User_Data /= null then
-         Shared_Parser.User_Data.Reset;
+   procedure New_Parser
+     (Parser                         :    out LR.Parser.Parser;
+      Lexer                          : in     WisiToken.Lexer.Handle;
+      Table                          : in     Parse_Table_Ptr;
+      Productions                    : in     
Syntax_Trees.Production_Info_Trees.Vector;
+      Language_Fixes                 : in     Language_Fixes_Access;
+      Language_Matching_Begin_Tokens : in     
Language_Matching_Begin_Tokens_Access;
+      Language_String_ID_Set         : in     Language_String_ID_Set_Access;
+      User_Data                      : in     Syntax_Trees.User_Data_Access)
+   is begin
+      Parser.Tree.Lexer  := Lexer;
+      Parser.Productions := Productions;
+      Parser.User_Data   := User_Data;
+
+      --  In Base_Parser; Tree, Line_Begin_Token, Last_Grammar_Node are 
default initialized.
+
+      Parser.Table                          := Table;
+      Parser.Language_Fixes                 := Language_Fixes;
+      Parser.Language_Matching_Begin_Tokens := Language_Matching_Begin_Tokens;
+      Parser.Language_String_ID_Set         := Language_String_ID_Set;
+
+      --  In Parser; String_Quote_Checked, Post_Recover, Parsers are default
+      --  initialized. Partial_Parse_Active is set by user after this.
+   end New_Parser;
+
+   procedure Edit_Tree
+     (Parser : in out LR.Parser.Parser;
+      Edits  : in     KMN_Lists.List)
+   is
+      --  Similar to [Lahav 2004] Algorithms 3, 4. That assumes creating a
+      --  separate temp list of new tokens, and then merging that into the
+      --  parse tree, is faster than merging new tokens in one by one; we
+      --  just do the latter. We also don't modify the edit list.
+      --
+      --  Parser.Lexer contains the edited text; the initial text is not
+      --  available.
+      use WisiToken.Syntax_Trees;
+      use all type Ada.Containers.Count_Type;
+      use all type KMN_Lists.Cursor;
+
+      Tree : Syntax_Trees.Tree renames Parser.Tree;
+
+      KMN_Node         : KMN_Lists.Cursor := Edits.First;
+      Old_Byte_Pos     : Base_Buffer_Pos  := 0;
+      Old_Char_Pos     : Base_Buffer_Pos  := 0;
+      New_Byte_Pos     : Base_Buffer_Pos  := 0;
+      New_Char_Pos     : Base_Buffer_Pos  := 0;
+
+      Scanned_Byte_Pos : Base_Buffer_Pos  := 0;
+      Scanned_Char_Pos : Base_Buffer_Pos  := 0;
+      --  End of last token saved after being scanned by the lexer; this
+      --  does not include trailing whitespace, so it is not actually the
+      --  last position scanned by the lexer. Note that the lexer has
+      --  effectively scanned the deleted bytes in the current KMN, so when
+      --  comparing unshifted token positions to Scanned_Byte_Pos, we may
+      --  need to add KMN.Deleted_Bytes.
+
+      Shift_Bytes      : Base_Buffer_Pos  := 0;
+      Shift_Chars      : Base_Buffer_Pos  := 0;
+
+      Shift_Lines : Base_Line_Number_Type := 0;
+      --  Whenever a non_grammar is deleted from Tree (either permanently,
+      --  or moved to Floating_Non_Grammar), Shift_Lines is decremented by
+      --  New_Line_Count (non_grammar_token). Then if a token is restored
+      --  from Floating_Non_Grammar to the Tree, Shift_Lines is incremented;
+      --  if a token is deleted from Floating_Non_Grammar, Shift_Lines is
+      --  not changed. See Deleted_Shift_Lines in KMN_Loop for additional
+      --  rules.
+
+      Floating_Non_Grammar : Lexer.Token_Arrays.Vector;
+      --  Non_grammar that are detached from a node (for various reasons).
+      --  These are not shifted, because the correct shift is
+      --  unknown at the time they are detached. ada_mode-recover_42.adb.
+      --
+      --  If a non_grammar is floated from a scanned node, it is unshifted
+      --  to be consistent.
+
+      Delayed_Scan           : Boolean             := False;
+      Delayed_Floating_Index : Positive_Index_Type := Positive_Index_Type'Last;
+      Delayed_Lex_Start_Byte : Buffer_Pos          := Buffer_Pos'Last;
+      Delayed_Lex_Start_Char : Buffer_Pos          := Buffer_Pos'Last;
+      Delayed_Lex_Start_Line : Line_Number_Type    := Line_Number_Type'Last;
+      --  When multiple edits occur in a token, the last one may insert or
+      --  delete an end delimiter, so it is not possible to compute the
+      --  correct scan end when handling the first edit. So the scan is
+      --  delayed. If Delayed_Floating_Index /= 'Last, the token is a
+      --  non_grammar. We also get delayed scans when inserting/deleting a
+      --  block delimeter sets Scan_End past the next KMN.
+
+      Scan_End : Base_Buffer_Pos := Invalid_Buffer_Pos;
+      --  If Scan_End /= Invalid_Buffer_Pos, an edit exposed text as
+      --  code; a comment or string end was inserted, or a comment or string
+      --  start was deleted. Scan all exposed code thru Scan_End (which
+      --  is shifted).
+      --
+      --  If the start and end delimiters of the block token are different,
+      --  we don't need to check for start delimiter inserted or end
+      --  delimiter deleted; the edited token is scanned, and all the tokens
+      --  that the new token covers will be deleted because they are in the
+      --  region scanned.
+      --
+      --  However, if the start and end delimiters are the same (as for
+      --  strings), then deleting either delimiter requires all text thru
+      --  new-line or EOI be scanned.
+
+      type Lexer_Error_Data is record
+         Node : Node_Access;
+         --  The node containing the error.
+
+         Scan_End : Base_Buffer_Pos := Invalid_Buffer_Pos;
+         --  If Node is scanned and this is not invalid, the scan must continue
+         --  until this position is scanned. Shifted.
+
+         Scan_Start_Node : Node_Access := Invalid_Node_Access;
+         --  If Node is scanned and this is not invalid, the scan must start at
+         --  this node.
+
+         Edit_Region : Buffer_Region := Null_Buffer_Region;
+         --  Null_Buffer_Region if Node.ID is not block delimited.
+         --  Otherwise, if a matching delimiter is inserted or deleted in this
+         --  region, Node must be scanned. Unshifted.
+      end record;
+
+      package Lexer_Error_Data_Lists is new Ada.Containers.Doubly_Linked_Lists 
(Lexer_Error_Data);
+
+      Lexer_Errors : Lexer_Error_Data_Lists.List;
+      --  This list records the scan region for lexer errors, depending on
+      --  where they occur. Then if an edit might affect the lexer error,
+      --  the scan for the edit covers that region. That lets an edit fix
+      --  the lexer error. test_incremental.adb Edit_String_06,
+      --  Lexer_Errors_04.
+
+      Stream : Syntax_Trees.Stream_ID; -- Tree.Shared_Stream that we are 
editing.
+
+      Terminal : Terminal_Ref;
+      --  Node being considered for shift or delete. Terminals before
+      --  Terminal are shifted; Terminal and terminals after it are
+      --  unshifted.
+
+      Terminal_Non_Grammar_Next : Lexer.Token_Arrays.Extended_Index := 
Lexer.Token_Arrays.No_Index;
+      --  Next non_grammar in Terminal to be shifted or deleted.
+
+      Terminal_Shifted : Boolean := False;
+      --  On exit from Unchanged_Loop, Terminal is not shifted unless
+      --  Terminal_Non_Grammar_Next is not No_Index. Terminal_Shifted is
+      --  used in Delete_Scanned_Loop to account for this.
+
+      procedure Maybe_Delete_Lexer_Errors (Node : in Valid_Node_Access)
+      is begin
+         if Invalid_Error_Ref /= Tree.Has_Error_Class (Node, 
Lexer_Error'(others => <>)) then
+            --  Delete from Lexer_Errors. test_incremental.adb Edit_String_09
+            declare
+               use Lexer_Error_Data_Lists;
+               Cur : Cursor := Lexer_Errors.First;
+            begin
+               loop
+                  exit when Cur = No_Element;
+
+                  if Lexer_Errors (Cur).Node = Node then
+                     declare
+                        To_Delete_1 : Cursor := Cur;
+                     begin
+                        Next (Cur);
+                        Lexer_Errors.Delete (To_Delete_1);
+                     end;
+                  end if;
+                  Next (Cur);
+               end loop;
+            end;
+         end if;
+      end Maybe_Delete_Lexer_Errors;
+
+      procedure Breakdown (Terminal : in out Terminal_Ref; To_Single : in 
Boolean := False)
+      with Pre => Terminal /= Invalid_Stream_Node_Ref
+      is begin
+         if Tree.Label (Terminal.Element) = Nonterm then
+            if Trace_Incremental_Parse > Detail then
+               Tree.Lexer.Trace.Put_Line
+                 ("breakdown " & (if To_Single then "single " else "") & 
Tree.Image
+                    (Tree.Get_Node (Terminal.Stream, Terminal.Element), 
Node_Numbers => True) &
+                    " target " & Tree.Image (Terminal.Node, Node_Numbers => 
True));
+               if Trace_Incremental_Parse > Extra + 1 then
+                  Tree.Lexer.Trace.Put_Line ("... before:");
+                  Tree.Lexer.Trace.Put_Line (Tree.Image (Stream, Children => 
True));
+                  Tree.Lexer.Trace.New_Line;
+               end if;
+            end if;
+            Tree.Breakdown
+              (Terminal, Parser.Productions,
+               Syntax_Trees.User_Data_Access_Constant (Parser.User_Data), 
First_Terminal => True);
+
+            if To_Single and then Tree.Label (Terminal.Element) = Nonterm then
+               Tree.Left_Breakdown (Terminal, 
Syntax_Trees.User_Data_Access_Constant (Parser.User_Data));
+            end if;
+            if Trace_Incremental_Parse > Extra then
+               Tree.Lexer.Trace.Put_Line
+                 ("... result " & Tree.Image (Stream, Children => 
Trace_Incremental_Parse > Extra + 1));
+            end if;
+         end if;
+      end Breakdown;
+
+   begin
+      Tree.Start_Edit;
+
+      if Edits.Length = 0 then
+         return;
       end if;
 
-      Shared_Parser.String_Quote_Checked := Invalid_Line_Number;
-      Shared_Parser.Shared_Tree.Clear;
-      Shared_Parser.Parsers              := Parser_Lists.New_List
-        (Shared_Tree => Shared_Parser.Shared_Tree'Unchecked_Access);
+      Stream := Tree.Shared_Stream;
+
+      --  Breakdown Recover_Conflict nonterms. Nonterms are marked
+      --  Recover_Conflict when the initial parse resolves a conflict during
+      --  error recovery; a subseuent edit may require a different conflict
+      --  resolution. We breakdown all the way to terminals, because some of
+      --  the children may be nonterms that were created before the error
+      --  was detected, but need to change. test_incremental.adb
+      --  Undo_Conflict_01.
+      --
+      --  We must do this before Undo_Recover, because that might breakdown
+      --  a Recover_Conflict node.
+      declare
+         Ref          : Stream_Node_Ref := Tree.First_Recover_Conflict;
+         To_Breakdown : Stream_Node_Ref;
+      begin
+         Breakdown_Recover_Conflict :
+         loop
+            exit Breakdown_Recover_Conflict when Ref = Invalid_Stream_Node_Ref;
+
+            if Tree.Label (Ref.Node) = Nonterm and then Tree.Is_Empty_Nonterm 
(Ref.Node) then
+               if Trace_Incremental_Parse > Detail then
+                  Tree.Lexer.Trace.Put_Line
+                    ("delete empty recover_conflict node " & Tree.Image 
(Ref.Node, Node_Numbers => True));
+               end if;
+
+               if Get_Node (Ref.Element) /= Ref.Node then
+                  Tree.Breakdown
+                    (Ref, Parser.Productions,
+                     Syntax_Trees.User_Data_Access_Constant 
(Parser.User_Data), First_Terminal => False);
+               end if;
+
+               declare
+                  To_Delete : Stream_Index := Ref.Element;
+               begin
+                  Tree.Stream_Next (Ref, Rooted => True);
+                  Tree.Stream_Delete (Stream, To_Delete);
+               end;
+
+            else
+               if Trace_Incremental_Parse > Detail then
+                  Tree.Lexer.Trace.Put_Line
+                    ("breakdown recover_conflict node " & Tree.Image (Ref, 
Node_Numbers => True));
+               end if;
+
+               Tree.Breakdown
+                 (Ref, Parser.Productions,
+                  Syntax_Trees.User_Data_Access_Constant (Parser.User_Data),
+                  First_Terminal => False);
+               To_Breakdown := Ref;
+               Tree.First_Terminal (To_Breakdown);
+               Tree.Stream_Next (Ref, Rooted => True);
+
+               To_Terminals :
+               loop
+                  if Trace_Incremental_Parse > Extra then
+                     Tree.Lexer.Trace.Put_Line
+                       ("... to_breakdown " & Tree.Image (To_Breakdown, 
Node_Numbers => True));
+                  end if;
+
+                  if Tree.Label (To_Breakdown.Element) = Nonterm then
+                     if Tree.Is_Empty_Nonterm (Tree.Get_Node (Stream, 
To_Breakdown.Element)) then
+                        declare
+                           To_Delete : Stream_Index := To_Breakdown.Element;
+                        begin
+                           Tree.Stream_Next (To_Breakdown, Rooted => False);
+                           Tree.Stream_Delete (Stream, To_Delete);
+                        end;
+                     else
+                        Tree.Left_Breakdown (To_Breakdown, 
Syntax_Trees.User_Data_Access_Constant (Parser.User_Data));
+                        Tree.Stream_Next (To_Breakdown, Rooted => False);
+                     end if;
+                     if Trace_Incremental_Parse > Extra then
+                        Tree.Lexer.Trace.Put_Line
+                          ("... stream " & Tree.Image (Stream, Node_Numbers => 
True));
+                     end if;
+
+                  else
+                     Tree.Stream_Next (To_Breakdown, Rooted => False);
+                  end if;
+                  exit To_Terminals when To_Breakdown.Element = Ref.Element;
+               end loop To_Terminals;
+
+               To_Breakdown := Invalid_Stream_Node_Ref;
+            end if;
 
-      Shared_Parser.Lex_All;
+            Tree.First_Recover_Conflict (Ref);
+         end loop Breakdown_Recover_Conflict;
+      end;
 
-      Shared_Parser.Parsers.First.State_Ref.Stack.Push 
((Shared_Parser.Table.State_First, others => <>));
+      --  Undo all error recover insert/delete, in case this is needed as
+      --  part of an edit in another place; test_incremental.adb
+      --  Preserve_Parse_Errors_2.
+      --
+      --  IMPROVEME incremental: This algorithm visits every terminal; not
+      --  incremental. Cache Has_Following_Deleted, has_virtual in nonterms.
 
-      Main_Loop :
+      Terminal := Tree.First_Terminal (Tree.Stream_First (Stream, Skip_SOI => 
False));
+      Undo_Recover :
       loop
-         --  exit on Accept_It action or syntax error.
+         Next_Recover :
+         loop
+            exit Undo_Recover when Terminal.Node = Invalid_Node_Access;
+            exit Next_Recover when Tree.Label (Terminal.Node) in 
Virtual_Terminal;
+            exit Next_Recover when Tree.Label (Terminal.Node) = 
Source_Terminal and then
+              Tree.Has_Following_Deleted (Terminal.Node);
+            Tree.Next_Terminal (Terminal);
+         end loop Next_Recover;
+
+         case Terminal_Label'(Tree.Label (Terminal.Node)) is
+         when Source_Terminal =>
+            declare
+               Has_Deleted   : constant Valid_Node_Access := Terminal.Node;
+               Insert_Before : Terminal_Ref;
+            begin
+               Tree.Next_Terminal (Terminal);
+
+               Breakdown (Terminal);
+               Insert_Before := Terminal;
+
+               for Deleted_Node of reverse Tree.Following_Deleted 
(Has_Deleted) loop
+                  if Tree.Label (Deleted_Node) in Virtual_Terminal_Label then
+                     --  This would be deleted in the next step, so don't 
bother restoring
+                     --  it.
+                     if Trace_Incremental_Parse > Detail then
+                        Tree.Lexer.Trace.Put_Line
+                          ("drop virtual deleted node " & Tree.Image 
(Deleted_Node, Node_Numbers => True));
+                     end if;
 
-         Parse_Verb (Shared_Parser, Current_Verb, Zombie_Count);
+                  else
+                     declare
+                        Deleted_Byte_Region : constant Buffer_Region := 
Tree.Byte_Region
+                          (Deleted_Node, Trailing_Non_Grammar => False);
+
+                        Has_Deleted_Non_Grammar : Lexer.Token_Arrays.Vector 
renames Tree.Non_Grammar_Var
+                          (Has_Deleted);
+                        First_To_Move : Positive_Index_Type := 
Positive_Index_Type'Last;
+                        Deleted_Non_Grammar : Lexer.Token_Arrays.Vector 
renames Tree.Non_Grammar_Var (Deleted_Node);
+                        pragma Assert (Deleted_Non_Grammar.Length = 0);
+                     begin
+                        if Has_Deleted_Non_Grammar.Length > 0 and then
+                          Deleted_Byte_Region.First < Has_Deleted_Non_Grammar
+                            
(Has_Deleted_Non_Grammar.Last_Index).Byte_Region.Last
+                        then
+                           --  Move some Non_Grammar to Deleted_Non_Grammar
+                           --  test_incremental.adb Modify_Deleted_Element, 
Lexer_Errors_01,
+                           --  Restore_Deleted_01
+                           for I in Has_Deleted_Non_Grammar.First_Index .. 
Has_Deleted_Non_Grammar.Last_Index loop
+                              if Deleted_Byte_Region.First < 
Has_Deleted_Non_Grammar (I).Byte_Region.Last then
+                                 First_To_Move := I;
+                                 exit;
+                              end if;
+                           end loop;
+                           for I in First_To_Move .. 
Has_Deleted_Non_Grammar.Last_Index loop
+                              Deleted_Non_Grammar.Append 
(Has_Deleted_Non_Grammar (I));
+                           end loop;
+                           if First_To_Move = 
Has_Deleted_Non_Grammar.First_Index then
+                              Has_Deleted_Non_Grammar.Clear;
+                           else
+                              Has_Deleted_Non_Grammar.Set_First_Last
+                                (Has_Deleted_Non_Grammar.First_Index, 
First_To_Move - 1);
+                           end if;
+                        end if;
+                     end;
 
-         if Trace_Parse > Extra then
-            Trace.Put_Line ("cycle start; current_verb: " & 
Parse_Action_Verbs'Image (Current_Verb));
-         end if;
+                     if Trace_Incremental_Parse > Detail then
+                        Tree.Lexer.Trace.Put_Line
+                          ("restore deleted node " & Tree.Image
+                             (Deleted_Node, Node_Numbers => True, Non_Grammar 
=> True) &
+                             " before " & Tree.Image
+                               (Insert_Before, Node_Numbers => True, 
Non_Grammar => True));
+                     end if;
 
-         case Current_Verb is
-         when Pause =>
-            null;
+                     Tree.Set_Sequential_Index (Deleted_Node, 
Invalid_Sequential_Index);
+                     Insert_Before := Tree.Stream_Insert (Stream, 
Deleted_Node, Insert_Before.Element);
+                  end if;
+               end loop;
+               Tree.Following_Deleted (Has_Deleted).Clear;
+
+               if Trace_Incremental_Parse > Extra then
+                  Tree.Lexer.Trace.Put_Line
+                    ("stream:" & Tree.Image
+                       (Stream,
+                        Children    => Trace_Incremental_Parse > Detail,
+                        Non_Grammar => True,
+                        Augmented   => True));
+               end if;
+            end;
 
-         when Shift =>
-            --  We just shifted a token; get the next token from
-            --  Shared_Parser.Terminals.
+         when Virtual_Terminal_Label =>
+            --  Delete Terminal.
+            Breakdown (Terminal, To_Single => True);
 
-            for Parser_State of Shared_Parser.Parsers loop
-               if Parser_State.Verb = Error then
-                  if Shared_Parser.Enable_McKenzie_Recover then
-                     Parser_State.Zombie_Token_Count := 
Parser_State.Zombie_Token_Count + 1;
-                     if Trace_Parse > Extra then
-                        Trace.Put_Line
-                          (Integer'Image (Parser_State.Label) & ": zombie (" &
-                             WisiToken.Token_Index'Image
-                               (Shared_Parser.Table.McKenzie_Param.Check_Limit 
- Parser_State.Zombie_Token_Count) &
-                             " tokens remaining)");
+            declare
+               Terminal_Non_Grammar : Lexer.Token_Arrays.Vector renames 
Tree.Non_Grammar_Var (Terminal.Node);
+               To_Delete            : Stream_Index := Terminal.Element;
+               Next_Element         : Stream_Index := Tree.Stream_Next 
(Terminal.Stream, Terminal.Element);
+            begin
+               if Terminal_Non_Grammar.Length > 0 then
+                  declare
+                     Term_Non_Gramm_Region : constant Buffer_Region :=
+                       (First => Terminal_Non_Grammar 
(Terminal_Non_Grammar.First_Index).Byte_Region.First,
+                        Last  => Terminal_Non_Grammar 
(Terminal_Non_Grammar.Last_Index).Byte_Region.Last);
+
+                     Next_Terminal : Stream_Node_Ref := Tree.Next_Terminal 
(Terminal);
+                  begin
+                     --  Terminal_Non_Grammar is non-empty only if 
User_Data.Insert_Token
+                     --  moved some non_grammar to it. If the terminal they 
were moved from
+                     --  was subsequently deleted and restored, it may now be
+                     --  Next_Terminal: ada_mode-interactive_09.adb new_line 
after 'for'.
+                     --  Or it may be before a previous terminal; 
ada_mode-recover_09.adb.
+                     --
+                     --  Find a terminal to move Terminal_Non_Grammar to.
+                     if Next_Terminal = Invalid_Stream_Node_Ref then
+                        --  ada_mode-recover_partial_26.adb
+                        Next_Terminal := Terminal;
+                     end if;
+                     loop
+                        exit when Tree.Byte_Region (Next_Terminal.Node, 
Trailing_Non_Grammar => False).First <
+                          Term_Non_Gramm_Region.First;
+                        exit when Tree.ID (Next_Terminal.Node) = 
Tree.Lexer.Descriptor.SOI_ID;
+                        Tree.Prev_Terminal (Next_Terminal);
+                     end loop;
+                     Tree.Non_Grammar_Var (Next_Terminal.Node).Append 
(Terminal_Non_Grammar);
+                     Terminal_Non_Grammar.Clear;
+                     if Trace_Incremental_Parse > Detail then
+                        Tree.Lexer.Trace.Put_Line
+                          ("move non_grammar to " & Tree.Image
+                             (Next_Terminal.Node, Node_Numbers => True, 
Non_Grammar => True));
+                     end if;
+                  end;
+               end if;
+
+               if Trace_Incremental_Parse > Detail then
+                  Tree.Lexer.Trace.Put_Line
+                    ("delete virtual " & Tree.Image (To_Delete, Node_Numbers 
=> True, Non_Grammar => True));
+               end if;
+
+               Tree.Next_Terminal (Terminal);
+               Tree.Stream_Delete (Terminal.Stream, To_Delete);
+
+
+               --  Delete immediately following empty nonterms. For example, 
in Ada,
+               --  error recover often inserts 'end <name_opt> ;', where 
name_opt is
+               --  empty; delete all three tokens. On the other hand, an empty
+               --  nonterm could be a block name; it will be recreated by the 
parser,
+               --  not treated as an error.
+               loop
+                  exit when Next_Element = Invalid_Stream_Index;
+                  declare
+                     Node : constant Valid_Node_Access := Tree.Get_Node 
(Terminal.Stream, Next_Element);
+                     To_Delete : Stream_Index;
+                  begin
+                     if Tree.Label (Node) = Nonterm and then Tree.Child_Count 
(Node) = 0 then
+                        To_Delete := Next_Element;
+
+                        if Trace_Incremental_Parse > Detail then
+                           Tree.Lexer.Trace.Put_Line
+                             ("delete empty nonterm " & Tree.Image
+                                (To_Delete, Node_Numbers => True, Non_Grammar 
=> True));
+                        end if;
+
+                        Next_Element := Tree.Stream_Next (Terminal.Stream, @);
+                        Tree.Stream_Delete (Terminal.Stream, To_Delete);
+                     else
+                        exit;
+                     end if;
+                  end;
+               end loop;
+            end;
+         end case;
+      end loop Undo_Recover;
+
+      --  Delete parse error nodes, record lexer error nodes. Any parse
+      --  errors remaining after the edit is applied will be recreated
+      --  during parse. We don't delete lexer errors, because they will not
+      --  be recreated by parsing; they will be deleted if they are fixed by
+      --  an edit, because the tokens containing them will be rescanned.
+      --
+      --  It is tempting to try to retain parse errors so the previously
+      --  found solution can be reapplied, but then it is difficult to
+      --  decide which errors to delete during parse; test_incremental.adb
+      --  Edit_String_05.
+      declare
+         Err_Ref : Stream_Error_Ref := Tree.First_Error (Stream);
+      begin
+         loop
+            exit when not Has_Error (Err_Ref);
+            declare
+               Err       : constant Error_Data'Class := Error (Err_Ref);
+               Error_Ref : constant Stream_Node_Ref  := 
Tree.Error_Stream_Node_Ref (Err_Ref);
+
+               function Find_Edit_Region return Buffer_Region
+               is
+                  ID : constant Token_ID := Tree.ID (Error_Ref.Node);
+               begin
+                  if not Tree.Lexer.Is_Block_Delimited (ID) then
+                     return Null_Buffer_Region;
+                  end if;
+
+                  if Tree.Lexer.Same_Block_Delimiters (ID) then
+                     if Tree.Lexer.New_Line_Is_End_Delimiter (ID) then
+                        --  A string-double-one-line or similar
+                        return Tree.Byte_Region_Of_Line_Region (Error_Ref);
+                     else
+                        --  Any inserted or deleted matching delimeter affects 
all text from
+                        --  the edit point to the end of text.
+                        return
+                          (Tree.Byte_Region (Tree.SOI, Trailing_Non_Grammar => 
True).First,
+                           Tree.Byte_Region (Tree.EOI, Trailing_Non_Grammar => 
True).Last);
                      end if;
+                  else
+                     raise SAL.Not_Implemented with "FIXME: need test cases.";
+                     return Null_Buffer_Region;
+                  end if;
+               end Find_Edit_Region;
+
+            begin
+               if Err in Lexer_Error then
+                  --  We don't know Shift_Bytes yet, so we can't find 
Scan_Start_Node or Scan_End.
+                  Lexer_Errors.Append
+                    ((Error_Ref.Node,
+                      Edit_Region => Find_Edit_Region,
+                      others => <>));
+                  Tree.Next_Error (Err_Ref);
+               else
+                  if Trace_Incremental_Parse > Detail then
+                     Tree.Lexer.Trace.Put_Line ("delete " & Err.Image (Tree, 
Error_Ref.Node));
+                  end if;
+                  Tree.Delete_Error (Err_Ref);
+               end if;
+            end;
+         end loop;
+      end;
+
+      --  Now process source edits. We have to start with SOI to handle
+      --  edits in leading non-grammar. test_incremental.adb Edit_Comment_12
+      Terminal := Tree.First_Terminal (Tree.Stream_First (Stream, Skip_SOI => 
False));
+
+      KMN_Loop :
+      loop
+         declare
+            KMN : constant WisiToken.Parse.KMN := Edits (KMN_Node);
+
+            Stable_Region : constant Buffer_Region := -- Not shifted
+              (Old_Byte_Pos + 1, Old_Byte_Pos + KMN.Stable_Bytes);
+
+            Stable_Region_Chars : constant Buffer_Region :=
+              (Old_Char_Pos + 1, Old_Char_Pos + KMN.Stable_Chars);
+
+            Deleted_Region : constant Buffer_Region := -- Not shifted.
+              (Stable_Region.Last + 1, Stable_Region.Last + KMN.Deleted_Bytes);
+
+            Inserted_Region : constant Buffer_Region :=
+              (New_Byte_Pos + KMN.Stable_Bytes + 1, New_Byte_Pos + 
KMN.Stable_Bytes + KMN.Inserted_Bytes);
+            --  Inserted_Region.First is the first char after the stable 
region in
+            --  the edited text (which means it is shifted).
+            --
+            --  If Length (Inserted_Region) = 0 and Length (Deleted_Region) = 0
+            --  then this is the final stable region
+
+            Inserted_Region_Chars : constant Buffer_Region :=
+              (New_Char_Pos + KMN.Stable_Chars + 1, New_Char_Pos + 
KMN.Stable_Chars + KMN.Inserted_Chars);
+
+            Next_KMN : constant WisiToken.Parse.KMN :=
+              (if KMN_Node = Edits.Last
+               then Invalid_KMN
+               else Edits (KMN_Lists.Next (KMN_Node)));
+
+            Next_KMN_Stable_First : constant Buffer_Pos := Stable_Region.Last 
+ KMN.Deleted_Bytes + 1;
+            Next_KMN_Stable_Last  : constant Buffer_Pos := 
Next_KMN_Stable_First - 1 + Next_KMN.Stable_Bytes;
+
+            Deleted_Shift_Lines : Base_Line_Number_Type := 0;
+            --  When a non_grammar is deleted by Delete_Deleted_Loop below, if
+            --  Terminal remains before the deleted non_grammar
+            --  Deleted_Shift_Lines is incremented instead of Shift_Lines. Then
+            --  Shift_Lines is correct when computing a scan start point. After
+            --  the scan, Deleted_Shift_Lines is added to Shift_Lines.
+
+         begin
+            --  Parser.Lexer contains the edited text, so we can't check that
+            --  stable, deleted are inside the initial text. Caller should use
+            --  Validate_KMN.
+
+            if Trace_Incremental_Parse > Detail then
+               Tree.Lexer.Trace.New_Line;
+               Tree.Lexer.Trace.Put_Line
+                 ("KMN: " & Image (Stable_Region) & Image (Inserted_Region) & 
Image (Deleted_Region));
+               Tree.Lexer.Trace.Put_Line ("old  :" & Old_Byte_Pos'Image & 
Old_Char_Pos'Image);
+               Tree.Lexer.Trace.Put_Line
+                 ("shift:" & Shift_Bytes'Image & " " & Shift_Chars'Image & " " 
& Shift_Lines'Image);
+               Tree.Lexer.Trace.Put_Line ("scanned_byte_pos:" & 
Scanned_Byte_Pos'Image);
+               Tree.Lexer.Trace.Put_Line
+                 ("stream:" & Tree.Image
+                    (Stream,
+                     Children    => True,
+                     Non_Grammar => True,
+                     Augmented   => True));
+
+               Tree.Lexer.Trace.Put_Line
+                 ("terminal: " & Tree.Image (Terminal, Non_Grammar => True, 
Node_Numbers => True));
+               if Terminal_Non_Grammar_Next /= Lexer.Token_Arrays.No_Index then
+                  Tree.Lexer.Trace.Put_Line ("terminal_non_grammar_next:" & 
Terminal_Non_Grammar_Next'Image);
+               end if;
+
+               if Floating_Non_Grammar.Length > 0 then
+                  Tree.Lexer.Trace.Put_Line
+                    ("floating_non_grammar: " & Lexer.Full_Image 
(Floating_Non_Grammar, Tree.Lexer.Descriptor.all));
+                  if Delayed_Floating_Index /= Positive_Index_Type'Last then
+                     Tree.Lexer.Trace.Put_Line ("delayed_floating_index:" & 
Delayed_Floating_Index'Image);
                   end if;
+               end if;
+
+               Tree.Lexer.Trace.New_Line;
+            end if;
+
+            if not Contains (Outer => Parser.Tree.Lexer.Buffer_Region_Byte, 
Inner => Inserted_Region) then
+               raise User_Error with "KMN insert region " & Image 
(Inserted_Region) & " outside edited source text " &
+                 Image (Parser.Tree.Lexer.Buffer_Region_Byte);
+            end if;
+
+            --  Decide which Lexer_Errors are possibly affected by this KMN
+            declare
+               use Lexer_Error_Data_Lists;
+               Cur : Cursor := Lexer_Errors.First;
+            begin
+               loop
+                  exit when Cur = No_Element;
 
-               elsif Parser_State.Verb = Shift then
                   declare
-                     function Insert_Virtual return Boolean
-                     is
-                        use Recover_Op_Arrays, Recover_Op_Array_Refs;
-                        Ins_Del     : Vector renames 
Parser_State.Recover_Insert_Delete;
-                        Ins_Del_Cur : Extended_Index renames 
Parser_State.Recover_Insert_Delete_Current;
-                        Result : Boolean := False;
-                     begin
-                        if Ins_Del_Cur /= No_Index then
+                     Node    : constant Valid_Node_Access := Lexer_Errors 
(Cur).Node;
+                     Node_ID : constant Token_ID          := Tree.ID (Node);
+                  begin
+                     if KMN.Inserted_Bytes = 0 and KMN.Deleted_Bytes = 0 then
+                        --  last KMN
+                        null;
+
+                     elsif Tree.Lexer.Is_Block_Delimited (Node_ID) then
+                        --  test_incremental.adb Edit_String_09, _13, _14, 
Lexer_Errors_03.
+                        declare
+                           Edit_Region      : constant Buffer_Region := 
Lexer_Errors (Cur).Edit_Region;
+                           Node_Byte_Region : constant Buffer_Region := 
Tree.Byte_Region
+                             (Node, Trailing_Non_Grammar => False);
+                        begin
+                           if Edit_Region.First > Next_KMN_Stable_First then
+                              --  This error may be affected by a following 
KMN; no following lexer
+                              --  error can be affected by this KMN.
+                              exit;
+
+                           elsif KMN.Inserted_Bytes > 0 then
+                              if Tree.Lexer.Same_Block_Delimiters (Node_ID) 
then
+                                 if Invalid_Buffer_Pos /= 
Tree.Lexer.Contains_End_Delimiter (Node_ID, Inserted_Region)
+                                   and then
+                                   Contains (Edit_Region + Shift_Bytes, 
Inserted_Region.First)
+                                 then
+                                    --  This error is affected by this edit.
+                                    declare
+                                       Prev_Terminal : constant 
Stream_Node_Ref :=
+                                         Tree.Prev_Terminal 
(Tree.To_Stream_Node_Ref (Stream, Node));
+                                       Data : Lexer_Error_Data renames 
Lexer_Errors (Cur);
+                                    begin
+                                       Data.Scan_End := 
Tree.Lexer.Find_Scan_End
+                                         (Tree.ID (Node), Node_Byte_Region + 
Shift_Bytes +
+                                            (if Node_Byte_Region.First > 
Stable_Region.Last
+                                             then 0
+                                             else KMN.Inserted_Bytes),
+                                          Inserted  => True,
+                                          Start     => True);
+
+                                       if Tree.ID (Prev_Terminal) = Tree.ID 
(Node) and then
+                                         Tree.Byte_Region (Prev_Terminal, 
Trailing_Non_Grammar => False).Last + 1 =
+                                         Node_Byte_Region.First and then
+                                         Tree.Lexer.Escape_Delimiter_Doubled 
(Node_ID)
+                                       then
+                                          --  Prev, Node look like:
+                                          --
+                                          --   "foo""bar"
+                                          --
+                                          --  Need to scan both. 
test_incremental.adb Edit_String_12
+                                          Data.Scan_Start_Node := 
Prev_Terminal.Node;
+                                       end if;
+                                    end;
+                                 end if;
+                              else
+                                 --  Not same delimiters.
+                                 raise SAL.Not_Implemented with "FIXME: need 
test cases";
+                              end if;
+                           end if;
+
+                           if KMN.Deleted_Bytes > 0 then
+                              if Overlaps (Node_Byte_Region, Deleted_Region) 
then
+                                 --  test_incremental.adb Edit_String_09
+                                 if Lexer_Errors (Cur).Scan_End /= 
Invalid_Buffer_Pos then
+                                    raise SAL.Not_Implemented with "FIXME: 
insert and delete both set scan_end";
+                                 end if;
+                                 Lexer_Errors (Cur).Scan_End := 
Tree.Lexer.Find_Scan_End
+                                   (Tree.ID (Node), Node_Byte_Region + 
Shift_Bytes +
+                                      (if Node_Byte_Region.First > 
Stable_Region.Last
+                                       then 0
+                                       else KMN.Inserted_Bytes),
+                                    Inserted  => True,
+                                    Start     => True);
+                              end if;
+                           end if;
+                        end;
+
+                     else
+                        --  Not block delimited.
+                        declare
+                           Node_Byte_Region : constant Buffer_Region := 
Tree.Byte_Region
+                             (Node, Trailing_Non_Grammar => True);
+                        begin
+                           if Node_Byte_Region.First in Stable_Region.First .. 
Next_KMN_Stable_First then
+                              --  Now we know Shift for this lexer error. Node 
has not yet been
+                              --  shifted, but we must include the shift for 
the current KMN if Node
+                              --  is after Stable_Region.Last; 
test_incremental.adb Lexer_Errors_07.
+
+                              --  The lexer error occurred while scanning the 
token or one of the
+                              --  following non_grammars. test_incremental.adb 
Lexer_Errors_04.
+                              Lexer_Errors (Cur).Scan_End := 
Node_Byte_Region.Last +
+                                (if Node_Byte_Region.First > Stable_Region.Last
+                                 then KMN.Inserted_Bytes - KMN.Deleted_Bytes
+                                 else 0);
+                           else
+                              --  This error may be affected by a following 
KMN; no following lexer
+                              --  error can be affected by this KMN.
+                              exit;
+                           end if;
+                        end;
+                     end if;
+
+                     if Lexer_Errors (Cur).Scan_End <= Stable_Region.Last + 
Shift_Bytes then
+                        --  This lexer error is not fixed by these edits.
+                        declare
+                           To_Delete : Cursor := Cur;
+                        begin
+                           Next (Cur);
+                           Lexer_Errors.Delete (To_Delete);
+                        end;
+                     else
+                        --  We must scan from this lexer error to find out if 
it is fixed.
+                        if Trace_Lexer > Outline then
                            declare
-                              Op : Recover_Op renames Variable_Ref (Ins_Del, 
Ins_Del_Cur);
+                              Data : Lexer_Error_Data renames Lexer_Errors 
(Cur).Element.all;
                            begin
-                              if Op.Op = Insert and then
-                                Op.Ins_Token_Index =
-                                (if Parser_State.Inc_Shared_Token
-                                 then Parser_State.Shared_Token + 1
-                                 else Parser_State.Shared_Token)
-                              then
-                                 Result := True;
+                              Tree.Lexer.Trace.Put_Line
+                                ("lexer error on " & Tree.Image (Data.Node, 
Node_Numbers => True) &
+                                   " possibly fixed by this KMN; scan end" & 
Data.Scan_End'Image);
+                           end;
+                        end if;
+                        Next (Cur);
+                     end if;
+                  end;
+               end loop;
+            end;
+
+            if Tree.ID (Terminal.Node) = Tree.Lexer.Descriptor.EOI_ID then
+               --  We only shift EOI after all KMN are processed; it may need 
to be
+               --  shifted for more than one edit point. test_incremental.adb
+               --  Edit_Comment_3.
+               if Trace_Incremental_Parse > Detail then
+                  Tree.Lexer.Trace.Put_Line
+                    ("nothing left to shift; terminal:" & Tree.Image
+                       (Terminal, Non_Grammar => True, Augmented => True));
+               end if;
 
-                                 Parser_State.Current_Token := 
Parser_State.Tree.Add_Terminal
-                                   (Op.Ins_ID, Before => Op.Ins_Token_Index);
+            elsif not Delayed_Scan then
+               --  If there was a Delayed_Scan, some changed tokens may be 
before
+               --  Stable_Region, so we don't do Unchanged_Loop.
+               --
+               --  It is tempting to skip Unchanged_Loop if Shift_Bytes = 0 and
+               --  Shift_Chars = 0 and Shift_Lines = 0. But we need to scan all
+               --  Non_Grammar for Floating_Non_Grammar, which changes 
Shift_Lines.
+               --  IMPROVEME: only need to scan trailing stable terminal?
+
+               Unchanged_Loop :
+               loop
+                  exit Unchanged_Loop when Terminal = Invalid_Stream_Node_Ref;
+                  exit Unchanged_Loop when Tree.ID (Terminal.Node) = 
Tree.Lexer.Descriptor.EOI_ID;
+
+                  --  All virtuals were deleted above by removing error 
corrections.
+                  pragma Assert (Tree.Label (Terminal.Node) = 
Syntax_Trees.Source_Terminal);
+
+                  if Terminal_Non_Grammar_Next = Lexer.Token_Arrays.No_Index 
then
+                     --  Exit when Terminal may be changed by the current KMN 
edit; it is
+                     --  partly past or adjacent to Stable_Region.Last. Also 
exit when last
+                     --  KMN is done.
+                     exit Unchanged_Loop when
+                       Tree.ID (Terminal) /= Tree.Lexer.Descriptor.SOI_ID and 
then
+                       (if Length (Inserted_Region) = 0 and Length 
(Deleted_Region) = 0
+                        then Tree.Byte_Region (Terminal.Node, 
Trailing_Non_Grammar => False).Last >
+                          Stable_Region.Last -- Last KMN
+                        else Tree.Byte_Region (Terminal.Node, 
Trailing_Non_Grammar => False).Last >=
+                          Stable_Region.Last);
+
+                     if Trace_Incremental_Parse > Detail then
+                        Tree.Lexer.Trace.Put_Line
+                          ("stable shift " & Tree.Image
+                             (Terminal.Node,
+                              Non_Grammar => True, Terminal_Node_Numbers => 
True, Augmented => True));
+                     end if;
 
-                                 Op.Ins_Tree_Node := 
Parser_State.Current_Token;
+                     --  Tree.Shift sets Terminal_Non_Grammar_Next to the 
first non-grammar
+                     --  that may be modified.
+                     Tree.Shift
+                       (Terminal.Node, Shift_Bytes, Shift_Chars, Shift_Lines,
+                        Last_Stable_Byte =>
+                          (if KMN.Inserted_Bytes = 0 and KMN.Deleted_Bytes = 0
+                           then Buffer_Pos'Last --  ada_mode-interactive_02.adb
+                           else Stable_Region.Last),
+                        Non_Grammar_Next => Terminal_Non_Grammar_Next);
+
+                     if Trace_Incremental_Parse > Detail then
+                        Tree.Lexer.Trace.Put_Line
+                          ("  => " & Tree.Image
+                             (Terminal.Node, Non_Grammar => True, 
Terminal_Node_Numbers => True,
+                              Augmented => True));
+                     end if;
 
-                                 Ins_Del_Cur := Ins_Del_Cur + 1;
-                                 if Ins_Del_Cur > Last_Index (Ins_Del) then
-                                    Ins_Del_Cur := No_Index;
+                     if Terminal_Non_Grammar_Next /= 
Lexer.Token_Arrays.No_Index then
+                        if Trace_Incremental_Parse > Detail then
+                           Tree.Lexer.Trace.Put_Line 
("terminal_non_grammar_next:" & Terminal_Non_Grammar_Next'Image);
+                        end if;
+                        Terminal_Shifted := True;
+                        exit Unchanged_Loop;
+                     else
+                        Tree.Next_Terminal (Terminal);
+                     end if;
+
+                  else
+                     --  The previous KMN left Terminal_Non_Grammar_Next /= 
No_Index
+                     Terminal_Shifted := True;
+
+                     if Trace_Incremental_Parse > Detail then
+                        Tree.Lexer.Trace.Put_Line 
("terminal_non_grammar_next:" & Terminal_Non_Grammar_Next'Image);
+                     end if;
+
+                     --  Shift remaining non_grammar in Stable_Region
+                     Non_Grammar_Loop :
+                     loop
+                        declare
+                           Non_Grammar : Lexer.Token_Arrays.Vector renames 
Tree.Non_Grammar_Var (Terminal.Node);
+                        begin
+                           exit Unchanged_Loop when Non_Grammar 
(Terminal_Non_Grammar_Next).Byte_Region.Last >
+                             Stable_Region.Last;
+
+                           Lexer.Shift (Non_Grammar 
(Terminal_Non_Grammar_Next), Shift_Bytes, Shift_Chars, Shift_Lines);
+
+                           Terminal_Non_Grammar_Next := @ + 1;
+
+                           if Terminal_Non_Grammar_Next = 
Non_Grammar.Last_Index then
+                              Terminal_Non_Grammar_Next := 
Lexer.Token_Arrays.No_Index;
+                              Tree.Next_Terminal (Terminal);
+                              exit Non_Grammar_Loop;
+                           end if;
+                        end;
+                     end loop Non_Grammar_Loop;
+
+                     if Trace_Incremental_Parse > Detail then
+                        if Terminal_Non_Grammar_Next = 
Lexer.Token_Arrays.No_Index then
+                           Tree.Lexer.Trace.Put_Line 
("terminal_non_grammar_next cleared");
+                        else
+                           Tree.Lexer.Trace.Put_Line 
("terminal_non_grammar_next:" & Terminal_Non_Grammar_Next'Image);
+                        end if;
+                     end if;
+                  end if;
+               end loop Unchanged_Loop;
+            end if;
+
+            --  Unchanged_Loop exited because Terminal or Terminal.Non_Grammar 
is
+            --  at least partly out of Stable_Region or adjacent to the edit
+            --  start, or because it reached Tree.EOI. Therefore the edit 
start is
+            --  in Terminal, or in a non-grammar token or whitespace before
+            --  Terminal (or after Tree.EOI), or after Terminal if Terminal is
+            --  deleted.
+
+            if KMN.Deleted_Bytes > 0 then
+               --  Delete tokens deleted by this KMN, preserving Terminal if
+               --  necessary. test_incremental.adb Edit_Code_03, _04.
+               --
+               --  If deleting a grammar token, delete start is before the 
token,
+               --  delete end may be in its non_grammar; check for deleted 
comment
+               --  start and float non-deleted non_grammar. If not deleting a 
grammar
+               --  token, delete start is in its non_grammar, delete end may 
be in or
+               --  after its non_grammar; check for deleted comment start and 
end
+               --  (there can be one of each) and float non-deleted 
non_grammar. To
+               --  simplify the code, we always check for comment start and 
comment
+               --  end deleted.
+
+               declare
+                  --  If we do Breakdown on a copy of Terminal, Terminal may 
become
+                  --  invalid because Terminal.Element is deleted. So before 
the first
+                  --  delete, we save the previous terminal.
+                  Saved_Prev_Terminal : Terminal_Ref := 
Invalid_Stream_Node_Ref;
+
+                  Check_Deleted : Terminal_Ref :=
+                    (if Terminal_Non_Grammar_Next /= 
Lexer.Token_Arrays.No_Index or
+                       Tree.Byte_Region (Terminal.Node, Trailing_Non_Grammar 
=> False).Last < Deleted_Region.First
+                     then Tree.Next_Terminal (Terminal)
+                     else Terminal);
+
+                  Terminal_Is_Check_Deleted : Boolean;
+
+                  Keep_Terminal : constant Boolean :=
+                    Tree.ID (Terminal.Node) in Tree.Lexer.Descriptor.SOI_ID | 
Tree.Lexer.Descriptor.EOI_ID or
+                    Terminal_Non_Grammar_Next /= Lexer.Token_Arrays.No_Index 
or -- comment is modified
+                    Tree.Byte_Region (Terminal.Node, Trailing_Non_Grammar => 
False).First <
+                    Deleted_Region.First; -- terminal is modified
+
+                  procedure Check_Scan_End
+                    (ID           : in Token_ID;
+                     Start_Region : in Buffer_Region;
+                     End_Region   : in Buffer_Region)
+                  --  Check if the start or end delimiter is deleted or 
modified.
+                  is
+                     Start_Changed : constant Boolean := Deleted_Region.First 
<= Start_Region.Last and
+                       Deleted_Region.Last >= Start_Region.First; -- start 
delimiter deleted or modified
+
+                     End_Changed : constant Boolean := Deleted_Region.Last >= 
End_Region.Last and
+                          Deleted_Region.First <= End_Region.First; -- end 
delimiter deleted or modified
+                  begin
+                     if Start_Changed and End_Changed then
+                        return;
+
+                     elsif Start_Changed or End_Changed then
+                        --  test_incremental.adb Delete_Comment_Start_*, 
Edit_String_*, Edit_Comment_16,
+                        --  not ada_mode-interactive_01.adb "-- ada_identifier"
+                        Scan_End := Tree.Lexer.Find_Scan_End
+                          (ID,
+                           (Start_Region.First + Shift_Bytes +
+                              --  If not Start_Changed, start delimiter is 
before the current KMN
+                              (if Start_Changed then KMN.Inserted_Bytes - 
KMN.Deleted_Bytes else 0),
+                            End_Region.Last    + Shift_Bytes + 
KMN.Inserted_Bytes - KMN.Deleted_Bytes),
+                           Inserted => False,
+                           Start    => Start_Changed);
+
+                        if Trace_Incremental_Parse > Detail then
+                           Tree.Lexer.Trace.Put_Line
+                             ("start or end delimiter deleted or modified:" &
+                                Start_Region.First'Image & " .." & 
Scan_End'Image);
+                        end if;
+                     end if;
+                  end Check_Scan_End;
+
+                  procedure Check_Scan_End (Token : in Lexer.Token)
+                  with Pre => Tree.Lexer.Is_Block_Delimited (Token.ID)
+                  --  Token is modified; check if the start or end delimiter 
is deleted or modified.
+                  is
+                  begin
+                     Check_Scan_End
+                       (ID           => Token.ID,
+                        Start_Region =>
+                          (Token.Byte_Region.First,
+                           Token.Byte_Region.First + Buffer_Pos 
(Tree.Lexer.Start_Delimiter_Length (Token.ID)) - 1),
+                        End_Region   =>
+                          (Token.Byte_Region.Last - Buffer_Pos 
(Tree.Lexer.End_Delimiter_Length (Token.ID)) + 1,
+                           Token.Byte_Region.Last));
+                  end Check_Scan_End;
+
+                  procedure Check_Scan_End (Node : in Valid_Node_Access)
+                  with Pre => Tree.Lexer.Is_Block_Delimited (Tree.ID (Node))
+                  --  Check if the start delimiter is deleted.
+                  is
+                     ID          : constant Token_ID      := Tree.ID (Node);
+                     Byte_Region : constant Buffer_Region := Tree.Byte_Region 
(Node, Trailing_Non_Grammar => False);
+                  begin
+                     Check_Scan_End
+                       (ID,
+                        Start_Region =>
+                          (Byte_Region.First,
+                           Byte_Region.First + Buffer_Pos 
(Tree.Lexer.Start_Delimiter_Length (ID)) - 1),
+                        End_Region =>
+                          (Byte_Region.Last - Buffer_Pos 
(Tree.Lexer.End_Delimiter_Length (ID)) + 1,
+                           Byte_Region.Last));
+                  end Check_Scan_End;
+
+                  procedure Handle_Non_Grammar
+                    (Non_Grammar    : in out Lexer.Token_Arrays.Vector;
+                     Delete_Grammar : in     Boolean;
+                     Floating       : in     Boolean)
+                  --  Delete start and/or end is in Non_Grammar. Check if it 
has any
+                  --  partly or not deleted tokens, and if it has a deleted 
comment end
+                  --  with a remaining comment start and vice versa. If 
Delete_Grammar,
+                  --  the grammar token that owns Non_Grammar is being deleted.
+                  is
+                     type Action_Type is (Keep, Delete, Float);
+
+                     --  If the KMN deletes from the middle of Non_Grammar, 
and not
+                     --  Delete_Grammar, we can have actions like:
+                     --  Keep, Keep, Delete, Keep, Keep.
+                     --
+                     --  If Delete_Grammar, that could be:
+                     --  Keep, Keep, Delete, Keep, Float
+                     --
+                     --  Where token 4 is modified. There are two ranges to 
delete.
+                     --
+                     --  For the cases we do handle, any deletes will always 
be contiguous,
+                     --  and floats will immediately follow the deletes.
+
+                     Delete_First : SAL.Base_Peek_Type := 
Non_Grammar.First_Index - 1;
+                     Delete_Last  : SAL.Base_Peek_Type := 
Non_Grammar.First_Index - 1;
+                     Float_First  : SAL.Base_Peek_Type := 
Non_Grammar.First_Index - 1;
+                     Float_Last   : SAL.Base_Peek_Type := 
Non_Grammar.First_Index - 1;
+                  begin
+                     --  First decide what to keep and delete, and float the 
ones that need to be floated
+                     for I in Non_Grammar.First_Index .. 
Non_Grammar.Last_Index loop
+                        declare
+                           Token  : Lexer.Token renames Non_Grammar (I);
+                           Action : Action_Type := Keep;
+                        begin
+                           if Token.ID in Tree.Lexer.Descriptor.SOI_ID | 
Tree.Lexer.Descriptor.EOI_ID then
+                              null;
+
+                           elsif Token.Byte_Region.First >= 
Deleted_Region.First and
+                             Token.Byte_Region.Last <= Deleted_Region.Last
+                           then
+                              --  Token is deleted.
+                              Action := Delete;
+
+                           elsif (Deleted_Region.First <= 
Token.Byte_Region.First and
+                                    Deleted_Region.Last >= 
Token.Byte_Region.First and
+                                    Deleted_Region.Last < 
Token.Byte_Region.Last) or
+                             (Token.Byte_Region.First < Deleted_Region.First 
and
+                                Token.Byte_Region.Last >= Deleted_Region.First)
+                           then
+                              --  Token is modified; it needs to be scanned.
+                              if Delete_Grammar then
+                                 --  If this edit also modified a precending 
grammar token, the scan
+                                 --  will start there, and include this 
modified non_grammar.
+                                 --  test_incremental.adb 
Delete_Comment_Start_05.
+                                 --
+                                 --  Otherwise, the scan will start at the end 
of the deleted text, and
+                                 --  include this non_grammar; 
test_incremental.adb
+                                 --  Delete_Comment_Start_06.
+                                 Action := Float;
+
+                              else
+                                 pragma Assert (if Floating then 
Delayed_Scan); -- test_incremental.adb Edit_Comment_02
+
+                                 --  If Floating, Non_Grammar was edited by a 
previous change,
+                                 --  Delayed_Scan is true, so it will be 
scanned. We leave it in
+                                 --  Non_Grammar to be consistent. If not 
Floating, we leave it in
+                                 --  Check_Deleted.Non_Grammar so it is 
included in the scan start
+                                 --  compute below.
+                                 Action := Keep;
+                              end if;
+
+                              if Tree.Lexer.Is_Block_Delimited (Token.ID) then
+                                 Check_Scan_End (Token);
+                              end if;
+
+                           else
+                              --  Token is neither deleted nor modified.
+                              if Floating then
+                                 Action := Keep;
+                              elsif Delete_Grammar then
+                                 Action := Float;
+                              else
+                                 Action := Keep;
+                              end if;
+                           end if;
+
+                           case Action is
+                           when Keep =>
+                              null;
+
+                           when Delete =>
+                              if Delete_First = Non_Grammar.First_Index - 1 
then
+                                 Delete_First := I;
+                              end if;
+                              Delete_Last := I;
+
+                              if not Floating then
+                                 --  Floating_Non_Grammar lines are included 
in Shift_Lines when
+                                 --  floated.
+
+                                 if Keep_Terminal then
+                                    --  test_incremental.adb Edit_Code_09
+                                    Deleted_Shift_Lines := @ - New_Line_Count 
(Token.Line_Region);
+                                 else
+                                    Shift_Lines := @ - New_Line_Count 
(Token.Line_Region);
                                  end if;
                               end if;
-                           end;
-                        end if;
-                        return Result;
-                     end Insert_Virtual;
+
+                              if Trace_Incremental_Parse > Detail then
+                                 Tree.Lexer.Trace.Put_Line
+                                   ("delete deleted " & (if Floating then 
"floating " else "") &
+                                      "non_grammar " & Lexer.Image (Token, 
Tree.Lexer.Descriptor.all));
+                              end if;
+
+                           when Float =>
+                              pragma Assert (not Floating);
+                              if Keep_Terminal then
+                                 --  test_incremental.adb Edit_Code_09
+                                 Deleted_Shift_Lines := @ - New_Line_Count 
(Token.Line_Region);
+                              else
+                                 Shift_Lines := @ - New_Line_Count 
(Token.Line_Region);
+                              end if;
+
+                              Floating_Non_Grammar.Append (Token);
+                              if Float_First = Non_Grammar.First_Index - 1 then
+                                 Float_First := I;
+                              end if;
+                              Float_Last := I;
+
+                              if Trace_Incremental_Parse > Detail then
+                                 Tree.Lexer.Trace.Put_Line
+                                   ("float non_grammar " & Lexer.Full_Image 
(Token, Tree.Lexer.Descriptor.all));
+                              end if;
+                           end case;
+                        end;
+                     end loop;
+
+                     --  Delete the deleted and floated.
+                     declare
+                        procedure Delete_Range (First, Last : in 
SAL.Base_Peek_Type)
+                        is begin
+                           if First < Non_Grammar.First_Index then
+                              null;
+
+                           elsif First = Non_Grammar.First_Index then
+                              if Last = Non_Grammar.Last_Index then
+                                 Non_Grammar.Clear;
+                              else
+                                 Non_Grammar.Set_First_Last (Last + 1, 
Non_Grammar.Last_Index);
+                              end if;
+
+                           elsif Last = Non_Grammar.Last_Index then
+                              Non_Grammar.Set_First_Last 
(Non_Grammar.First_Index, First - 1);
+
+                           else
+                              --  Delete slice from middle
+                              declare
+                                 New_Non_Grammar : Lexer.Token_Arrays.Vector;
+                              begin
+                                 for I in Non_Grammar.First_Index .. 
Non_Grammar.Last_Index loop
+                                    if I < First then
+                                       New_Non_Grammar.Append (Non_Grammar 
(I));
+                                    elsif I > Last then
+                                       New_Non_Grammar.Append (Non_Grammar 
(I));
+                                    end if;
+                                 end loop;
+                                 Non_Grammar := New_Non_Grammar;
+                              end;
+                           end if;
+                        end Delete_Range;
+                     begin
+                        Delete_Range (Delete_First, Delete_Last);
+                        Delete_Range (Float_First, Float_Last);
+                     end;
+                  end Handle_Non_Grammar;
+
+                  procedure Do_Delete
+                  is
                   begin
-                     if Insert_Virtual then
-                        null;
+                     if Terminal_Is_Check_Deleted then
+                        Terminal := Invalid_Stream_Node_Ref; -- allow deleting 
Terminal.Element via Check_Deleted
+                     end if;
+
+                     if Saved_Prev_Terminal = Invalid_Stream_Node_Ref and not 
Terminal_Is_Check_Deleted then
+                        if Terminal.Element = Check_Deleted.Element then
+                           Check_Deleted.Element := Invalid_Stream_Index;
+
+                           Breakdown (Terminal, To_Single => False);
+
+                           --  Find the stream element that contains 
Check_Deleted_Node.
+                           Check_Deleted.Element := Terminal.Element;
+                           loop
+                              pragma Assert
+                                (Tree.ID (Tree.Get_Node (Check_Deleted.Stream, 
Check_Deleted.Element)) /=
+                                   Tree.Lexer.Descriptor.EOI_ID);
+
+                              if Tree.Is_Descendant_Of
+                                (Root => Tree.Get_Node (Check_Deleted.Stream, 
Check_Deleted.Element),
+                                 Descendant => Check_Deleted.Node)
+                              then
+                                 exit;
+                              end if;
+                              Check_Deleted.Element := Tree.Stream_Next 
(Check_Deleted.Stream, Check_Deleted.Element);
+                           end loop;
+                           if Terminal.Element = Check_Deleted.Element then
+                              --  Check_Deleted.Element was not deleted.
+                              Saved_Prev_Terminal := Tree.Prev_Terminal 
(Terminal);
+                              Terminal := Invalid_Stream_Node_Ref;
+                           end if;
+                        end if;
+                     end if;
+
+                     pragma Assert
+                       (Terminal_Is_Check_Deleted or else
+                        (if Saved_Prev_Terminal = Invalid_Stream_Node_Ref
+                         then Terminal.Element /= Check_Deleted.Element
+                         else Saved_Prev_Terminal.Element /= 
Check_Deleted.Element));
+
+                     Breakdown (Check_Deleted, To_Single => True);
+
+                     declare
+                        To_Delete : Stream_Node_Ref := Check_Deleted;
+                     begin
+                        Tree.Next_Terminal (Check_Deleted);
+                        if Trace_Incremental_Parse > Detail then
+                           Tree.Lexer.Trace.Put_Line
+                             ("delete deleted " &
+                                Tree.Image (To_Delete.Element, 
Terminal_Node_Numbers => True, Non_Grammar => False));
+                        end if;
 
-                     elsif (if Parser_State.Inc_Shared_Token
-                            then Parser_State.Shared_Token + 1
-                            else Parser_State.Shared_Token) <= 
Shared_Parser.Terminals.Last_Index
+                        Maybe_Delete_Lexer_Errors (To_Delete.Node);
+
+                        if Tree.Lexer.Is_Block_Delimited (Tree.ID 
(To_Delete.Node)) then
+                           Check_Scan_End (To_Delete.Node);
+                        end if;
+
+                        --  FIXME: if terminal_is_check_deleted, we already 
did this
+                        Handle_Non_Grammar
+                          (Tree.Non_Grammar_Var (To_Delete.Node), 
Delete_Grammar => True, Floating => False);
+
+                        pragma Assert (To_Delete.Node /= Tree.SOI and 
To_Delete.Node /= Tree.EOI);
+                        Tree.Stream_Delete (Stream, To_Delete.Element);
+                     end;
+
+                     if Terminal_Is_Check_Deleted then
+                        Terminal := Check_Deleted;
+                     end if;
+                  end Do_Delete;
+
+               begin
+                  if Floating_Non_Grammar.Length > 0 then
+                     --  test_incremental.adb Edit_Comment_3
+                     Handle_Non_Grammar (Floating_Non_Grammar, Delete_Grammar 
=> False, Floating => True);
+                  end if;
+
+                  if Terminal_Non_Grammar_Next /= Lexer.Token_Arrays.No_Index 
then
+                     Handle_Non_Grammar
+                       (Tree.Non_Grammar_Var (Terminal.Node), Delete_Grammar 
=> False, Floating => False);
+                  end if;
+
+                  Delete_Deleted_Loop :
+                  loop
+                     Terminal_Is_Check_Deleted := Terminal = Check_Deleted;
+
+                     exit Delete_Deleted_Loop when Check_Deleted = 
Invalid_Stream_Node_Ref;
+                     --  Happens when Terminal is EOI. test_incremental.adb 
Edit_Comment_3
+
+                     exit Delete_Deleted_Loop when Tree.ID 
(Check_Deleted.Node) = Parser.Tree.Lexer.Descriptor.EOI_ID;
+                     --  FIXME: exit when check_deleted outside KMN?
+
+                     if Tree.Byte_Region (Check_Deleted.Node, 
Trailing_Non_Grammar => False).First >
+                       Deleted_Region.Last + 1
                      then
-                        if Parser_State.Inc_Shared_Token then
-                           --  Inc_Shared_Token is only set False by 
McKenzie_Recover; see there
-                           --  for when/why. Don't increment past wisi_eoi 
(happens when input
-                           --  buffer is empty; test_mckenzie_recover.adb 
Empty_Comments).
-                           Parser_State.Shared_Token := 
Parser_State.Shared_Token + 1;
-                        else
-                           Parser_State.Inc_Shared_Token := True;
+                        --  Check_Deleted is not deleted or modified
+                        exit Delete_Deleted_Loop;
+
+                     elsif Tree.Byte_Region (Check_Deleted.Node, 
Trailing_Non_Grammar => False).First <
+                       Deleted_Region.First or
+                       Tree.Byte_Region (Check_Deleted.Node, 
Trailing_Non_Grammar => False).Last > Deleted_Region.Last
+                     then
+                        --  Check_Deleted is not deleted, but potentially 
modified.
+                        --  test_incremental.adb Edit_Code_04, Edit_Code_05.
+                        if Tree.Lexer.Is_Block_Delimited (Tree.ID 
(Check_Deleted.Node)) then
+                           --  test_incremental.adb Edit_String_01
+                           Check_Scan_End (Check_Deleted.Node);
                         end if;
 
-                        Parser_State.Current_Token := Shared_Parser.Terminals
-                          (Parser_State.Shared_Token).Tree_Index;
+                        Handle_Non_Grammar
+                          (Tree.Non_Grammar_Var (Check_Deleted.Node), 
Delete_Grammar => False, Floating => False);
 
+                        if Tree.Byte_Region (Check_Deleted.Node, 
Trailing_Non_Grammar => False).Last >
+                          Deleted_Region.Last
+                        then
+                           --  test_incremental.adb Edit_Comment_10
+                           exit Delete_Deleted_Loop;
+                        else
+                           --  test_incremental.adb Delete_Comment_Start_05
+                           Tree.Next_Terminal (Check_Deleted);
+                        end if;
+
+                     else
+                        Do_Delete;
                      end if;
-                  end;
+                  end loop Delete_Deleted_Loop;
 
-                  if Trace_Parse > Extra then
-                     Trace.Put_Line
-                       (Integer'Image (Parser_State.Label) & ": current_token" 
& Parser_State.Tree.Image
-                          (Parser_State.Current_Token, Trace.Descriptor.all));
+                  if Keep_Terminal then
+                     if Saved_Prev_Terminal /= Invalid_Stream_Node_Ref then
+                        Terminal := Tree.Next_Terminal (Saved_Prev_Terminal);
+                        --  Terminal_Non_Grammar_Next is unchanged.
+                     end if;
+                  else
+                     Terminal := Check_Deleted;
                   end if;
-               end if;
-            end loop;
+                  if Trace_Incremental_Parse > Extra then
+                     Tree.Lexer.Trace.Put_Line
+                       ("terminal: " & Tree.Image (Terminal, Non_Grammar => 
True, Node_Numbers => True));
+                     Tree.Lexer.Trace.Put_Line ("deleted_shift_lines:" & 
Deleted_Shift_Lines'Image);
+                  end if;
+               end;
+            end if;
 
-         when Accept_It =>
-            --  All parsers accepted or are zombies.
+            --  Now decide what to scan.
+            --
+            --  If two edit regions affect the same token, scanning the first 
will
+            --  also scan the second.
             declare
-               Count : constant SAL.Base_Peek_Type := 
Shared_Parser.Parsers.Count;
-               Current_Parser : Parser_Lists.Cursor := 
Shared_Parser.Parsers.First;
+               Terminal_Byte_Region : constant Buffer_Region := 
Tree.Byte_Region
+                 (Terminal.Node, Trailing_Non_Grammar => 
Terminal_Non_Grammar_Next /= Lexer.Token_Arrays.No_Index);
+
+               Do_Scan : Boolean := False;
+
+               Lex_Start_Byte : Buffer_Pos       := Buffer_Pos'Last;
+               Lex_Start_Char : Buffer_Pos       := Buffer_Pos'Last;
+               Lex_Start_Line : Line_Number_Type := Line_Number_Type'Last;
+
+               Last_Grammar       : Stream_Node_Ref := Invalid_Stream_Node_Ref;
+               Last_Scanned_Token : Lexer.Token;
+
+               procedure Check_Scan_End
+                 (ID     : in Token_ID;
+                  Region : in Buffer_Region)
+               --  Check if Inserted_Region inserts an end delimiter for ID in
+               --  Region.
+               is
+                  Shift : constant Base_Buffer_Pos := KMN.Inserted_Bytes - 
KMN.Deleted_Bytes +
+                    (if Delayed_Scan then Shift_Bytes else 0);
+               begin
+                  if KMN.Inserted_Bytes > 0 and then
+                    Inserted_Region.First <= Region.Last
+                  then
+                     declare
+                        Delimiter_Pos : constant Base_Buffer_Pos := 
Tree.Lexer.Contains_End_Delimiter
+                          (ID, Inserted_Region);
+                     begin
+                        if Delimiter_Pos /= Invalid_Buffer_Pos then
+                           --  test_incremental.adb Edit_Comment_5, _12, 
Edit_String_*, ada_mode-interactive_02.adb
+                           Scan_End := Tree.Lexer.Find_Scan_End
+                             (ID, (Delimiter_Pos, Region.Last + Shift), 
Inserted => True, Start => False);
+
+                           if Trace_Incremental_Parse > Detail then
+                              Tree.Lexer.Trace.Put_Line
+                                ("end delimiter inserted:" &
+                                   Region.First'Image & " .." &
+                                   Scan_End'Image);
+                           end if;
+                        end if;
+                     end;
+                  end if;
+               end Check_Scan_End;
+
+               procedure Check_Scan_End (Token : in Lexer.Token)
+               with Pre => Tree.Lexer.Is_Block_Delimited (Token.ID)
+               --  Check if Inserted_Region inserts an end delimiter in Token,
+               --  exposing the rest of Token as code.
+               is begin
+                  Check_Scan_End (Token.ID, Token.Byte_Region);
+               end Check_Scan_End;
+
+               procedure Check_Scan_End (Node : in Valid_Node_Access)
+               with Pre => Tree.Lexer.Is_Block_Delimited (Tree.ID (Node))
+               --  Check if Inserted_Region inserts an end delimiter in Node,
+               --  exposing the rest of Node as code.
+               is begin
+                  Check_Scan_End (Tree.ID (Node), Tree.Byte_Region (Node, 
Trailing_Non_Grammar => False));
+               end Check_Scan_End;
+
             begin
-               if Count = 1 then
-                  --  Nothing more to do
-                  exit Main_Loop;
+               if Lexer_Errors.Length > 0 and then
+                 Lexer_Errors (Lexer_Errors.First).Scan_End /= 
Invalid_Buffer_Pos
+               then
+                  --  Lexer_Errors is set above to contain lexer errors that 
may be
+                  --  affected by this KMN. test_incremental.adb 
Edit_String_06,
+                  --  Lexer_Errors_nn.
+                  declare
+                     Data : Lexer_Error_Data renames Lexer_Errors 
(Lexer_Errors.First);
+                     Ref : Stream_Node_Ref;
+
+                     procedure Delete_Node_To_Terminal
+                     --  Delete terminals Ref thru prev (Terminal); normally 
scanned
+                     --  tokens get deleted in Delete_Scanned_Loop below, but 
that only
+                     --  deletes tokens Terminal and after.
+                     is begin
+                        loop
+                           Breakdown (Ref, To_Single => True);
+                           declare
+                              To_Delete : Stream_Node_Ref := Ref;
+                           begin
+                              Tree.Next_Terminal (Ref);
+                              Tree.Stream_Delete (Stream, To_Delete.Element);
+                           end;
+                           exit when Ref.Node = Terminal.Node;
+                        end loop;
+                     end Delete_Node_To_Terminal;
 
-               elsif Zombie_Count + 1 = Count then
-                  --  All but one are zombies
-                  loop
-                     if Current_Parser.Verb = Accept_It then
-                        Current_Parser.Next;
-                     else
+                  begin
+                     --  FIXME: handle lexer error in non_grammar
+
+                     if Data.Scan_Start_Node /= Invalid_Node_Access then
+                        --  Breakdown Terminal so we can delete terminals 
before Terminal.
+                        Breakdown (Terminal);
+                        Ref := Tree.To_Stream_Node_Ref (Stream, 
Data.Scan_Start_Node);
+                        Delete_Node_To_Terminal;
+
+                        Do_Scan        := True;
+                        Lex_Start_Byte := Tree.Byte_Region 
(Data.Scan_Start_Node, Trailing_Non_Grammar => False).First;
+                        Lex_Start_Char := Tree.Char_Region 
(Data.Scan_Start_Node, Trailing_Non_Grammar => False).First;
+                        Lex_Start_Line := Tree.Line_Region (Ref, 
Trailing_Non_Grammar => False).First;
+                        Scan_End       := Data.Scan_End;
+
+                     elsif Data.Node = Terminal.Node then
+                        --  Data.Node is not shifted, and Err may be before or 
after
+                        --  Terminal.Byte_Region.
                         declare
-                           Temp  : Parser_Lists.Cursor := Current_Parser;
+                           Terminal_Byte_Region : constant Buffer_Region := 
Tree.Byte_Region
+                             (Data.Node, Trailing_Non_Grammar => False);
                         begin
-                           Current_Parser.Next;
-                           Shared_Parser.Parsers.Terminate_Parser
-                             (Temp, "zombie", Shared_Parser.Trace.all, 
Shared_Parser.Terminals);
+                           if Inserted_Region.First < 
Terminal_Byte_Region.First then
+                              --  test_incremental.adb Lexer_Errors_07
+                              Do_Scan        := True;
+                              Lex_Start_Byte := Inserted_Region.First;
+                              Lex_Start_Char := Inserted_Region_Chars.First;
+                              Lex_Start_Line := Tree.Line_Region (Terminal, 
Trailing_Non_Grammar => False).First;
+                              Scan_End       := Data.Scan_End;
+                           else
+                              Do_Scan        := True;
+                              Lex_Start_Byte := Terminal_Byte_Region.First;
+                              Lex_Start_Char := Tree.Char_Region (Data.Node, 
Trailing_Non_Grammar => False).First;
+                              Lex_Start_Line := Tree.Line_Region (Terminal, 
Trailing_Non_Grammar => False).First;
+                              Scan_End       := Data.Scan_End;
+                           end if;
                         end;
+
+                        if Terminal_Non_Grammar_Next /= 
Lexer.Token_Arrays.No_Index then
+                           Terminal_Non_Grammar_Next := 
Lexer.Token_Arrays.No_Index;
+                        end if;
+
+                     elsif Tree.Byte_Region (Data.Node, Trailing_Non_Grammar 
=> False).First <
+                       Tree.Byte_Region (Terminal.Node, Trailing_Non_Grammar 
=> False).First
+                     then
+                        --  Data.Node is shifted.
+
+                        --  Breakdown Terminal so it does not share a stream 
element with
+                        --  elements being deleted, and we can delete 
terminals before
+                        --  Terminal.
+                        Breakdown (Terminal);
+                        Ref := Tree.To_Stream_Node_Ref (Stream, Data.Node);
+                        Delete_Node_To_Terminal;
+
+                        Do_Scan        := True;
+                        Lex_Start_Byte := Tree.Byte_Region (Data.Node, 
Trailing_Non_Grammar => False).First;
+                        Lex_Start_Char := Tree.Char_Region (Data.Node, 
Trailing_Non_Grammar => False).First;
+                        Lex_Start_Line := Tree.Line_Region (Ref, 
Trailing_Non_Grammar => False).First;
+                        Scan_End       := Data.Scan_End;
+
+                        if Terminal_Non_Grammar_Next /= 
Lexer.Token_Arrays.No_Index then
+                           Terminal_Non_Grammar_Next := 
Lexer.Token_Arrays.No_Index;
+                        end if;
+
+                     else
+                        --  Scan start determined by Terminal, 
Terminal_Non_Grammar_Next below.
+                        --  test_incremental.adb Lexer_Errors_03
+                        Scan_End := Data.Scan_End;
                      end if;
-                     exit when Current_Parser.Is_Done;
-                  end loop;
+                  end;
 
-                  exit Main_Loop;
+                  Lexer_Errors.Delete_First;
+               end if;
 
-               else
-                  --  More than one parser is active.
+               if Do_Scan then
+                  --  Set from a Lexer_Error
+                  null;
+
+               elsif Delayed_Scan then
+                  --  A previous edit start affected Terminal or 
Floating_Non_Grammar
+                  --  (Delayed_Floating_Index). test_incremental.adb 
Edit_String_07
                   declare
-                     use all type Parser_Lists.Cursor;
-                     Error_Parser_Count : Integer := (if 
Shared_Parser.Lexer.Errors.Length > 0 then 1 else 0);
-
-                     Recover_Cost           : Integer;
-                     Min_Recover_Cost       : Integer                   := 
Integer'Last;
-                     Recover_Ops_Length     : Ada.Containers.Count_Type;
-                     Min_Recover_Ops_Length : Ada.Containers.Count_Type := 
Ada.Containers.Count_Type'Last;
-                     Recover_Cur            : Parser_Lists.Cursor       := 
Current_Parser;
+                     Token_ID : constant WisiToken.Token_ID :=
+                       (if Delayed_Floating_Index = Positive_Index_Type'Last
+                        then Tree.ID (Terminal.Node)
+                        else Floating_Non_Grammar (Delayed_Floating_Index).ID);
+
+                     Token_Byte_Region : constant Buffer_Region :=
+                       (if Delayed_Floating_Index = Positive_Index_Type'Last
+                        then Tree.Byte_Region (Terminal)
+                        else Floating_Non_Grammar 
(Delayed_Floating_Index).Byte_Region);
                   begin
-                     Current_Parser := Shared_Parser.Parsers.First;
-                     loop
-                        if Current_Parser.Verb = Accept_It then
-                           if Current_Parser.State_Ref.Errors.Length > 0 then
-                              Error_Parser_Count := Error_Parser_Count + 1;
+                     if (Next_KMN.Deleted_Bytes > 0 or Next_KMN.Inserted_Bytes 
> 0) and then
+                       Next_KMN_Stable_First < Token_Byte_Region.Last
+                     then
+                        --  Next change also edits the token; more delay.
+                        null;
+                     else
+                        Do_Scan        := True;
+                        Lex_Start_Byte := Delayed_Lex_Start_Byte;
+                        Lex_Start_Char := Delayed_Lex_Start_Char;
+                        Lex_Start_Line := Delayed_Lex_Start_Line;
+
+                        if Tree.Lexer.Is_Block_Delimited (Token_ID) then
+                           if Delayed_Floating_Index = 
Positive_Index_Type'Last then
+                              Check_Scan_End (Terminal.Node);
+                           else
+                              Check_Scan_End (Floating_Non_Grammar 
(Delayed_Floating_Index));
                            end if;
-                           Current_Parser.Next;
-                        else
+                        end if;
+
+                        Delayed_Scan := False;
+                     end if;
+                  end;
+
+               elsif KMN.Inserted_Bytes = 0 and KMN.Deleted_Bytes = 0 then
+                  --  Nothing to scan; last KMN
+                  null;
+
+               elsif Next_KMN_Stable_First + Shift_Bytes < Scanned_Byte_Pos + 
Base_Buffer_Pos (KMN.Deleted_Bytes) then
+                  --  All of current edit has been scanned, and scan end is
+                  --  not adjacent to KMN end. test_incremental.adb 
Edit_Comment_2
+                  null;
+
+               elsif Terminal_Byte_Region.Last + Shift_Bytes +
+                 (if Inserted_Region.First <= Terminal_Byte_Region.Last + 
Shift_Bytes
+                  then Base_Buffer_Pos (KMN.Inserted_Bytes)
+                  else 0) > Scanned_Byte_Pos
+                  --  Else Terminal was scanned by a previous KMN. 
test_incremental.adb
+                  --  Edit_Code_12, _14, _16, ada_mode-recover_align_1.adb,
+                  --  ada_mode-interactive_02.adb
+
+               then
+                  if Terminal_Non_Grammar_Next /= Lexer.Token_Arrays.No_Index 
then
+                     --  Edit start is in Terminal_Non_Grammar_Next.
+                     --  test_incremental.adb Edit_Comment*
+
+                     declare
+                        Non_Grammar  : Lexer.Token_Arrays.Vector renames 
Tree.Non_Grammar_Var (Terminal.Node);
+                        Last_Floated : Lexer.Token_Arrays.Extended_Index := 
Lexer.Token_Arrays.No_Index;
+                     begin
+                        declare
+                           Token : Lexer.Token renames Non_Grammar 
(Terminal_Non_Grammar_Next);
+                        begin
+                           if Token.Byte_Region.First + Shift_Bytes >
+                             Inserted_Region.First
+                           then
+                              --  Edit start is in whitespace preceding Token
+                              Lex_Start_Byte := Inserted_Region.First;
+                              Lex_Start_Char := Inserted_Region_Chars.First;
+                              Lex_Start_Line :=
+                                (if Terminal_Non_Grammar_Next > 
Non_Grammar.First_Index
+                                 then Non_Grammar (Terminal_Non_Grammar_Next - 
1).Line_Region.Last
+                                 else Tree.Line_Region
+                                   (Tree.Prev_Source_Terminal (Terminal, 
Trailing_Non_Grammar => True),
+                                    Trailing_Non_Grammar => True).Last);
+                              Do_Scan := True;
+                           else
+                              --  Edit start is in or just after Token
+                              Lex_Start_Byte := Token.Byte_Region.First + 
Shift_Bytes;
+                              Lex_Start_Char := Token.Char_Region.First + 
Shift_Chars;
+                              Lex_Start_Line := Token.Line_Region.First + 
Shift_Lines;
+                              Do_Scan := True;
+
+                              if Tree.Lexer.Is_Block_Delimited (Token.ID) then
+                                 Check_Scan_End (Token);
+                              end if;
+                           end if;
+                        end;
+
+                        --  Remaining Non_Grammar will either be scanned, or 
moved to a new
+                        --  grammar token, so delete or move to floating now.
+                        for I in Terminal_Non_Grammar_Next .. 
Non_Grammar.Last_Index loop
                            declare
-                              Temp  : Parser_Lists.Cursor := Current_Parser;
+                              Token       : Lexer.Token renames Non_Grammar 
(I);
+                              Byte_Region : Buffer_Region renames 
Token.Byte_Region;
                            begin
-                              Current_Parser.Next;
-                              Shared_Parser.Parsers.Terminate_Parser
-                                (Temp, "zombie", Shared_Parser.Trace.all, 
Shared_Parser.Terminals);
+                              if (KMN.Deleted_Bytes > 0 and then 
Byte_Region.First <= Deleted_Region.Last)
+                                or
+                                (KMN.Inserted_Bytes > 0 and then
+                                   Byte_Region.First + Shift_Bytes <= 
Inserted_Region.Last)
+                              then
+                                 if (Next_KMN.Deleted_Bytes > 0 or 
Next_KMN.Inserted_Bytes > 0) and then
+                                   Next_KMN_Stable_Last < Byte_Region.Last
+                                 then
+                                    --  Next change is an actual change (not 
just last placeholder KMN),
+                                    --  and it also overlaps this token. It 
may insert or delete a comment
+                                    --  end, so we don't know when to end a 
scan; handle it then.
+                                    --  test_incremental.adb Edit_Comment_03, 
_07, ada_mode-partial_parse.adb.
+                                    Shift_Lines := @ - New_Line_Count 
(Non_Grammar (I).Line_Region);
+                                    Floating_Non_Grammar.Append (Non_Grammar 
(I));
+                                    Last_Floated := I;
+                                    Do_Scan := False;
+                                    Delayed_Scan := True;
+                                    Delayed_Floating_Index := 
Floating_Non_Grammar.Last_Index;
+                                    Delayed_Lex_Start_Byte := Lex_Start_Byte;
+                                    Delayed_Lex_Start_Char := Lex_Start_Char;
+                                    Delayed_Lex_Start_Line := Lex_Start_Line;
+                                    if Trace_Incremental_Parse > Detail then
+                                       Tree.Lexer.Trace.Put_Line
+                                         ("scan delayed" & 
Lex_Start_Byte'Image &
+                                            (if Scan_End /= Invalid_Buffer_Pos
+                                             then " .." & Scan_End'Image
+                                             else ""));
+                                       if Trace_Incremental_Parse > Extra then
+                                          Tree.Lexer.Trace.Put_Line
+                                            ("float non_grammar" & I'Image & 
":" &
+                                               Lexer.Full_Image (Non_Grammar 
(I), Tree.Lexer.Descriptor.all));
+                                       end if;
+                                    end if;
+                                 else
+                                    --  Token overlaps or is adjacent to the 
change region; it will be
+                                    --  rescanned. Delete it here (ie don't 
copy to floating). It may
+                                    --  contain New_Lines. 
test_incremental.adb Delete_Comment_End.
+                                    if Trace_Incremental_Parse > Extra then
+                                       Tree.Lexer.Trace.Put_Line
+                                         ("delete non_grammar" & I'Image & ":" 
&
+                                            Lexer.Full_Image (Token, 
Tree.Lexer.Descriptor.all));
+                                    end if;
+                                    declare
+                                       New_Line_Count : constant 
Base_Line_Number_Type := WisiToken.New_Line_Count
+                                         (Non_Grammar (I).Line_Region);
+                                    begin
+                                       Shift_Lines := @ - New_Line_Count;
+                                    end;
+                                 end if;
+                              else
+                                 --  Token does not overlap the edit region; 
handle it later.
+                                 Shift_Lines := @ - New_Line_Count 
(Non_Grammar (I).Line_Region);
+                                 Floating_Non_Grammar.Append (Non_Grammar (I));
+                                 if Trace_Incremental_Parse > Extra then
+                                    Tree.Lexer.Trace.Put_Line
+                                      ("float non_grammar" & I'Image & ":" &
+                                         Lexer.Full_Image (Non_Grammar (I), 
Tree.Lexer.Descriptor.all));
+                                 end if;
+                                 Last_Floated := I;
+                              end if;
                            end;
+                        end loop;
+
+                        if Terminal_Non_Grammar_Next = Non_Grammar.First_Index 
then
+                           Non_Grammar.Clear;
+                        else
+                           Non_Grammar.Set_First_Last 
(Non_Grammar.First_Index, Terminal_Non_Grammar_Next - 1);
                         end if;
-                        exit when Current_Parser.Is_Done;
-                     end loop;
 
-                     if Error_Parser_Count > 0 then
-                        --  There was at least one error. We assume that 
caused the ambiguous
-                        --  parse, and we pick the parser with the minimum 
cost and minimum
-                        --  recover ops length (consistent with 
Duplicate_State) to allow the
-                        --  parse to succeed. We terminate the other parsers 
so the remaining
-                        --  parser can do Execute_Actions.
-                        --
-                        --  If there are multiple errors, this metric is not 
very meaningful.
-                        --
-                        --  Note all surviving parsers must have the same 
error count.
-                        Current_Parser := Shared_Parser.Parsers.First;
-                        loop
-                           Recover_Cost := Current_Parser.Min_Recover_Cost;
-                           if Recover_Cost < Min_Recover_Cost then
-                              Min_Recover_Cost       := Recover_Cost;
-                              Min_Recover_Ops_Length := 
Current_Parser.Max_Recover_Ops_Length;
-                              Recover_Cur            := Current_Parser;
-
-                           elsif Recover_Cost = Min_Recover_Cost then
-                              Recover_Ops_Length := 
Current_Parser.Max_Recover_Ops_Length;
-                              if Recover_Ops_Length < Min_Recover_Ops_Length 
then
-                                 Min_Recover_Ops_Length := Recover_Ops_Length;
-                                 Recover_Cur    := Current_Parser;
-                              end if;
+                        if Trace_Incremental_Parse > Detail then
+                           if Last_Floated /= Lexer.Token_Arrays.No_Index then
+                              Tree.Lexer.Trace.Put_Line
+                                ("float non_grammar" & 
Terminal_Non_Grammar_Next'Image & " .." &
+                                   Last_Floated'Image);
                            end if;
-                           Current_Parser.Next;
-                           exit when Current_Parser.Is_Done;
-                        end loop;
+                        end if;
 
-                        Current_Parser := Shared_Parser.Parsers.First;
-                        loop
-                           if Current_Parser = Recover_Cur then
-                              Current_Parser.Next;
-                           else
+                        Terminal_Non_Grammar_Next := 
Lexer.Token_Arrays.No_Index;
+
+                        Tree.Next_Terminal (Terminal);
+                     end;
+
+                  elsif Terminal_Byte_Region.First + Shift_Bytes < 
Inserted_Region.First then
+                     --  Edit start is in Terminal, not at first byte. 
test_incremental.adb
+                     --  Edit_Code_10, _11.
+
+                     if Tree.ID (Terminal.Node) = Tree.Lexer.Descriptor.EOI_ID 
then
+                        if Length (Inserted_Region) > 0 then
+                           --  Scan new text inserted at EOI.
+                           Do_Scan        := True;
+                           Lex_Start_Byte := Terminal_Byte_Region.First + 
Shift_Bytes;
+                           Lex_Start_Char := Tree.Char_Region (Terminal.Node, 
Trailing_Non_Grammar => False).First +
+                             Shift_Chars;
+
+                           --  Line_Region.First is from 
prev_terminal.non_grammar, which is shifted
+                           Lex_Start_Line := Tree.Line_Region (Terminal, 
Trailing_Non_Grammar => False).First;
+                        else
+                           --  We never re-scan eoi; we just shift it.
+                           null;
+                        end if;
+                     else
+                        Do_Scan        := True;
+                        Lex_Start_Byte := Terminal_Byte_Region.First + 
Shift_Bytes;
+                        Lex_Start_Char := Tree.Char_Region (Terminal.Node, 
Trailing_Non_Grammar => False).First +
+                          Shift_Chars;
+
+                        --  Line_Region.First is from 
prev_terminal.non_grammar, which is shifted
+                        Lex_Start_Line := Tree.Line_Region (Terminal, 
Trailing_Non_Grammar => False).First;
+
+                        if Tree.Lexer.Is_Block_Delimited (Tree.ID 
(Terminal.Node)) then
+                           Check_Scan_End (Terminal.Node);
+                        end if;
+                     end if;
+
+                  else
+                     --  Edit start is in or adjacent to some non_grammar 
token or
+                     --  whitespace preceding Terminal (including at Terminal 
first byte);
+                     --  delete non_grammar tokens adjacent to, containing or 
after the
+                     --  edit start; they will be rescanned (the scan loop 
exits on
+                     --  terminals, not non_grammars). Deleted New_Lines 
decrement
+                     --  Shift_Lines.
+
+                     declare
+                        procedure In_Whitespace
+                        is begin
+                           --  Edit start is in whitespace before Terminal.
+                           --  test_incremental.adb Edit_Code_01, 
Edit_Whitespace_1, _2
+                           --  ada_mode-incremental_04.adb
+                           Lex_Start_Byte := Buffer_Pos'Max (Scanned_Byte_Pos 
+ 1, Inserted_Region.First);
+                           Lex_Start_Char := Buffer_Pos'Max (Scanned_Char_Pos 
+ 1, Inserted_Region_Chars.First);
+
+                           declare
+                              Prev_Non_Grammar : Terminal_Ref := 
Tree.Prev_Terminal (Terminal);
+                           begin
+                              --  We can't use Tree.Line_Region (Prev) here, 
because if Prev has no
+                              --  non_grammar, it uses the following 
non_grammar for result.last,
+                              --  and that's not shifted yet. 
ada_mode-incremental_02.adb
+                              if Tree.Non_Grammar_Const 
(Prev_Non_Grammar.Node).Length = 0 then
+                                 Tree.Prev_Non_Grammar (Prev_Non_Grammar);
+                              end if;
                               declare
-                                 Temp  : Parser_Lists.Cursor := Current_Parser;
+                                 Non_Grammar : Lexer.Token_Arrays.Vector 
renames Tree.Non_Grammar_Const
+                                   (Prev_Non_Grammar.Node);
                               begin
-                                 Current_Parser.Next;
-                                 Shared_Parser.Parsers.Terminate_Parser
-                                   (Temp,
-                                    (if Recover_Cost = Min_Recover_Cost and 
then
-                                       Recover_Ops_Length = 
Min_Recover_Ops_Length
-                                     then "random"
-                                     else "recover cost/length"),
-                                    Shared_Parser.Trace.all, 
Shared_Parser.Terminals);
+                                 Lex_Start_Line := Non_Grammar 
(Non_Grammar.Last_Index).Line_Region.Last;
                               end;
-                           end if;
-                           exit when Current_Parser.Is_Done;
-                        end loop;
-
-                        exit Main_Loop;
-
-                     else
-                        --  There were no previous errors. We allow the parse 
to fail, on the
-                        --  assumption that an otherwise correct input should 
not yield an
-                        --  ambiguous parse.
-                        declare
-                           Token : Base_Token renames Shared_Parser.Terminals 
(Shared_Parser.Terminals.Last_Index);
+                           end;
+                           Do_Scan := True;
+                        end In_Whitespace;
+
+                        procedure Handle_Non_Grammar
+                          (Non_Grammar : in out 
WisiToken.Lexer.Token_Arrays.Vector;
+                           Floating    : in     Boolean)
+                        is
+                           Last_Byte : constant Buffer_Pos :=
+                             (if Non_Grammar.Length = 0
+                              then Buffer_Pos'Last
+                              else Non_Grammar 
(Non_Grammar.Last_Index).Byte_Region.Last +
+                                (if Floating then Shift_Bytes else 0));
+
+                           Delete : SAL.Base_Peek_Type := 0;
                         begin
-                           raise WisiToken.Parse_Error with Error_Message
-                             (Shared_Parser.Lexer.File_Name, Token.Line, 
Token.Column,
-                              "Ambiguous parse:" & SAL.Base_Peek_Type'Image 
(Count) & " parsers active.");
-                        end;
-                     end if;
-                  end;
-               end if;
-            end;
+                           if Non_Grammar.Length = 0 or else
+                             Non_Grammar (Non_Grammar.Last_Index).ID = 
Tree.Lexer.Descriptor.SOI_ID
+                           then
+                              In_Whitespace;
+
+                           elsif Last_Byte <= Scanned_Byte_Pos +
+                             (if Deleted_Region.First <= Last_Byte
+                              then Base_Buffer_Pos (KMN.Deleted_Bytes)
+                              else 0)
+                           then
+                              --  All of Non_Grammar has been scanned already.
+                              --  test_incremental.adb Edit_Comment_10, _17.
+
+                              if (KMN.Inserted_Bytes = 0 or else
+                                    (Inserted_Region.Last <= Scanned_Byte_Pos 
and
+                                       Inserted_Region.Last < 
Terminal_Byte_Region.First + Shift_Bytes - 1)) and
+                                (KMN.Deleted_Bytes = 0 or else
+                                   (Deleted_Region.Last + Shift_Bytes <=
+                                      Scanned_Byte_Pos + Base_Buffer_Pos 
(KMN.Deleted_Bytes) and
+                                      Deleted_Region.Last < 
Terminal_Byte_Region.First - 1))
+                              then
+                                 --  Inserted and Deleted have been scanned 
already, and are not
+                                 --  adjacent to Terminal. 
test_incremental.adb Edit_Code_14,
+                                 --  Edit_Comment_10 
ada_mode-interactive_02.adb
+                                 null;
 
-         when Reduce =>
-            null;
+                              else
+                                 In_Whitespace;
+                              end if;
+                           else
+                              for I in Non_Grammar.First_Index .. 
Non_Grammar.Last_Index loop
+                                 declare
+                                    Byte_Last : constant Buffer_Pos := 
Non_Grammar (I).Byte_Region.Last +
+                                      (if Floating then Shift_Bytes else 0);
+                                 begin
+                                    if Byte_Last + 1 >= Inserted_Region.First 
then
+                                       --  We don't need to check 
Scanned_Byte_Pos here; we always scan all
+                                       --  consecutive non_grammars, and we 
checked Scanned_Byte_Pos above.
+                                       --  ada_mode-recover_align_1.adb, 
test_incremental.adb Edit_Comment_2
+                                       Delete  := I;
+                                       Do_Scan := True;
+                                       exit;
+                                    end if;
+                                 end;
+                              end loop;
 
-         when Error =>
-            --  All parsers errored; attempt recovery
-            declare
-               use all type McKenzie_Recover.Recover_Status;
+                              if Delete > 0 and then Non_Grammar (Delete).ID = 
Tree.Lexer.Descriptor.SOI_ID then
+                                 if Delete = Non_Grammar.Last_Index then
+                                    Delete := 0;
+                                 else
+                                    Delete := Delete + 1;
+                                 end if;
+                              end if;
+
+                              if Delete > 0 then
+                                 --  Edit is in or before Non_Grammar (Delete) 
(ie a comment); set
+                                 --  Lex_Start_* to scan from edit start or 
start of Token, whichever
+                                 --  is earlier.
+
+                                 declare
+                                    Token : WisiToken.Lexer.Token renames 
Non_Grammar (Delete);
+                                 begin
+                                    if Tree.Lexer.Is_Block_Delimited 
(Token.ID) and
+                                      Inserted_Region.First < 
Token.Byte_Region.Last
+                                      --  Inserting in middle of Token, not 
adding to end.
+                                    then
+                                       declare
+                                          Delimiter_Pos : constant 
Base_Buffer_Pos := Tree.Lexer.Contains_End_Delimiter
+                                            (Token.ID, Inserted_Region);
+                                       begin
+                                          if Delimiter_Pos /= 
Invalid_Buffer_Pos then
+                                             --  A new end delimiter is 
inserted in Token, exposing the rest of
+                                             --  Token as code. 
test_incremental.adb Edit_Comment_4, Edit_Comment_7
+                                             Scan_End := 
Tree.Lexer.Find_Scan_End
+                                               (Token.ID,
+                                                (Delimiter_Pos, 
Invalid_Buffer_Pos),
+                                                Inserted => True,
+                                                Start    => False);
+
+                                             if Trace_Incremental_Parse > 
Detail then
+                                                Tree.Lexer.Trace.Put_Line
+                                                  ("end delimiter inserted:" &
+                                                     
Token.Byte_Region.First'Image & " .." &
+                                                     Scan_End'Image);
+                                             end if;
+                                          end if;
+                                       end;
+                                    end if;
+
+                                    Lex_Start_Byte := Buffer_Pos'Min
+                                      (Token.Byte_Region.First + (if Floating 
then Shift_Bytes else 0),
+                                       Inserted_Region.First);
+
+                                    Lex_Start_Char := Buffer_Pos'Min
+                                      (Token.Char_Region.First + (if Floating 
then Shift_Bytes else 0),
+                                       Inserted_Region_Chars.First);
+
+                                    if Floating then
+                                       --  Tokens Delete .. Non_Grammar.Last 
contributed to Shift_Lines;
+                                       --  ignore that contribution because 
they are after the lex start.
+                                       --  test_incremental.adb Edit_Code_10
+                                       --  ada_mode-interactive_10.adb
+                                       --  ada_mode-recover_14 comment after 
extra 'begin'.
+                                       declare
+                                          Temp_Shift_Lines : 
Base_Line_Number_Type := Shift_Lines;
+                                       begin
+                                          for I in Delete .. 
Non_Grammar.Last_Index loop
+                                             Temp_Shift_Lines := @ + 
New_Line_Count (Non_Grammar (I).Line_Region);
+                                          end loop;
+                                          Lex_Start_Line := 
Token.Line_Region.First + Temp_Shift_Lines;
+                                       end;
+                                    else
+                                       Lex_Start_Line := 
Token.Line_Region.First;
+                                    end if;
+                                 end;
+
+                                 if Trace_Incremental_Parse > Detail then
+                                    Tree.Lexer.Trace.Put_Line
+                                      ((if Floating
+                                        then "delete floating_non_grammar"
+                                        else "delete non_grammar") &
+                                         Delete'Image & " .." & 
Non_Grammar.Last_Index'Image);
+                                 end if;
 
-               Recover_Result : McKenzie_Recover.Recover_Status := 
McKenzie_Recover.Recover_Status'First;
+                                 if not Floating then
+                                    for I in Delete .. Non_Grammar.Last_Index 
loop
+                                       Shift_Lines := @ - New_Line_Count 
(Non_Grammar (I).Line_Region);
+                                    end loop;
+                                 end if;
 
-               Pre_Recover_Parser_Count : constant SAL.Base_Peek_Type := 
Shared_Parser.Parsers.Count;
-               Start : Ada.Calendar.Time;
-            begin
-               --  Recover algorithms expect current token at
-               --  Parsers(*).Current_Token, will set
-               --  Parsers(*).Recover_Insert_Delete with new input tokens and
-               --  deletions, adjust Parsers(*).Stack, and set
-               --  Parsers(*).Current_Token and Parsers(*).Verb.
-
-               if Shared_Parser.Enable_McKenzie_Recover then
-                  if Debug_Mode then
-                     Trace.Put_Clock ("pre-recover" & 
Shared_Parser.Parsers.Count'Img & " active");
-                     Start := Ada.Calendar.Clock;
-                  end if;
-                  Recover_Result := McKenzie_Recover.Recover (Shared_Parser);
-                  if Debug_Mode then
-                     declare
-                        use Ada.Calendar;
-                        Recover_Duration : constant Duration := Clock - Start;
+                                 Non_Grammar.Set_First_Last 
(Non_Grammar.First_Index, Delete - 1);
+                              else
+                                 In_Whitespace;
+                              end if;
+                           end if;
+                        end Handle_Non_Grammar;
                      begin
-                        Trace.Put_Clock
-                          ("post-recover" & Shared_Parser.Parsers.Count'Img & 
" active," & Recover_Duration'Image);
+                        if Tree.Lexer.Is_Block_Delimited (Tree.ID 
(Terminal.Node)) then
+                           Check_Scan_End (Terminal.Node);
+                        end if;
+
+                        if Floating_Non_Grammar.Length > 0 and then
+                          Floating_Non_Grammar 
(Floating_Non_Grammar.First_Index).Byte_Region.First + Shift_Bytes <=
+                          Inserted_Region.First
+                        then
+                           --  The edit start is in a floated non_grammar.
+                           --  test_incremental.adb Edit_Comment_7, 
Edit_Code_10, _17
+                           --  ada_mode-recover_14.adb comment after deleted 
"begin".
+                           Handle_Non_Grammar (Floating_Non_Grammar, Floating 
=> True);
+                        elsif Tree.ID (Terminal.Node) /= 
Tree.Lexer.Descriptor.SOI_ID then
+                           Handle_Non_Grammar
+                             (Tree.Non_Grammar_Var (Tree.Prev_Terminal 
(Terminal).Node), Floating => False);
+                        end if;
                      end;
                   end if;
+               end if;
 
-                  if Trace_Parse > Outline then
-                     if Recover_Result = Success  then
-                        Trace.Put_Line
-                          ("recover: succeed, parser count" & 
SAL.Base_Peek_Type'Image (Shared_Parser.Parsers.Count));
-                     else
-                        Trace.Put_Line
-                          ("recover: fail " & 
McKenzie_Recover.Recover_Status'Image (Recover_Result) &
-                             ", parser count" & SAL.Base_Peek_Type'Image 
(Shared_Parser.Parsers.Count));
+               if Do_Scan then
+                  if (Next_KMN.Deleted_Bytes > 0 or Next_KMN.Inserted_Bytes > 
0) and then
+                    (Next_KMN_Stable_Last < Terminal_Byte_Region.Last or
+                       (Scan_End /= Invalid_Buffer_Pos and 
Next_KMN_Stable_Last + Shift_Bytes < Scan_End))
+                  then
+                     --  Next change is an actual change (not just last 
placeholder KMN),
+                     --  and it also overlaps this token. It may insert or 
delete a delimiter
+                     --  end, so we don't know when to end a scan; handle it 
then.
+                     --  test_incremental.adb Edit_String_07.
+                     --
+                     --  Or Scan_End is past next KMN, so we don't know 
shift_bytes for
+                     --  Delete_Scanned_Loop. test_incremental.adb 
Edit_String_15,
+                     --  ada_mode-recover_incremental_03.adb
+                     Do_Scan                := False;
+                     Delayed_Scan           := True;
+                     Delayed_Floating_Index := Positive_Index_Type'Last;
+                     Delayed_Lex_Start_Byte := Lex_Start_Byte;
+                     Delayed_Lex_Start_Char := Lex_Start_Char;
+                     Delayed_Lex_Start_Line := Lex_Start_Line;
+
+                     if Trace_Incremental_Parse > Detail then
+                        Tree.Lexer.Trace.Put_Line ("scan delayed");
                      end if;
                   end if;
+               end if;
 
-                  if Ada.Text_IO.Is_Open (Shared_Parser.Recover_Log_File) then
-                     declare
-                        use Ada.Text_IO;
-                     begin
-                        Put
-                          (Shared_Parser.Recover_Log_File,
-                           Ada.Calendar.Formatting.Image (Ada.Calendar.Clock) 
& " " &
-                             Shared_Parser.Partial_Parse_Active'Image & " " &
-                             Recover_Result'Image & " " &
-                             Pre_Recover_Parser_Count'Image & " '" &
-                             Shared_Parser.Lexer.File_Name & "'");
-
-                        Put (Shared_Parser.Recover_Log_File, '(');
-                        for Parser of Shared_Parser.Parsers loop
-                           Put (Shared_Parser.Recover_Log_File, Image 
(Parser.Recover.Results.Peek.Strategy_Counts));
-                           Put
-                             (Shared_Parser.Recover_Log_File,
-                              Integer'Image (Parser.Recover.Enqueue_Count) &
-                                Integer'Image (Parser.Recover.Check_Count) & " 
" &
-                                Boolean'Image (Parser.Recover.Success));
-                        end loop;
-                        Put (Shared_Parser.Recover_Log_File, ')');
+               if Do_Scan then
+                  Last_Scanned_Token := (others => <>);
 
-                        New_Line (Shared_Parser.Recover_Log_File);
-                        Flush (Shared_Parser.Recover_Log_File);
-                     end;
-                  end if;
-               else
-                  if Trace_Parse > Outline or Trace_McKenzie > Outline then
-                     Trace.Put_Line ("recover disabled");
+                  if Trace_Incremental_Parse > Detail then
+                     Tree.Lexer.Trace.Put_Line
+                       ("lexer.set_position" & Lex_Start_Byte'Image & 
Lex_Start_Char'Image & Lex_Start_Line'Image);
+                     if Scan_End /= Invalid_Buffer_Pos then
+                        Tree.Lexer.Trace.Put_Line ("scan_end " & 
Scan_End'Image);
+                     end if;
                   end if;
-               end if;
 
-               if Recover_Result = Success then
-                  for Parser_State of Shared_Parser.Parsers loop
-                     Parser_State.Resume_Active          := True;
-                     Parser_State.Conflict_During_Resume := False;
+                  Parser.Tree.Lexer.Set_Position
+                    (Byte_Position => Lex_Start_Byte,
+                     Char_Position => Lex_Start_Char,
+                     Line          => Lex_Start_Line);
 
-                     if Trace_Parse > Outline then
-                        Trace.Put_Line
-                          (Integer'Image (Parser_State.Label) & ": 
Current_Token " &
-                             Parser_State.Tree.Image 
(Parser_State.Current_Token, Trace.Descriptor.all) &
-                             " Shared_Token " & Image
-                               (Parser_State.Shared_Token, 
Shared_Parser.Terminals, Trace.Descriptor.all));
-
-                        if Trace_Parse > Detail then
-                           Shared_Parser.Trace.Put_Line
-                             (Integer'Image (Parser_State.Label) & ": 
resume_active: True, token goal" &
-                                WisiToken.Token_Index'Image 
(Parser_State.Resume_Token_Goal));
-                        end if;
-                     end if;
+                  --  Ensure Terminal.Node is first in Terminal.Element, so we 
can
+                  --  insert before it.
+                  Breakdown (Terminal);
 
-                     Parser_State.Zombie_Token_Count := 0;
+                  Last_Grammar := Tree.Prev_Terminal (Terminal);
 
-                     case Parser_State.Verb is
-                     when Reduce =>
-                        null;
+                  Scan_Changed_Loop :
+                  loop
+                     declare
+                        Token       : Lexer.Token;
+                        Error_Count : constant Natural := Tree.Lexer.Find_Next 
(Token);
+                        Ref         : Terminal_Ref;
+                        Scan_Errors : Error_Data_Lists.List;
+                     begin
+                        if Trace_Lexer > Outline then
+                           Tree.Lexer.Trace.Put_Line ("lex: " & Lexer.Image 
(Token, Parser.Tree.Lexer.Descriptor.all));
+                        end if;
 
-                     when Error =>
-                        --  Force this parser to be terminated.
-                        if Shared_Parser.Enable_McKenzie_Recover then
-                           Parser_State.Zombie_Token_Count := 
Shared_Parser.Table.McKenzie_Param.Check_Limit + 1;
+                        if Error_Count > 0 then
+                           declare
+                              Cur : WisiToken.Lexer.Error_Lists.Cursor := 
Tree.Lexer.Errors.Last;
+                           begin
+                              for I in 1 .. Error_Count - 1 loop
+                                 WisiToken.Lexer.Error_Lists.Previous (Cur);
+                              end loop;
+                              for I in 1 .. Error_Count loop
+                                 declare
+                                    Error : Lexer.Error renames 
Tree.Lexer.Errors (Cur);
+                                 begin
+                                    Scan_Errors.Append (Lexer_Error'(Error => 
Error));
+
+                                    if Trace_Lexer > Outline then
+                                       Tree.Lexer.Trace.Put_Line
+                                         (" ... error: " & 
Error.Char_Pos'Image &
+                                            (if Error.Recover_Char (1) /= 
ASCII.NUL
+                                             then "'" & Error.Recover_Char (1) 
& "'"
+                                             else ""));
+                                    end if;
+                                 end;
+                                 WisiToken.Lexer.Error_Lists.Next (Cur);
+                              end loop;
+                           end;
                         end if;
 
-                     when Shift =>
-                        null;
+                        exit Scan_Changed_Loop when Token.ID = 
Parser.Tree.Lexer.Descriptor.EOI_ID;
+
+                        if Token.ID >= 
Parser.Tree.Lexer.Descriptor.First_Terminal and
+                          Scan_End /= Invalid_Buffer_Pos
+                        then
+                           if Token.Byte_Region.First > Scan_End then
+                              --  test_incremental.adb Edit_Comment_4, _5, _7, 
Delete_Comment_Start,
+                              --  edit_String_*
+                              --
+                              --  We set Scan_End invalid after using it in 
Delete_Scanned_Loop below.
+                              --  test_incremental.adb Edit_String_14.
+                              exit Scan_Changed_Loop;
+                           end if;
+                        else
+                           exit Scan_Changed_Loop when
+                             Token.ID >= 
Parser.Tree.Lexer.Descriptor.First_Terminal and then
+                             not (Token.Byte_Region.First - Shift_Bytes <= 
Stable_Region.Last or
+                                    --  Token started in stable region
+
+                                    (KMN.Inserted_Bytes > 0 and then
+                                       Token.Byte_Region.First <= 
Inserted_Region.Last + 1
+                                       --  Token starts in or immediately 
after inserted region
+                                       --  test_incremental.adb Edit_Code_4 '1 
+', Edit_Code_8 ';'
+
+                                    ) or
+                                    (KMN.Deleted_Bytes > 0 and then
+                                       Token.Byte_Region.First - (Shift_Bytes 
+ KMN.Inserted_Bytes) =
+                                       Deleted_Region.First
+                                       --  Previously existing token starts 
immediately after deleted region;
+                                       --  it may have been truncated 
(test_incremental.adb Edit_Code_4 'Cc')
+                                    ));
+                        end if;
 
-                     when Pause | Accept_It =>
-                        raise SAL.Programmer_Error;
-                     end case;
-                  end loop;
+                        Scanned_Byte_Pos   := Token.Byte_Region.Last;
+                        Scanned_Char_Pos   := Token.Char_Region.Last;
+                        Last_Scanned_Token := Token;
 
-               else
-                  --  Terminate with error. Parser_State has all the required 
info on
-                  --  the original error (recorded by Error in Do_Action); 
report reason
-                  --  recover failed.
-                  for Parser_State of Shared_Parser.Parsers loop
-                     Parser_State.Errors.Append
-                       ((Label          => LR.Message,
-                         First_Terminal => Trace.Descriptor.First_Terminal,
-                         Last_Terminal  => Trace.Descriptor.Last_Terminal,
-                         Recover        => <>,
-                         Msg            =>
-                           (if Shared_Parser.Enable_McKenzie_Recover
-                            then +"recover: fail " & 
McKenzie_Recover.Recover_Status'Image (Recover_Result)
-                            else +"recover disabled")));
-                  end loop;
-                  raise WisiToken.Syntax_Error;
-               end if;
+                        if Token.ID >= 
Parser.Tree.Lexer.Descriptor.First_Terminal then
+                           --  grammar token
+                           Ref := Tree.Insert_Source_Terminal
+                             (Stream, Token,
+                              Before => Terminal.Element,
+                              Errors => Scan_Errors);
 
-               --  Recover sets Parser.Verb to Shift for all active parsers, to
-               --  indicate it no longer has an error. Set Current_Verb to 
reflect
-               --  that.
-               Current_Verb := Shift;
-            end;
-         end case;
+                           Process_Grammar_Token (Parser, Token, Ref.Node);
+                           Last_Grammar := Ref;
 
-         --  We don't use 'for Parser_State of Parsers loop' here,
-         --  because terminate on error and spawn on conflict require
-         --  changing the parser list.
-         declare
-            Current_Parser : Parser_Lists.Cursor := 
Shared_Parser.Parsers.First;
-         begin
-            Action_Loop :
-            loop
-               exit Action_Loop when Current_Parser.Is_Done;
-
-               --  We don't check duplicate state during resume, because the 
tokens
-               --  inserted/deleted by error recover may cause initially 
duplicate
-               --  states to diverge.
-               if not Current_Parser.State_Ref.Resume_Active and
-                 Shared_Parser.Terminate_Same_State and
-                 Current_Verb = Shift
-               then
-                  Shared_Parser.Parsers.Duplicate_State
-                    (Current_Parser, Shared_Parser.Trace.all, 
Shared_Parser.Terminals);
-                  --  If Duplicate_State terminated Current_Parser, 
Current_Parser now
-                  --  points to the next parser. Otherwise it is unchanged.
-               end if;
+                           if Trace_Incremental_Parse > Detail then
+                              Tree.Lexer.Trace.Put_Line ("scan new " & 
Tree.Image (Ref));
+                           end if;
 
-               exit Action_Loop when Current_Parser.Is_Done;
+                        else
+                           --  non_grammar token
+                           if Trace_Incremental_Parse > Detail then
+                              Tree.Lexer.Trace.Put_Line
+                                ("scan new " & Lexer.Full_Image (Token, 
Parser.Tree.Lexer.Descriptor.all));
+                           end if;
 
-               if Trace_Parse > Extra then
-                  Trace.Put_Line
-                    ("current_verb: " & Parse_Action_Verbs'Image 
(Current_Verb) &
-                       "," & Integer'Image (Current_Parser.Label) &
-                       ".verb: " & Parse_Action_Verbs'Image 
(Current_Parser.Verb));
-               end if;
+                           if Error_Count > 0 then
+                              --  test_incremental.adb Lexer_Errors_04
+                              Tree.Add_Errors (Tree.Shared_Stream, 
Last_Grammar.Node, Scan_Errors);
+                           end if;
+                           Process_Non_Grammar_Token (Parser, 
Last_Grammar.Node, Token);
+                           Shift_Lines := @ + New_Line_Count 
(Token.Line_Region);
+                        end if;
 
-               --  Each branch of the following 'if' calls either 
Current_Parser.Free
-               --  (which advances to the next parser) or Current_Parser.Next.
+                     end;
+                  end loop Scan_Changed_Loop;
+               end if;
 
-               if Current_Parser.Verb = Error then
-                  --  This parser is a zombie; see Check_Error above.
-                  --
-                  --  Check to see if it is time to terminate it
-                  if Shared_Parser.Enable_McKenzie_Recover and then
-                    Current_Parser.State_Ref.Zombie_Token_Count <= 
Shared_Parser.Table.McKenzie_Param.Check_Limit
-                  then
-                     if Trace_Parse > Detail then
-                        Trace.Put_Line (Integer'Image (Current_Parser.Label) & 
": zombie");
-                     end if;
+               --  Do this here so Shift_Bytes is correct in 
Delete_Scanned_Loop.
+               --
+               --  However, if Terminal is before the edit region, the previous
+               --  shift applies. test_incremental.adb Edit_Whitespace
+               Shift_Bytes := @ - KMN.Deleted_Bytes + KMN.Inserted_Bytes;
+               Shift_Chars := @ - KMN.Deleted_Chars + KMN.Inserted_Chars;
 
-                     Current_Parser.Next;
-                  else
-                     Shared_Parser.Parsers.Terminate_Parser
-                       (Current_Parser, "zombie", Shared_Parser.Trace.all, 
Shared_Parser.Terminals);
-                  end if;
+               Shift_Lines := @ + Deleted_Shift_Lines;
 
-               elsif Current_Parser.Verb = Current_Verb then
+               Old_Byte_Pos := Stable_Region.Last + KMN.Deleted_Bytes;
+               Old_Char_Pos := Stable_Region_Chars.Last + KMN.Deleted_Chars;
+               New_Byte_Pos := Inserted_Region.Last;
+               New_Char_Pos := Inserted_Region_Chars.Last;
+               pragma Assert (New_Byte_Pos - Old_Byte_Pos = Shift_Bytes);
 
-                  if Trace_Parse > Extra then
-                     Parser_Lists.Put_Top_10 (Trace, Current_Parser);
-                  end if;
+               if Last_Scanned_Token.ID /= Invalid_Token_ID then
+                  --  If Last_Scanned_Token.ID = Invalid_Token_ID, only 
whitespace was
+                  --  scanned; Edit_Comment_8
+                  --
+                  --  We don't check Do_Scan, because we may be processing the 
next KMN
+                  --  covered by a previous scan.
+                  --
+                  --  Delete tokens in the current KMN that were replaced by 
the scan.
+                  --
+                  --  If a scan covers more than one KMN, we can't process 
more than the
+                  --  first because Shift_* is not known. test_incremental.adb
+                  --  Edit_Code_4, Edit_Comment_13, ada_skel.adb 
ada-skel-return.
+                  --
+                  --  If the last token scanned was a comment created by an 
inserted
+                  --  comment start or extended by a deleted comment end then 
we must
+                  --  delete all tokens are now part of by the comment.
+                  --  test_incremental.adb Delete_Comment_End, Edit_Comment_9,
+                  --  Insert_Comment_Start
+                  Delete_Scanned_Loop :
+                  loop
+                     exit Delete_Scanned_Loop when Tree.ID (Terminal.Node) = 
Parser.Tree.Lexer.Descriptor.EOI_ID;
 
-                  declare
-                     State : Parser_Lists.Parser_State renames 
Current_Parser.State_Ref.Element.all;
-                  begin
-                     Action := Action_For
-                       (Table => Shared_Parser.Table.all,
-                        State => State.Stack.Peek.State,
-                        ID    => State.Tree.ID (State.Current_Token));
-                  end;
+                     exit Delete_Scanned_Loop when Tree.Byte_Region
+                       (Terminal.Node, Trailing_Non_Grammar => False).First >
+                       (if Scan_End = Invalid_Buffer_Pos
+                        then Next_KMN_Stable_Last
+                        else Scan_End - (if Terminal_Shifted then 0 else 
Shift_Bytes));
+                     --  Terminal is in next KMN; we don't know Shift_Bytes to 
compare to
+                     --  Scanned_Byte_Pos. test_incremental.adb Edit_Comment_13
 
-                  declare
-                     Conflict : Parse_Action_Node_Ptr := Action.Next;
-                  begin
-                     loop
-                        exit when Conflict = null;
-                        --  Spawn a new parser (before modifying 
Current_Parser stack).
+                     Terminal_Shifted := False;
 
-                        Current_Parser.State_Ref.Conflict_During_Resume := 
Current_Parser.State_Ref.Resume_Active;
+                     exit Delete_Scanned_Loop when Tree.Byte_Region
+                       (Terminal.Node, Trailing_Non_Grammar => False).First + 
Shift_Bytes -
+                       (if Tree.Byte_Region (Terminal.Node, 
Trailing_Non_Grammar => False).First <= Stable_Region.Last
+                        then -KMN.Deleted_Bytes + KMN.Inserted_Bytes else 0) > 
Scanned_Byte_Pos;
 
-                        if Shared_Parser.Parsers.Count = 
Shared_Parser.Max_Parallel then
-                           --  If errors were recovered, terminate a parser 
that used the
-                           --  highest cost solution.
-                           declare
-                              use all type 
WisiToken.Parse.LR.Parser_Lists.Cursor;
-                              Max_Recover_Cost : Integer             := 0;
-                              Cur              : Parser_Lists.Cursor := 
Shared_Parser.Parsers.First;
-                              Max_Parser       : Parser_Lists.Cursor := Cur;
-                           begin
-                              loop
-                                 exit when Cur.Is_Done;
-                                 if Cur.Total_Recover_Cost > Max_Recover_Cost 
then
-                                    Max_Parser       := Cur;
-                                    Max_Recover_Cost := Cur.Total_Recover_Cost;
-                                 end if;
-                                 Cur.Next;
-                              end loop;
+                     if Tree.ID (Terminal) = Tree.Lexer.Descriptor.SOI_ID then
+                        Tree.Next_Terminal (Terminal);
 
-                              if Max_Recover_Cost > 0 then
-                                 if Max_Parser = Current_Parser then
-                                    Current_Parser.Next;
+                     else
+                        --  Ensure Terminal is Single, so we can delete it.
+                        Breakdown (Terminal, To_Single => True);
 
-                                    Shared_Parser.Parsers.Terminate_Parser
-                                      (Max_Parser, "too many parsers; max 
error repair cost", Trace,
-                                       Shared_Parser.Terminals);
+                        declare
+                           To_Delete : Stream_Node_Ref := Terminal;
+                        begin
+                           Tree.Next_Terminal (Terminal);
+                           if Trace_Incremental_Parse > Detail then
+                              Tree.Lexer.Trace.Put_Line
+                                ("delete modified " &
+                                   Tree.Image (To_Delete.Element, 
Terminal_Node_Numbers => True, Non_Grammar => True));
+                           end if;
 
-                                    --  We changed Current_Parser, so start 
over
-                                    goto Continue_Action_Loop;
-                                 else
-                                    Shared_Parser.Parsers.Terminate_Parser
-                                      (Max_Parser, "too many parsers; max 
error repair cost", Trace,
-                                       Shared_Parser.Terminals);
+                           --  We always scan all non_grammar immediately 
following a scanned
+                           --  terminal.
+                           for Token of Tree.Non_Grammar_Const 
(To_Delete.Node) loop
+                              Shift_Lines := @ - New_Line_Count 
(Token.Line_Region);
+
+                              if (Token.Byte_Region.Last + Shift_Bytes <= 
Scanned_Byte_Pos and
+                                    Token.Byte_Region.First + Shift_Bytes <= 
Next_KMN_Stable_Last + Shift_Bytes)
+                                --  Token was scanned, and is in current KMN
+                                or
+                                Token.Byte_Region.Last <= Deleted_Region.Last
+                                --  token was deleted. 
test/ada_mode-interactive_03.adb delete text end of buffer.
+                              then
+                                 if Trace_Incremental_Parse > Detail then
+                                    Tree.Lexer.Trace.Put_Line
+                                      ("delete non_grammar " & Lexer.Image 
(Token, Tree.Lexer.Descriptor.all));
                                  end if;
+                              else
+                                 if Trace_Incremental_Parse > Detail then
+                                    Tree.Lexer.Trace.Put_Line
+                                      ("float non_grammar " & Lexer.Full_Image 
(Token, Tree.Lexer.Descriptor.all));
+                                 end if;
+                                 Floating_Non_Grammar.Append (Token);
                               end if;
-                           end;
-                        end if;
+                           end loop;
+
+                           pragma Assert (To_Delete.Node /= Tree.SOI and 
To_Delete.Node /= Tree.EOI);
+                           Tree.Stream_Delete (Stream, To_Delete.Element);
+                           Maybe_Delete_Lexer_Errors (To_Delete.Node);
+                        end;
+                     end if;
+                  end loop Delete_Scanned_Loop;
+                  Scan_End := Invalid_Buffer_Pos;
+               end if;
+
+               --  If any Floating_Non_Grammar are in this KMN's change region 
or next
+               --  KMN stable, they can be handled here.
+               declare
+                  Last_Handled_Non_Grammar : SAL.Base_Peek_Type := 
Lexer.Token_Arrays.No_Index;
+
+                  function Find_Element
+                    (Target_Bytes : in Buffer_Pos;
+                     After        : in Boolean)
+                    return Terminal_Ref
+                  --  If Target_Bytes < Terminal.Byte_Region.First: If After
+                  --  is True, return terminal node that is after or contains
+                  --  Target_Bytes, where prev terminal is before 
Target_Bytes. Else
+                  --  return terminal that is before Target_Bytes, where next 
is after.
+                  --
+                  --  Otherwise similar, but searching forward.
+                  --
+                  --  Target_Bytes is unshifted.
+                  is
+                     Terminal_First : constant Buffer_Pos := Tree.Byte_Region
+                       (Terminal.Node, Trailing_Non_Grammar => False).First;
+                     Searching_Back : constant Boolean := Terminal_First > 
Target_Bytes;
+
+                     Before_1 : Terminal_Ref := -- before Before
+                       (if Searching_Back
+                        then Tree.Prev_Terminal (Terminal)
+                        else Terminal);
+                     Before      : Terminal_Ref :=
+                       (if Searching_Back
+                        then Terminal
+                        else Tree.Next_Terminal (Terminal));
+                  begin
+                     loop
+                        if Searching_Back then
+                           if Tree.ID (Terminal) = 
Tree.Lexer.Descriptor.SOI_ID then
+                              return Terminal;
+                           end if;
 
-                        if Shared_Parser.Parsers.Count = 
Shared_Parser.Max_Parallel then
                            declare
-                              Parser_State : Parser_Lists.Parser_State renames 
Current_Parser.State_Ref;
-                              Token : Base_Token renames 
Shared_Parser.Terminals (Parser_State.Shared_Token);
+                              --  Target_Bytes is unshifted. If searching 
forward, all nodes are
+                              --  also unshifted. If searching back, all nodes 
except Terminal are
+                              --  shifted. Compare Target_Bytes to unshifted 
region.
+                              --
+                              --  region bounds test case: 
ada_mode-recover_partial_14.adb
+                              --  contains; ada_mode-recover_42.adb 
lexer_error string_literal
+                              Shift_First : constant Base_Buffer_Pos := 
-Shift_Bytes;
+                              Shift_Last : constant Base_Buffer_Pos := (if 
Before = Terminal then 0 else -Shift_Bytes);
+
+                              First : constant Buffer_Pos := Tree.Byte_Region
+                                (Before_1.Node, Trailing_Non_Grammar => 
False).Last + 1 + Shift_First;
+                              Last  : constant Buffer_Pos :=
+                                (if After
+                                 then Tree.Byte_Region (Before).Last
+                                 else Tree.Byte_Region (Before).First - 1) + 
Shift_Last;
                            begin
-                              raise WisiToken.Parse_Error with Error_Message
-                                (Shared_Parser.Lexer.File_Name, Token.Line, 
Token.Column,
-                                 "too many parallel parsers required in 
grammar state" &
-                                   State_Index'Image 
(Parser_State.Stack.Peek.State) &
-                                   "; simplify grammar, or increase 
max-parallel (" &
-                                   SAL.Base_Peek_Type'Image 
(Shared_Parser.Max_Parallel) & ")");
+                              exit when Target_Bytes in First .. Last;
                            end;
-
                         else
-                           if Trace_Parse > Outline then
-                              declare
-                                 Parser_State : Parser_Lists.Parser_State 
renames Current_Parser.State_Ref;
-                              begin
-                                 Trace.Put_Line
-                                   (Integer'Image (Current_Parser.Label) & ": 
" &
-                                      Trimmed_Image 
(Parser_State.Stack.Peek.State) & ": " &
-                                      Parser_State.Tree.Image
-                                        (Parser_State.Current_Token, 
Trace.Descriptor.all) & " : " &
-                                      "spawn" & Integer'Image 
(Shared_Parser.Parsers.Last_Label + 1) & ", (" &
-                                      Trimmed_Image (1 + Integer 
(Shared_Parser.Parsers.Count)) & " active)");
-                              end;
-                           end if;
-
-                           Shared_Parser.Parsers.Prepend_Copy (Current_Parser);
-                           Do_Action (Conflict.Item, 
Shared_Parser.Parsers.First, Shared_Parser);
-
-                           --  We must terminate error parsers immediately in 
order to avoid
-                           --  zombie parsers during recovery.
                            declare
-                              Temp : Parser_Lists.Cursor := 
Shared_Parser.Parsers.First;
+                              First : constant Buffer_Pos := Tree.Byte_Region
+                                (Before_1.Node, Trailing_Non_Grammar => 
False).Last + 1;
+                              Last  : constant Buffer_Pos :=
+                                (if After
+                                 then Tree.Byte_Region (Before).Last
+                                 else Tree.Byte_Region (Before).First - 1);
                            begin
-                              Check_Error (Temp);
+                              exit when Target_Bytes in First .. Last;
                            end;
                         end if;
-
-                        Conflict := Conflict.Next;
+                        if Terminal_First > Target_Bytes then
+                           Before := Before_1;
+                           Tree.Prev_Terminal (Before_1);
+                        else
+                           Before_1 := Before;
+                           Tree.Next_Terminal (Before);
+                        end if;
                      end loop;
-                  end;
-                  Do_Action (Action.Item, Current_Parser, Shared_Parser);
-                  Check_Error (Current_Parser);
+                     return (if After then Before else Before_1);
+                  end Find_Element;
 
-               else
-                  --  Current parser is waiting for others to catch up
-                  Current_Parser.Next;
-               end if;
-               <<Continue_Action_Loop>>
-            end loop Action_Loop;
-         end;
-      end loop Main_Loop;
+                  procedure Restore (I : in Positive_Index_Type)
+                  --  Restore Floating_Non_Grammar (I)
+                  is
+                     Token : Lexer.Token renames Floating_Non_Grammar (I);
 
-      if Trace_Parse > Outline then
-         Trace.Put_Line (Shared_Parser.Parsers.First.Label'Image & ": 
succeed");
-      end if;
+                     Containing_Terminal : constant Terminal_Ref := 
Find_Element
+                       (Token.Byte_Region.First, After => False);
 
-      if Debug_Mode then
-         Trace.Put_Clock ("finish parse");
-      end if;
+                     Old_Token : constant Lexer.Token := Token; -- for trace 
message
 
-      --  We don't raise Syntax_Error for lexer errors, since they are all
-      --  recovered, either by inserting a quote, or by ignoring the
-      --  character.
-   exception
-   when Syntax_Error | WisiToken.Parse_Error | Partial_Parse =>
-      if Debug_Mode then
-         Trace.Put_Clock ("finish - error");
-      end if;
-      raise;
+                     Temp_Shift_Lines : Base_Line_Number_Type := Shift_Lines;
+                  begin
+                     if Token.Byte_Region.First < Tree.Byte_Region
+                       (Terminal.Node, Trailing_Non_Grammar => False).First
+                     then
+                        --  Only shift if inserted before Terminal. 
ada_mode-recover_14
+                        --
+                        --  Ignore this and remaining Floating_Non_Grammar's 
contribution to
+                        --  Shift_Lines; we are inserting it before those. 
(new_lines in
+                        --  Floating_Non_Grammar were previously subtracted 
from Shift_Lines).
+                        --  ada_mode-interactive_01.adb, 
ada_mode-recover_33.adb,
+                        --  ada_mode-recover_extra_end_loop.adb
+                        for J in I .. Floating_Non_Grammar.Last_Index loop
+                           Temp_Shift_Lines := @ + New_Line_Count 
(Floating_Non_Grammar (J).Line_Region);
+                        end loop;
+                        Lexer.Shift (Token, Shift_Bytes, Shift_Chars, 
Temp_Shift_Lines);
+                     end if;
 
-   when E : others =>
-      declare
-         Msg : constant String := Ada.Exceptions.Exception_Name (E) & ": " & 
Ada.Exceptions.Exception_Message (E);
-      begin
-         if Shared_Parser.Parsers.Count > 0 then
-            --  Emacs displays errors in the *syntax-errors* buffer
-            Shared_Parser.Parsers.First_State_Ref.Errors.Append
-              ((Label          => LR.Message,
-                First_Terminal => Trace.Descriptor.First_Terminal,
-                Last_Terminal  => Trace.Descriptor.Last_Terminal,
-                Recover        => <>,
-                Msg            => +Msg));
-         end if;
+                     Shift_Lines := @ + New_Line_Count (Token.Line_Region);
 
-         if Debug_Mode then
-            Trace.Put_Line (GNAT.Traceback.Symbolic.Symbolic_Traceback (E)); 
-- includes Prefix
-            Trace.New_Line;
-         end if;
+                     Tree.Non_Grammar_Var (Containing_Terminal.Node).Append 
(Token);
 
-         --  Emacs displays the exception message in the echo area; easy to 
miss
-         raise WisiToken.Parse_Error with Msg;
-      end;
-   end Parse;
+                     if Trace_Incremental_Parse > Detail then
+                        Tree.Lexer.Trace.Put_Line
+                          ("restore floating_non_grammar " & Lexer.Image 
(Old_Token, Tree.Lexer.Descriptor.all));
+                        Tree.Lexer.Trace.Put_Line
+                          (" ... to " & Tree.Image (Containing_Terminal, 
Non_Grammar => True));
+                     end if;
+                  end Restore;
 
-   overriding function Tree (Shared_Parser : in Parser) return 
Syntax_Trees.Tree
-   is begin
-      if Shared_Parser.Parsers.Count > 1 then
-         raise WisiToken.Parse_Error with "ambigous parse";
-      else
-         return Shared_Parser.Parsers.First_State_Ref.Tree;
+               begin
+                  if Delayed_Scan and then
+                    (Delayed_Floating_Index = Positive_Index_Type'Last or
+                       Floating_Non_Grammar.First_Index = 
Delayed_Floating_Index)
+                  then
+                     null;
+                  else
+                     for I in Floating_Non_Grammar.First_Index ..
+                       (if Delayed_Scan then Delayed_Floating_Index - 1 else 
Floating_Non_Grammar.Last_Index)
+                     loop
+                        exit when Floating_Non_Grammar (I).Byte_Region.First > 
Next_KMN_Stable_Last;
+                        --  If token is in next KMN edit region, shift_bytes 
is wrong here.
+
+                        if Floating_Non_Grammar (I).Byte_Region.First + 
Shift_Bytes <= Scanned_Byte_Pos then
+                           --  Non_grammar token was rescanned; delete the old 
one.
+                           --  test case: ada_mode-recover_align_1.adb, 
test_incremental.adb Edit_Whitespace
+
+                           if Trace_Incremental_Parse > Detail then
+                              Tree.Lexer.Trace.Put_Line
+                                ("delete floating_non_grammar " & 
Lexer.Full_Image
+                                   (Floating_Non_Grammar (I), 
Tree.Lexer.Descriptor.all));
+                           end if;
+                           Last_Handled_Non_Grammar := I;
+
+                        elsif Floating_Non_Grammar (I).Byte_Region.Last <= 
Next_KMN_Stable_Last then
+                           --  Non_Grammar is in next KMN stable region; find 
terminal to append
+                           --  non_grammar to. ada_mode-recover_18.adb
+                           Restore (I);
+                           Last_Handled_Non_Grammar := I;
+                        else
+                           exit;
+                        end if;
+                     end loop;
+                  end if;
+                  if Last_Handled_Non_Grammar /= Lexer.Token_Arrays.No_Index 
then
+                     if Last_Handled_Non_Grammar = 
Floating_Non_Grammar.Last_Index then
+                        Floating_Non_Grammar.Clear;
+                     else
+                        Floating_Non_Grammar.Set_First_Last
+                          (Last_Handled_Non_Grammar + 1, 
Floating_Non_Grammar.Last_Index);
+                     end if;
+                  end if;
+               end;
+            end;
+
+            KMN_Node := KMN_Lists.Next (KMN_Node);
+
+            if not KMN_Lists.Has_Element (KMN_Node) then
+               --  Finally shift EOI.
+               pragma Assert
+                 (Tree.ID (Terminal.Node) = Tree.Lexer.Descriptor.EOI_ID and
+                    Tree.Non_Grammar_Const (Terminal.Node).Length = 1); -- 
EOI_ID
+
+               Tree.Shift
+                 (Terminal.Node, Shift_Bytes, Shift_Chars, Shift_Lines, 
Buffer_Pos'Last, Terminal_Non_Grammar_Next);
+
+               if Trace_Incremental_Parse > Detail then
+                  Tree.Lexer.Trace.Put_Line ("final shift " & Tree.Image 
(Terminal, Non_Grammar => True));
+               end if;
+
+               exit KMN_Loop;
+            end if;
+         end;
+      end loop KMN_Loop;
+
+      if Tree.ID (Terminal.Node) /= Parser.Tree.Lexer.Descriptor.EOI_ID then
+         raise User_Error with "edit list does not cover entire tree";
       end if;
-   end Tree;
 
-   overriding function Tree_Var_Ref (Shared_Parser : aliased in out Parser) 
return Syntax_Trees.Tree_Variable_Reference
-   is begin
-      if Shared_Parser.Parsers.Count > 1 then
-         raise WisiToken.Parse_Error with "ambigous parse";
-      else
-         return (Element => Shared_Parser.Parsers.First_State_Ref.Tree'Access);
+      if not Floating_Non_Grammar.Is_Empty then
+         raise SAL.Programmer_Error with "floating_non_grammar not emptied: " 
& Lexer.Image
+           (Floating_Non_Grammar, Tree.Lexer.Descriptor.all);
       end if;
-   end Tree_Var_Ref;
 
-   overriding
-   procedure Execute_Actions
-     (Parser          : in out LR.Parser.Parser;
-      Image_Augmented : in     Syntax_Trees.Image_Augmented := null)
+      if Debug_Mode then
+         declare
+            Error_Reported : WisiToken.Syntax_Trees.Node_Sets.Set;
+         begin
+            Parser.Tree.Validate_Tree (Parser.User_Data.all, Error_Reported, 
Node_Index_Order => False);
+            if Error_Reported.Count /= 0 then
+               if Trace_Incremental_Parse > Outline then
+                  Tree.Lexer.Trace.New_Line;
+                  Tree.Lexer.Trace.Put_Line ("edit_tree: validate_tree failed; 
tree:");
+                  Tree.Print_Streams (Children => True, Non_Grammar => True);
+               end if;
+               raise WisiToken.Parse_Error with "edit_tree: validate_tree 
failed";
+            end if;
+         end;
+      end if;
+   end Edit_Tree;
+
+   overriding procedure Parse
+     (Shared_Parser    : in out LR.Parser.Parser;
+      Recover_Log_File : in     Ada.Text_IO.File_Type;
+      Edits            : in     KMN_Lists.List := KMN_Lists.Empty_List;
+      Pre_Edited       : in     Boolean        := False)
+   is separate;
+
+   overriding procedure Execute_Actions
+     (Parser              : in out LR.Parser.Parser;
+      Action_Region_Bytes : in     WisiToken.Buffer_Region)
    is
+      use all type Syntax_Trees.Post_Parse_Action;
       use all type Syntax_Trees.User_Data_Access;
-      use all type WisiToken.Syntax_Trees.Semantic_Action;
-
-      Descriptor : WisiToken.Descriptor renames Parser.Trace.Descriptor.all;
 
       procedure Process_Node
         (Tree : in out Syntax_Trees.Tree;
-         Node : in     Valid_Node_Index)
+         Node : in     Syntax_Trees.Valid_Node_Access)
       is
          use all type Syntax_Trees.Node_Label;
+         Node_Byte_Region : constant Buffer_Region := Tree.Byte_Region
+           (Node, Trailing_Non_Grammar => True);
       begin
-         if Tree.Label (Node) /= Nonterm then
+         if Tree.Label (Node) /= Nonterm or else
+           not (Node_Byte_Region = Null_Buffer_Region or
+                  Overlaps (Node_Byte_Region, Action_Region_Bytes))
+         then
             return;
          end if;
 
+         for Child of Tree.Children (Node) loop
+            if Child /= Syntax_Trees.Invalid_Node_Access then
+               --  Child can be null in an edited tree
+               Process_Node (Tree, Child);
+            end if;
+         end loop;
+
+         Parser.User_Data.Reduce (Tree, Node);
          declare
-            Tree_Children : constant Valid_Node_Index_Array := Tree.Children 
(Node);
+            Post_Parse_Action : constant Syntax_Trees.Post_Parse_Action := 
Parser.Get_Post_Parse_Action
+              (Tree.Production_ID (Node));
          begin
-            Parser.User_Data.Reduce (Tree, Node, Tree_Children);
-            if Tree.Action (Node) /= null then
+            if Post_Parse_Action /= null then
                begin
-                  Tree.Action (Node) (Parser.User_Data.all, Tree, Node, 
Tree_Children);
+                  Post_Parse_Action (Parser.User_Data.all, Tree, Node);
                exception
                when E : others =>
-                  declare
-                     Line   : Line_Number_Type  := Line_Number_Type'First;
-                     Column : Ada.Text_IO.Count := Ada.Text_IO.Count'First;
-                  begin
-                     if Tree.First_Shared_Terminal (Node) = 
Invalid_Token_Index then
-                        declare
-                           Byte_Region : Buffer_Region renames 
Tree.Byte_Region (Node);
-                        begin
-                           if Byte_Region /= Null_Buffer_Region then
-                              Column := Ada.Text_IO.Count (Byte_Region.First);
-                           end if;
-                        end;
-                     else
-                        declare
-                           Token : Base_Token renames Parser.Terminals 
(Tree.First_Shared_Terminal (Node));
-                        begin
-                           Line := Token.Line;
-                           Column := Token.Column;
-                        end;
-                     end if;
-                     raise WisiToken.Parse_Error with Error_Message
-                       (Parser.Lexer.File_Name, Line, Column,
-                        "action raised exception " & 
Ada.Exceptions.Exception_Name (E) & ": " &
-                          Ada.Exceptions.Exception_Message (E));
-                  end;
+                  if WisiToken.Debug_Mode then
+                     Parser.Tree.Lexer.Trace.Put_Line
+                       (Ada.Exceptions.Exception_Name (E) & ": " & 
Ada.Exceptions.Exception_Message (E));
+                     Parser.Tree.Lexer.Trace.Put_Line 
(GNAT.Traceback.Symbolic.Symbolic_Traceback (E));
+                     Parser.Tree.Lexer.Trace.New_Line;
+                  end if;
+
+                  raise WisiToken.Parse_Error with Tree.Error_Message
+                    (Node,
+                     "action raised exception " & 
Ada.Exceptions.Exception_Name (E) & ": " &
+                       Ada.Exceptions.Exception_Message (E));
                end;
             end if;
          end;
       end Process_Node;
 
    begin
-      if Parser.User_Data /= null then
-         if Parser.Parsers.Count > 1 then
-            raise Syntax_Error with "ambiguous parse; can't execute actions";
-         end if;
-
-         declare
-            use Recover_Op_Arrays, Recover_Op_Array_Refs;
-            Parser_State : Parser_Lists.Parser_State renames 
Parser.Parsers.First_State_Ref;
-         begin
-            pragma Assert (Parser_State.Tree.Flushed);
+      if Parser.User_Data = null then
+         return;
+      end if;
 
-            Parser_State.Tree.Set_Parents;
+      if Parser.Tree.Root = Syntax_Trees.Invalid_Node_Access then
+         --  No code in file, and error recovery failed to insert valid code.
+         --  Or ambiguous parse; Finish_Parse not called.
+         return;
+      end if;
 
-            if Trace_Action > Outline then
-               if Trace_Action > Extra then
-                  Parser_State.Tree.Print_Tree (Descriptor, 
Parser_State.Tree.Root, Image_Augmented);
-                  Parser.Trace.New_Line;
-               end if;
-               Parser.Trace.Put_Line
-                 (Integer'Image (Parser_State.Label) & ": root node: " & 
Parser_State.Tree.Image
-                    (Parser_State.Tree.Root, Descriptor));
-            end if;
+      Parser.User_Data.Initialize_Actions (Parser.Tree);
 
-            for I in First_Index (Parser_State.Recover_Insert_Delete) ..
-              Last_Index (Parser_State.Recover_Insert_Delete)
-            loop
-               declare
-                  Op : Recover_Op renames Constant_Ref 
(Parser_State.Recover_Insert_Delete, I);
-               begin
-                  case Op.Op is
-                  when Insert =>
-                     Parser.User_Data.Insert_Token (Parser_State.Tree, 
Op.Ins_Tree_Node);
-                  when Delete =>
-                     Parser.User_Data.Delete_Token (Parser_State.Tree, 
Op.Del_Token_Index);
-                  end case;
-               end;
-            end loop;
+      Process_Node (Parser.Tree, Parser.Tree.Root);
+   exception
+   when WisiToken.Parse_Error =>
+      raise;
 
-            Parser.User_Data.Initialize_Actions (Parser_State.Tree);
-            Parser_State.Tree.Process_Tree (Process_Node'Access);
-         end;
+   when E : others =>
+      if Debug_Mode then
+         Parser.Tree.Lexer.Trace.Put_Line
+           (Ada.Exceptions.Exception_Name (E) & ": " & 
Ada.Exceptions.Exception_Message (E));
+         Parser.Tree.Lexer.Trace.Put_Line 
(GNAT.Traceback.Symbolic.Symbolic_Traceback (E));
+         Parser.Tree.Lexer.Trace.New_Line;
       end if;
+      raise;
    end Execute_Actions;
 
-   overriding function Any_Errors (Parser : in LR.Parser.Parser) return Boolean
-   is
-      use all type Ada.Containers.Count_Type;
-      Parser_State : Parser_Lists.Parser_State renames 
Parser.Parsers.First_Constant_State_Ref;
-   begin
-      pragma Assert (Parser_State.Tree.Flushed);
-      return Parser.Parsers.Count > 1 or Parser_State.Errors.Length > 0 or 
Parser.Lexer.Errors.Length > 0;
-   end Any_Errors;
-
-   overriding procedure Put_Errors (Parser : in LR.Parser.Parser)
-   is
-      use Ada.Text_IO;
-
-      Parser_State : Parser_Lists.Parser_State renames 
Parser.Parsers.First_Constant_State_Ref;
-      Descriptor   : WisiToken.Descriptor renames Parser.Trace.Descriptor.all;
-   begin
-      for Item of Parser.Lexer.Errors loop
-         Put_Line
-           (Current_Error,
-            Parser.Lexer.File_Name & ":0:0: lexer unrecognized character at" & 
Buffer_Pos'Image (Item.Char_Pos));
-      end loop;
-
-      for Item of Parser_State.Errors loop
-         case Item.Label is
-         when Action =>
-            declare
-               Index : constant Base_Token_Index := 
Parser_State.Tree.First_Shared_Terminal (Item.Error_Token);
-            begin
-               if Index = Invalid_Token_Index then
-                  --  Error_Token is virtual
-                  Put_Line
-                    (Current_Error,
-                     Error_Message
-                       (Parser.Lexer.File_Name, 1, 0,
-                        "syntax error: expecting " & Image (Item.Expecting, 
Descriptor) &
-                          ", found " & Image (Parser_State.Tree.ID 
(Item.Error_Token), Descriptor)));
-               else
-                  declare
-                     Token : Base_Token renames Parser.Terminals (Index);
-                  begin
-                     Put_Line
-                       (Current_Error,
-                        Error_Message
-                          (Parser.Lexer.File_Name, Token.Line, Token.Column,
-                           "syntax error: expecting " & Image (Item.Expecting, 
Descriptor) &
-                             ", found '" & Parser.Lexer.Buffer_Text 
(Token.Byte_Region) & "'"));
-                  end;
-               end if;
-            end;
-         when Check =>
-            Put_Line
-              (Current_Error,
-               Parser.Lexer.File_Name & ":0:0: semantic check error: " &
-                 Semantic_Checks.Image (Item.Check_Status, Descriptor));
-         when Message =>
-            Put_Line (Current_Error, -Item.Msg);
-         end case;
-
-         if Item.Recover.Stack.Depth /= 0 then
-            Put_Line (Current_Error, "   recovered: " & Image 
(Item.Recover.Ops, Descriptor));
-         end if;
-      end loop;
-   end Put_Errors;
-
 end WisiToken.Parse.LR.Parser;
diff --git a/wisitoken-parse-lr-parser.ads b/wisitoken-parse-lr-parser.ads
index 46626fdf13..1aafb8a2c6 100644
--- a/wisitoken-parse-lr-parser.ads
+++ b/wisitoken-parse-lr-parser.ads
@@ -5,7 +5,7 @@
 --  In a child package of Parser.LR partly for historical reasons,
 --  partly to allow McKenzie_Recover to be in a sibling package.
 --
---  Copyright (C) 2002, 2003, 2009, 2010, 2013-2015, 2017 - 2020 Free Software 
Foundation, Inc.
+--  Copyright (C) 2002, 2003, 2009, 2010, 2013-2015, 2017 - 2022 Free Software 
Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -25,19 +25,15 @@ pragma License (Modified_GPL);
 
 with WisiToken.Parse.LR.Parser_Lists;
 with WisiToken.Lexer;
-with WisiToken.Parse;
 with WisiToken.Syntax_Trees;
+limited with WisiToken.Parse.LR.McKenzie_Recover.Base;
 package WisiToken.Parse.LR.Parser is
-
-   Default_Max_Parallel : constant := 15;
+   type Parser;
 
    type Language_Fixes_Access is access procedure
-     (Trace             : in out WisiToken.Trace'Class;
-      Lexer             : access constant WisiToken.Lexer.Instance'Class;
-      Parser_Label      : in     Natural;
-      Parse_Table       : in     WisiToken.Parse.LR.Parse_Table;
-      Terminals         : in     Base_Token_Arrays.Vector;
-      Tree              : in     Syntax_Trees.Tree;
+     (Super             : in out Base.Supervisor;
+      Shared_Parser     : in out Parser;
+      Parser_Index      : in     SAL.Peek_Type;
       Local_Config_Heap : in out Config_Heaps.Heap_Type;
       Config            : in     Configuration);
    --  Config encountered a parse table Error action, or failed a
@@ -54,20 +50,25 @@ package WisiToken.Parse.LR.Parser is
    --  caused the error.
 
    type Language_Matching_Begin_Tokens_Access is access procedure
-     (Tokens                  : in     Token_ID_Array_1_3;
-      Config                  : in     Configuration;
-      Matching_Tokens         :    out Token_ID_Arrays.Vector;
-      Forbid_Minimal_Complete :    out Boolean);
-   --  Tokens (1) caused a parse error; Tokens (2 .. 3) are the following
-   --  tokens (Invalid_Token_ID if none). Set Matching_Tokens to a token
-   --  sequence that starts a production matching Tokens. If
-   --  Minimal_Complete would produce a bad solution at this error point,
-   --  set Forbid_Minimal_Complete True.
+     (Super                   :         in out Base.Supervisor;
+      Shared_Parser           :         in out Parser;
+      Tokens                  :         in     Token_ID_Array_1_3;
+      Config                  : aliased in     Configuration;
+      Matching_Tokens         :         in out Token_ID_Arrays.Vector;
+      Forbid_Minimal_Complete :         in out Boolean);
+   --  Tokens (1) is the current token; Tokens (2 .. 3) are the following
+   --  tokens (Invalid_Token_ID if none). Set Matching_Tokens to a
+   --  terminal token sequence that starts a production matching Tokens.
+   --  If Minimal_Complete would produce a bad solution at this error
+   --  point, set Forbid_Minimal_Complete True.
    --
    --  For example, if Tokens is a block end, return tokens that are the
    --  corresponding block begin. If the error point is inside a
    --  multi-token 'end' (ie 'end if;', or 'end <name>;'), set
    --  Forbid_Minimal_Complete True.
+   --
+   --  ada-mode uses Peek_Sequential_Start in this subprogram, so it
+   --  requires Super, Shared_Parser, aliased Config.
 
    type Language_String_ID_Set_Access is access function
      (Descriptor        : in WisiToken.Descriptor;
@@ -77,86 +78,65 @@ package WisiToken.Parse.LR.Parser is
    --  nonterminals that can contain String_Literal_ID as part of an
    --  expression. Used in placing a missing string quote.
 
-   type Post_Recover_Access is access procedure;
-
    type Parser is new WisiToken.Parse.Base_Parser with record
       Table                          : Parse_Table_Ptr;
       Language_Fixes                 : Language_Fixes_Access;
       Language_Matching_Begin_Tokens : Language_Matching_Begin_Tokens_Access;
       Language_String_ID_Set         : Language_String_ID_Set_Access;
 
-      String_Quote_Checked : Line_Number_Type := Invalid_Line_Number;
+      String_Quote_Checked : Base_Line_Number_Type := Invalid_Line_Number;
       --  Max line checked for missing string quote.
 
-      Post_Recover : Post_Recover_Access;
-      --  Gather data for tests.
-
-      Shared_Tree : aliased Syntax_Trees.Base_Tree;
-      --  Each parser (normal and recover) has its own branched syntax tree,
-      --  all branched from this tree. Terminals are added to the tree when
-      --  they become the current token.
-      --
-      --  It is never the case that terminals are added to this shared tree
-      --  when there is more than one task active, so we don't need a
-      --  protected tree.
-      --
-      --  See WisiToken.LR.Parser_Lists Parser_State for more discussion of
-      --  Shared_Tree.
+      Resume_Active : Boolean := False;
+      Min_Sequential_Index : Syntax_Trees.Sequential_Index := 
Syntax_Trees.Sequential_Index'Last;
+      Max_Sequential_Index : Syntax_Trees.Sequential_Index := 
Syntax_Trees.Sequential_Index'First;
 
       Parsers : aliased Parser_Lists.List;
 
-      Max_Parallel            : SAL.Base_Peek_Type;
-      Terminate_Same_State    : Boolean;
-      Enable_McKenzie_Recover : Boolean;
-      Recover_Log_File        : Ada.Text_IO.File_Type;
-      Partial_Parse_Active    : Boolean := False;
-      --  Partial_Parse_Active is only used in recover log messages.
+      Partial_Parse_Active    : access Boolean;
+      Partial_Parse_Byte_Goal : access WisiToken.Buffer_Pos;
+      --  Used by In_Parse_Actions to terminate Partial_Parse.
    end record;
 
-   overriding procedure Finalize (Object : in out LR.Parser.Parser);
-   --  Deep free Object.Table.
+   --  It is tempting to declare Finalize here, to free Parser.Table. But
+   --  Wisi.Parse_Context reuses the table between parser instances, so
+   --  we can't do that. Other applications must explicitly free
+   --  Parser.Table if they care.
 
    procedure New_Parser
-     (Parser                         :    out          LR.Parser.Parser;
-      Trace                          : not null access WisiToken.Trace'Class;
-      Lexer                          : in              WisiToken.Lexer.Handle;
-      Table                          : in              Parse_Table_Ptr;
-      Language_Fixes                 : in              Language_Fixes_Access;
-      Language_Matching_Begin_Tokens : in              
Language_Matching_Begin_Tokens_Access;
-      Language_String_ID_Set         : in              
Language_String_ID_Set_Access;
-      User_Data                      : in              
WisiToken.Syntax_Trees.User_Data_Access;
-      Max_Parallel                   : in              SAL.Base_Peek_Type := 
Default_Max_Parallel;
-      Terminate_Same_State           : in              Boolean            := 
True);
-
-   overriding procedure Parse (Shared_Parser : aliased in out 
LR.Parser.Parser);
-   --  Attempt a parse. Calls Parser.Lexer.Reset, runs lexer to end of
-   --  input setting Shared_Parser.Terminals, then parses tokens.
-   --
-   --  If an error is encountered, Parser.Lexer_Errors and
-   --  Parsers(*).Errors contain information about the errors. If a
-   --  recover strategy succeeds, no exception is raised. If recover does
-   --  not succeed, raises Syntax_Error.
-   --
-   --  For errors where no recovery is possible, raises Parse_Error with
-   --  an appropriate error message.
-
-   overriding function Tree (Shared_Parser : in Parser) return 
Syntax_Trees.Tree;
-   overriding function Tree_Var_Ref (Shared_Parser : aliased in out Parser) 
return Syntax_Trees.Tree_Variable_Reference;
-   --  If there is one parser in Parsers, return its tree. Otherwise,
-   --  raise Parse_Error for an ambiguous parse.
+     (Parser                         :    out LR.Parser.Parser;
+      Lexer                          : in     WisiToken.Lexer.Handle;
+      Table                          : in     Parse_Table_Ptr;
+      Productions                    : in     
Syntax_Trees.Production_Info_Trees.Vector;
+      Language_Fixes                 : in     Language_Fixes_Access;
+      Language_Matching_Begin_Tokens : in     
Language_Matching_Begin_Tokens_Access;
+      Language_String_ID_Set         : in     Language_String_ID_Set_Access;
+      User_Data                      : in     
WisiToken.Syntax_Trees.User_Data_Access);
+
+   procedure Edit_Tree
+     (Parser : in out LR.Parser.Parser;
+      Edits  : in     KMN_Lists.List)
+   with Pre => Parser.Tree.Editable,
+     Post => Parser.Tree.Stream_Count = 1;
+   --  Assumes Parser.Lexer.Source has changed in a way reflected in
+   --  Edits. Uses Edits to direct editing Parser.Tree to reflect lexing
+   --  the changed source, in preparation for Incremental_Parse; result
+   --  is in Tree.Shared_Stream.
+
+   overriding procedure Parse
+     (Shared_Parser    : in out LR.Parser.Parser;
+      Recover_Log_File : in     Ada.Text_IO.File_Type;
+      Edits            : in     KMN_Lists.List := KMN_Lists.Empty_List;
+      Pre_Edited       : in     Boolean        := False);
 
    overriding procedure Execute_Actions
-     (Parser          : in out LR.Parser.Parser;
-      Image_Augmented : in     Syntax_Trees.Image_Augmented := null);
-   --  Call User_Data.Delete_Token on any tokens deleted by error
-   --  recovery, then User_Data.Reduce and the grammar semantic actions
-   --  on all nonterms in the syntax tree.
-
-   overriding function Any_Errors (Parser : in LR.Parser.Parser) return 
Boolean;
-   --  Return True if any errors where encountered, recovered or not.
-
-   overriding procedure Put_Errors (Parser : in LR.Parser.Parser);
-   --  Put user-friendly error messages from the parse to
-   --  Ada.Text_IO.Current_Error.
+     (Parser              : in out LR.Parser.Parser;
+      Action_Region_Bytes : in     WisiToken.Buffer_Region);
+   --  Call Parser.User_Data.Insert_Token, Parser.User_Data.Delete_Token
+   --  on any tokens inserted/deleted by error recovery. Update
+   --  Parser.Line_Begin_Tokens to reflect error recovery. Then call
+   --  User_Data.Reduce and the grammar post parse actions on all
+   --  nonterms in the syntax tree that overlap Action_Region_Bytes, by
+   --  traversing the tree in depth-first order.
 
 end WisiToken.Parse.LR.Parser;
diff --git a/wisitoken-parse-lr-parser_lists.adb 
b/wisitoken-parse-lr-parser_lists.adb
index 0b93d40c23..45fe31d0f8 100644
--- a/wisitoken-parse-lr-parser_lists.adb
+++ b/wisitoken-parse-lr-parser_lists.adb
@@ -2,7 +2,7 @@
 --
 --  see spec
 --
---  Copyright (C) 2014 - 2021  All Rights Reserved.
+--  Copyright (C) 2014 - 2022  All Rights Reserved.
 --
 --  The WisiToken package is free software; you can redistribute it
 --  and/or modify it under terms of the GNU General Public License as
@@ -18,60 +18,351 @@
 
 pragma License (Modified_GPL);
 
+with SAL.Gen_Definite_Doubly_Linked_Lists.Gen_Image_Aux;
 package body WisiToken.Parse.LR.Parser_Lists is
 
+   ----------
+   --  Spec public subprogams, declaration order.
+
    function Parser_Stack_Image
-     (Stack      : in Parser_Stacks.Stack;
-      Descriptor : in WisiToken.Descriptor;
-      Tree       : in Syntax_Trees.Tree;
-      Depth      : in SAL.Base_Peek_Type := 0)
+     (Stack : in Syntax_Trees.Stream_ID;
+      Tree  : in Syntax_Trees.Tree;
+      Depth : in SAL.Base_Peek_Type := 0)
      return String
    is
       use Ada.Strings.Unbounded;
 
+      Stack_Depth : constant SAL.Base_Peek_Type := Tree.Stack_Depth (Stack);
+
       Last : constant SAL.Base_Peek_Type :=
         (if Depth = 0
-         then Stack.Depth
-         else SAL.Base_Peek_Type'Min (Depth, Stack.Depth));
+         then Stack_Depth
+         else SAL.Base_Peek_Type'Min (Depth, Stack_Depth));
 
       Result : Unbounded_String := +"(";
    begin
       for I in 1 .. Last loop
          declare
-            Item : Parser_Stack_Item renames Stack.Peek (I);
+            Item : constant Syntax_Trees.Stream_Index := Tree.Peek (Stack, I);
+            State : constant Unknown_State_Index := Tree.State (Stack, Item);
          begin
             Result := Result &
-              ((if Item.State = Unknown_State then " " else Trimmed_Image 
(Item.State)) & " :" &
-                 (if I = Stack.Depth
-                  then ""
-                  else
-                    (if Item.Token = Invalid_Node_Index -- From recover 
fast-forward
-                     then ""
-                     else Tree.Image (Item.Token, Descriptor) & ", ")));
+              (if State = Unknown_State or Trace_Parse_No_State_Numbers
+               then " - : " else Trimmed_Image (State) & " : ") &
+              (if I = Stack_Depth
+               then ""
+               else Tree.Image (Tree.Get_Node (Stack, Item), 
Terminal_Node_Numbers => True) & ", ");
          end;
       end loop;
       return To_String (Result & ")");
    end Parser_Stack_Image;
 
-   function New_List (Shared_Tree : in Syntax_Trees.Base_Tree_Access) return 
List
+   function Recover_Image (Item : in Syntax_Trees.Valid_Node_Access; Tree : in 
Syntax_Trees.Tree) return String
+   is begin
+      return Recover_Image (Tree.Error_List (Item), Tree);
+   end Recover_Image;
+
+   function Recover_Image
+     (Parser_State : in out Parser_Lists.Parser_State;
+      Tree         : in     Syntax_Trees.Tree;
+      Current_Only : in     Boolean := False)
+     return String
+   is
+      use Ada.Strings.Unbounded;
+      use Syntax_Trees;
+
+      function Recover_Image is new Valid_Node_Access_Lists.Gen_Image_Aux 
(Syntax_Trees.Tree, Recover_Image);
+
+   begin
+      if not Current_Only then
+         if Parser_State.Current_Recover_Op = No_Insert_Delete then
+            return Recover_Image (Parser_State.Recover_Insert_Delete, Tree);
+         else
+            declare
+               List : Valid_Node_Access_Lists.List := 
Parser_State.Recover_Insert_Delete;
+            begin
+               List.Append (Error_Node (Parser_State.Current_Error_Ref 
(Tree)));
+               return Recover_Image (List, Tree);
+            end;
+         end if;
+      end if;
+
+      declare
+         Result : Unbounded_String := +"(";
+         Ops : Recover_Op_Nodes_Arrays.Vector renames 
Recover_Op_Array_Const_Ref
+           (Error (Parser_State.Current_Error_Ref (Tree)));
+         Need_Comma : Boolean := False;
+      begin
+         if Parser_State.Current_Recover_Op /= No_Insert_Delete then
+            for I in Parser_State.Current_Recover_Op .. Ops.Last_Index loop
+               if Need_Comma then
+                  Append (Result, ", ");
+               else
+                  Need_Comma := True;
+               end if;
+               Append (Result, Image (Ops (I), Tree));
+            end loop;
+         end if;
+         return -Result & ")";
+      end;
+   end Recover_Image;
+
+   function Current_Recover_Op (Parser_State : in Parser_Lists.Parser_State) 
return SAL.Base_Peek_Type
+   is begin
+      return Parser_State.Current_Recover_Op;
+   end Current_Recover_Op;
+
+   procedure Set_Current_Error_Features
+     (Parser_State : in out Parser_Lists.Parser_State;
+      Tree         : in     Syntax_Trees.Tree)
+   is begin
+      Parser_State.Current_Error_Features := (others => <>);
+
+      declare
+         Error_Ref  : constant Syntax_Trees.Stream_Error_Ref  := 
Parser_State.Current_Error_Ref (Tree);
+         Error_Node : constant Syntax_Trees.Valid_Node_Access := 
Tree.Error_Node (Error_Ref);
+         Error      : Syntax_Trees.Error_Data'Class renames Syntax_Trees.Error 
(Error_Ref);
+      begin
+         Parser_State.Current_Error_Features :=
+           (Label => Tree.Label (Error_Node),
+            Seq_Index =>
+              (case Tree.Label (Error_Node) is
+               when Syntax_Trees.Terminal_Label => Tree.Get_Sequential_Index 
(Error_Node),
+               when Syntax_Trees.Nonterm => Tree.Get_Sequential_Index 
(Tree.First_Terminal (Error_Node))),
+            Terminal_Predicate =>
+              (if Error in Parse_Error
+               then Error_Pred_Parse'Access
+               elsif Error in In_Parse_Action_Error
+               then null
+               elsif Error in Error_Message
+               then Error_Pred_Message'Access
+              else raise SAL.Programmer_Error),
+            Deleted                      => False,
+            Prev_Term_Seq_Index          => 
Syntax_Trees.Invalid_Sequential_Index);
+      end;
+   end Set_Current_Error_Features;
+
+   procedure Clear_Current_Error_Features
+     (Parser_State : in out Parser_Lists.Parser_State)
+   is begin
+      Parser_State.Current_Error_Features := (others => <>);
+   end Clear_Current_Error_Features;
+
+   function Current_Error_Ref
+     (Parser_State : in Parser_Lists.Parser_State;
+      Tree         : in Syntax_Trees.Tree)
+     return Syntax_Trees.Stream_Error_Ref
+   is begin
+      return Tree.Current_Error_Ref
+        (Parser_State.Stream,
+         Terminal_Predicate  =>
+           (if Parser_State.Current_Error_Features.Seq_Index = 
Syntax_Trees.Invalid_Sequential_Index
+            then Error_Pred_Parse_Message'Access
+            else Parser_State.Current_Error_Features.Terminal_Predicate),
+         Nonterm_Predicate   => Error_Pred_In_Parse_Action'Access,
+         Error_Node_Features => Parser_State.Current_Error_Features);
+   end Current_Error_Ref;
+
+   procedure Do_Delete
+     (Parser_State : in out Parser_Lists.Parser_State;
+      Tree         : in out Syntax_Trees.Tree;
+      Op           : in out Delete_Op_Nodes;
+      User_Data    : in     Syntax_Trees.User_Data_Access_Constant)
+   is
+      use Syntax_Trees;
+      Error_Node : constant Valid_Node_Access := Syntax_Trees.Error_Node 
(Parser_State.Current_Error_Ref (Tree));
+
+      --  We don't want a deleted node as Op.Del_After_Node;
+      --  ada_mode-recover_extra_end_loop.adb deletes "end loop ;". So we
+      --  don't use 'Tree.Prev_Terminal (Terminal, Parser_State.Stream);'.
+      --  The previous terminal is on the parse stack.
+      Prev_Terminal : Stream_Node_Parents := Tree.To_Stream_Node_Parents
+        (Tree.To_Rooted_Ref (Parser_State.Stream, Tree.Peek 
(Parser_State.Stream)));
+   begin
+      Tree.Last_Terminal (Prev_Terminal, Parser_State.Stream, Preceding => 
True);
+
+      if Tree.Label (Prev_Terminal.Ref.Node) /= Source_Terminal then
+         Tree.Prev_Source_Terminal
+           (Prev_Terminal, Parser_State.Stream, Trailing_Non_Grammar => False);
+      end if;
+
+      loop
+         --  Delete empty nonterms, breakdown non-empty nonterms, delete next 
terminal.
+         declare
+            Current_Token : Stream_Node_Ref := Tree.Current_Token 
(Parser_State.Stream);
+         begin
+            case Tree.Label (Current_Token.Node) is
+            when Terminal_Label =>
+               pragma Assert (Op.Del_Index = Tree.Get_Sequential_Index 
(Current_Token.Node));
+
+               if Current_Token.Node = Error_Node then
+                  Parser_State.Current_Error_Features.Deleted := True;
+                  Parser_State.Current_Error_Features.Prev_Term_Seq_Index := 
Tree.Get_Sequential_Index
+                    (Prev_Terminal.Ref.Node);
+               end if;
+
+               Tree.Add_Deleted
+                 (Deleted_Node  => Current_Token.Node,
+                  Prev_Terminal => Prev_Terminal,
+                  User_Data     => User_Data);
+
+               Op.Del_Node := Current_Token.Node;
+
+               Current_Token := Invalid_Stream_Node_Ref; -- allow delete 
Current_Token.Element
+               Tree.Delete_Current_Token (Parser_State.Stream);
+               exit;
+
+            when Nonterm =>
+               if Tree.Is_Empty_Nonterm (Current_Token.Node) then
+                  --  Delete an empty nonterm preceding the target terminal.
+                  --  test_mckenzie_recover.adb Missing_Name_2
+
+                  Current_Token := Invalid_Stream_Node_Ref; -- allow delete 
Current_Token.Element
+
+                  Tree.Delete_Current_Token (Parser_State.Stream);
+               else
+                  --  Error recover only supports Delete for terminals.
+                  --  test_mckenzie_recover.adb String_Quote_1 case 3
+                  if Current_Token.Stream /= Parser_State.Stream then
+                     Tree.Move_Shared_To_Input (Parser_State.Stream);
+                     Current_Token := Tree.Current_Token (Parser_State.Stream);
+                  end if;
+
+                  Tree.Left_Breakdown (Current_Token, User_Data);
+               end if;
+            end case;
+         end;
+      end loop;
+   end Do_Delete;
+
+   procedure Undo_Reduce
+     (Parser_State : in out Parser_Lists.Parser_State;
+      Tree         : in out Syntax_Trees.Tree;
+      Table        : in     Parse_Table;
+      User_Data    : in     Syntax_Trees.User_Data_Access_Constant)
+   is
+      use Syntax_Trees;
+   begin
+      if Tree.Has_Error (Tree.Get_Node (Parser_State.Stream, Tree.Peek 
(Parser_State.Stream))) then
+         --  Move the errors to the first terminal, so they are not lost.
+         declare
+            Current_Error_Ref : constant Stream_Error_Ref := 
Parser_State.Current_Error_Ref (Tree);
+            Current_Error_Node : constant Valid_Node_Access := 
Syntax_Trees.Error_Node
+              (Current_Error_Ref);
+
+            Ref : Stream_Node_Parents := Tree.To_Stream_Node_Parents
+              (Tree.To_Rooted_Ref (Parser_State.Stream, Tree.Peek 
(Parser_State.Stream)));
+
+            Nonterm    : constant Valid_Node_Access := Ref.Ref.Node;
+            New_Errors : Error_Data_Lists.List;
+         begin
+            for Err of Tree.Error_List (Ref.Ref.Node) loop
+               New_Errors.Append (To_Message (Err, Tree, Ref.Ref.Node));
+            end loop;
+
+            Tree.First_Terminal (Ref, Following => False);
+            if Ref.Ref.Node = Invalid_Node_Access then
+               --  So far, we never put an error on an empty nonterm; we just 
delete
+               --  it.
+               raise SAL.Programmer_Error with "undo_reduce error on empty 
nonterm";
+            end if;
+            Tree.Add_Errors (Ref, New_Errors, User_Data);
+
+            if Nonterm = Current_Error_Node then
+               Parser_State.Current_Error_Features.Label              := 
Tree.Label (Ref.Ref.Node);
+               Parser_State.Current_Error_Features.Seq_Index          := 
Tree.Get_Sequential_Index (Ref.Ref.Node);
+               Parser_State.Current_Error_Features.Terminal_Predicate := 
Error_Pred_Message'Access;
+            end if;
+         end;
+      end if;
+
+      declare
+         Nonterm    : constant Node_Access := Tree.Pop (Parser_State.Stream);
+         Prev_State : State_Index          := Tree.State (Parser_State.Stream);
+      begin
+         for Child of Tree.Children (Nonterm) loop
+            Tree.Clear_Parent (Child, Clear_Children => Parser_State.Stream = 
Tree.Shared_Stream);
+
+            if Is_Terminal (Tree.ID (Child), Tree.Lexer.Descriptor.all) then
+               Prev_State := Shift_State (Action_For (Table, Prev_State, 
Tree.ID (Child)));
+            else
+               Prev_State := Goto_For (Table, Prev_State, Tree.ID (Child));
+            end if;
+            Tree.Push (Parser_State.Stream, Child, Prev_State);
+         end loop;
+      end;
+   end Undo_Reduce;
+
+   procedure First_Recover_Op (Parser_State : in out Parser_Lists.Parser_State)
+   is begin
+      Parser_State.Current_Recover_Op := 1;
+   end First_Recover_Op;
+
+   procedure Next_Recover_Op
+     (Parser_State : in out Parser_Lists.Parser_State;
+      Tree         : in     Syntax_Trees.Tree)
+   is
+      Recover_Ops : Recover_Op_Nodes_Arrays.Vector renames 
Recover_Op_Array_Const_Ref
+        (Syntax_Trees.Error (Parser_State.Current_Error_Ref (Tree)));
+   begin
+      if Parser_State.Current_Recover_Op = Recover_Ops.Last_Index then
+         Parser_State.Current_Recover_Op := No_Insert_Delete;
+      else
+         Parser_State.Current_Recover_Op := @ + 1;
+      end if;
+   end Next_Recover_Op;
+
+   procedure Update_Error
+     (Parser_State : in out Parser_Lists.Parser_State;
+      Tree         : in out Syntax_Trees.Tree;
+      Data         : in     Syntax_Trees.Error_Data'Class;
+      User_Data    : in     Syntax_Trees.User_Data_Access_Constant)
+   is begin
+      Tree.Update_Error (Parser_State.Stream, Parser_State.Current_Error_Ref 
(Tree), Data, User_Data);
+
+      declare
+         Err_Ref : constant Syntax_Trees.Stream_Error_Ref := 
Parser_State.Current_Error_Ref (Tree);
+         Recover_Ops : Recover_Op_Nodes_Arrays.Vector renames 
Recover_Op_Array_Const_Ref
+           (Syntax_Trees.Error (Err_Ref));
+      begin
+         if Parser_State.Current_Recover_Op = Recover_Ops.Last_Index + 1 then
+            --  This happens in recover, when apply ops to parser does all 
insert/delete ops.
+            Parser_State.Current_Recover_Op := No_Insert_Delete;
+         end if;
+
+         if Parser_State.Current_Recover_Op = No_Insert_Delete then
+            Parser_State.Recover_Insert_Delete.Append (Syntax_Trees.Error_Node 
(Err_Ref));
+         end if;
+      end;
+   end Update_Error;
+
+   function Peek_Current_Sequential_Terminal
+     (Parser_State : in Parser_Lists.Parser_State;
+      Tree         : in Syntax_Trees.Tree)
+     return Syntax_Trees.Terminal_Ref
    is
-      First_Parser_Label : constant := 0;
-      Parser : Parser_State := (Label => First_Parser_Label, others => <>);
+      use Syntax_Trees;
+      Result : Stream_Node_Parents := Tree.To_Stream_Node_Parents
+        (Tree.Current_Token (Parser_State.Stream));
    begin
-      Parser.Tree.Initialize (Shared_Tree, Flush => True);
+      Tree.First_Sequential_Terminal (Result, Following => True);
+      return Result.Ref;
+   end Peek_Current_Sequential_Terminal;
 
+   function New_List (Tree : in out Syntax_Trees.Tree) return List
+   is begin
       return Result : List
       do
-         Result.Parser_Label := First_Parser_Label;
-
-         Result.Elements.Append (Parser);
+         Result.Elements.Append
+           ((Stream => Tree.New_Stream (Syntax_Trees.Invalid_Stream_ID),
+             others => <>));
       end return;
    end New_List;
 
-   function Last_Label (List : in Parser_Lists.List) return Natural
+   procedure Clear (List : in out Parser_Lists.List)
    is begin
-      return List.Parser_Label;
-   end Last_Label;
+      List.Elements.Finalize;
+   end Clear;
 
    function Count (List : in Parser_Lists.List) return SAL.Base_Peek_Type
    is begin
@@ -90,53 +381,15 @@ package body WisiToken.Parse.LR.Parser_Lists is
 
    function Is_Done (Cursor : in Parser_Lists.Cursor) return Boolean
    is
-      use Parser_State_Lists;
+      use all type Parser_State_Lists.Cursor;
    begin
-      return Cursor.Ptr = No_Element;
+      return Cursor.Ptr = Parser_State_Lists.No_Element;
    end Is_Done;
 
-   function Label (Cursor : in Parser_Lists.Cursor) return Natural
+   function Stream (Cursor : in Parser_Lists.Cursor) return 
Syntax_Trees.Stream_ID
    is begin
-      return Parser_State_Lists.Constant_Ref (Cursor.Ptr).Label;
-   end Label;
-
-   function Total_Recover_Cost (Cursor : in Parser_Lists.Cursor) return Integer
-   is
-      Result : Integer := 0;
-   begin
-      for Error of Parser_State_Lists.Constant_Ref (Cursor.Ptr).Errors loop
-         Result := Error.Recover.Cost;
-      end loop;
-      return Result;
-   end Total_Recover_Cost;
-
-   function Max_Recover_Ops_Length (Cursor : in Parser_Lists.Cursor) return 
Ada.Containers.Count_Type
-   is
-      use Ada.Containers;
-      use Config_Op_Arrays;
-      Result : Count_Type := 0;
-      Errors : Parse_Error_Lists.List renames Parser_State_Lists.Constant_Ref 
(Cursor.Ptr).Errors;
-   begin
-      for Error of Errors loop
-         if Length (Error.Recover.Ops) > Result then
-            Result := Length (Error.Recover.Ops);
-         end if;
-      end loop;
-      return Result;
-   end Max_Recover_Ops_Length;
-
-   function Min_Recover_Cost (Cursor : in Parser_Lists.Cursor) return Integer
-   is
-      Result : Integer := Integer'Last;
-      Errors : Parse_Error_Lists.List renames Parser_State_Lists.Constant_Ref 
(Cursor.Ptr).Errors;
-   begin
-      for Error of Errors loop
-         if Error.Recover.Cost < Result then
-            Result := Error.Recover.Cost;
-         end if;
-      end loop;
-      return Result;
-   end Min_Recover_Cost;
+      return Parser_State_Lists.Constant_Ref (Cursor.Ptr).Stream;
+   end Stream;
 
    procedure Set_Verb (Cursor : in Parser_Lists.Cursor; Verb : in 
All_Parse_Action_Verbs)
    is begin
@@ -149,14 +402,12 @@ package body WisiToken.Parse.LR.Parser_Lists is
    end Verb;
 
    procedure Terminate_Parser
-     (Parsers   : in out List;
-      Current   : in out Cursor'Class;
-      Message   : in     String;
-      Trace     : in out WisiToken.Trace'Class;
-      Terminals : in     Base_Token_Arrays.Vector)
+     (Parsers : in out List;
+      Current : in out Cursor'Class;
+      Tree    : in out Syntax_Trees.Tree;
+      Message : in     String;
+      Trace   : in out WisiToken.Trace'Class)
    is
-      State : Parser_State renames Parser_State_Lists.Constant_Ref 
(Current.Ptr).Element.all;
-
       procedure Free (Cursor : in out Parser_Lists.Cursor'Class)
       is
          Temp : Parser_State_Lists.Cursor := Cursor.Ptr;
@@ -165,57 +416,88 @@ package body WisiToken.Parse.LR.Parser_Lists is
          Parsers.Elements.Delete (Temp);
       end Free;
    begin
-      if Trace_Parse > Outline then
-         Trace.Put_Line
-           (Integer'Image (Current.Label) & ": terminate (" &
-              Trimmed_Image (Integer (Parsers.Count) - 1) & " active)" &
-              ": " & Message & Image
-                (State.Tree.First_Shared_Terminal (State.Current_Token),
-                 Terminals, Trace.Descriptor.all));
-      end if;
+      declare
+         State : Parser_State renames Parser_State_Lists.Variable_Ref 
(Current.Ptr);
+      begin
+         if Trace_Parse > Outline then
+            Trace.Put_Line
+              (" " & Tree.Trimmed_Image (Current.Stream) & ": terminate (" &
+                 Trimmed_Image (Integer (Parsers.Count) - 1) & " active)" &
+                 ": " & Message & " " & Tree.Image (Tree.Current_Token 
(State.Stream), Terminal_Node_Numbers => True));
+         end if;
 
+         Tree.Delete_Stream (State.Stream);
+         State.Clear_Stream;
+      end;
       Free (Current);
-
-      if Parsers.Count = 1 then
-         Parsers.First.State_Ref.Tree.Flush;
-      end if;
    end Terminate_Parser;
 
    procedure Duplicate_State
-     (Parsers   : in out List;
-      Current   : in out Cursor'Class;
-      Trace     : in out WisiToken.Trace'Class;
-      Terminals : in     Base_Token_Arrays.Vector)
+     (Parsers : in out List;
+      Current : in out Cursor'Class;
+      Tree    : in out Syntax_Trees.Tree;
+      Trace   : in out WisiToken.Trace'Class)
    is
       use all type Ada.Containers.Count_Type;
 
       function Compare
-        (Stack_1 : in Parser_Stacks.Stack;
-         Tree_1  : in Syntax_Trees.Tree;
-         Stack_2 : in Parser_Stacks.Stack;
-         Tree_2  : in Syntax_Trees.Tree)
+        (Stack_1 : in Syntax_Trees.Stream_ID;
+         Stack_2 : in Syntax_Trees.Stream_ID)
         return Boolean
+      --  True if equal
       is
+         function Same_Last_Terminal return Boolean
+         is
+            use Syntax_Trees;
+            Ref_1 : Stream_Node_Parents := Tree.To_Stream_Node_Parents
+              (Tree.To_Rooted_Ref (Stack_1, Tree.Peek (Stack_1)));
+            Ref_2 : Stream_Node_Parents := Tree.To_Stream_Node_Parents
+              (Tree.To_Rooted_Ref (Stack_2, Tree.Peek (Stack_2)));
+         begin
+            Tree.Last_Terminal (Ref_1, Stack_1, Preceding => True);
+            loop
+               exit when Tree.Label (Ref_1.Ref.Node) = Source_Terminal;
+               Tree.Prev_Terminal (Ref_1, Stack_1, Preceding => True);
+            end loop;
+
+            Tree.Last_Terminal (Ref_2, Stack_2, Preceding => True);
+            loop
+               exit when Tree.Label (Ref_1.Ref.Node) = Source_Terminal;
+               Tree.Prev_Terminal (Ref_2, Stack_2, Preceding => True);
+            end loop;
+            return Tree.Byte_Region (Ref_1.Ref.Node, Trailing_Non_Grammar => 
False) =
+              Tree.Byte_Region (Ref_2.Ref.Node, Trailing_Non_Grammar => False);
+         end Same_Last_Terminal;
+
       begin
-         if Stack_1.Depth /= Stack_2.Depth then
+         if Tree.Stack_Depth (Stack_1) /= Tree.Stack_Depth (Stack_2) then
             return False;
+
+         elsif not Same_Last_Terminal then
+            --  ada_mode-bad_duplicate_state.adb requires this check; 
otherwise it
+            --  reports a syntax_error on 'renames'.
+            return False;
+
          else
-            for I in reverse 1 .. Stack_1.Depth - 1 loop
+            for I in reverse 1 .. Tree.Stack_Depth (Stack_1) - 1 loop
                --  Assume they differ near the top; no point in comparing 
bottom
-               --  item. The syntax trees will differ even if the tokens on 
the stack
-               --  are the same, so compare the tokens.
-               declare
-                  Item_1 : Parser_Stack_Item renames Stack_1 (I);
-                  Item_2 : Parser_Stack_Item renames Stack_2 (I);
-               begin
-                  if Item_1.State /= Item_2.State then
-                     return False;
-                  else
-                     if not Syntax_Trees.Same_Token (Tree_1, Item_1.Token, 
Tree_2, Item_2.Token) then
+               --  item.
+               if Tree.State (Stack_1) /= Tree.State (Stack_2) then
+                  return False;
+               else
+                  declare
+                     use Syntax_Trees;
+                     Node_1 : constant Valid_Node_Access := Tree.Get_Node 
(Stack_1, Tree.Peek (Stack_1, I));
+                     Node_2 : constant Valid_Node_Access := Tree.Get_Node 
(Stack_2, Tree.Peek (Stack_2, I));
+                  begin
+                     --  We can't use Node_1 = Node_2, because the nodes were 
created
+                     --  independently by separate parsers. For LR parsing, 
the only node
+                     --  attribute that matters is ID.
+                     if Tree.ID (Node_1) /= Tree.ID (Node_2) then
                         return False;
                      end if;
-                  end if;
-               end;
+                  end;
+               end if;
             end loop;
             return True;
          end if;
@@ -228,10 +510,9 @@ package body WisiToken.Parse.LR.Parser_Lists is
          declare
             Other_Parser : Parser_State renames Other.State_Ref;
          begin
-            if Other.Label /= Current.Label and then
+            if Other.Stream /= Current.Stream and then
               Other.Verb /= Error and then
-              Compare
-                (Other_Parser.Stack, Other_Parser.Tree, 
Current.State_Ref.Stack, Current.State_Ref.Tree)
+              Compare (Other_Parser.Stream, Current.Stream)
             then
                exit;
             end if;
@@ -242,92 +523,96 @@ package body WisiToken.Parse.LR.Parser_Lists is
       if not Other.Is_Done then
          --  Both have the same number of errors, otherwise one would have been
          --  terminated earlier.
-         if Other.Total_Recover_Cost = Current.Total_Recover_Cost then
-            if Other.Max_Recover_Ops_Length = Current.Max_Recover_Ops_Length 
then
-               Parsers.Terminate_Parser (Other, "duplicate state: random", 
Trace, Terminals);
+         declare
+            use Ada.Strings.Unbounded;
+            One_Stream : constant Syntax_Trees.Stream_ID := Current.Stream;
+            Another_Stream : constant Syntax_Trees.Stream_ID := Other.Stream;
+            Msg : Unbounded_String;
+         begin
+            if Other.State_Ref.Total_Recover_Cost = 
Current.State_Ref.Total_Recover_Cost then
+               if Other.State_Ref.Max_Recover_Ops_Length = 
Current.State_Ref.Max_Recover_Ops_Length then
+                  Append (Msg, ": random");
+               else
+                  Append (Msg, ": min ops length");
+                  --  Keep the minimum ops length
+                  if Other.State_Ref.Max_Recover_Ops_Length > 
Current.State_Ref.Max_Recover_Ops_Length then
+                     null;
+                  else
+                     Other := Cursor (Current);
+                     Current.Next;
+                  end if;
+               end if;
             else
-               --  Keep the minimum ops length
-               if Other.Max_Recover_Ops_Length > 
Current.Max_Recover_Ops_Length then
+               Append (Msg, ": cost");
+               if Other.State_Ref.Total_Recover_Cost > 
Current.State_Ref.Total_Recover_Cost then
                   null;
                else
                   Other := Cursor (Current);
                   Current.Next;
                end if;
-               Parsers.Terminate_Parser (Other, "duplicate state: ops length", 
Trace, Terminals);
             end if;
-         else
-            if Other.Total_Recover_Cost > Current.Total_Recover_Cost then
-               null;
-            else
-               Other := Cursor (Current);
-               Current.Next;
-            end if;
-            Parsers.Terminate_Parser (Other, "duplicate state: cost", Trace, 
Terminals);
-         end if;
+            Parsers.Terminate_Parser
+              (Other, Tree, "duplicate state with " & Tree.Trimmed_Image
+                 (if Another_Stream = Other.Stream
+                  then One_Stream
+                  else Another_Stream) & (-Msg),
+               Trace);
+         end;
       end if;
    end Duplicate_State;
 
    function State_Ref (Position : in Cursor) return State_Reference
    is begin
-      return (Element => Parser_State_Lists.Constant_Ref 
(Position.Ptr).Element);
+      return (Element => Parser_State_Lists.Unchecked_Ref (Position.Ptr));
    end State_Ref;
 
    function First_State_Ref (List : in Parser_Lists.List'Class) return 
State_Reference
    is begin
-      return (Element => Parser_State_Lists.Constant_Ref 
(List.Elements.First).Element);
+      return (Element => Parser_State_Lists.Unchecked_Ref 
(List.Elements.First));
    end First_State_Ref;
 
    function First_Constant_State_Ref (List : in Parser_Lists.List'Class) 
return Constant_State_Reference
    is begin
-      return (Element => Parser_State_Lists.Constant_Ref 
(List.Elements.First).Element);
+      return (Element => Parser_State_Lists.Unchecked_Ref 
(List.Elements.First));
    end First_Constant_State_Ref;
 
-   procedure Put_Top_10 (Trace : in out WisiToken.Trace'Class; Cursor : in 
Parser_Lists.Cursor)
-   is
-      Parser_State : Parser_Lists.Parser_State renames 
Parser_State_Lists.Constant_Ref (Cursor.Ptr);
-   begin
-      Trace.Put (Natural'Image (Parser_State.Label) & " stack: ");
-      Trace.Put_Line (Image (Parser_State.Stack, Trace.Descriptor.all, 
Parser_State.Tree, Depth => 10));
-   end Put_Top_10;
-
    procedure Prepend_Copy
-     (List   : in out Parser_Lists.List;
-      Cursor : in     Parser_Lists.Cursor'Class)
+     (List      : in out Parser_Lists.List;
+      Cursor    : in     Parser_Lists.Cursor'Class;
+      Tree      : in out Syntax_Trees.Tree;
+      User_Data : in     Syntax_Trees.User_Data_Access_Constant;
+      Trace     : in out WisiToken.Trace'Class)
    is
       New_Item : Parser_State;
    begin
-      List.Parser_Label := List.Parser_Label + 1;
       declare
          Item : Parser_State renames Parser_State_Lists.Variable_Ref 
(Cursor.Ptr);
          --  We can't do 'Prepend' in the scope of this 'renames';
          --  that would be tampering with cursors.
       begin
-         Item.Tree.Set_Flush_False;
-
          --  We specify all items individually, rather copy Item and then
          --  override a few, to avoid copying large items like Recover.
-         --  We copy Recover.Enqueue_Count .. Check_Count for unit tests.
+         --  We copy Recover.Enqueue_Count, Check_Count for 
test_mckenzie_recover.adb.
          New_Item :=
-           (Shared_Token                  => Item.Shared_Token,
-            Recover_Insert_Delete         => Item.Recover_Insert_Delete,
-            Recover_Insert_Delete_Current => 
Item.Recover_Insert_Delete_Current,
-            Current_Token                 => Item.Current_Token,
-            Inc_Shared_Token              => Item.Inc_Shared_Token,
-            Stack                         => Item.Stack,
-            Tree                          => Item.Tree,
-            Recover                       =>
-              (Enqueue_Count              => Item.Recover.Enqueue_Count,
-               Config_Full_Count          => Item.Recover.Config_Full_Count,
-               Check_Count                => Item.Recover.Check_Count,
-               others                     => <>),
-            Resume_Active                 => Item.Resume_Active,
-            Resume_Token_Goal             => Item.Resume_Token_Goal,
-            Conflict_During_Resume        => Item.Conflict_During_Resume,
-            Zombie_Token_Count            => 0,
-            Errors                        => Item.Errors,
-            Label                         => List.Parser_Label,
-            Verb                          => Item.Verb);
+           (Recover                =>
+              (Enqueue_Count       => Item.Recover.Enqueue_Count,
+               Check_Count         => Item.Recover.Check_Count,
+               others              => <>),
+            Recover_Insert_Delete  => Item.Recover_Insert_Delete,
+            Total_Recover_Cost     => Item.Total_Recover_Cost,
+            Max_Recover_Ops_Length => Item.Max_Recover_Ops_Length,
+            Error_Count            => Item.Error_Count,
+            Resume_Active          => Item.Resume_Active,
+            Resume_Token_Goal      => Item.Resume_Token_Goal,
+            Conflict_During_Resume => Item.Conflict_During_Resume,
+            Zombie_Token_Count     => 0,
+            Last_Action            => Item.Last_Action,
+            Current_Recover_Op     => Item.Current_Recover_Op,
+            Current_Error_Features => Item.Current_Error_Features,
+            Stream                 => Tree.New_Stream (Item.Stream),
+            Verb                   => Item.Verb);
       end;
+
       List.Elements.Prepend (New_Item);
    end Prepend_Copy;
 
@@ -339,6 +624,11 @@ package body WisiToken.Parse.LR.Parser_Lists is
       return (Ptr => Ptr.Ptr);
    end To_Cursor;
 
+   function To_Parser_Node_Access (Cur : in Cursor) return Parser_Node_Access
+   is begin
+      return (Ptr => Cur.Ptr);
+   end To_Parser_Node_Access;
+
    function Constant_Reference
      (Container : aliased in List'Class;
       Position  :         in Parser_Node_Access)
@@ -346,7 +636,7 @@ package body WisiToken.Parse.LR.Parser_Lists is
    is
       pragma Unreferenced (Container);
    begin
-      return (Element => Parser_State_Lists.Constant_Ref 
(Position.Ptr).Element);
+      return (Element => Parser_State_Lists.Unchecked_Ref (Position.Ptr));
    end Constant_Reference;
 
    function Reference
@@ -356,13 +646,13 @@ package body WisiToken.Parse.LR.Parser_Lists is
    is
       pragma Unreferenced (Container);
    begin
-      return (Element => Parser_State_Lists.Variable_Ref 
(Position.Ptr).Element);
+      return (Element => Parser_State_Lists.Unchecked_Ref (Position.Ptr));
    end Reference;
 
-   function Persistent_State_Ref (Position : in Parser_Node_Access) return 
State_Access
+   function Unchecked_State_Ref (Position : in Parser_Node_Access) return 
State_Access
    is begin
-      return State_Access (Parser_State_Lists.Persistent_Ref (Position.Ptr));
-   end Persistent_State_Ref;
+      return State_Access (Parser_State_Lists.Unchecked_Ref (Position.Ptr));
+   end Unchecked_State_Ref;
 
    type Iterator (Elements : access Parser_State_Lists.List) is new 
Iterator_Interfaces.Forward_Iterator
      with null record;
@@ -398,19 +688,24 @@ package body WisiToken.Parse.LR.Parser_Lists is
       return Parser_State_Lists.Has_Element (Iterator.Ptr);
    end Has_Element;
 
-   function Label (Iterator : in Parser_State) return Natural
+   function Stream (State : in Parser_State) return Syntax_Trees.Stream_ID
+   is begin
+      return State.Stream;
+   end Stream;
+
+   procedure Clear_Stream (State : in out Parser_State)
    is begin
-      return Iterator.Label;
-   end Label;
+      State.Stream := Syntax_Trees.Invalid_Stream_ID;
+   end Clear_Stream;
 
-   function Verb (Iterator : in Parser_State) return All_Parse_Action_Verbs
+   function Verb (State : in Parser_State) return All_Parse_Action_Verbs
    is begin
-      return Iterator.Verb;
+      return State.Verb;
    end Verb;
 
-   procedure Set_Verb (Iterator : in out Parser_State; Verb : in 
All_Parse_Action_Verbs)
+   procedure Set_Verb (State : in out Parser_State; Verb : in 
All_Parse_Action_Verbs)
    is begin
-      Iterator.Verb := Verb;
+      State.Verb := Verb;
    end Set_Verb;
 
 end WisiToken.Parse.LR.Parser_Lists;
diff --git a/wisitoken-parse-lr-parser_lists.ads 
b/wisitoken-parse-lr-parser_lists.ads
index 718846672e..54f6fd4327 100644
--- a/wisitoken-parse-lr-parser_lists.ads
+++ b/wisitoken-parse-lr-parser_lists.ads
@@ -2,7 +2,7 @@
 --
 --  Generalized LR parser state.
 --
---  Copyright (C) 2014-2015, 2017 - 2021 Free Software Foundation, Inc.
+--  Copyright (C) 2014-2015, 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -22,22 +22,15 @@ pragma License (Modified_GPL);
 
 with Ada.Iterator_Interfaces;
 with SAL.Gen_Indefinite_Doubly_Linked_Lists;
-with SAL.Gen_Unbounded_Definite_Stacks;
 with WisiToken.Syntax_Trees;
 package WisiToken.Parse.LR.Parser_Lists is
-
-   type Parser_Stack_Item is record
-      State : Unknown_State_Index     := Unknown_State;
-      Token : Node_Index := Invalid_Node_Index;
-   end record;
-
-   package Parser_Stacks is new SAL.Gen_Unbounded_Definite_Stacks 
(Parser_Stack_Item);
+   use all type WisiToken.Syntax_Trees.Stream_ID;
+   use all type WisiToken.Syntax_Trees.Stream_Error_Ref;
 
    function Parser_Stack_Image
-     (Stack      : in Parser_Stacks.Stack;
-      Descriptor : in WisiToken.Descriptor;
-      Tree       : in Syntax_Trees.Tree;
-      Depth      : in SAL.Base_Peek_Type := 0)
+     (Stack : in Syntax_Trees.Stream_ID;
+      Tree  : in Syntax_Trees.Tree;
+      Depth : in SAL.Base_Peek_Type := 0)
      return String;
    --  If Depth = 0, put all of Stack. Otherwise put Min (Depth,
    --  Stack.Depth) items.
@@ -45,71 +38,132 @@ package WisiToken.Parse.LR.Parser_Lists is
    --  Unique name for calling from debugger
 
    function Image
-     (Stack      : in Parser_Stacks.Stack;
-      Descriptor : in WisiToken.Descriptor;
-      Tree       : in Syntax_Trees.Tree;
-      Depth      : in SAL.Base_Peek_Type := 0)
+     (Stack : in Syntax_Trees.Stream_ID;
+      Tree  : in Syntax_Trees.Tree;
+      Depth : in SAL.Base_Peek_Type := 0)
      return String renames Parser_Stack_Image;
 
    type Base_Parser_State is tagged
    record
       --  Visible components for direct access
+      --
+      --  The parse stack is in Shared_Parser.Tree (Parser_State.Stream).
 
-      Shared_Token : Base_Token_Index := Invalid_Token_Index;
-      --  Last token read from Shared_Parser.Terminals.
+      Recover : aliased LR.McKenzie_Data := (others => <>);
 
-      Recover_Insert_Delete : aliased Recover_Op_Arrays.Vector;
-      --  Tokens that were inserted or deleted during error recovery.
-      --  Contains only Insert and Delete ops. Filled by error recover, used
-      --  by main parse and Execute_Actions.
+      Recover_Insert_Delete : aliased 
Syntax_Trees.Valid_Node_Access_Lists.List;
+      --  List of nodes containing errors that contain recover operations;
+      --  tokens that were inserted or deleted during error recovery. Filled
+      --  by error recover, used by Execute_Actions for
+      --  User_Data.Insert_Token, .Delete_Token.
       --
       --  Not emptied between error recovery sessions, so Execute_Actions
       --  knows about all insert/delete.
 
-      Recover_Insert_Delete_Current : Recover_Op_Arrays.Extended_Index := 
Recover_Op_Arrays.No_Index;
-      --  Next item in Recover_Insert_Delete to be processed by main parse;
-      --  No_Index if all done.
-
-      Current_Token : Node_Index := Invalid_Node_Index;
-      --  Current terminal, in Tree
-
-      Inc_Shared_Token : Boolean := True;
-
-      Stack : Parser_Stacks.Stack;
-      --  There is no need to use a branched stack; max stack length is
-      --  proportional to source text nesting depth, not source text length.
+      Total_Recover_Cost     : Integer                   := 0;
+      Max_Recover_Ops_Length : Ada.Containers.Count_Type := 0;
+      Error_Count            : Integer                   := 0;
 
-      Tree : aliased Syntax_Trees.Tree;
-      --  We use a branched tree to avoid copying large trees for each
-      --  spawned parser; tree size is proportional to source text size. In
-      --  normal parsing, parallel parsers are short-lived; they each process
-      --  a few tokens, to resolve a grammar conflict.
-      --
-      --  When there is only one parser, tree nodes are written directly to
-      --  the shared tree (via the branched tree, with Flush => True).
-      --
-      --  When there is more than one, tree nodes are written to the
-      --  branched tree. Then when all but one parsers are terminated, the
-      --  remaining branched tree is flushed into the shared tree.
-
-      Recover : aliased LR.McKenzie_Data := (others => <>);
-
-      Zombie_Token_Count : Base_Token_Index := 0;
+      Zombie_Token_Count : Integer := 0;
       --  If Zombie_Token_Count > 0, this parser has errored, but is waiting
       --  to see if other parsers do also.
 
-      Resume_Active          : Boolean          := False;
-      Resume_Token_Goal      : Base_Token_Index := Invalid_Token_Index;
-      Conflict_During_Resume : Boolean          := False;
-      --  Resume is complete for this parser Shared_Token reaches this
-      --  Resume_Token_Goal.
+      Resume_Active : Boolean := False;
+
+      Resume_Token_Goal : Syntax_Trees.Base_Sequential_Index := 
Syntax_Trees.Invalid_Sequential_Index;
+      --  Set at the end of recovery, so during recovery it is the end of
+      --  the previous recover session.
+
+      Conflict_During_Resume : Boolean := False;
 
-      Errors : Parse_Error_Lists.List;
+      Last_Action : Parse_Action_Rec := (others => <>);
    end record;
 
    type Parser_State is new Base_Parser_State with private;
    type State_Access is access all Parser_State;
 
+   function Recover_Image
+     (Parser_State : in out Parser_Lists.Parser_State;
+      Tree         : in     Syntax_Trees.Tree;
+      Current_Only : in     Boolean := False)
+     return String;
+
+   function Current_Recover_Op (Parser_State : in Parser_Lists.Parser_State) 
return SAL.Base_Peek_Type;
+   --  Index into Parser_State.Current_Error_Ref recover_ops;
+   --  No_Insert_Delete if no current error (all ops done).
+
+   procedure Set_Current_Error_Features
+     (Parser_State : in out Parser_Lists.Parser_State;
+      Tree         : in     Syntax_Trees.Tree);
+   --  Record Syntax_Trees.Error_Node_Features of
+   --  Parser_State.Current_Error_Ref (called with default Features) to
+   --  enable Current_Error_Ref to find it again while recover ops are
+   --  processed.
+
+   procedure Clear_Current_Error_Features
+     (Parser_State : in out Parser_Lists.Parser_State);
+   --  Reset to default, ready for a new error recover session.
+
+   function Current_Error_Ref
+     (Parser_State : in Parser_Lists.Parser_State;
+      Tree         : in Syntax_Trees.Tree)
+     return Syntax_Trees.Stream_Error_Ref
+   with Post => Current_Error_Ref'Result /= 
Syntax_Trees.Invalid_Stream_Error_Ref;
+   --  Must only be called when Parser_State has an error; return current
+   --  error node. If Set_Current_Error_Features has been called, uses the
+   --  recorded Error_Node_Features.
+
+   procedure Do_Delete
+     (Parser_State : in out Parser_Lists.Parser_State;
+      Tree         : in out Syntax_Trees.Tree;
+      Op           : in out Delete_Op_Nodes;
+      User_Data    : in     Syntax_Trees.User_Data_Access_Constant);
+   --  Perform Delete operation on Stream, set Op.Del_Node to
+   --  deleted node. Update Parser_State.Current_Error_Features if deleted 
node =
+   --  error node.
+
+   procedure Undo_Reduce
+     (Parser_State : in out Parser_Lists.Parser_State;
+      Tree         : in out Syntax_Trees.Tree;
+      Table        : in     Parse_Table;
+      User_Data    : in     Syntax_Trees.User_Data_Access_Constant);
+   --  Undo reduction of nonterm at Parser_State.Stream.Stack_Top; Stack_Top 
is then
+   --  the last Child of the nonterm.
+   --
+   --  If Stream.Stack_Top has an error, it is moved to the first
+   --  terminal; if that error is the current error, update
+   --  Parser_State.Current_Error_Features.
+   --
+   --  Duplicates LR.Undo_Reduce; that is used by Edit_Tree, when there
+   --  is no Parser_State.
+
+   procedure First_Recover_Op (Parser_State : in out 
Parser_Lists.Parser_State);
+   --  Set Parser_State.Current_Recover_Op to 1, indicating that there
+   --  are insert/delete operations in the current error.
+
+   procedure Next_Recover_Op
+     (Parser_State : in out Parser_Lists.Parser_State;
+      Tree         : in     Syntax_Trees.Tree);
+   --  Increment Parser_State.Current_Recover_Op.
+
+   procedure Update_Error
+     (Parser_State : in out Parser_Lists.Parser_State;
+      Tree         : in out Syntax_Trees.Tree;
+      Data         : in     Syntax_Trees.Error_Data'Class;
+      User_Data    : in     Syntax_Trees.User_Data_Access_Constant);
+   --  Update current error with Data. If Parser_State.Current_Recover_Op
+   --  is the last op in the current error, append the current error node
+   --  to Parser_State.Recover_Insert_Delete, and reset
+   --  Parser_State.Current_Recover_Op.
+
+   function Peek_Current_Sequential_Terminal
+     (Parser_State : in Parser_Lists.Parser_State;
+      Tree         : in Syntax_Trees.Tree)
+     return Syntax_Trees.Terminal_Ref;
+   --  Return first terminal with a valid Sequential_Index from current
+   --  token or a following token if current is an empty nonterm. For
+   --  comparison with insert/delete token index.
+
    type List is tagged private
    with
      Constant_Indexing => Constant_Reference,
@@ -117,9 +171,12 @@ package WisiToken.Parse.LR.Parser_Lists is
      Default_Iterator  => Iterate,
      Iterator_Element  => Parser_State;
 
-   function New_List (Shared_Tree : in Syntax_Trees.Base_Tree_Access) return 
List;
+   function New_List (Tree : in out Syntax_Trees.Tree) return List
+   with Pre => Tree.Parseable;
+   --  Create the first parse stream in Tree.
 
-   function Last_Label (List : in Parser_Lists.List) return Natural;
+   procedure Clear (List : in out Parser_Lists.List);
+   --  Empty list.
 
    function Count (List : in Parser_Lists.List) return SAL.Base_Peek_Type;
 
@@ -129,29 +186,26 @@ package WisiToken.Parse.LR.Parser_Lists is
    procedure Next (Cursor : in out Parser_Lists.Cursor);
    function Is_Done (Cursor : in Parser_Lists.Cursor) return Boolean;
    function Has_Element (Cursor : in Parser_Lists.Cursor) return Boolean is 
(not Is_Done (Cursor));
-   function Label (Cursor : in Parser_Lists.Cursor) return Natural;
-   function Total_Recover_Cost (Cursor : in Parser_Lists.Cursor) return 
Integer;
-   function Max_Recover_Ops_Length (Cursor : in Parser_Lists.Cursor) return 
Ada.Containers.Count_Type;
-   function Min_Recover_Cost (Cursor : in Parser_Lists.Cursor) return Integer;
+   function Stream (Cursor : in Parser_Lists.Cursor) return 
Syntax_Trees.Stream_ID;
 
    procedure Set_Verb (Cursor : in Parser_Lists.Cursor; Verb : in 
All_Parse_Action_Verbs);
    function Verb (Cursor : in Parser_Lists.Cursor) return 
All_Parse_Action_Verbs;
 
    procedure Terminate_Parser
-     (Parsers   : in out List;
-      Current   : in out Cursor'Class;
-      Message   : in     String;
-      Trace     : in out WisiToken.Trace'Class;
-      Terminals : in     Base_Token_Arrays.Vector);
-   --  Terminate Current. Current is set to no element.
+     (Parsers : in out List;
+      Current : in out Cursor'Class;
+      Tree    : in out Syntax_Trees.Tree;
+      Message : in     String;
+      Trace   : in out WisiToken.Trace'Class);
+   --  Terminate Current. Current is set to next element.
    --
-   --  Terminals is used to report the current token in the message.
+   --  Tree is used to report the current token in the message.
 
    procedure Duplicate_State
-     (Parsers   : in out List;
-      Current   : in out Cursor'Class;
-      Trace     : in out WisiToken.Trace'Class;
-      Terminals : in     Base_Token_Arrays.Vector);
+     (Parsers : in out List;
+      Current : in out Cursor'Class;
+      Tree    : in out Syntax_Trees.Tree;
+      Trace   : in out WisiToken.Trace'Class);
    --  If any other parser in Parsers has a stack equivalent to Current,
    --  Terminate one of them. Current is either unchanged, or advanced to
    --  the next parser.
@@ -176,10 +230,12 @@ package WisiToken.Parse.LR.Parser_Lists is
    with Pre => List.Count > 0;
    --  Direct access to visible components of first parser's Parser_State
 
-   procedure Put_Top_10 (Trace : in out WisiToken.Trace'Class; Cursor : in 
Parser_Lists.Cursor);
-   --  Put image of top 10 stack items to Trace.
-
-   procedure Prepend_Copy (List : in out Parser_Lists.List; Cursor : in 
Parser_Lists.Cursor'Class);
+   procedure Prepend_Copy
+     (List      : in out Parser_Lists.List;
+      Cursor    : in     Parser_Lists.Cursor'Class;
+      Tree      : in out Syntax_Trees.Tree;
+      User_Data : in     Syntax_Trees.User_Data_Access_Constant;
+      Trace     : in out WisiToken.Trace'Class);
    --  Copy parser at Cursor, prepend to current list. New copy will not
    --  appear in Cursor.Next ...; it is accessible as First (List).
    --
@@ -216,6 +272,7 @@ package WisiToken.Parse.LR.Parser_Lists is
    type Parser_Node_Access (<>) is private;
 
    function To_Cursor (Ptr : in Parser_Node_Access) return Cursor;
+   function To_Parser_Node_Access (Cur : in Cursor) return Parser_Node_Access;
 
    type Constant_Reference_Type (Element : not null access constant 
Parser_State) is null record
    with Implicit_Dereference => Element;
@@ -232,7 +289,7 @@ package WisiToken.Parse.LR.Parser_Lists is
      return State_Reference;
    pragma Inline (Reference);
 
-   function Persistent_State_Ref (Position : in Parser_Node_Access) return 
State_Access;
+   function Unchecked_State_Ref (Position : in Parser_Node_Access) return 
State_Access;
 
    function Has_Element (Iterator : in Parser_Node_Access) return Boolean;
 
@@ -240,16 +297,30 @@ package WisiToken.Parse.LR.Parser_Lists is
 
    function Iterate (Container : aliased in out List) return 
Iterator_Interfaces.Forward_Iterator'Class;
 
-   --  Access to some private Parser_State components
+   --  Access to private Parser_State components
 
-   function Label (Iterator : in Parser_State) return Natural;
-   procedure Set_Verb (Iterator : in out Parser_State; Verb : in 
All_Parse_Action_Verbs);
-   function Verb (Iterator : in Parser_State) return All_Parse_Action_Verbs;
+   function Stream (State : in Parser_State) return Syntax_Trees.Stream_ID;
+   procedure Set_Verb (State : in out Parser_State; Verb : in 
All_Parse_Action_Verbs);
+   function Verb (State : in Parser_State) return All_Parse_Action_Verbs;
+
+   procedure Clear_Stream (State : in out Parser_State);
+   --  Clear all references to Syntax_Tree streams, so the tree can be
+   --  finalized.
 
 private
 
    type Parser_State is new Base_Parser_State with record
-      Label : Natural; -- for debugging/verbosity
+
+      Current_Recover_Op : SAL.Base_Peek_Type := No_Insert_Delete;
+      --  Next op in Parser_State.Current_Error_Ref.Error.Recover_Ops to be
+      --  processed by main parse; No_Insert_Delete if all done.
+      --
+      --  We do not keep a copy of Parser_State.Current_Error_Ref, because
+      --  the main parser can change the Stream_Node_Ref by shift or reduce.
+
+      Current_Error_Features : Syntax_Trees.Error_Node_Features;
+
+      Stream : Syntax_Trees.Stream_ID;
 
       Verb : All_Parse_Action_Verbs := Shift; -- current action to perform
    end record;
@@ -257,17 +328,14 @@ private
    package Parser_State_Lists is new SAL.Gen_Indefinite_Doubly_Linked_Lists 
(Parser_State);
 
    type List is tagged record
-      Elements     : aliased Parser_State_Lists.List;
-      Parser_Label : Natural; -- label of last added parser.
+      Elements : aliased Parser_State_Lists.List;
    end record;
 
-   type Cursor is tagged
-   record
+   type Cursor is tagged record
       Ptr : Parser_State_Lists.Cursor;
    end record;
 
-   type Parser_Node_Access is
-   record
+   type Parser_Node_Access is record
       Ptr : Parser_State_Lists.Cursor;
    end record;
 
diff --git a/wisitoken-parse-lr-parser_no_recover.adb 
b/wisitoken-parse-lr-parser_no_recover.adb
index 6fd2b80be2..035ad13eb5 100644
--- a/wisitoken-parse-lr-parser_no_recover.adb
+++ b/wisitoken-parse-lr-parser_no_recover.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2002 - 2005, 2008 - 2015, 2017 - 2020 Free Software 
Foundation, Inc.
+--  Copyright (C) 2002 - 2005, 2008 - 2015, 2017 - 2022 Free Software 
Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -28,54 +28,54 @@
 pragma License (Modified_GPL);
 
 with Ada.Exceptions;
+with GNAT.Traceback.Symbolic;
 package body WisiToken.Parse.LR.Parser_No_Recover is
 
    procedure Reduce_Stack_1
-     (Current_Parser : in     Parser_Lists.Cursor;
+     (Shared_Parser  : in out Parser;
+      Current_Parser : in     Parser_Lists.Cursor;
       Action         : in     Reduce_Action_Rec;
-      Nonterm        :    out Valid_Node_Index;
+      New_State      : in     State_Index;
       Trace          : in out WisiToken.Trace'Class)
    is
       Parser_State  : Parser_Lists.Parser_State renames 
Current_Parser.State_Ref.Element.all;
-      Children_Tree : Valid_Node_Index_Array (1 .. SAL.Base_Peek_Type 
(Action.Token_Count));
-      --  for Set_Children.
-   begin
-      for I in reverse Children_Tree'Range loop
-         Children_Tree (I) := Parser_State.Stack.Pop.Token;
-      end loop;
-
-      Nonterm := Parser_State.Tree.Add_Nonterm
-        (Action.Production, Children_Tree, Action.Action, Default_Virtual => 
False);
-      --  Computes Nonterm.Byte_Region
 
+      Nonterm : constant Syntax_Trees.Stream_Node_Ref := 
Shared_Parser.Tree.Reduce
+        (Parser_State.Stream, Action.Production, Action.Token_Count, New_State,
+         Recover_Conflict => False);
+   begin
       if Trace_Parse > Detail then
-         Trace.Put_Line (Parser_State.Tree.Image (Nonterm, 
Trace.Descriptor.all, Include_Children => True));
+         Trace.Put_Line
+           (Shared_Parser.Tree.Image (Nonterm.Node, Children => True, 
Terminal_Node_Numbers => True));
       end if;
    end Reduce_Stack_1;
 
    procedure Do_Action
-     (Action         : in Parse_Action_Rec;
-      Current_Parser : in Parser_Lists.Cursor;
-      Shared_Parser  : in Parser)
+     (Action         : in     Parse_Action_Rec;
+      Current_Parser : in     Parser_Lists.Cursor;
+      Shared_Parser  : in out Parser)
    is
+      use Syntax_Trees;
+      use all type Ada.Containers.Count_Type;
+
       Parser_State : Parser_Lists.Parser_State renames 
Current_Parser.State_Ref;
-      Trace        : WisiToken.Trace'Class renames Shared_Parser.Trace.all;
-      Nonterm      : Valid_Node_Index;
+      Trace        : WisiToken.Trace'Class renames 
Shared_Parser.Tree.Lexer.Trace.all;
    begin
       if Trace_Parse > Detail then
          Trace.Put
-           (Integer'Image (Current_Parser.Label) & ": " &
-              Trimmed_Image (Parser_State.Stack.Peek.State) & ": " &
-              Parser_State.Tree.Image (Parser_State.Current_Token, 
Trace.Descriptor.all) & " : ");
-         Put (Trace, Action);
+           (" " & Shared_Parser.Tree.Trimmed_Image (Current_Parser.Stream) & 
": " &
+              Trimmed_Image (Shared_Parser.Tree.State (Current_Parser.Stream)) 
& ": " &
+              Shared_Parser.Tree.Image
+                (Shared_Parser.Tree.Current_Token (Parser_State.Stream).Node,
+                 Terminal_Node_Numbers => True) & " : ");
+         Put (Trace, Trace_Image (Action, 
Shared_Parser.Tree.Lexer.Descriptor.all));
          Trace.New_Line;
       end if;
 
       case Action.Verb is
       when Shift =>
          Current_Parser.Set_Verb (Shift);
-         Parser_State.Stack.Push ((Action.State, Parser_State.Current_Token));
-         Parser_State.Tree.Set_State (Parser_State.Current_Token, 
Action.State);
+         Shared_Parser.Tree.Shift (Parser_State.Stream, Action.State);
 
       when Reduce =>
          Current_Parser.Set_Verb (Reduce);
@@ -83,7 +83,9 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
          declare
             New_State : constant Unknown_State_Index := Goto_For
               (Table => Shared_Parser.Table.all,
-               State => Parser_State.Stack (SAL.Base_Peek_Type 
(Action.Token_Count) + 1).State,
+               State => Shared_Parser.Tree.State
+                 (Parser_State.Stream,
+                  Shared_Parser.Tree.Peek (Parser_State.Stream, SAL.Peek_Type 
(Action.Token_Count + 1))),
                ID    => Action.Production.LHS);
          begin
             if New_State = Unknown_State then
@@ -94,9 +96,7 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
                   Trace.Put_Line (" ... error");
                end if;
             else
-               Reduce_Stack_1 (Current_Parser, Action, Nonterm, Trace);
-               Parser_State.Stack.Push ((New_State, Nonterm));
-               Parser_State.Tree.Set_State (Nonterm, New_State);
+               Reduce_Stack_1 (Shared_Parser, Current_Parser, Action, 
New_State, Trace);
 
                if Trace_Parse > Detail then
                   Trace.Put_Line (" ... goto state " & Trimmed_Image 
(New_State));
@@ -106,12 +106,11 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
 
       when Accept_It =>
          Current_Parser.Set_Verb (Accept_It);
-         Reduce_Stack_1
-           (Current_Parser,
-            (Reduce, Action.Production, Action.Action, Action.Check, 
Action.Token_Count),
-            Nonterm, Trace);
 
-         Parser_State.Tree.Set_Root (Nonterm);
+         Reduce_Stack_1
+           (Shared_Parser, Current_Parser,
+            (Reduce, Action.Production, Action.Token_Count),
+            Accept_State, Trace);
 
       when Error =>
          Current_Parser.Set_Verb (Action.Verb);
@@ -121,27 +120,32 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
 
          declare
             Expecting : constant Token_ID_Set := LR.Expecting
-              (Shared_Parser.Table.all, 
Current_Parser.State_Ref.Stack.Peek.State);
+              (Shared_Parser.Table.all, Shared_Parser.Tree.State 
(Current_Parser.Stream));
          begin
-            Parser_State.Errors.Append
-              ((Label          => LR.Action,
-                First_Terminal => Trace.Descriptor.First_Terminal,
-                Last_Terminal  => Trace.Descriptor.Last_Terminal,
-                Error_Token    => Parser_State.Current_Token,
-                Expecting      => Expecting,
-                Recover        => (others => <>)));
+            Shared_Parser.Tree.Add_Error_To_Input
+              (Stream            => Parser_State.Stream,
+               Data              => Parse_Error'
+                 (First_Terminal => 
Shared_Parser.Tree.Lexer.Descriptor.First_Terminal,
+                  Last_Terminal  => 
Shared_Parser.Tree.Lexer.Descriptor.Last_Terminal,
+                  Expecting      => Expecting,
+                  Recover_Ops    => Recover_Op_Nodes_Arrays.Empty_Vector,
+                  Recover_Test   => null),
+               User_Data         => Syntax_Trees.User_Data_Access_Constant 
(Shared_Parser.User_Data));
 
             if Trace_Parse > Outline then
                Put
                  (Trace,
-                  Integer'Image (Current_Parser.Label) & ": expecting: " &
-                    Image (Expecting, Trace.Descriptor.all));
+                  " " & Shared_Parser.Tree.Trimmed_Image 
(Current_Parser.Stream) & ": expecting: " &
+                    Image (Expecting, 
Shared_Parser.Tree.Lexer.Descriptor.all));
                Trace.New_Line;
             end if;
          end;
       end case;
    end Do_Action;
 
+   procedure Parse_Verb
+     (Shared_Parser : in out Parser;
+      Verb          :    out All_Parse_Action_Verbs)
    --  Return the type of parser cycle to execute.
    --
    --  Accept : all Parsers.Verb return Accept - done parsing.
@@ -151,9 +155,6 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
    --  Reduce : some Parsers.Verb return Reduce.
    --
    --  Error : all Parsers.Verb return Error.
-   procedure Parse_Verb
-     (Shared_Parser : in out Parser;
-      Verb          :    out All_Parse_Action_Verbs)
    is
       Shift_Count  : SAL.Base_Peek_Type := 0;
       Accept_Count : SAL.Base_Peek_Type := 0;
@@ -203,35 +204,31 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
    end Finalize;
 
    procedure New_Parser
-     (Parser               :    out          LR.Parser_No_Recover.Parser;
-      Trace                : not null access WisiToken.Trace'Class;
-      Lexer                : in              WisiToken.Lexer.Handle;
-      Table                : in              Parse_Table_Ptr;
-      User_Data            : in              
WisiToken.Syntax_Trees.User_Data_Access;
-      Max_Parallel         : in              SAL.Base_Peek_Type := 
Default_Max_Parallel;
-      First_Parser_Label   : in              Integer            := 1;
-      Terminate_Same_State : in              Boolean            := True)
-   is
-      use all type Syntax_Trees.User_Data_Access;
-   begin
-      Parser.Lexer                := Lexer;
-      Parser.Trace                := Trace;
-      Parser.Table                := Table;
-      Parser.User_Data            := User_Data;
-      Parser.Max_Parallel         := Max_Parallel;
-      Parser.First_Parser_Label   := First_Parser_Label;
-      Parser.Terminate_Same_State := Terminate_Same_State;
-
-      if User_Data /= null then
-         User_Data.Set_Lexer_Terminals (Lexer, 
Parser.Terminals'Unchecked_Access);
-      end if;
+     (Parser      :    out LR.Parser_No_Recover.Parser;
+      Lexer       : in     WisiToken.Lexer.Handle;
+      Table       : in     Parse_Table_Ptr;
+      Productions : in     Syntax_Trees.Production_Info_Trees.Vector;
+      User_Data   : in     Syntax_Trees.User_Data_Access)
+   is begin
+      Parser.Tree.Lexer  := Lexer;
+      Parser.Table       := Table;
+      Parser.Productions := Productions;
+      Parser.User_Data   := User_Data;
    end New_Parser;
 
-   overriding procedure Parse (Shared_Parser : aliased in out Parser)
+   overriding procedure Parse
+     (Shared_Parser : in out Parser;
+      Log_File      : in     Ada.Text_IO.File_Type;
+      Edits         : in     KMN_Lists.List := KMN_Lists.Empty_List;
+      Pre_Edited    : in     Boolean        := False)
    is
+      pragma Unreferenced (Log_File, Pre_Edited);
+
+      use all type KMN_Lists.List;
+      use all type WisiToken.Syntax_Trees.Terminal_Ref;
       use all type Syntax_Trees.User_Data_Access;
 
-      Trace : WisiToken.Trace'Class renames Shared_Parser.Trace.all;
+      Trace : WisiToken.Trace'Class renames Shared_Parser.Tree.Lexer.Trace.all;
 
       Current_Verb : All_Parse_Action_Verbs;
       Action       : Parse_Action_Node_Ptr;
@@ -246,8 +243,7 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
             if Shared_Parser.Parsers.Count = 1 then
                raise Syntax_Error;
             else
-               Shared_Parser.Parsers.Terminate_Parser
-                 (Check_Parser, "", Shared_Parser.Trace.all, 
Shared_Parser.Terminals);
+               Shared_Parser.Parsers.Terminate_Parser (Check_Parser, 
Shared_Parser.Tree, "", Trace);
             end if;
          else
             Check_Parser.Next;
@@ -255,18 +251,23 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
       end Check_Error;
 
    begin
+      if Edits /= KMN_Lists.Empty_List then
+         raise SAL.Programmer_Error;
+      end if;
+
       if Shared_Parser.User_Data /= null then
          Shared_Parser.User_Data.Reset;
       end if;
 
-      Shared_Parser.Shared_Tree.Clear;
-
-      Shared_Parser.Parsers := Parser_Lists.New_List
-        (Shared_Tree => Shared_Parser.Shared_Tree'Unchecked_Access);
+      Shared_Parser.Tree.Clear;
 
       Shared_Parser.Lex_All;
 
-      Shared_Parser.Parsers.First.State_Ref.Stack.Push 
((Shared_Parser.Table.State_First, others => <>));
+      Shared_Parser.Parsers := Parser_Lists.New_List (Shared_Parser.Tree);
+
+      Shared_Parser.Tree.Start_Parse
+        (Shared_Parser.Parsers.First.State_Ref.Stream,
+         Shared_Parser.Table.State_First);
 
       Main_Loop :
       loop
@@ -276,35 +277,28 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
 
          case Current_Verb is
          when Shift =>
-            --  All parsers just shifted a token; get the next token
-
-            for Parser_State of Shared_Parser.Parsers loop
-               Parser_State.Shared_Token  := Parser_State.Shared_Token + 1;
-               Parser_State.Current_Token := Shared_Parser.Terminals
-                          (Parser_State.Shared_Token).Tree_Index;
-            end loop;
+            --  All parsers just shifted a token, or we are just starting a 
parse;
+            --  Tree.Current_Token is the next token.
+            null;
 
          when Accept_It =>
             --  All parsers accepted.
             declare
                Count : constant SAL.Base_Peek_Type := 
Shared_Parser.Parsers.Count;
+               State : Parser_Lists.Parser_State renames 
Shared_Parser.Parsers.First.State_Ref.Element.all;
             begin
                if Count = 1 then
                   --  Nothing more to do
                   if Trace_Parse > Outline then
-                     Trace.Put_Line (Integer'Image 
(Shared_Parser.Parsers.First.Label) & ": succeed");
+                     Trace.Put_Line (" " & Shared_Parser.Tree.Trimmed_Image 
(State.Stream) & ": succeed");
                   end if;
                   exit Main_Loop;
 
                else
                   --  More than one parser is active; ambiguous parse.
-                  declare
-                     Token : Base_Token renames Shared_Parser.Terminals 
(Shared_Parser.Terminals.Last_Index);
-                  begin
-                     raise WisiToken.Parse_Error with Error_Message
-                       (Shared_Parser.Lexer.File_Name, Token.Line, 
Token.Column,
-                        "Ambiguous parse:" & SAL.Base_Peek_Type'Image (Count) 
& " parsers active.");
-                  end;
+                  raise WisiToken.Parse_Error with 
Shared_Parser.Tree.Error_Message
+                    (Shared_Parser.Tree.Current_Token (State.Stream),
+                     "Ambiguous parse:" & SAL.Base_Peek_Type'Image (Count) & " 
parsers active.");
                end if;
             end;
 
@@ -312,9 +306,8 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
             null;
 
          when Error =>
-            --  All parsers errored; terminate with error. Semantic_State has 
all
-            --  the required info (recorded by Error in Do_Action), so we just
-            --  raise the exception.
+            --  All parsers errored; terminate with error. Do_Action reported 
the
+            --  error, so we just raise the exception.
             raise Syntax_Error;
 
          when Pause =>
@@ -331,11 +324,8 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
             loop
                exit when Current_Parser.Is_Done;
 
-               if Shared_Parser.Terminate_Same_State and
-                 Current_Verb = Shift
-               then
-                  Shared_Parser.Parsers.Duplicate_State
-                    (Current_Parser, Shared_Parser.Trace.all, 
Shared_Parser.Terminals);
+               if Current_Verb = Shift then
+                  Shared_Parser.Parsers.Duplicate_State (Current_Parser, 
Shared_Parser.Tree, Trace);
                   --  If Duplicate_State terminated Current_Parser, 
Current_Parser now
                   --  points to the next parser. Otherwise it is unchanged.
                end if;
@@ -344,8 +334,8 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
 
                if Trace_Parse > Extra then
                   Trace.Put_Line
-                    ("current_verb: " & Parse_Action_Verbs'Image 
(Current_Verb) &
-                       "," & Integer'Image (Current_Parser.Label) &
+                    ("current_verb: " & Image (Current_Verb) &
+                       ", " & Shared_Parser.Tree.Trimmed_Image 
(Current_Parser.Stream) &
                        ".verb: " & Parse_Action_Verbs'Image 
(Current_Parser.Verb));
                end if;
 
@@ -353,17 +343,18 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
                --  (which advances to the next parser) or Current_Parser.Next.
 
                if Current_Parser.Verb = Current_Verb then
-                  if Trace_Parse > Extra then
-                     Parser_Lists.Put_Top_10 (Trace, Current_Parser);
-                  end if;
-
                   declare
-                     State : Parser_Lists.Parser_State renames 
Current_Parser.State_Ref.Element.all;
+                     Parser_State : Parser_Lists.Parser_State renames 
Current_Parser.State_Ref.Element.all;
                   begin
+                     if Trace_Parse > Extra then
+                        Trace.Put (" " & Shared_Parser.Tree.Trimmed_Image 
(Parser_State.Stream) & ": stack: ");
+                        Trace.Put_Line (Parser_Lists.Image 
(Parser_State.Stream, Shared_Parser.Tree));
+                     end if;
+
                      Action := Action_For
                        (Table => Shared_Parser.Table.all,
-                        State => State.Stack.Peek.State,
-                        ID    => State.Tree.ID (State.Current_Token));
+                        State => Shared_Parser.Tree.State 
(Parser_State.Stream),
+                        ID    => Shared_Parser.Tree.ID 
(Shared_Parser.Tree.Current_Token (Parser_State.Stream).Node));
                   end;
 
                   declare
@@ -373,17 +364,16 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
                         exit when Conflict = null;
                         --  Spawn a new parser (before modifying 
Current_Parser stack).
 
-                        if Shared_Parser.Parsers.Count = 
Shared_Parser.Max_Parallel then
+                        if Shared_Parser.Parsers.Count = 
Shared_Parser.Table.Max_Parallel then
                            declare
                               Parser_State : Parser_Lists.Parser_State renames 
Current_Parser.State_Ref;
-                              Token : Base_Token renames 
Shared_Parser.Terminals (Parser_State.Shared_Token);
                            begin
-                              raise WisiToken.Parse_Error with Error_Message
-                                (Shared_Parser.Lexer.File_Name, Token.Line, 
Token.Column,
+                              raise WisiToken.Parse_Error with 
Shared_Parser.Tree.Error_Message
+                                (Shared_Parser.Tree.Shared_Token 
(Parser_State.Stream),
                                  ": too many parallel parsers required in 
grammar state" &
-                                   State_Index'Image 
(Parser_State.Stack.Peek.State) &
+                                   Shared_Parser.Tree.State 
(Parser_State.Stream)'Image &
                                    "; simplify grammar, or increase 
max-parallel (" &
-                                   SAL.Base_Peek_Type'Image 
(Shared_Parser.Max_Parallel) & ")");
+                                   SAL.Base_Peek_Type'Image 
(Shared_Parser.Table.Max_Parallel) & ")");
                            end;
                         else
                            if Trace_Parse > Outline then
@@ -391,16 +381,20 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
                                  Parser_State : Parser_Lists.Parser_State 
renames Current_Parser.State_Ref;
                               begin
                                  Trace.Put_Line
-                                   (Integer'Image (Current_Parser.Label) & ": 
" &
-                                      Trimmed_Image 
(Parser_State.Stack.Peek.State) & ": " &
-                                      Parser_State.Tree.Image
-                                        (Parser_State.Current_Token, 
Trace.Descriptor.all) & " : " &
-                                      "spawn" & Integer'Image 
(Shared_Parser.Parsers.Last_Label + 1) & ", (" &
-                                      Trimmed_Image (1 + Integer 
(Shared_Parser.Parsers.Count)) & " active)");
+                                   (" " & Shared_Parser.Tree.Trimmed_Image 
(Current_Parser.Stream) & ":" &
+                                      Shared_Parser.Tree.State 
(Parser_State.Stream)'Image & ": " &
+                                      Shared_Parser.Tree.Image
+                                        (Shared_Parser.Tree.Current_Token 
(Parser_State.Stream).Node,
+                                         Terminal_Node_Numbers => True) & " : 
" &
+                                      "spawn " & 
Shared_Parser.Tree.Next_Stream_ID_Trimmed_Image &
+                                      ", (" & Trimmed_Image (1 + Integer 
(Shared_Parser.Parsers.Count)) & " active)");
                               end;
                            end if;
 
-                           Shared_Parser.Parsers.Prepend_Copy (Current_Parser);
+                           Shared_Parser.Parsers.Prepend_Copy
+                             (Current_Parser, Shared_Parser.Tree,
+                              Syntax_Trees.User_Data_Access_Constant 
(Shared_Parser.User_Data),
+                              Trace);
                            Do_Action (Conflict.Item, 
Shared_Parser.Parsers.First, Shared_Parser);
 
                            declare
@@ -425,150 +419,87 @@ package body WisiToken.Parse.LR.Parser_No_Recover is
          end;
       end loop Main_Loop;
 
+      Shared_Parser.Tree.Clear_Parse_Streams;
+
+      if Trace_Action > Extra then
+         Trace.Put_Line
+           (Shared_Parser.Tree.Image
+              (Children     => True,
+               Non_Grammar  => True,
+               Augmented    => True,
+               Line_Numbers => True));
+         Trace.New_Line;
+      end if;
+
       --  We don't raise Syntax_Error for lexer errors, since they are all
       --  recovered, either by inserting a quote, or by ignoring the
       --  character.
    end Parse;
 
-   overriding procedure Execute_Actions
-     (Parser          : in out LR.Parser_No_Recover.Parser;
-      Image_Augmented : in     Syntax_Trees.Image_Augmented := null)
+   procedure Execute_Actions
+     (Tree        : in out Syntax_Trees.Tree;
+      Productions : in     Syntax_Trees.Production_Info_Trees.Vector;
+      User_Data   : in     Syntax_Trees.User_Data_Access)
    is
-      pragma Unreferenced (Image_Augmented);
       use all type Syntax_Trees.User_Data_Access;
-
       procedure Process_Node
         (Tree : in out Syntax_Trees.Tree;
-         Node : in     Valid_Node_Index)
+         Node : in     Syntax_Trees.Valid_Node_Access)
       is
-         use all type Syntax_Trees.Node_Label;
+         use Syntax_Trees;
       begin
          if Tree.Label (Node) /= Nonterm then
             return;
          end if;
 
+         User_Data.Reduce (Tree, Node);
+
          declare
-            use all type Syntax_Trees.Semantic_Action;
-            Tree_Children : constant Valid_Node_Index_Array := Tree.Children 
(Node);
+            Action : constant Syntax_Trees.Post_Parse_Action := 
Get_Post_Parse_Action
+              (Productions, Tree.Production_ID (Node));
          begin
-            Parser.User_Data.Reduce (Tree, Node, Tree_Children);
-
-            if Tree.Action (Node) /= null then
+            if Action /= null then
                begin
-                  Tree.Action (Node) (Parser.User_Data.all, Tree, Node, 
Tree_Children);
+                  Action (User_Data.all, Tree, Node);
                exception
                when E : others =>
-                  declare
-                     Line   : Line_Number_Type  := Line_Number_Type'First;
-                     Column : Ada.Text_IO.Count := Ada.Text_IO.Count'First;
-                  begin
-                     if Tree.First_Shared_Terminal (Node) = 
Invalid_Token_Index then
-                        declare
-                           Byte_Region : Buffer_Region renames 
Tree.Byte_Region (Node);
-                        begin
-                           if Byte_Region /= Null_Buffer_Region then
-                              Column := Ada.Text_IO.Count (Byte_Region.First);
-                           end if;
-                        end;
-                     else
-                        declare
-                           Token : Base_Token renames Parser.Terminals 
(Tree.First_Shared_Terminal (Node));
-                        begin
-                           Line := Token.Line;
-                           Column := Token.Column;
-                        end;
-                     end if;
-                     raise WisiToken.Parse_Error with Error_Message
-                       (Parser.Lexer.File_Name, Line, Column,
-                        "action raised exception " & 
Ada.Exceptions.Exception_Name (E) & ": " &
-                          Ada.Exceptions.Exception_Message (E));
-                  end;
+                  if Trace_Tests > Outline then
+                     --  running a unit test; exception may be AUnit assert 
fail
+                     raise;
+
+                  elsif WisiToken.Debug_Mode then
+                     Tree.Lexer.Trace.Put_Line
+                       (GNAT.Traceback.Symbolic.Symbolic_Traceback (E)); -- 
includes Prefix
+                     Tree.Lexer.Trace.New_Line;
+                  end if;
+
+                  raise WisiToken.Parse_Error with Tree.Error_Message
+                    (Node, "action raised exception " & 
Ada.Exceptions.Exception_Name (E) & ": " &
+                       Ada.Exceptions.Exception_Message (E));
                end;
             end if;
          end;
       end Process_Node;
 
+   begin
+      User_Data.Initialize_Actions (Tree);
+      Tree.Process_Tree (Process_Node'Access);
+   end Execute_Actions;
+
+   overriding procedure Execute_Actions
+     (Parser              : in out LR.Parser_No_Recover.Parser;
+      Action_Region_Bytes : in     WisiToken.Buffer_Region := 
WisiToken.Null_Buffer_Region)
+   is
+      use all type WisiToken.Syntax_Trees.User_Data_Access;
+      pragma Unreferenced (Action_Region_Bytes);
    begin
       if Parser.User_Data /= null then
          if Parser.Parsers.Count > 1 then
             raise Syntax_Error with "ambiguous parse; can't execute actions";
          end if;
-
-         declare
-            Parser_State : Parser_Lists.Parser_State renames 
Parser.Parsers.First_State_Ref.Element.all;
-         begin
-            Parser_State.Tree.Set_Parents;
-            Parser.User_Data.Initialize_Actions (Parser_State.Tree);
-            Parser_State.Tree.Process_Tree (Process_Node'Access);
-         end;
-      end if;
-   end Execute_Actions;
-
-   overriding function Tree (Parser : in LR.Parser_No_Recover.Parser) return 
Syntax_Trees.Tree
-   is begin
-      if Parser.Parsers.Count > 1 then
-         raise WisiToken.Parse_Error with "ambigous parse";
-      else
-         return Parser.Parsers.First_State_Ref.Tree;
-      end if;
-   end Tree;
-
-   overriding
-   function Tree_Var_Ref
-     (Parser : aliased in out LR.Parser_No_Recover.Parser)
-     return Syntax_Trees.Tree_Variable_Reference
-   is begin
-      if Parser.Parsers.Count > 1 then
-         raise WisiToken.Parse_Error with "ambigous parse";
-      else
-         return (Element => Parser.Parsers.First_State_Ref.Tree'Access);
       end if;
-   end Tree_Var_Ref;
-
-   overriding function Any_Errors (Parser : in LR.Parser_No_Recover.Parser) 
return Boolean
-   is
-      use all type Ada.Containers.Count_Type;
-      Parser_State : Parser_Lists.Parser_State renames 
Parser.Parsers.First_Constant_State_Ref;
-   begin
-      pragma Assert (Parser_State.Tree.Flushed);
-      return Parser.Parsers.Count > 1 or Parser_State.Errors.Length > 0 or 
Parser.Lexer.Errors.Length > 0;
-   end Any_Errors;
-
-   overriding procedure Put_Errors (Parser : in LR.Parser_No_Recover.Parser)
-   is
-      use Ada.Text_IO;
-
-      Parser_State : Parser_Lists.Parser_State renames 
Parser.Parsers.First_Constant_State_Ref;
-      Descriptor   : WisiToken.Descriptor renames Parser.Trace.Descriptor.all;
-   begin
-      for Item of Parser.Lexer.Errors loop
-         Put_Line
-           (Current_Error,
-            Parser.Lexer.File_Name & ":0:0: lexer unrecognized character at" & 
Buffer_Pos'Image (Item.Char_Pos));
-      end loop;
-
-      for Item of Parser_State.Errors loop
-         case Item.Label is
-         when Action =>
-            declare
-               Token : Base_Token renames Parser.Terminals 
(Parser_State.Tree.First_Shared_Terminal (Item.Error_Token));
-            begin
-               Put_Line
-                 (Current_Error,
-                  Error_Message
-                    (Parser.Lexer.File_Name, Token.Line, Token.Column,
-                     "syntax error: expecting " & Image (Item.Expecting, 
Descriptor) &
-                       ", found '" & Parser.Lexer.Buffer_Text 
(Token.Byte_Region) & "'"));
-            end;
 
-         when Check =>
-            null;
-
-         when Message =>
-            Put_Line (Current_Error, -Item.Msg);
-         end case;
-
-      end loop;
-   end Put_Errors;
+      Execute_Actions (Parser.Tree, Parser.Productions, Parser.User_Data);
+   end Execute_Actions;
 
 end WisiToken.Parse.LR.Parser_No_Recover;
diff --git a/wisitoken-parse-lr-parser_no_recover.ads 
b/wisitoken-parse-lr-parser_no_recover.ads
index 5e630d2be3..ac5169d8b1 100644
--- a/wisitoken-parse-lr-parser_no_recover.ads
+++ b/wisitoken-parse-lr-parser_no_recover.ads
@@ -1,12 +1,13 @@
 --  Abstract :
 --
---  A generalized LR parser, with no error recovery, no semantic checks.
+--  A generalized LR parser, with no error recovery, no semantic
+--  checks, no incremental parse.
 --
 --  This allows wisi-generate (which uses the generated wisi_grammar)
 --  to not depend on wisitoken-lr-mckenzie_recover, so editing that
 --  does not cause everything to be regenerated/compiled.
 --
---  Copyright (C) 2002, 2003, 2009, 2010, 2013 - 2015, 2017 - 2020 Free 
Software Foundation, Inc.
+--  Copyright (C) 2002, 2003, 2009, 2010, 2013 - 2015, 2017 - 2022 Free 
Software Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -29,64 +30,39 @@ with WisiToken.Parse.LR.Parser_Lists;
 with WisiToken.Syntax_Trees;
 package WisiToken.Parse.LR.Parser_No_Recover is
 
-   Default_Max_Parallel : constant := 15;
-
    type Parser is new WisiToken.Parse.Base_Parser with record
-      Table       : Parse_Table_Ptr;
-      Shared_Tree : aliased Syntax_Trees.Base_Tree;
-      --  Each parser has its own branched syntax tree, all branched from
-      --  this tree.
-      --
-      --  See WisiToken.LR.Parser_Lists Parser_State for more discussion of
-      --  Shared_Tree.
-
+      Table   : Parse_Table_Ptr;
       Parsers : aliased Parser_Lists.List;
-
-      Max_Parallel         : SAL.Base_Peek_Type;
-      First_Parser_Label   : Integer;
-      Terminate_Same_State : Boolean;
    end record;
 
    overriding procedure Finalize (Object : in out LR.Parser_No_Recover.Parser);
    --  Deep free Object.Table.
 
    procedure New_Parser
-     (Parser               :    out          LR.Parser_No_Recover.Parser;
-      Trace                : not null access WisiToken.Trace'Class;
-      Lexer                : in              WisiToken.Lexer.Handle;
-      Table                : in              Parse_Table_Ptr;
-      User_Data            : in              Syntax_Trees.User_Data_Access;
-      Max_Parallel         : in              SAL.Base_Peek_Type := 
Default_Max_Parallel;
-      First_Parser_Label   : in              Integer            := 1;
-      Terminate_Same_State : in              Boolean            := True);
-
-   overriding procedure Parse (Shared_Parser : aliased in out 
LR.Parser_No_Recover.Parser);
-   --  Attempt a parse. Calls Parser.Lexer.Reset, runs lexer to end of
-   --  input setting Shared_Parser.Terminals, then parses tokens.
-   --
-   --  If a parse error is encountered, raises Syntax_Error.
-   --  Parser.Lexer_Errors and Parsers(*).Errors contain information
-   --  about the errors.
-   --
-   --  For other errors, raises Parse_Error with an appropriate error
-   --  message.
+     (Parser      :    out LR.Parser_No_Recover.Parser;
+      Lexer       : in     WisiToken.Lexer.Handle;
+      Table       : in     Parse_Table_Ptr;
+      Productions : in     Syntax_Trees.Production_Info_Trees.Vector;
+      User_Data   : in     Syntax_Trees.User_Data_Access);
 
-   overriding function Tree (Parser : in LR.Parser_No_Recover.Parser) return 
Syntax_Trees.Tree;
-
-   overriding
-   function Tree_Var_Ref
-     (Parser : aliased in out LR.Parser_No_Recover.Parser)
-     return Syntax_Trees.Tree_Variable_Reference;
-
-   overriding function Any_Errors (Parser : in LR.Parser_No_Recover.Parser) 
return Boolean;
-
-   overriding procedure Put_Errors (Parser : in LR.Parser_No_Recover.Parser);
-   --  Put user-friendly error messages from the parse to
-   --  Ada.Text_IO.Current_Error.
+   overriding procedure Parse
+     (Shared_Parser : in out LR.Parser_No_Recover.Parser;
+      Log_File      : in     Ada.Text_IO.File_Type;
+      Edits         : in     KMN_Lists.List := KMN_Lists.Empty_List;
+      Pre_Edited    : in     Boolean        := False);
+   --  Raises SAL.Programmer_Error if Edits is not empty. Pre_Edited and
+   --  Log_File are ignored.
 
    overriding procedure Execute_Actions
-     (Parser          : in out LR.Parser_No_Recover.Parser;
-      Image_Augmented : in     Syntax_Trees.Image_Augmented := null);
-   --  Execute the grammar actions in Parser.
+     (Parser              : in out LR.Parser_No_Recover.Parser;
+      Action_Region_Bytes : in     WisiToken.Buffer_Region := 
WisiToken.Null_Buffer_Region);
+   --  Action_Region_Bytes is ignored (all nodes always processed).
+
+   procedure Execute_Actions
+     (Tree        : in out Syntax_Trees.Tree;
+      Productions : in     Syntax_Trees.Production_Info_Trees.Vector;
+      User_Data   : in     Syntax_Trees.User_Data_Access);
+   --  Implements Execute_Actions, allows specifying different tree
+   --  (needed by wisitoken-bnf-generate).
 
 end WisiToken.Parse.LR.Parser_No_Recover;
diff --git a/wisitoken-parse-lr.adb b/wisitoken-parse-lr.adb
index 27fe85d9cc..7c82f23144 100644
--- a/wisitoken-parse-lr.adb
+++ b/wisitoken-parse-lr.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2013-2015, 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2013-2015, 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -27,9 +27,10 @@
 
 pragma License (GPL);
 
+with Ada.Characters.Handling;
 with Ada.Exceptions;
-with Ada.Strings.Maps;
 with Ada.Strings.Fixed;
+with Ada.Strings.Maps;
 with Ada.Text_IO;
 with GNATCOLL.Mmap;
 package body WisiToken.Parse.LR is
@@ -55,25 +56,26 @@ package body WisiToken.Parse.LR is
       end case;
    end Image;
 
-   procedure Put (Trace : in out WisiToken.Trace'Class; Item : in 
Parse_Action_Rec)
-   is
-      use Ada.Containers;
-   begin
+   function Trace_Image (Item : in Parse_Action_Rec; Descriptor : in 
WisiToken.Descriptor) return String
+   is begin
       case Item.Verb is
       when Shift =>
-         Trace.Put ("shift and goto state" & State_Index'Image (Item.State), 
Prefix => False);
+         return "shift and goto state" &
+           (if Trace_Parse_No_State_Numbers
+            then " --"
+            else State_Index'Image (Item.State));
 
       when Reduce =>
-         Trace.Put
-           ("reduce" & Count_Type'Image (Item.Token_Count) & " tokens to " &
-              Image (Item.Production.LHS, Trace.Descriptor.all),
-            Prefix => False);
+         return "reduce" & Item.Token_Count'Image  & " tokens to " &
+           Image (Item.Production.LHS, Descriptor);
+
       when Accept_It =>
-         Trace.Put ("accept it", Prefix => False);
+         return "accept it";
+
       when Error =>
-         Trace.Put ("ERROR", Prefix => False);
+         return "ERROR";
       end case;
-   end Put;
+   end Trace_Image;
 
    function Equal (Left, Right : in Parse_Action_Rec) return Boolean
    is
@@ -109,16 +111,21 @@ package body WisiToken.Parse.LR is
       return False;
    end Is_In;
 
-   function Compare (Left, Right : in Token_ID) return SAL.Compare_Result
-   is begin
-      if Left < Right then
-         return SAL.Less;
-      elsif Left = Right then
-         return SAL.Equal;
+   procedure Delete
+     (Container : in out Action_Node;
+      Prev      : in     Parse_Action_Node_Ptr;
+      Current   : in out Parse_Action_Node_Ptr)
+   is
+      To_Delete : Parse_Action_Node_Ptr := Current;
+   begin
+      Current := Current.Next;
+      if Container.Actions = To_Delete then
+         Container.Actions := Current;
       else
-         return SAL.Greater;
+         Prev.Next := Current;
       end if;
-   end Compare;
+      Free (To_Delete);
+   end Delete;
 
    procedure Add
      (List   : in out Action_Arrays.Vector;
@@ -163,6 +170,14 @@ package body WisiToken.Parse.LR is
         Item.Reduce_Count'Image & ")";
    end Strict_Image;
 
+   function Image (Item : in Kernel_Info; Descriptor : in 
WisiToken.Descriptor) return String
+   is begin
+      return "(" & Image (Item.Production, Descriptor) & ", " &
+        Item.Length_After_Dot'Image & ", " &
+        Image (Item.Reduce_Production, Descriptor) & ", " &
+        Item.Reduce_Count'Image & ")";
+   end Image;
+
    function Strict_Image (Item : in Minimal_Action) return String
    is begin
       case Item.Verb is
@@ -182,7 +197,7 @@ package body WisiToken.Parse.LR is
       when Shift =>
          return "Shift " & Image (Item.ID, Descriptor);
       when Reduce =>
-         return "Reduce to " & Image (Item.Production.LHS, Descriptor);
+         return "Reduce" & Item.Token_Count'Image & " tokens to " & Image 
(Item.Production.LHS, Descriptor);
       end case;
    end Image;
 
@@ -210,33 +225,27 @@ package body WisiToken.Parse.LR is
       Symbol          : in     Token_ID;
       Verb            : in     LR.Parse_Action_Verbs;
       Production      : in     Production_ID;
-      RHS_Token_Count : in     Ada.Containers.Count_Type;
-      Semantic_Action : in     WisiToken.Syntax_Trees.Semantic_Action;
-      Semantic_Check  : in     Semantic_Checks.Semantic_Check)
+      RHS_Token_Count : in     Ada.Containers.Count_Type)
    is
       Action : constant Parse_Action_Rec :=
         (case Verb is
-         when Reduce    => (Reduce, Production, Semantic_Action, 
Semantic_Check, RHS_Token_Count),
-         when Accept_It => (Accept_It, Production, Semantic_Action, 
Semantic_Check, RHS_Token_Count),
+         when Reduce    => (Reduce, Production, RHS_Token_Count),
+         when Accept_It => (Accept_It, Production, RHS_Token_Count),
          when others    => raise SAL.Programmer_Error);
    begin
       Add (State.Action_List, Symbol, Action);
    end Add_Action;
 
    procedure Add_Action
-     (State           : in out Parse_State;
-      Symbols         : in     Token_ID_Array;
-      Production      : in     Production_ID;
-      RHS_Token_Count : in     Ada.Containers.Count_Type;
-      Semantic_Action : in     WisiToken.Syntax_Trees.Semantic_Action;
-      Semantic_Check  : in     WisiToken.Semantic_Checks.Semantic_Check)
+     (State             : in out Parse_State;
+      Symbols           : in     Token_ID_Array;
+      Production        : in     Production_ID;
+      RHS_Token_Count   : in     Ada.Containers.Count_Type)
    is begin
       --  We assume WisiToken.BNF.Output_Ada_Common.Duplicate_Reduce is True
       --  for this state; no conflicts, all the same action, Recursive.
       for Symbol of Symbols loop
-         Add_Action
-           (State, Symbol, Reduce, Production, RHS_Token_Count,
-            Semantic_Action, Semantic_Check);
+         Add_Action (State, Symbol, Reduce, Production, RHS_Token_Count);
       end loop;
    end Add_Action;
 
@@ -244,12 +253,9 @@ package body WisiToken.Parse.LR is
      (State             : in out LR.Parse_State;
       Symbol            : in     Token_ID;
       Reduce_Production : in     Production_ID;
-      RHS_Token_Count   : in     Ada.Containers.Count_Type;
-      Semantic_Action   : in     WisiToken.Syntax_Trees.Semantic_Action;
-      Semantic_Check    : in     Semantic_Checks.Semantic_Check)
+      RHS_Token_Count   : in     Ada.Containers.Count_Type)
    is
-      Conflict : constant Parse_Action_Rec :=
-        (Reduce, Reduce_Production, Semantic_Action, Semantic_Check, 
RHS_Token_Count);
+      Conflict : constant Parse_Action_Rec := (Reduce, Reduce_Production, 
RHS_Token_Count);
 
       Ref : constant Action_Arrays.Find_Reference_Constant_Type := 
State.Action_List.Find_Constant (Symbol);
 
@@ -270,6 +276,65 @@ package body WisiToken.Parse.LR is
       State.Goto_List.Insert ((Symbol, To_State));
    end Add_Goto;
 
+   procedure Set_McKenzie_Options (Param : in out McKenzie_Param_Type; Config 
: in String)
+   is
+      use Ada.Characters.Handling;
+      use Ada.Strings.Fixed;
+      Name_First : Integer := Config'First;
+      Name_Last  : Integer;
+
+      Value_First : Integer;
+      Value_Last  : Integer;
+   begin
+      loop
+         Name_Last := Index (Config, "=", Name_First);
+         exit when Name_Last = 0;
+
+         Value_First := Name_Last + 1;
+         Name_Last   := Name_Last - 1;
+         Value_Last  := Index (Config, " ", Value_First);
+         if Value_Last = 0 then
+            Value_Last := Config'Last;
+         end if;
+         declare
+            Name : constant String := To_Lower (Config (Name_First .. 
Name_Last));
+
+            function Get_Value return Integer
+            is begin
+               return Integer'Value (Config (Value_First .. Value_Last));
+            exception
+            when Constraint_Error =>
+               raise User_Error with "expecting integer value, found '" &
+                 Config (Value_First .. Value_Last) & "'";
+            end Get_Value;
+
+            Value : constant Integer := Get_Value;
+         begin
+            --  Trace var alphabetical order
+            if Name = "check_delta_limit" or
+              Name = "check_delta"
+            then
+               Param.Check_Delta_Limit := Value;
+
+            elsif Name = "check_limit" then
+               Param.Check_Limit := Syntax_Trees.Sequential_Index (Value);
+
+            elsif Name = "enqueue_limit" then
+               Param.Enqueue_Limit := Value;
+
+            elsif Name = "zombie_limit" then
+               Param.Zombie_Limit := Value;
+
+            else
+               raise User_Error with "expecting McKenzie option name, found '" 
& Config (Name_First .. Name_Last) & "'";
+            end if;
+         end;
+
+         Name_First := Value_Last + 1;
+         exit when Name_First > Config'Last;
+      end loop;
+   end Set_McKenzie_Options;
+
    function Goto_For
      (Table : in Parse_Table;
       State : in State_Index;
@@ -301,6 +366,61 @@ package body WisiToken.Parse.LR is
       return Ref.Actions;
    end Action_For;
 
+   function Shift_State (Action_List : in Parse_Action_Node_Ptr) return 
State_Index
+   is begin
+      --  There can be only one shift action, and it is always first.
+      return Action_List.Item.State;
+   end Shift_State;
+
+   procedure Undo_Reduce
+     (Tree      : in out Syntax_Trees.Tree;
+      Table     : in     Parse_Table;
+      Stream    : in     Syntax_Trees.Stream_ID;
+      User_Data : in     Syntax_Trees.User_Data_Access_Constant)
+   is
+      --  We can't move this into Syntax_Trees, because we need Table to set
+      --  the stream element states.
+      use Syntax_Trees;
+   begin
+      if Tree.Has_Error (Tree.Get_Node (Stream, Tree.Peek (Stream))) then
+         --  Move the errors to the first terminal, so they are not lost.
+         declare
+            Ref : Stream_Node_Parents := Tree.To_Stream_Node_Parents
+              (Tree.To_Rooted_Ref (Stream, Tree.Peek (Stream)));
+
+            New_Errors : Error_Data_Lists.List;
+         begin
+            for Err of Tree.Error_List (Ref.Ref.Node) loop
+               New_Errors.Append (To_Message (Err, Tree, Ref.Ref.Node));
+            end loop;
+
+            Tree.First_Terminal (Ref, Following => False);
+            if Ref.Ref.Node = Invalid_Node_Access then
+               --  So far, we never put an error on an empty nonterm; we just 
delete
+               --  it.
+               raise SAL.Programmer_Error with "undo_reduce error on empty 
nonterm";
+            end if;
+            Tree.Add_Errors (Ref, New_Errors, User_Data);
+         end;
+      end if;
+
+      declare
+         Nonterm    : constant Node_Access := Tree.Pop (Stream);
+         Prev_State : State_Index          := Tree.State (Stream);
+      begin
+         for Child of Tree.Children (Nonterm) loop
+            Tree.Clear_Parent (Child, Clear_Children => Stream = 
Tree.Shared_Stream);
+
+            if Is_Terminal (Tree.ID (Child), Tree.Lexer.Descriptor.all) then
+               Prev_State := Shift_State (Action_For (Table, Prev_State, 
Tree.ID (Child)));
+            else
+               Prev_State := Goto_For (Table, Prev_State, Tree.ID (Child));
+            end if;
+            Tree.Push (Stream, Child, Prev_State);
+         end loop;
+      end;
+   end Undo_Reduce;
+
    function Expecting (Table : in Parse_Table; State : in State_Index) return 
Token_ID_Set
    is
       Result : Token_ID_Set := (Table.First_Terminal .. Table.Last_Terminal => 
False);
@@ -336,11 +456,7 @@ package body WisiToken.Parse.LR is
       Free (Table);
    end Free_Table;
 
-   function Get_Text_Rep
-     (File_Name      : in String;
-      McKenzie_Param : in McKenzie_Param_Type;
-      Actions        : in Semantic_Action_Array_Arrays.Vector)
-     return Parse_Table_Ptr
+   function Get_Text_Rep (File_Name : in String) return Parse_Table_Ptr
    is
       use Ada.Text_IO;
 
@@ -371,7 +487,7 @@ package body WisiToken.Parse.LR is
             --  Buffer_Last on newline for Check_New_Line.
             Buffer_Last := Buffer_Last + 1;
          else
-            raise SAL.Programmer_Error with Error_Message
+            raise SAL.Programmer_Error with WisiToken.Error_Message
               (File_Name, 1, Ada.Text_IO.Count (Buffer_Last),
                "expecting semicolon, found '" & Buffer (Buffer_Last) & "'");
          end if;
@@ -392,7 +508,7 @@ package body WisiToken.Parse.LR is
                Buffer_Last := Buffer_Last + 1;
             end if;
          else
-            raise SAL.Programmer_Error with Error_Message
+            raise SAL.Programmer_Error with WisiToken.Error_Message
               (File_Name, 1, Ada.Text_IO.Count (Buffer_Last),
                "expecting new_line, found '" & Buffer (Buffer_Last) & "'");
          end if;
@@ -421,7 +537,7 @@ package body WisiToken.Parse.LR is
       procedure Raise_Gen_Next_Value_Constraint_Error (Name : String; Region : 
Buffer_Region)
       is begin
          --  Factored out from Gen_Next_Value to make Inline efficient.
-         raise SAL.Programmer_Error with Error_Message
+         raise SAL.Programmer_Error with WisiToken.Error_Message
            (File_Name, 1, Ada.Text_IO.Count (Region.First),
             "expecting " & Name & ", found '" & Buffer (Region.First .. 
Region.Last) & "'");
       end Raise_Gen_Next_Value_Constraint_Error;
@@ -446,7 +562,6 @@ package body WisiToken.Parse.LR is
       function Next_Token_ID is new Gen_Next_Value (Token_ID, "Token_ID");
       function Next_Integer is new Gen_Next_Value (Integer, "Integer");
       function Next_Parse_Action_Verbs is new Gen_Next_Value 
(Parse_Action_Verbs, "Parse_Action_Verbs");
-      function Next_Boolean is new Gen_Next_Value (Boolean, "Boolean");
       function Next_Count_Type is new Gen_Next_Value 
(Ada.Containers.Count_Type, "Count_Type");
    begin
       File            := GNATCOLL.Mmap.Open_Read (File_Name);
@@ -471,8 +586,6 @@ package body WisiToken.Parse.LR is
       begin
          Check_New_Line;
 
-         Table.McKenzie_Param := McKenzie_Param;
-
          for State of Table.States loop
             declare
                Actions_Done : Boolean := False;
@@ -504,18 +617,6 @@ package body WisiToken.Parse.LR is
                            Node_J.Item.State := Next_State_Index;
 
                         when Reduce | Accept_It =>
-                           if Next_Boolean then
-                              Node_J.Item.Action := Actions
-                                
(Node_J.Item.Production.LHS)(Node_J.Item.Production.RHS).Action;
-                           else
-                              Node_J.Item.Action := null;
-                           end if;
-                           if Next_Boolean then
-                              Node_J.Item.Check := Actions
-                                
(Node_J.Item.Production.LHS)(Node_J.Item.Production.RHS).Check;
-                           else
-                              Node_J.Item.Check := null;
-                           end if;
                            Node_J.Item.Token_Count := Next_Count_Type;
 
                         when Error =>
@@ -638,109 +739,31 @@ package body WisiToken.Parse.LR is
       raise;
 
    when E : others =>
-      raise SAL.Programmer_Error with Error_Message
+      raise SAL.Programmer_Error with WisiToken.Error_Message
         (File_Name, 1, Ada.Text_IO.Count (Buffer_Last),
          Ada.Exceptions.Exception_Name (E) & ": " & 
Ada.Exceptions.Exception_Message (E));
    end Get_Text_Rep;
 
-   function Compare (Left, Right : in Insert_Delete_Op) return 
SAL.Compare_Result
-   is
-      Left_Token_Index : constant WisiToken.Token_Index :=
-        (case Insert_Delete_Op_Label'(Left.Op) is
-         when Insert => Left.Ins_Token_Index,
-         when Delete => Left.Del_Token_Index);
-      Right_Token_Index : constant WisiToken.Token_Index :=
-        (case Insert_Delete_Op_Label'(Right.Op) is
-         when Insert => Right.Ins_Token_Index,
-         when Delete => Right.Del_Token_Index);
-   begin
-      if Left_Token_Index < Right_Token_Index then
-         return SAL.Less;
-      elsif Left_Token_Index = Right_Token_Index then
-         return SAL.Equal;
-      else
-         return SAL.Greater;
-      end if;
-   end Compare;
-
-   function Equal (Left : in Config_Op; Right : in Insert_Op) return Boolean
-   is begin
-      return Left.Op = Insert and then
-        Left.Ins_ID = Right.Ins_ID and then
-        Left.Ins_Token_Index = Right.Ins_Token_Index;
-   end Equal;
-
-   function None (Ops : aliased in Config_Op_Arrays.Vector; Op : in 
Config_Op_Label) return Boolean
+   function Stack_Has
+     (Tree  : in Syntax_Trees.Tree;
+      Stack : in Recover_Stacks.Stack;
+      ID    : in Token_ID)
+     return Boolean
    is
-      use Config_Op_Arrays, Config_Op_Array_Refs;
+      use Recover_Stacks;
    begin
-      for I in First_Index (Ops) .. Last_Index (Ops) loop
-         if Constant_Ref (Ops, I).Op = Op then
-            return False;
+      for I in 1 .. Depth (Stack) loop
+         if Tree.Element_ID (Peek (Stack, I).Token) = ID then
+            return True;
          end if;
       end loop;
-      return True;
-   end None;
-
-   function None_Since_FF (Ops : aliased in Config_Op_Arrays.Vector; Op : in 
Config_Op_Label) return Boolean
-   is
-      use Config_Op_Arrays, Config_Op_Array_Refs;
-   begin
-      for I in reverse First_Index (Ops) .. Last_Index (Ops) loop
-         declare
-            O : Config_Op renames Constant_Ref (Ops, I);
-         begin
-            exit when O.Op = Fast_Forward;
-            if O.Op = Op then
-               return False;
-            end if;
-         end;
-      end loop;
-      return True;
-   end None_Since_FF;
-
-   function Only_Since_FF (Ops : aliased in Config_Op_Arrays.Vector; Op : in 
Config_Op_Label) return Boolean
-   is
-      use Config_Op_Arrays, Config_Op_Array_Refs;
-      use all type Ada.Containers.Count_Type;
-   begin
-      if Length (Ops) = 0 or else Constant_Ref (Ops, Last_Index (Ops)).Op /= 
Op then
-         return False;
-      else
-         for I in reverse First_Index (Ops) .. Last_Index (Ops) loop
-            declare
-               O : Config_Op renames Constant_Ref (Ops, I);
-            begin
-               exit when O.Op = Fast_Forward;
-               if O.Op /= Op then
-                  return False;
-               end if;
-            end;
-         end loop;
-         return True;
-      end if;
-   end Only_Since_FF;
-
-   function Any (Ops : aliased in Config_Op_Arrays.Vector; Op : in 
Config_Op_Label) return Boolean
-   is
-      use Config_Op_Arrays, Config_Op_Array_Refs;
-   begin
-      for I in First_Index (Ops) .. Last_Index (Ops) loop
-         declare
-            O : Config_Op renames Constant_Ref (Ops, I);
-         begin
-            if O.Op = Op then
-               return True;
-            end if;
-         end;
-      end loop;
       return False;
-   end Any;
+   end Stack_Has;
 
    function Valid_Tree_Indices (Stack : in Recover_Stacks.Stack; Depth : in 
SAL.Base_Peek_Type) return Boolean
    is begin
       for I in 1 .. Depth loop
-         if Stack.Peek (I).Tree_Index = Invalid_Node_Index then
+         if Stack.Peek (I).Token.Virtual then
             return False;
          end if;
       end loop;
diff --git a/wisitoken-parse-lr.ads b/wisitoken-parse-lr.ads
index d07fefdccf..c6bc218455 100644
--- a/wisitoken-parse-lr.ads
+++ b/wisitoken-parse-lr.ads
@@ -9,7 +9,7 @@
 --
 --  See wisitoken.ads
 --
---  Copyright (C) 2002, 2003, 2009, 2010, 2013 - 2015, 2017 - 2021 Free 
Software Foundation, Inc.
+--  Copyright (C) 2002, 2003, 2009, 2010, 2013 - 2015, 2017 - 2022 Free 
Software Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -34,26 +34,35 @@
 
 pragma License (Modified_GPL);
 
-with Ada.Containers.Indefinite_Doubly_Linked_Lists;
 with Ada.Unchecked_Deallocation;
 with SAL.Gen_Array_Image;
+with SAL.Gen_Bounded_Definite_Doubly_Linked_Lists.Gen_Image_Aux;
 with SAL.Gen_Bounded_Definite_Stacks.Gen_Image_Aux;
-with SAL.Gen_Bounded_Definite_Vectors.Gen_Image_Aux;
-with SAL.Gen_Bounded_Definite_Vectors.Gen_Refs;
 with SAL.Gen_Unbounded_Definite_Min_Heaps_Fibonacci;
 with SAL.Gen_Unbounded_Definite_Vectors_Sorted;
-with System.Multiprocessors;
-with WisiToken.Semantic_Checks;
 with WisiToken.Syntax_Trees;
 package WisiToken.Parse.LR is
+   use all type WisiToken.Syntax_Trees.Node_Access;
+   use all type WisiToken.Syntax_Trees.Base_Sequential_Index;
    use all type SAL.Base_Peek_Type;
 
    type All_Parse_Action_Verbs is (Pause, Shift, Reduce, Accept_It, Error);
    subtype Parse_Action_Verbs is All_Parse_Action_Verbs range Shift .. Error;
    subtype Minimal_Verbs is All_Parse_Action_Verbs range Shift .. Reduce;
+   subtype Conflict_Parse_Actions is Parse_Action_Verbs range Shift .. 
Accept_It;
    --  Pause is only used for error recovery, to allow parallel parsers
    --  to re-sync on the same input terminal.
 
+   --  WORKAROUND: GNAT Community 2020 with -gnat2020 S'Image outputs
+   --  integer when S is a subtype.
+   function Image (Item : in All_Parse_Action_Verbs) return String
+   is (case Item is
+       when Pause     => "PAUSE",
+       when Shift     => "SHIFT",
+       when Reduce    => "REDUCE",
+       when Accept_It => "ACCEPT_IT",
+       when Error     => "ERROR");
+
    subtype Token_ID_Array_1_3 is Token_ID_Array (1 .. 3);
    --  For Language_Matching_Begin_Tokens.
 
@@ -69,9 +78,7 @@ package WisiToken.Parse.LR is
 
       when Reduce | Accept_It =>
          --  Production.LHS is the result nonterm
-         Action      : WisiToken.Syntax_Trees.Semantic_Action   := null;
-         Check       : WisiToken.Semantic_Checks.Semantic_Check := null;
-         Token_Count : Ada.Containers.Count_Type                := 0;
+         Token_Count : Ada.Containers.Count_Type := 0;
 
       when Error =>
          null;
@@ -83,8 +90,9 @@ package WisiToken.Parse.LR is
    function Image (Item : in Parse_Action_Rec; Descriptor : in 
WisiToken.Descriptor) return String;
    --  Ada aggregate syntax, leaving out Action, Check in reduce; for debug 
output
 
-   procedure Put (Trace : in out WisiToken.Trace'Class; Item : in 
Parse_Action_Rec);
-   --  Put a line for Item in parse trace format, with no prefix.
+   function Trace_Image (Item : in Parse_Action_Rec; Descriptor : in 
WisiToken.Descriptor) return String;
+   --  Used in parser trace, for compatibility with existing unit tests.
+   --  Respects Trace_Parse_No_State_Numbers.
 
    function Equal (Left, Right : in Parse_Action_Rec) return Boolean;
    --  Ignore items not used by the canonical shift-reduce algorithm.
@@ -103,12 +111,24 @@ package WisiToken.Parse.LR is
 
    type Action_Node is record
       Symbol  : Token_ID := Invalid_Token_ID; -- ignored if Action is Error
-      Actions : Parse_Action_Node_Ptr;
+      Actions : Parse_Action_Node_Ptr := null;
    end record;
 
+   procedure Delete
+     (Container : in out Action_Node;
+      Prev      : in     Parse_Action_Node_Ptr;
+      Current   : in out Parse_Action_Node_Ptr);
+   --  Delete Current from an action list. Container.Actions is the
+   --  root of the list; updated as needed. Prev is the previous element
+   --  in the list; null if none. Prev.Next is updated to
+   --  Current.Next. Current is updated to Current.Next.
+
    function To_Key (Item : in Action_Node) return Token_ID is (Item.Symbol);
 
-   function Compare (Left, Right : in Token_ID) return SAL.Compare_Result;
+   function Compare (Left, Right : in Token_ID) return SAL.Compare_Result
+   is (if Left < Right then SAL.Less
+       elsif Left = Right then SAL.Equal
+       else SAL.Greater);
 
    package Action_Arrays is new SAL.Gen_Unbounded_Definite_Vectors_Sorted
      (Action_Node, Token_ID, To_Key, Compare, Default_Element => (others => 
<>));
@@ -146,6 +166,10 @@ package WisiToken.Parse.LR is
    end record;
 
    function Strict_Image (Item : in Kernel_Info) return String;
+   --  Ada positional aggregate, for code generation
+
+   function Image (Item : in Kernel_Info; Descriptor : in 
WisiToken.Descriptor) return String;
+   --  For debug
 
    type Kernel_Info_Array is array (Ada.Containers.Count_Type range <>) of 
Kernel_Info;
    package Kernel_Info_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
@@ -209,18 +233,14 @@ package WisiToken.Parse.LR is
       Symbol          : in     Token_ID;
       Verb            : in     Parse_Action_Verbs;
       Production      : in     Production_ID;
-      RHS_Token_Count : in     Ada.Containers.Count_Type;
-      Semantic_Action : in     WisiToken.Syntax_Trees.Semantic_Action;
-      Semantic_Check  : in     WisiToken.Semantic_Checks.Semantic_Check);
+      RHS_Token_Count : in     Ada.Containers.Count_Type);
    --  Add a Reduce or Accept_It action to tail of State action list.
 
    procedure Add_Action
      (State           : in out Parse_State;
       Symbols         : in     Token_ID_Array;
       Production      : in     Production_ID;
-      RHS_Token_Count : in     Ada.Containers.Count_Type;
-      Semantic_Action : in     WisiToken.Syntax_Trees.Semantic_Action;
-      Semantic_Check  : in     WisiToken.Semantic_Checks.Semantic_Check);
+      RHS_Token_Count : in     Ada.Containers.Count_Type);
    --  Add duplicate Reduce actions, and final Error action, to tail of
    --  State action list.
 
@@ -228,9 +248,7 @@ package WisiToken.Parse.LR is
      (State             : in out Parse_State;
       Symbol            : in     Token_ID;
       Reduce_Production : in     Production_ID;
-      RHS_Token_Count   : in     Ada.Containers.Count_Type;
-      Semantic_Action   : in     WisiToken.Syntax_Trees.Semantic_Action;
-      Semantic_Check    : in     WisiToken.Semantic_Checks.Semantic_Check);
+      RHS_Token_Count   : in     Ada.Containers.Count_Type);
    --  Add a Reduce conflict to State.
 
    procedure Add_Goto
@@ -252,7 +270,7 @@ package WisiToken.Parse.LR is
       --  Cost of operations on config stack, input.
 
       Minimal_Complete_Cost_Delta : Integer;
-      --  Reduction in cost due to using Minimal_Complete_Action.
+      --  Added to cost when using Minimal_Complete_Action; typically negative.
 
       Matching_Begin : Integer;
       --  Cost of Matching_Begin strategy (applied once, independent of
@@ -265,11 +283,16 @@ package WisiToken.Parse.LR is
       --  Cost of ignoring a semantic check failure. Should be at least the
       --  cost of a typical fix for such a failure.
 
-      Task_Count : System.Multiprocessors.CPU_Range;
-      --  Number of parallel tasks during recovery. If 0, use
-      --  System.Multiprocessors.Number_Of_CPUs - 1.
+      Zombie_Limit : Positive;
+      --  Terminal tokens to wait before terminating parser that encountered
+      --  an error. See test_mckenzie_recover.adb Revive_Zombie for example
+      --  of why this is not hard-coded at 0. Setting it the same as
+      --  Check_Limit is often a good choice.
+
+      Check_Limit : Syntax_Trees.Sequential_Index;
+      --  Max count of shared tokens to parse ahead when checking a
+      --  configuration.
 
-      Check_Limit       : Token_Index; -- max tokens to parse ahead when 
checking a configuration.
       Check_Delta_Limit : Natural;     -- max configs checked, delta over 
successful parser.
       Enqueue_Limit     : Natural;     -- max configs enqueued.
    end record;
@@ -287,11 +310,15 @@ package WisiToken.Parse.LR is
       Fast_Forward                => 0,
       Matching_Begin              => 0,
       Ignore_Check_Fail           => 0,
-      Task_Count                  => System.Multiprocessors.CPU_Range'Last,
+      Zombie_Limit                => 4,
       Check_Limit                 => 4,
       Check_Delta_Limit           => Natural'Last,
       Enqueue_Limit               => Natural'Last);
 
+   procedure Set_McKenzie_Options (Param : in out McKenzie_Param_Type; Config 
: in String);
+   --  Set options from Config. Config contains space-separated name=value
+   --  pairs. See body for exact names.
+
    type Parse_Table
      (State_First       : State_Index;
       State_Last        : State_Index;
@@ -301,57 +328,56 @@ package WisiToken.Parse.LR is
       Last_Nonterminal  : Token_ID)
      is tagged
    record
-      States         : Parse_State_Array (State_First .. State_Last);
-      Error_Action   : Parse_Action_Node_Ptr;
-      McKenzie_Param : McKenzie_Param_Type (First_Terminal, Last_Terminal, 
First_Nonterminal, Last_Nonterminal);
+      States                : Parse_State_Array (State_First .. State_Last);
+      Error_Action          : Parse_Action_Node_Ptr;
+      Error_Recover_Enabled : Boolean;
+      McKenzie_Param        : McKenzie_Param_Type (First_Terminal, 
Last_Terminal, First_Nonterminal, Last_Nonterminal);
+      Max_Parallel          : SAL.Base_Peek_Type := 15;
    end record;
 
    function Goto_For
      (Table : in Parse_Table;
       State : in State_Index;
       ID    : in Token_ID)
-     return Unknown_State_Index;
+     return Unknown_State_Index
+   with Pre => ID in Table.First_Nonterminal .. Table.Last_Nonterminal;
    --  Return next state after reducing stack by nonterminal ID;
    --  Unknown_State if none (only possible during error recovery).
-   --  Second form allows retrieving Production.
 
    function Action_For
      (Table : in Parse_Table;
       State : in State_Index;
       ID    : in Token_ID)
      return Parse_Action_Node_Ptr
-   with Post => Action_For'Result /= null;
+   with Pre => ID in Table.First_Terminal .. Table.Last_Terminal,
+     Post => Action_For'Result /= null;
    --  Return the action for State, terminal ID.
 
-   function Expecting (Table : in Parse_Table; State : in State_Index) return 
Token_ID_Set;
+   function Shift_State (Action_List : in Parse_Action_Node_Ptr) return 
State_Index;
+   --  Return State from the shift action in Action_List.
+
+   procedure Undo_Reduce
+     (Tree      : in out Syntax_Trees.Tree;
+      Table     : in     Parse_Table;
+      Stream    : in     Syntax_Trees.Stream_ID;
+      User_Data : in     Syntax_Trees.User_Data_Access_Constant);
+   --  Undo reduction of nonterm at Stream.Stack_Top; Stack_Top is then
+   --  the last Child of the nonterm.
+   --
+   --  If Stream.Stack_Top has an error, it is moved to the first terminal.
+   --
+   --  This duplicates Parser_Lists.Undo_Reduce; that is used by the main
+   --  parser when there is a Parser_State; this is used by Edit_Tree and
+   --  error reccover when there is not.
 
-   function McKenzie_Defaulted (Table : in Parse_Table) return Boolean is
-     --  We can't use Table.McKenzie_Param = Default_McKenzie_Param here,
-     --  because the discriminants are different.
-     (Table.McKenzie_Param.Check_Limit = Default_McKenzie_Param.Check_Limit and
-        Table.McKenzie_Param.Check_Delta_Limit = 
Default_McKenzie_Param.Check_Delta_Limit and
-        Table.McKenzie_Param.Enqueue_Limit = 
Default_McKenzie_Param.Enqueue_Limit);
+   function Expecting (Table : in Parse_Table; State : in State_Index) return 
Token_ID_Set;
 
    type Parse_Table_Ptr is access Parse_Table;
    procedure Free_Table (Table : in out Parse_Table_Ptr);
 
-   type Semantic_Action is record
-      Action : WisiToken.Syntax_Trees.Semantic_Action := null;
-      Check  : WisiToken.Semantic_Checks.Semantic_Check := null;
-   end record;
-
-   package Semantic_Action_Arrays is new SAL.Gen_Unbounded_Definite_vectors 
(Natural, Semantic_Action, (others => <>));
-   package Semantic_Action_Array_Arrays is new 
SAL.Gen_Unbounded_Definite_Vectors
-     (Token_ID, Semantic_Action_Arrays.Vector, 
Semantic_Action_Arrays.Empty_Vector);
-
-   function Get_Text_Rep
-     (File_Name      : in String;
-      McKenzie_Param : in McKenzie_Param_Type;
-      Actions        : in Semantic_Action_Array_Arrays.Vector)
-     return Parse_Table_Ptr;
+   function Get_Text_Rep (File_Name : in String) return Parse_Table_Ptr;
    --  Read machine-readable text format of states (as output by
-   --  WisiToken.Generate.LR.Put_Text_Rep) from file File_Name. Result
-   --  has actions, checks from Productions.
+   --  WisiToken.Generate.LR.Put_Text_Rep) from file File_Name.
 
    ----------
    --  For McKenzie_Recover. Declared here because Parser_Lists needs
@@ -362,218 +388,50 @@ package WisiToken.Parse.LR is
    --  Undo_Reduce, which is only done on nonterms reduced by the main
    --  parser, not virtual nonterms produced by recover.
 
-   package Fast_Token_ID_Arrays is new SAL.Gen_Bounded_Definite_Vectors
-     (SAL.Peek_Type, Token_ID, Default_Element => Invalid_Token_ID, Capacity 
=> 20);
-
-   No_Insert_Delete : constant SAL.Base_Peek_Type := 0;
-
-   function Image
-     (Index      : in SAL.Peek_Type;
-      Tokens     : in Fast_Token_ID_Arrays.Vector;
-      Descriptor : in WisiToken.Descriptor)
-     return String
-     is (SAL.Peek_Type'Image (Index) & ":" & SAL.Peek_Type'Image 
(Fast_Token_ID_Arrays.Last_Index (Tokens)) & ":" &
-           Image (Fast_Token_ID_Arrays.Element (Tokens, Index), Descriptor));
-
-   type Config_Op_Label is (Fast_Forward, Undo_Reduce, Push_Back, Insert, 
Delete);
-   subtype Insert_Delete_Op_Label is Config_Op_Label range Insert .. Delete;
-   --  Fast_Forward is a placeholder to mark a fast_forward parse; that
-   --  resets what operations are allowed to be done on a config.
-   --
-   --  Undo_Reduce is the inverse of Reduce.
-   --
-   --  Push_Back pops the top stack item, and moves the input stream
-   --  pointer back to the first shared_terminal contained by that item.
-   --
-   --  Insert inserts a new token in the token input stream, before the
-   --  given point in Terminals.
-   --
-   --  Delete deletes one item from the token input stream, at the given
-   --  point.
-
-   type Config_Op (Op : Config_Op_Label := Fast_Forward) is record
-      --  We store enough information to perform the operation on the main
-      --  parser stack and input stream when the config is the result
-      --  of a successful recover.
-
-      case Op is
-      when Fast_Forward =>
-         FF_Token_Index : WisiToken.Token_Index;
-         --  Config.Current_Shared_Token after the operation is done; the last
-         --  token shifted.
-
-      when Undo_Reduce =>
-         Nonterm : Token_ID;
-         --  The nonterminal popped off the stack.
-
-         Token_Count : Ada.Containers.Count_Type;
-         --  The number of tokens pushed on the stack.
-
-      when Push_Back =>
-         PB_ID : Token_ID;
-         --  The nonterm ID popped off the stack.
-
-         PB_Token_Index : WisiToken.Base_Token_Index;
-         --  Config.Current_Shared_Token after
-         --  the operation is done. If the token is empty, Token_Index is
-         --  Invalid_Token_Index.
-
-      when Insert =>
-         Ins_ID : Token_ID;
-         --  The token ID inserted.
-
-         Ins_Token_Index : WisiToken.Base_Token_Index;
-         --  Ins_ID is inserted before Token_Index.
-
-      when Delete =>
-         Del_ID : Token_ID;
-         --  The token ID deleted.
-
-         Del_Token_Index : WisiToken.Base_Token_Index;
-         --  Token at Token_Index is deleted.
-
-      end case;
-   end record;
-   subtype Insert_Delete_Op is Config_Op with Dynamic_Predicate => 
(Insert_Delete_Op.Op in Insert_Delete_Op_Label);
-   subtype Insert_Op is Config_Op with Dynamic_Predicate => (Insert_Op.Op = 
Insert);
-
-   function Token_Index (Op : in Insert_Delete_Op) return WisiToken.Token_Index
-     is (case Insert_Delete_Op_Label'(Op.Op) is
-         when Insert => Op.Ins_Token_Index,
-         when Delete => Op.Del_Token_Index);
-
-   function ID (Op : in Insert_Delete_Op) return WisiToken.Token_ID
-     is (case Insert_Delete_Op_Label'(Op.Op) is
-         when Insert => Op.Ins_ID,
-         when Delete => Op.Del_ID);
-
-   function Compare (Left, Right : in Insert_Delete_Op) return 
SAL.Compare_Result;
-   --  Compare token_index.
-
-   function Equal (Left : in Config_Op; Right : in Insert_Op) return Boolean;
-   --  Ignore state, stack_depth
-
-   package Config_Op_Arrays is new SAL.Gen_Bounded_Definite_Vectors
-     (Positive_Index_Type, Config_Op, Default_Element => (Fast_Forward, 
WisiToken.Token_Index'First), Capacity => 80);
-   --  Using a fixed size vector significantly speeds up
-   --  McKenzie_Recover. The capacity is determined by the maximum number
-   --  of repair operations, which is limited by the cost_limit McKenzie
-   --  parameter plus an arbitrary number from the language-specific
-   --  repairs; in practice, a capacity of 80 is enough so far. If a
-   --  config does hit that limit, it is abandoned; some other config is
-   --  likely to be cheaper.
-
-   package Config_Op_Array_Refs is new Config_Op_Arrays.Gen_Refs;
-
-   function Config_Op_Image (Item : in Config_Op; Descriptor : in 
WisiToken.Descriptor) return String
-     is ("(" & Config_Op_Label'Image (Item.Op) & ", " &
-           (case Item.Op is
-            when Fast_Forward => WisiToken.Token_Index'Image 
(Item.FF_Token_Index),
-            when Undo_Reduce => Image (Item.Nonterm, Descriptor) & "," &
-                 Ada.Containers.Count_Type'Image (Item.Token_Count),
-            when Push_Back => Image (Item.PB_ID, Descriptor) & "," &
-                 WisiToken.Token_Index'Image (Item.PB_Token_Index),
-            when Insert => Image (Item.Ins_ID, Descriptor) & "," &
-                 WisiToken.Token_Index'Image (Item.Ins_Token_Index),
-            when Delete => Image (Item.Del_ID, Descriptor) & "," &
-                 WisiToken.Token_Index'Image (Item.Del_Token_Index))
-           & ")");
-
-   function Image (Item : in Config_Op; Descriptor : in WisiToken.Descriptor) 
return String
-     renames Config_Op_Image;
-
-   function Config_Op_Array_Image is new Config_Op_Arrays.Gen_Image_Aux 
(WisiToken.Descriptor, Image);
-   function Image (Item : in Config_Op_Arrays.Vector; Descriptor : in 
WisiToken.Descriptor) return String
-     renames Config_Op_Array_Image;
-
-   function None (Ops : aliased in Config_Op_Arrays.Vector; Op : in 
Config_Op_Label) return Boolean;
-   --  True if Ops contains no Op.
-
-   function None_Since_FF (Ops : aliased in Config_Op_Arrays.Vector; Op : in 
Config_Op_Label) return Boolean;
-   --  True if Ops contains no Op after the last Fast_Forward (or ops.first, if
-   --  no Fast_Forward).
-
-   function Only_Since_FF (Ops : aliased in Config_Op_Arrays.Vector; Op : in 
Config_Op_Label) return Boolean;
-   --  True if Ops contains only Op (at least one) after the last Fast_Forward 
(or ops.first, if
-   --  no Fast_Forward).
-
-   function Any (Ops : aliased in Config_Op_Arrays.Vector; Op : in 
Config_Op_Label) return Boolean;
-   --  True if Ops contains at least one Op.
-
-   type Recover_Op (Op : Insert_Delete_Op_Label := Insert) is record
-      --  Add Ins_Tree_Node to Config_Op info, set when item is
-      --  parsed; used to create user augmented token.
-
-      case Op is
-      when Insert =>
-         Ins_ID : Token_ID := Invalid_Token_ID;
-         --  The token ID inserted.
-
-         Ins_Token_Index : Base_Token_Index := Invalid_Token_Index;
-         --  Ins_ID is inserted before Token_Index.
-
-         Ins_Tree_Node : Node_Index := Invalid_Node_Index;
-
-      when Delete =>
-         Del_ID : Token_ID;
-         --  The token ID deleted.
-
-         Del_Token_Index : Base_Token_Index;
-         --  Token at Token_Index is deleted.
-
-      end case;
-   end record;
-
-   package Recover_Op_Arrays is new SAL.Gen_Bounded_Definite_Vectors
-     (Positive_Index_Type, Recover_Op, Capacity => 80, Default_Element => 
(others => <>));
-
-   package Recover_Op_Array_Refs is new Recover_Op_Arrays.Gen_Refs;
-
-   function Image (Item : in Recover_Op; Descriptor : in WisiToken.Descriptor) 
return String
-     is ("(" & Item.Op'Image & ", " &
-           (case Item.Op is
-            when Insert => Image (Item.Ins_ID, Descriptor) & "," &
-                 Item.Ins_Token_Index'Image & "," &
-                 Item.Ins_Tree_Node'Image,
-            when Delete => Image (Item.Del_ID, Descriptor) & "," &
-                 Item.Del_Token_Index'Image)
-           & ")");
-
-   function Image is new Recover_Op_Arrays.Gen_Image_Aux 
(WisiToken.Descriptor, Image);
-
    type Recover_Stack_Item is record
       State : Unknown_State_Index := Unknown_State;
 
-      Tree_Index : Node_Index := Invalid_Node_Index;
-      --  Valid if copied at recover initialize, Invalid if pushed during
-      --  recover.
-
-      Token : Recover_Token;
+      Token : Syntax_Trees.Recover_Token;
       --  Virtual is False if token is from input text; True if inserted
-      --  during recover.
+      --  during recover. If not Virtual, Element_Node = Node (ie rooted
+      --  stream ref).
    end record;
 
    package Recover_Stacks is new SAL.Gen_Bounded_Definite_Stacks 
(Recover_Stack_Item);
 
-   function Image (Item : in Recover_Stack_Item; Descriptor : in 
WisiToken.Descriptor) return String
+   function Image (Item : in Recover_Stack_Item; Tree : in Syntax_Trees.Tree) 
return String
      is ((if Item.State = Unknown_State then " " else Trimmed_Image 
(Item.State)) & " : " &
-           Image (Item.Token, Descriptor));
+           Syntax_Trees.Image (Tree, Item.Token));
 
-   function Recover_Stack_Image is new Recover_Stacks.Gen_Image_Aux 
(WisiToken.Descriptor, Image);
+   function Recover_Stack_Image is new Recover_Stacks.Gen_Image_Aux 
(Syntax_Trees.Tree, Image);
    --  Unique name for calling from debugger
 
    function Image
-     (Stack      : in Recover_Stacks.Stack;
-      Descriptor : in WisiToken.Descriptor;
-      Depth      : in SAL.Base_Peek_Type := 0)
+     (Stack : in Recover_Stacks.Stack;
+      Tree  : in Syntax_Trees.Tree;
+      Depth : in SAL.Base_Peek_Type := 0)
      return String
      renames Recover_Stack_Image;
 
+   function Stack_Has
+     (Tree  : in Syntax_Trees.Tree;
+      Stack : in Recover_Stacks.Stack;
+      ID    : in Token_ID)
+     return Boolean;
+   --  True if some item in Stack has ID.
+
    function Valid_Tree_Indices (Stack : in Recover_Stacks.Stack; Depth : in 
SAL.Base_Peek_Type) return Boolean with
      Pre => Stack.Depth >= Depth;
-   --  Return True if Stack top Depth items have valid Tree_Indices,
-   --  which is true if they were copied from the parser stack, and not
-   --  pushed by recover.
+   --  Return True if Stack top Depth items are not Virtual, which is
+   --  true if they were copied from the parser stack, and not pushed by
+   --  recover.
+
+   package Bounded_Streams is new SAL.Gen_Bounded_Definite_Doubly_Linked_Lists 
(Syntax_Trees.Node_Access);
+
+   function Image (Item : in Syntax_Trees.Node_Access; Tree : in 
Syntax_Trees.Tree) return String
+   is (Tree.Image (Item, Node_Numbers => True));
+
+   function Image is new Bounded_Streams.Gen_Image_Aux (Syntax_Trees.Tree, 
LR.Image);
 
    type Strategies is
      (Ignore_Error, Language_Fix, Minimal_Complete, Matching_Begin,
@@ -584,8 +442,10 @@ package WisiToken.Parse.LR is
 
    type Minimal_Complete_State is (None, Active, Done);
 
+   No_Insert_Delete : constant SAL.Base_Peek_Type := 0;
+
    type Configuration is record
-      Stack : Recover_Stacks.Stack (70);
+      Stack : Recover_Stacks.Stack (90);
       --  Initially built from the parser stack, then the stack after the
       --  Ops below have been performed.
       --
@@ -593,45 +453,65 @@ package WisiToken.Parse.LR is
       --  larger size slows down recover due to memory cache thrashing and
       --  allocation.
       --
-      --  Emacs Ada mode wisi.adb needs > 50
-
-      Resume_Token_Goal : WisiToken.Token_Index := WisiToken.Token_Index'Last;
-      --  A successful solution shifts this token. Per-config because it
-      --  increases with Delete; we increase Shared_Parser.Resume_Token_Goal
-      --  only from successful configs.
-
-      Current_Shared_Token : Base_Token_Index := WisiToken.Token_Index'Last;
-      --  Index into Shared_Parser.Terminals for current input token, after
-      --  all of Inserted is input. Initially the error token.
-
-      String_Quote_Checked : Line_Number_Type := Invalid_Line_Number;
-      --  Max line checked for missing string quote.
+      --  Emacs ada-mode wisi.adb needs > 50
+      --  wisitoken-parse.adb Edit_Tree needs > 70
+
+      Current_Shared_Token : Syntax_Trees.Terminal_Ref := 
Syntax_Trees.Invalid_Stream_Node_Ref;
+      --  Current input token in Shared_Stream; to be input after all of
+      --  Input_Stream and Insert_Delete is input. Initially the error
+      --  token. In batch parse, always a single Source_Terminal; in
+      --  incremental parse, always the first terminal in the stream
+      --  element, which may be Invalid_Node_Access if the stream element is
+      --  empty.
+
+      Input_Stream : aliased Bounded_Streams.List (20);
+      --  Holds tokens copied from Shared_Stream when Push_Back operations
+      --  are performed, or added by Insert. Delete may be applied to these,
+      --  which requires that nonterms be broken down (similar to
+      --  Syntax_Trees.Left_Breakdown).
+      --
+      --  Current token is root of Input_Stream.First.
+      --
+      --  To justify the size; in a typical recover we might need to push
+      --  back a few terminals, and one nonterm that is then broken down
+      --  (max of 15 tokens for most languages). For
+      --  test_mckenzie_recover.adb, 10 is too small, 20 is enough.
 
-      Insert_Delete : aliased Config_Op_Arrays.Vector;
+      Insert_Delete : aliased Recover_Op_Arrays.Vector;
       --  Edits to the input stream that are not yet parsed; contains only
-      --  Insert and Delete ops, in token_index order.
+      --  Insert and Delete ops, in node_index order.
 
       Current_Insert_Delete : SAL.Base_Peek_Type := No_Insert_Delete;
       --  Index of the next op in Insert_Delete. If No_Insert_Delete, use
-      --  Current_Shared_Token.
+      --  Current_Tree_Token.
+
+      Resume_Token_Goal : Syntax_Trees.Sequential_Index := 
Syntax_Trees.Sequential_Index'Last;
+      --  A successful solution shifts this terminal token from
+      --  Tree.Shared_Stream. Per-config because it increases with Delete;
+      --  we set Shared_Parser.Resume_Token_Goal only from successful
+      --  configs.
 
-      Error_Token       : Recover_Token;
-      Check_Token_Count : Ada.Containers.Count_Type := 0;
-      Check_Status      : Semantic_Checks.Check_Status;
+      String_Quote_Checked_Line     : Base_Line_Number_Type := 
Invalid_Line_Number;
+      String_Quote_Checked_Byte_Pos : Base_Buffer_Pos       := 
Invalid_Buffer_Pos;
+      --  Max line, line_end_pos checked for missing string quote.
+
+      Error_Token                 : Syntax_Trees.Recover_Token;
+      In_Parse_Action_Token_Count : SAL.Base_Peek_Type := 0;
+      In_Parse_Action_Status      : Syntax_Trees.In_Parse_Actions.Status;
       --  If parsing this config ended with a parse error, Error_Token is
       --  the token that failed to shift, Check_Status.Label is Ok.
       --
-      --  If parsing this config ended with a semantic check fail,
+      --  If parsing this config ended with an In_Parse_Action fail,
       --  Error_Token is the nonterm created by the reduction,
-      --  Check_Token_Count the number of tokens in the right hand side, and
-      --  Check_Status is the error.
+      --  In_Parse_Action_Token_Count the number of tokens in the right hand
+      --  side, and In_Parse_Action_Status is the error.
       --
       --  Error_Token is set to Invalid_Token_ID when Config is parsed
       --  successfully, or modified so the error is no longer meaningful (ie
       --  in explore when adding an op, or in language_fixes when adding a
       --  fix).
 
-      Ops : aliased Config_Op_Arrays.Vector;
+      Ops : aliased Recover_Op_Arrays.Vector;
       --  Record of operations applied to this Config, in application order.
       --  Insert and Delete ops that are not yet parsed are reflected in
       --  Insert_Delete, in token_index order.
@@ -671,28 +551,4 @@ package WisiToken.Parse.LR is
    procedure Accumulate (Data : in McKenzie_Data; Counts : in out 
Strategy_Counts);
    --  Sum Results.Strategy_Counts.
 
-   type Parse_Error_Label is (Action, Check, Message);
-
-   type Parse_Error
-     (Label          : Parse_Error_Label;
-      First_Terminal : Token_ID;
-      Last_Terminal  : Token_ID)
-   is record
-      Recover : Configuration;
-
-      case Label is
-      when Action =>
-         Error_Token : Valid_Node_Index; -- index into Parser.Tree
-         Expecting   : Token_ID_Set (First_Terminal .. Last_Terminal);
-
-      when Check =>
-         Check_Status : Semantic_Checks.Check_Status;
-
-      when Message =>
-         Msg : Ada.Strings.Unbounded.Unbounded_String;
-      end case;
-   end record;
-
-   package Parse_Error_Lists is new 
Ada.Containers.Indefinite_Doubly_Linked_Lists (Parse_Error);
-
 end WisiToken.Parse.LR;
diff --git a/wisitoken-parse-packrat-generated.adb 
b/wisitoken-parse-packrat-generated.adb
index 7d73f34761..54ee0faf5c 100644
--- a/wisitoken-parse-packrat-generated.adb
+++ b/wisitoken-parse-packrat-generated.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -19,78 +19,69 @@ pragma License (Modified_GPL);
 
 package body WisiToken.Parse.Packrat.Generated is
 
-   overriding procedure Parse (Parser : aliased in out Generated.Parser)
+   overriding procedure Parse
+     (Parser     : in out Generated.Parser;
+      Log_File   : in     Ada.Text_IO.File_Type;
+      Edits      : in     KMN_Lists.List := KMN_Lists.Empty_List;
+      Pre_Edited : in     Boolean        := False)
    is
-      --  'aliased' required for Base_Tree'Access. WORKAROUND: that was
-      --  enough when Parser type was declared in generated Main; now that
-      --  it's a derived type, it doesn't work. So we use Unchecked_Access.
-
-      Descriptor : WisiToken.Descriptor renames Parser.Trace.Descriptor.all;
-
-      Junk : WisiToken.Valid_Node_Index;
-      pragma Unreferenced (Junk);
+      pragma Unreferenced (Log_File, Pre_Edited);
+      use all type WisiToken.Syntax_Trees.User_Data_Access;
+      use all type Ada.Containers.Count_Type;
+      Descriptor : WisiToken.Descriptor renames 
Parser.Tree.Lexer.Descriptor.all;
 
       Result : Memo_Entry;
    begin
-      Parser.Base_Tree.Clear;
-      Parser.Tree.Initialize (Parser.Base_Tree'Unchecked_Access, Flush => 
True);
-      Parser.Lex_All;
-      Parser.Derivs.Set_First_Last (Descriptor.First_Nonterminal, 
Descriptor.Last_Nonterminal);
+      if Edits.Length > 0 then
+         raise WisiToken.Parse_Error;
+      end if;
 
-      for Nonterm in Descriptor.First_Nonterminal .. 
Parser.Trace.Descriptor.Last_Nonterminal loop
-         Parser.Derivs (Nonterm).Clear;
-         Parser.Derivs (Nonterm).Set_First_Last (Parser.Terminals.First_Index, 
Parser.Terminals.Last_Index);
+      for Deriv of Parser.Derivs loop
+         for Memo of Deriv loop
+            case Memo.State is
+            when No_Result | Failure =>
+               null;
+            when Success =>
+               Memo.Last_Pos := Syntax_Trees.Invalid_Stream_Index;
+            end case;
+         end loop;
       end loop;
 
-      for Token_Index in Parser.Terminals.First_Index .. 
Parser.Terminals.Last_Index loop
-         Junk := Parser.Tree.Add_Terminal (Token_Index, Parser.Terminals);
-         --  FIXME: move this into Lex_All, delete Terminals, just use 
Syntax_Tree
+      Parser.Derivs.Set_First_Last (Descriptor.First_Nonterminal, 
Descriptor.Last_Nonterminal);
+
+      Parser.Tree.Clear;
+
+      if Parser.User_Data /= null then
+         Parser.User_Data.Reset;
+      end if;
+      Parser.Lex_All; -- Creates Tree.Shared_Stream
+
+      --  WORKAROUND: there appears to be a bug in GNAT Community 2021 that 
makes
+      --  ref_count fail in this usage. May be related to AdaCore ticket 
V107-045.
+      Parser.Tree.Enable_Ref_Count_Check (Parser.Tree.Shared_Stream, Enable => 
False);
+
+      for Nonterm in Descriptor.First_Nonterminal .. 
Descriptor.Last_Nonterminal loop
+         Parser.Derivs (Nonterm).Clear (Free_Memory => True);
+         Parser.Derivs (Nonterm).Set_First_Last
+           (Parser.Tree.Get_Node_Index
+              (Parser.Tree.Shared_Stream, Parser.Tree.Stream_First 
(Parser.Tree.Shared_Stream, Skip_SOI => True)),
+            Parser.Tree.Get_Node_Index
+              (Parser.Tree.Shared_Stream, Parser.Tree.Stream_Last 
(Parser.Tree.Shared_Stream, Skip_EOI => False)));
       end loop;
 
-      Result := Parser.Parse_WisiToken_Accept (Parser, 
Parser.Terminals.First_Index - 1);
+      Result := Parser.Parse_WisiToken_Accept
+        (Parser, Parser.Tree.Stream_First (Parser.Tree.Shared_Stream, Skip_SOI 
=> False));
 
       if Result.State /= Success then
          if Trace_Parse > Outline then
-            Parser.Trace.Put_Line ("parse failed");
+            Parser.Tree.Lexer.Trace.Put_Line ("parse failed");
          end if;
 
-         raise Syntax_Error with "parse failed"; --  FIXME: need better error 
message!
+         raise Syntax_Error with "parse failed"; --  FIXME packrat: need 
better error message!
       else
          Parser.Tree.Set_Root (Result.Result);
       end if;
 
    end Parse;
 
-   overriding function Tree (Parser : in Generated.Parser) return 
Syntax_Trees.Tree
-   is begin
-      return Parser.Tree;
-   end Tree;
-
-   overriding function Tree_Var_Ref
-     (Parser : aliased in out Generated.Parser)
-     return Syntax_Trees.Tree_Variable_Reference
-   is begin
-      return (Element => Parser.Tree'Access);
-   end Tree_Var_Ref;
-
-   overriding function Any_Errors (Parser : in Generated.Parser) return Boolean
-   is
-      use all type Ada.Containers.Count_Type;
-   begin
-      return Parser.Lexer.Errors.Length > 0;
-   end Any_Errors;
-
-   overriding procedure Put_Errors (Parser : in Generated.Parser)
-   is
-      use Ada.Text_IO;
-   begin
-      for Item of Parser.Lexer.Errors loop
-         Put_Line
-           (Current_Error,
-            Parser.Lexer.File_Name & ":0:0: lexer unrecognized character at" & 
Buffer_Pos'Image (Item.Char_Pos));
-      end loop;
-
-      --  FIXME: Packrat parser does not report errors yet.
-   end Put_Errors;
-
 end WisiToken.Parse.Packrat.Generated;
diff --git a/wisitoken-parse-packrat-generated.ads 
b/wisitoken-parse-packrat-generated.ads
index 4932465326..fe4153a851 100644
--- a/wisitoken-parse-packrat-generated.ads
+++ b/wisitoken-parse-packrat-generated.ads
@@ -7,7 +7,7 @@
 --
 --  see parent.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -24,7 +24,7 @@ pragma License (Modified_GPL);
 with WisiToken.Syntax_Trees;
 package WisiToken.Parse.Packrat.Generated is
 
-   Recursive : exception; -- FIXME: delete
+   Recursive : exception;
 
    type Memo_State is (No_Result, Failure, Success);
    subtype Result_States is Memo_State range Failure .. Success;
@@ -33,21 +33,23 @@ package WisiToken.Parse.Packrat.Generated is
 
       case State is
       when No_Result =>
-         Recursive : Boolean := False; --  FIXME: delete
+         Recursive : Boolean := False;
 
       when Failure =>
          null;
 
       when Success =>
-         Result : aliased Valid_Node_Index;
-
-         Last_Token : Base_Token_Index; --  FIXME: change to Last_Pos
+         Result : aliased Syntax_Trees.Node_Access;
 
+         Last_Pos : Syntax_Trees.Stream_Index;
       end case;
    end record;
 
+   subtype Positive_Node_Index is Syntax_Trees.Node_Index range 1 .. 
Syntax_Trees.Node_Index'Last;
    package Memos is new SAL.Gen_Unbounded_Definite_Vectors
-     (Token_Index, Memo_Entry, Default_Element => (others => <>));
+     (Positive_Node_Index, Memo_Entry, Default_Element => (others => <>));
+   --  Memos is indexed by Node_Index of terminals in Shared_Stream
+   --  (incremental parse is not supported).
 
    subtype Result_Type is Memo_Entry
    with Dynamic_Predicate => Result_Type.State in Result_States;
@@ -57,20 +59,19 @@ package WisiToken.Parse.Packrat.Generated is
 
    type Parse_WisiToken_Accept is access
      --  WORKAROUND: using Packrat.Parser'Class here hits a GNAT Bug box in 
GPL 2018.
-     function (Parser : in out Base_Parser'Class; Last_Pos : in 
Base_Token_Index) return Result_Type;
+     function (Parser : in out Base_Parser'Class; Last_Pos : in 
Syntax_Trees.Stream_Index) return Result_Type;
 
    type Parser is new Packrat.Parser with record
-      Derivs : Generated.Derivs.Vector; --  FIXME: use discriminated array, as 
in procedural
+      Derivs : Generated.Derivs.Vector; --  FIXME packrat: use discriminated 
array, as in procedural
 
       Parse_WisiToken_Accept : Generated.Parse_WisiToken_Accept;
    end record;
 
-   overriding procedure Parse (Parser : aliased in out Generated.Parser);
-   overriding function Tree (Parser : in Generated.Parser) return 
Syntax_Trees.Tree;
-   overriding function Tree_Var_Ref
-     (Parser : aliased in out Generated.Parser)
-     return Syntax_Trees.Tree_Variable_Reference;
-   overriding function Any_Errors (Parser : in Generated.Parser) return 
Boolean;
-   overriding procedure Put_Errors (Parser : in Generated.Parser);
+   overriding procedure Parse
+     (Parser     : in out Generated.Parser;
+      Log_File   : in     Ada.Text_IO.File_Type;
+      Edits      : in     KMN_Lists.List := KMN_Lists.Empty_List;
+      Pre_Edited : in     Boolean        := False);
+   --  Raises Parse_Error if Edits is not empty. Log_File, Pre_Edited are 
ignored.
 
 end WisiToken.Parse.Packrat.Generated;
diff --git a/wisitoken-parse-packrat-procedural.adb 
b/wisitoken-parse-packrat-procedural.adb
index 887794ed16..3e70804e40 100644
--- a/wisitoken-parse-packrat-procedural.adb
+++ b/wisitoken-parse-packrat-procedural.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -22,14 +22,14 @@ package body WisiToken.Parse.Packrat.Procedural is
    function Apply_Rule
      (Parser   : in out Procedural.Parser;
       R        : in     Token_ID;
-      Last_Pos : in     Base_Token_Index)
+      Last_Pos : in     Syntax_Trees.Stream_Index)
      return Memo_Entry
    with Post => Apply_Rule'Result.State in Failure .. Success;
 
    function Eval
      (Parser   : in out Procedural.Parser;
       R        : in     Token_ID;
-      Last_Pos : in     Base_Token_Index)
+      Last_Pos : in     Syntax_Trees.Stream_Index)
      return Memo_Entry
    with Post => Eval'Result.State in Failure .. Success;
 
@@ -39,14 +39,18 @@ package body WisiToken.Parse.Packrat.Procedural is
    function Eval
      (Parser   : in out Procedural.Parser;
       R        : in     Token_ID;
-      Last_Pos : in     Base_Token_Index)
+      Last_Pos : in     Syntax_Trees.Stream_Index)
      return Memo_Entry
    is
-      Descriptor : WisiToken.Descriptor renames Parser.Trace.Descriptor.all;
+      use all type WisiToken.Syntax_Trees.Stream_Index;
+
+      Tree       : Syntax_Trees.Tree renames Parser.Tree;
+      Descriptor : WisiToken.Descriptor renames Tree.Lexer.Descriptor.all;
 
       subtype Terminal is Token_ID range Descriptor.First_Terminal .. 
Descriptor.Last_Terminal;
 
-      Pos : Base_Token_Index := Last_Pos; --  last token parsed.
+      Pos      : Syntax_Trees.Stream_Index := Last_Pos; --  last token parsed.
+      Next_Pos : Syntax_Trees.Stream_Index := Tree.Stream_Next 
(Tree.Shared_Stream, Pos);
    begin
       for RHS_Index in Parser.Grammar (R).RHSs.First_Index .. Parser.Grammar 
(R).RHSs.Last_Index loop
          declare
@@ -56,26 +60,26 @@ package body WisiToken.Parse.Packrat.Procedural is
          begin
             if RHS.Tokens.Length = 0 then
                return
-                 (State              => Success,
-                  Result             => Parser.Tree.Add_Nonterm
-                    (Production      => (R, RHS_Index),
-                     Action          => RHS.Action,
-                     Children        => (1 .. 0 => Invalid_Node_Index),
-                     Default_Virtual => False),
-                  Last_Pos           => Pos);
+                 (State            => Success,
+                  Result           => Tree.Add_Nonterm
+                    (Production    => (R, RHS_Index),
+                     Children      => (1 .. 0 => 
Syntax_Trees.Invalid_Node_Access),
+                     Clear_Parents => False),
+                  Last_Pos         => Pos);
             else
                declare
-                  Children : Valid_Node_Index_Array
+                  Children : Syntax_Trees.Node_Access_Array
                     (SAL.Base_Peek_Type (RHS.Tokens.First_Index) .. 
SAL.Base_Peek_Type (RHS.Tokens.Last_Index));
                begin
                   for I in RHS.Tokens.First_Index .. RHS.Tokens.Last_Index loop
                      if RHS.Tokens (I) in Terminal then
-                        if Pos = Parser.Terminals.Last_Index then
+                        if Next_Pos = Syntax_Trees.Invalid_Stream_Index then
                            goto Fail_RHS;
 
-                        elsif Parser.Terminals (Pos + 1).ID = RHS.Tokens (I) 
then
-                           Pos := Pos + 1;
-                           Children (SAL.Base_Peek_Type (I)) := Tree_Index 
(Pos);
+                        elsif Tree.ID (Tree.Shared_Stream, Next_Pos) = 
RHS.Tokens (I) then
+                           Pos := Next_Pos;
+                           Next_Pos := Tree.Stream_Next (Tree.Shared_Stream, 
Pos);
+                           Children (SAL.Base_Peek_Type (I)) := Tree.Get_Node 
(Tree.Shared_Stream, Pos);
                         else
                            goto Fail_RHS;
                         end if;
@@ -85,6 +89,7 @@ package body WisiToken.Parse.Packrat.Procedural is
                         when Success =>
                            Children (SAL.Base_Peek_Type (I)) := Memo.Result;
                            Pos := Memo.Last_Pos;
+                           Next_Pos := Tree.Stream_Next (Tree.Shared_Stream, 
Pos);
 
                         when Failure =>
                            goto Fail_RHS;
@@ -94,17 +99,25 @@ package body WisiToken.Parse.Packrat.Procedural is
                      end if;
                   end loop;
 
-                  return
+                  return Result : constant Memo_Entry :=
                     (State              => Success,
                      Result             => Parser.Tree.Add_Nonterm
                        (Production      => (R, RHS_Index),
-                        Action          => RHS.Action,
-                        Children        => Children,
-                        Default_Virtual => False),
-                     Last_Pos           => Pos);
+                        Children        => Syntax_Trees.To_Valid_Node_Access 
(Children),
+                        Clear_Parents   => True),
+                     --  We must be able to steal nodes from failed nonterms;
+                     --  body_instantiation_conflict.wy.
+                     Last_Pos           => Pos)
+                  do
+                     if Trace_Parse > Extra then
+                        Parser.Tree.Lexer.Trace.Put_Line
+                          ("eval: " & Parser.Tree.Image (Root => 
Result.Result, Children => True));
+                     end if;
+                  end return;
 
                   <<Fail_RHS>>
                   Pos := Last_Pos;
+                  Next_Pos := Tree.Stream_Next (Tree.Shared_Stream, Pos);
                end;
             end if;
          end;
@@ -117,16 +130,22 @@ package body WisiToken.Parse.Packrat.Procedural is
    function Apply_Rule
      (Parser   : in out Procedural.Parser;
       R        : in     Token_ID;
-      Last_Pos : in     Base_Token_Index)
+      Last_Pos : in     Syntax_Trees.Stream_Index)
      return Memo_Entry
    is
-      Descriptor : WisiToken.Descriptor renames Parser.Trace.Descriptor.all;
+      use all type WisiToken.Syntax_Trees.Stream_Index;
+      use all type WisiToken.Syntax_Trees.Node_Index;
 
-      Pos       : Base_Token_Index     := Last_Pos;     --  last token parsed.
-      Start_Pos : constant Token_Index := Last_Pos + 1; --  first token in 
current nonterm
-      Memo      : Memo_Entry           := Parser.Derivs (R)(Start_Pos);
+      Tree       : Syntax_Trees.Tree renames Parser.Tree;
+      Descriptor : WisiToken.Descriptor renames Tree.Lexer.Descriptor.all;
 
-      Pos_Recurse_Last : Base_Token_Index := Last_Pos;
+      Pos       : Syntax_Trees.Stream_Index          := Last_Pos; --  last 
token parsed.
+      Start_Pos : constant Syntax_Trees.Stream_Index := Tree.Stream_Next
+        (Tree.Shared_Stream, Last_Pos);                         --  first 
token in current nonterm
+      Memo      : Memo_Entry                         := Parser.Derivs (R)
+        (Tree.Get_Node_Index (Tree.Shared_Stream, Start_Pos));
+
+      Pos_Recurse_Last : Syntax_Trees.Stream_Index := Last_Pos;
       Result_Recurse   : Memo_Entry;
    begin
       case Memo.State is
@@ -138,49 +157,58 @@ package body WisiToken.Parse.Packrat.Procedural is
 
       when No_Result =>
          if Parser.Direct_Left_Recursive (R) then
-            Parser.Derivs (R).Replace_Element (Start_Pos, (State => Failure));
+            Parser.Derivs (R).Replace_Element
+              (Tree.Get_Node_Index (Tree.Shared_Stream, Start_Pos), (State => 
Failure));
          else
             Memo := Eval (Parser, R, Last_Pos);
+
             if (Trace_Parse > Detail and Memo.State = Success) or Trace_Parse 
> Extra then
                case Memo.State is
                when Success =>
-                  Parser.Trace.Put_Line (Parser.Tree.Image (Memo.Result, 
Descriptor, Include_Children => True));
+                  Parser.Tree.Lexer.Trace.Put_Line
+                    (Parser.Tree.Image (Memo.Result, Children => True, 
Terminal_Node_Numbers => True));
                when Failure =>
-                  Parser.Trace.Put_Line (Image (R, Descriptor) & " failed at 
pos" & Last_Pos'Image);
+                  Parser.Tree.Lexer.Trace.Put_Line
+                    (Image (R, Descriptor) & " failed at pos " & Image_Pos 
(Tree, Tree.Shared_Stream, Last_Pos));
                when No_Result =>
                   raise SAL.Programmer_Error;
                end case;
             end if;
-            Parser.Derivs (R).Replace_Element (Start_Pos, Memo);
+            Parser.Derivs (R).Replace_Element (Tree.Get_Node_Index 
(Tree.Shared_Stream, Start_Pos), Memo);
             return Memo;
          end if;
       end case;
 
       loop
+         --  Production is like: list : list element | element
+         --
+         --  Each time around this loop starts at the same point, but
+         --  accumulates more tokens in the first 'list'; it exits when
+         --  'element' does not match the remaining input.
          Pos := Last_Pos;
 
-         if Pos > Parser.Terminals.Last_Index then --  FIXME: this can't pass 
here; Last_Pos never > last_index
-            --  There might be an empty nonterm after the last token
-            return (State => Failure);
-         end if;
-
          Result_Recurse := Eval (Parser, R, Pos);
 
          if Result_Recurse.State = Success then
-            if Result_Recurse.Last_Pos > Pos_Recurse_Last then
-               Parser.Derivs (R).Replace_Element (Start_Pos, Result_Recurse);
+            if Tree.Get_Node_Index (Tree.Shared_Stream, 
Result_Recurse.Last_Pos) >
+              Tree.Get_Node_Index (Tree.Shared_Stream, Pos_Recurse_Last)
+            then
+               Parser.Derivs (R).Replace_Element
+                 (Tree.Get_Node_Index (Tree.Shared_Stream, Start_Pos), 
Result_Recurse);
                Pos              := Result_Recurse.Last_Pos;
                Pos_Recurse_Last := Pos;
 
                if WisiToken.Trace_Parse > Detail then
-                  Parser.Trace.Put_Line
-                    (Parser.Tree.Image (Result_Recurse.Result, Descriptor, 
Include_Children => True));
+                  Parser.Tree.Lexer.Trace.Put_Line
+                    (Parser.Tree.Image
+                       (Result_Recurse.Result, Children => True, 
Terminal_Node_Numbers => True));
                end if;
                --  continue looping
 
             elsif Result_Recurse.Last_Pos = Pos_Recurse_Last then
-               if Parser.Tree.Buffer_Region_Is_Empty (Result_Recurse.Result) 
then
-                  Parser.Derivs (R).Replace_Element (Start_Pos, 
Result_Recurse);
+               if Parser.Tree.Is_Empty_Nonterm (Result_Recurse.Result) then
+                  Parser.Derivs (R).Replace_Element
+                    (Tree.Get_Node_Index (Tree.Shared_Stream, Start_Pos), 
Result_Recurse);
                end if;
                exit;
             else
@@ -191,24 +219,24 @@ package body WisiToken.Parse.Packrat.Procedural is
             exit;
          end if;
       end loop;
-      return Parser.Derivs (R)(Start_Pos);
+      return Parser.Derivs (R)(Tree.Get_Node_Index (Tree.Shared_Stream, 
Start_Pos));
    end Apply_Rule;
 
    ----------
    --  Public subprograms
 
    function Create
-     (Grammar               : in     WisiToken.Productions.Prod_Arrays.Vector;
-      Direct_Left_Recursive : in     Token_ID_Set;
-      Start_ID              : in     Token_ID;
-      Trace                 : access WisiToken.Trace'Class;
-      Lexer                 :        WisiToken.Lexer.Handle;
-      User_Data             :        WisiToken.Syntax_Trees.User_Data_Access)
+     (Grammar               : in WisiToken.Productions.Prod_Arrays.Vector;
+      Direct_Left_Recursive : in Token_ID_Set;
+      Start_ID              : in Token_ID;
+      Lexer                 : in WisiToken.Lexer.Handle;
+      Productions           : in 
WisiToken.Syntax_Trees.Production_Info_Trees.Vector;
+      User_Data             : in WisiToken.Syntax_Trees.User_Data_Access)
      return Procedural.Parser
    is begin
-      return Parser : Procedural.Parser (Grammar.First_Index, 
Grammar.Last_Index) do
-         Parser.Trace                 := Trace;
-         Parser.Lexer                 := Lexer;
+      return Parser                   : Procedural.Parser 
(Grammar.First_Index, Grammar.Last_Index) do
+         Parser.Tree.Lexer            := Lexer;
+         Parser.Productions           := Productions;
          Parser.User_Data             := User_Data;
          Parser.Grammar               := Grammar;
          Parser.Start_ID              := Start_ID;
@@ -216,52 +244,58 @@ package body WisiToken.Parse.Packrat.Procedural is
       end return;
    end Create;
 
-   overriding procedure Parse (Parser : aliased in out Procedural.Parser)
+   overriding procedure Parse
+     (Parser     : in out Procedural.Parser;
+      Log_File   : in     Ada.Text_IO.File_Type;
+      Edits      : in     KMN_Lists.List := KMN_Lists.Empty_List;
+      Pre_Edited : in     Boolean        := False)
    is
-      Descriptor : WisiToken.Descriptor renames Parser.Trace.Descriptor.all;
+      pragma Unreferenced (Log_File, Pre_Edited);
+      use all type Ada.Containers.Count_Type;
+      use all type WisiToken.Syntax_Trees.User_Data_Access;
 
-      Junk : Valid_Node_Index;
-      pragma Unreferenced (Junk);
+      Descriptor : WisiToken.Descriptor renames 
Parser.Tree.Lexer.Descriptor.all;
 
       Result : Memo_Entry;
    begin
-      Parser.Base_Tree.Clear;
-      Parser.Tree.Initialize (Parser.Base_Tree'Unchecked_Access, Flush => 
True);
-      Parser.Lex_All;
+      if Edits.Length > 0 then
+         raise WisiToken.Parse_Error;
+      end if;
 
-      for Nonterm in Descriptor.First_Nonterminal .. 
Parser.Trace.Descriptor.Last_Nonterminal loop
-         Parser.Derivs (Nonterm).Clear;
-         Parser.Derivs (Nonterm).Set_First_Last (Parser.Terminals.First_Index, 
Parser.Terminals.Last_Index + 1);
-         --  There might be an empty nonterm after the last token
-      end loop;
+      Parser.Tree.Clear;
+      --  Creates Shared_Stream, but no parse stream; packrat does not
+      --  use a parse stream.
+
+      if Parser.User_Data /= null then
+         Parser.User_Data.Reset;
+      end if;
+      Parser.Lex_All;
 
-      for Token_Index in Parser.Terminals.First_Index .. 
Parser.Terminals.Last_Index loop
-         Junk := Parser.Tree.Add_Terminal (Token_Index, Parser.Terminals);
+      --  WORKAROUND: there appears to be a bug in GNAT Community 2021 that 
makes
+      --  ref_count fail in this usage. May be related to AdaCore ticket 
V107-045.
+      Parser.Tree.Enable_Ref_Count_Check (Parser.Tree.Shared_Stream, Enable => 
False);
+
+      for Nonterm in Descriptor.First_Nonterminal .. 
Descriptor.Last_Nonterminal loop
+         Parser.Derivs (Nonterm).Clear (Free_Memory => True);
+         Parser.Derivs (Nonterm).Set_First_Last
+           (Parser.Tree.Get_Node_Index
+              (Parser.Tree.Shared_Stream, Parser.Tree.Stream_First 
(Parser.Tree.Shared_Stream, Skip_SOI => True)),
+            Parser.Tree.Get_Node_Index
+              (Parser.Tree.Shared_Stream, Parser.Tree.Stream_Last 
(Parser.Tree.Shared_Stream, Skip_EOI => False)));
       end loop;
 
-      Result := Apply_Rule (Parser, Parser.Start_ID, 
Parser.Terminals.First_Index - 1);
+      Result := Apply_Rule
+        (Parser, Parser.Start_ID,  Parser.Tree.Stream_First 
(Parser.Tree.Shared_Stream, Skip_SOI => False));
 
       if Result.State /= Success then
          if Trace_Parse > Outline then
-            Parser.Trace.Put_Line ("parse failed");
+            Parser.Tree.Lexer.Trace.Put_Line ("parse failed");
          end if;
 
-         raise Syntax_Error with "parse failed"; --  FIXME: need better error 
message!
+         raise Syntax_Error with "parse failed"; --  FIXME packrat: need 
better error message!
       else
          Parser.Tree.Set_Root (Result.Result);
       end if;
    end Parse;
 
-   overriding function Tree (Parser : in Procedural.Parser) return 
Syntax_Trees.Tree
-   is begin
-      return Parser.Tree;
-   end Tree;
-
-   overriding function Tree_Var_Ref
-     (Parser : aliased in out Procedural.Parser)
-     return Syntax_Trees.Tree_Variable_Reference
-   is begin
-      return (Element => Parser.Tree'Access);
-   end Tree_Var_Ref;
-
 end WisiToken.Parse.Packrat.Procedural;
diff --git a/wisitoken-parse-packrat-procedural.ads 
b/wisitoken-parse-packrat-procedural.ads
index ff0483778a..9abc754ec5 100644
--- a/wisitoken-parse-packrat-procedural.ads
+++ b/wisitoken-parse-packrat-procedural.ads
@@ -9,7 +9,7 @@
 --
 --  See parent.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -43,18 +43,22 @@ package WisiToken.Parse.Packrat.Procedural is
          null;
 
       when Success =>
-         Result   : WisiToken.Valid_Node_Index;
-         Last_Pos : Base_Token_Index;
+         Result   : Syntax_Trees.Node_Access;
+         Last_Pos : Syntax_Trees.Stream_Index;
 
       end case;
    end record;
 
+   subtype Positive_Node_Index is Syntax_Trees.Node_Index range 1 .. 
Syntax_Trees.Node_Index'Last;
    package Memos is new SAL.Gen_Unbounded_Definite_Vectors
-     (Token_Index, Memo_Entry, Default_Element => (others => <>));
+     (Positive_Node_Index, Memo_Entry, Default_Element => (others => <>));
+   --  Memos is indexed by Node_Index of terminals in Shared_Stream
+   --  (incremental parse is not supported).
+
    type Derivs is array (Token_ID range <>) of Memos.Vector;
 
-   type Parser (First_Nonterminal, Last_Nonterminal : Token_ID) is new 
Packrat.Parser with
-   record
+   type Parser (First_Nonterminal, Last_Nonterminal : Token_ID) is new 
Packrat.Parser
+   with record
       Grammar               : WisiToken.Productions.Prod_Arrays.Vector;
       Start_ID              : Token_ID;
       Direct_Left_Recursive : Token_ID_Set (First_Nonterminal .. 
Last_Nonterminal);
@@ -62,25 +66,19 @@ package WisiToken.Parse.Packrat.Procedural is
    end record;
 
    function Create
-     (Grammar               : in     WisiToken.Productions.Prod_Arrays.Vector;
-      Direct_Left_Recursive : in     Token_ID_Set;
-      Start_ID              : in     Token_ID;
-      Trace                 : access WisiToken.Trace'Class;
-      Lexer                 :        WisiToken.Lexer.Handle;
-      User_Data             :        WisiToken.Syntax_Trees.User_Data_Access)
+     (Grammar               : in WisiToken.Productions.Prod_Arrays.Vector;
+      Direct_Left_Recursive : in Token_ID_Set;
+      Start_ID              : in Token_ID;
+      Lexer                 : in WisiToken.Lexer.Handle;
+      Productions           : in 
WisiToken.Syntax_Trees.Production_Info_Trees.Vector;
+      User_Data             : in WisiToken.Syntax_Trees.User_Data_Access)
      return Procedural.Parser;
 
-   overriding procedure Parse (Parser : aliased in out Procedural.Parser);
-   overriding function Tree (Parser : in Procedural.Parser) return 
Syntax_Trees.Tree;
-   overriding function Tree_Var_Ref
-     (Parser : aliased in out Procedural.Parser)
-     return Syntax_Trees.Tree_Variable_Reference;
-
-   overriding function Any_Errors (Parser : in Procedural.Parser) return 
Boolean
-     is (False);
-   --  All errors are reported by Parse raising Syntax_Error.
-
-   overriding procedure Put_Errors (Parser : in Procedural.Parser)
-   is null;
+   overriding procedure Parse
+     (Parser     : in out Procedural.Parser;
+      Log_File   : in     Ada.Text_IO.File_Type;
+      Edits      : in     KMN_Lists.List := KMN_Lists.Empty_List;
+      Pre_Edited : in     Boolean        := False);
+   --  Raises Parse_Error if Edits is not empty.
 
 end WisiToken.Parse.Packrat.Procedural;
diff --git a/wisitoken-parse-packrat.adb b/wisitoken-parse-packrat.adb
index 46ea1e90ff..b01bc0c881 100644
--- a/wisitoken-parse-packrat.adb
+++ b/wisitoken-parse-packrat.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2018, 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018, 2020 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -17,47 +17,91 @@
 
 pragma License (Modified_GPL);
 
+with Ada.Exceptions;
+with GNAT.Traceback.Symbolic;
 package body WisiToken.Parse.Packrat is
 
    overriding
    procedure Execute_Actions
-     (Parser          : in out Packrat.Parser;
-      Image_Augmented : in     Syntax_Trees.Image_Augmented := null)
+     (Parser              : in out Packrat.Parser;
+      Action_Region_Bytes : in     WisiToken.Buffer_Region := 
WisiToken.Null_Buffer_Region)
    is
-      Descriptor : WisiToken.Descriptor renames Parser.Trace.Descriptor.all;
+      use all type WisiToken.Syntax_Trees.User_Data_Access;
 
       procedure Process_Node
         (Tree : in out Syntax_Trees.Tree;
-         Node : in     Valid_Node_Index)
+         Node : in     Syntax_Trees.Valid_Node_Access)
       is
          use all type Syntax_Trees.Node_Label;
+         use all type Syntax_Trees.Post_Parse_Action;
+         use all type WisiToken.Syntax_Trees.Node_Access;
       begin
-         if Tree.Label (Node) /= Nonterm then
+         if Tree.Label (Node) /= Nonterm or else
+           not Overlaps (Tree.Byte_Region (Node, Trailing_Non_Grammar => 
False), Action_Region_Bytes)
+         then
             return;
          end if;
 
          declare
-            use all type Syntax_Trees.Semantic_Action;
-            Tree_Children : constant Valid_Node_Index_Array := Tree.Children 
(Node);
+            Tree_Children     : constant Syntax_Trees.Node_Access_Array := 
Tree.Children (Node);
+            Post_Parse_Action : constant Syntax_Trees.Post_Parse_Action := 
Parser.Get_Post_Parse_Action
+              (Tree.Production_ID (Node));
          begin
-            Parser.User_Data.Reduce (Tree, Node, Tree_Children);
+            for Child of Tree_Children loop
+               if Child /= null and then Overlaps
+                 (Tree.Byte_Region (Child, Trailing_Non_Grammar => False), 
Action_Region_Bytes)
+               then
+                  Process_Node (Tree, Child);
+               end if;
+            end loop;
 
-            if Tree.Action (Node) /= null then
-               Tree.Action (Node) (Parser.User_Data.all, Tree, Node, 
Tree_Children);
+            Parser.User_Data.Reduce (Tree, Node);
+
+            if Post_Parse_Action /= null then
+               Post_Parse_Action (Parser.User_Data.all, Tree, Node);
             end if;
          end;
       end Process_Node;
 
    begin
+      if Parser.User_Data = null then
+         return;
+      end if;
+
       if Trace_Action > Outline then
          if Trace_Action > Extra then
-            Parser.Tree.Print_Tree (Descriptor, Parser.Tree.Root, 
Image_Augmented);
-            Parser.Trace.New_Line;
+            Parser.Tree.Print_Tree (Parser.Tree.Root);
+            Parser.Tree.Lexer.Trace.New_Line;
          end if;
-         Parser.Trace.Put_Line ("root node: " & Parser.Tree.Image 
(Parser.Tree.Root, Descriptor));
+         Parser.Tree.Lexer.Trace.Put_Line ("root node: " & Parser.Tree.Image 
(Parser.Tree.Root));
       end if;
 
-      Parser.Tree.Process_Tree (Process_Node'Access);
+      Parser.User_Data.Initialize_Actions (Parser.Tree);
+      Process_Node (Parser.Tree, Parser.Tree.Root);
+   exception
+   when E : others =>
+      if Debug_Mode then
+         Parser.Tree.Lexer.Trace.Put_Line
+           (Ada.Exceptions.Exception_Name (E) & ": " & 
Ada.Exceptions.Exception_Message (E));
+         Parser.Tree.Lexer.Trace.Put_Line 
(GNAT.Traceback.Symbolic.Symbolic_Traceback (E));
+         Parser.Tree.Lexer.Trace.New_Line;
+      end if;
+      raise;
    end Execute_Actions;
 
+   function Image_Pos
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Syntax_Trees.Stream_ID;
+      Element : in Syntax_Trees.Stream_Index)
+     return String
+   is
+      use Syntax_Trees;
+   begin
+      if Element = Invalid_Stream_Index then
+         return "0";
+      else
+         return Tree.Get_Node_Index (Stream, Element)'Image;
+      end if;
+   end Image_Pos;
+
 end WisiToken.Parse.Packrat;
diff --git a/wisitoken-parse-packrat.ads b/wisitoken-parse-packrat.ads
index 9c0421f4b8..0c59dc9746 100644
--- a/wisitoken-parse-packrat.ads
+++ b/wisitoken-parse-packrat.ads
@@ -14,7 +14,7 @@
 --  [warth 2008]  Warth, A., Douglass, J.R. and Millstein, T.D., 2008. Packrat
 --                parsers can support left recursion. PEPM, 8, pp.103-110.
 --
---  Copyright (C) 2018, 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018, 2020 - 2021 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -47,29 +47,19 @@ pragma License (Modified_GPL);
 with WisiToken.Syntax_Trees;
 package WisiToken.Parse.Packrat is
 
-   function Tree_Index (Terminal_Index : in Token_Index) return 
Valid_Node_Index
-     is (Valid_Node_Index (Terminal_Index));
-   --  All tokens are read and entered into the syntax tree before any
-   --  nonterms are reduced, so the mapping from Terminals token_index to
-   --  Tree node_index is identity.
-   --  FIXME: use Terminals (Terminal_Index).Tree_Index
-
-   type Parser is abstract new Base_Parser with record
-      --  Dynamic parsing data
-
-      Base_Tree : aliased WisiToken.Syntax_Trees.Base_Tree;
-      Tree      : aliased WisiToken.Syntax_Trees.Tree;
-      --  FIXME: Current we only need Base_Tree for Execute_Actions, except
-      --  that Syntax_Trees only declares the needed operations on Tree. But
-      --  we may need more trees for error recovery; if not, fix
-      --  Syntax_Trees, move Base_Tree and Execute_Actions up to
-      --  base_parser.
-
-   end record;
+   type Parser is abstract new Base_Parser with null record;
 
    overriding
    procedure Execute_Actions
-     (Parser          : in out Packrat.Parser;
-      Image_Augmented : in     Syntax_Trees.Image_Augmented := null);
+     (Parser              : in out Packrat.Parser;
+      Action_Region_Bytes : in     WisiToken.Buffer_Region := 
WisiToken.Null_Buffer_Region);
+
+   function Image_Pos
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Syntax_Trees.Stream_ID;
+      Element : in Syntax_Trees.Stream_Index)
+     return String
+   with Pre => Tree.Contains (Stream, Element);
+   --  "0" for Invalid_Stream_Index, Node_Index'Image otherwise.
 
 end WisiToken.Parse.Packrat;
diff --git a/wisitoken-parse.adb b/wisitoken-parse.adb
index 535c423450..9920d8577e 100644
--- a/wisitoken-parse.adb
+++ b/wisitoken-parse.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -17,92 +17,1164 @@
 
 pragma License (Modified_GPL);
 
+with WisiToken.In_Parse_Actions;
 package body WisiToken.Parse is
 
-   function Next_Grammar_Token (Parser : in out Base_Parser'Class) return 
Token_ID
+   Delims : constant Ada.Strings.Maps.Character_Set := Ada.Strings.Maps.To_Set 
(" (),");
+   --  For Error_Data'Input.
+
+   --  Body subprograms, alphabetical
+
+   procedure Adjust_Copy (Item : in out Recover_Op_Nodes_Arrays.Vector)
    is
-      use all type Ada.Containers.Count_Type;
-      use all type Syntax_Trees.User_Data_Access;
+      use Syntax_Trees;
+   begin
+      for Op of Item loop
+         case Op.Op is
+         when Insert =>
+            if Op.Ins_Node /= Invalid_Node_Access and then Copied_Node 
(Op.Ins_Node) /= Invalid_Node_Access then
+               Op.Ins_Node := Copied_Node (Op.Ins_Node);
+            end if;
+
+         when Delete =>
+            if Op.Del_Node /= Invalid_Node_Access and then Copied_Node 
(Op.Del_Node) /= Invalid_Node_Access  then
+               Op.Del_Node := Copied_Node (Op.Del_Node);
+            end if;
+         end case;
+      end loop;
+   end Adjust_Copy;
 
-      Token : Base_Token;
-      Error : Boolean;
+   function Input_Recover_Op
+     (Stream : not null access Ada.Streams.Root_Stream_Type'Class)
+     return Recover_Op_Nodes
+   is begin
+      return Result : Recover_Op_Nodes (Insert_Delete_Op_Label'Value 
(Next_Value (Stream, Delims))) do
+         case Result.Op is
+         when Insert =>
+            Result.Ins_ID           := Token_ID'Value (Next_Value (Stream, 
Delims));
+            Result.Input_Node_Index := Syntax_Trees.Node_Index'Value 
(Next_Value (Stream, Delims));
+
+         when Delete =>
+            Result.Del_ID           := Token_ID'Value (Next_Value (Stream, 
Delims));
+            Result.Input_Node_Index := Syntax_Trees.Node_Index'Value 
(Next_Value (Stream, Delims));
+         end case;
+      end return;
+   end Input_Recover_Op;
+
+   function Input_Recover_Ops
+     (Stream : not null access Ada.Streams.Root_Stream_Type'Class)
+     return Recover_Op_Nodes_Arrays.Vector
+   is
+      Length : constant Positive_Index_Type := Positive_Index_Type'Value 
(Next_Value (Stream, Delims));
    begin
-      loop
-         Error := Parser.Lexer.Find_Next (Token);
+      return Result : Recover_Op_Nodes_Arrays.Vector do
+         for I in 1 .. Length loop
+            Result.Append (Input_Recover_Op (Stream));
+         end loop;
+      end return;
+   end Input_Recover_Ops;
 
-         --  We don't handle Error until later; we assume it was recovered.
+   procedure Output_Recover_Op
+     (Stream : not null access Ada.Streams.Root_Stream_Type'Class;
+      Item   : in              Recover_Op_Nodes)
+   is begin
+      Character'Write (Stream, '(');
+      String'Write (Stream, Item.Op'Image);
+      --  Ignore Input_Node_Index
+      case Item.Op is
+      when Insert =>
+         String'Write (Stream, Item.Ins_ID'Image);
+         --  Ignore Ins_Before.
+         String'Write (Stream, Syntax_Trees.Get_Node_Index 
(Item.Ins_Node)'Image);
 
-         if Token.Line /= Invalid_Line_Number then
-            --  Some lexers don't support line numbers.
-            if Parser.Lexer.First then
-               if Parser.Line_Begin_Token.Length = 0 then
-                  Parser.Line_Begin_Token.Set_First_Last (Token.Line, 
Token.Line);
-               else
-                  Parser.Line_Begin_Token.Set_First_Last 
(Parser.Line_Begin_Token.First_Index, Token.Line);
-               end if;
-               Parser.Line_Begin_Token (Token.Line) := 
Parser.Terminals.Last_Index +
-                 (if Token.ID >= Parser.Trace.Descriptor.First_Terminal then 1 
else 0);
+      when Delete =>
+         String'Write (Stream, Item.Del_ID'Image);
+         --  Ignore Del_Index.
+         String'Write (Stream, Syntax_Trees.Get_Node_Index 
(Item.Del_Node)'Image);
 
-            elsif Token.ID = Parser.Trace.Descriptor.EOI_ID then
-               Parser.Line_Begin_Token.Set_First_Last 
(Parser.Line_Begin_Token.First_Index, Token.Line + 1);
-               Parser.Line_Begin_Token (Token.Line + 1) := 
Parser.Terminals.Last_Index + 1;
-            end if;
-         end if;
+      end case;
+      Character'Write (Stream, ')');
+   end Output_Recover_Op;
+
+   procedure Output_Recover_Ops
+     (Stream : not null access Ada.Streams.Root_Stream_Type'Class;
+      Item   : in              Recover_Op_Nodes_Arrays.Vector)
+   is begin
+      String'Write (Stream,  Item.Length'Image);
+      Character'Write (Stream, '(');
+      for Op of Item loop
+         Output_Recover_Op (Stream, Op);
+      end loop;
+      Character'Write (Stream, ')');
+   end Output_Recover_Ops;
 
-         if Trace_Parse > Lexer_Debug then
-            Parser.Trace.Put_Line (Image (Token, Parser.Trace.Descriptor.all));
+   procedure Set_Node_Access
+     (Item           : in out Recover_Op_Nodes_Arrays.Vector;
+      Node_Index_Map : in     Syntax_Trees.Node_Index_Array_Node_Access.Vector)
+   is
+      use Syntax_Trees;
+   begin
+      for Op of Item loop
+         if Op.Input_Node_Index /= Invalid_Node_Index then
+            case Op.Op is
+            when Insert =>
+               Op.Ins_Node := Node_Index_Map (Op.Input_Node_Index);
+
+            when Delete =>
+               Op.Del_Node := Node_Index_Map (Op.Input_Node_Index);
+            end case;
+
+            Op.Input_Node_Index := Invalid_Node_Index;
          end if;
+      end loop;
+   end Set_Node_Access;
 
-         if Token.ID >= Parser.Trace.Descriptor.First_Terminal then
+   procedure Validate_Ops
+     (Item                : in     Recover_Op_Nodes_Arrays.Vector;
+      Tree                : in     Syntax_Trees.Tree'Class;
+      Error_Node          : in     Syntax_Trees.Valid_Node_Access;
+      Node_Error_Reported : in out Boolean)
+   is
+      use Syntax_Trees;
+
+      Node_Image_Output : Boolean := Node_Error_Reported;
 
-            Parser.Terminals.Append (Token);
+      procedure Report_Error (Msg : in String)
+      is begin
+         Node_Error_Reported := True;
+
+         if not Node_Image_Output then
+            Tree.Lexer.Trace.Put_Line
+              (Tree.Error_Message
+                 (Error_Node,
+                  Tree.Image
+                    (Error_Node,
+                     Children     => False,
+                     Node_Numbers => True)));
+            Node_Image_Output := True;
+         end if;
 
-            --  We create the syntax tree node here, so Lexer_To_Augmented can
-            --  store augmented data in it.
-            Parser.Terminals (Parser.Terminals.Last_Index).Tree_Index := 
Parser.Tree_Var_Ref.Add_Terminal
-              (Parser.Terminals.Last_Index, Parser.Terminals);
+         Tree.Lexer.Trace.Put_Line (Tree.Error_Message (Error_Node, "... 
invalid_tree: " & Msg));
+      end Report_Error;
 
-            if Parser.User_Data /= null then
-               Parser.User_Data.Lexer_To_Augmented
-                 (Parser.Tree_Var_Ref, Parser.Terminals 
(Parser.Terminals.Last_Index), Parser.Lexer);
+   begin
+      for Op of Item loop
+         case Op.Op is
+         when Insert =>
+            if Op.Ins_Node /= Invalid_Node_Access then
+               if not Tree.In_Tree (Op.Ins_Node) then
+                  Report_Error ("op.ins_node not in tree");
+               end if;
             end if;
 
-            exit;
-         else
-            --  non-grammar; not in syntax tree
-            if Parser.User_Data /= null then
-               Parser.User_Data.Lexer_To_Augmented (Parser.Tree_Var_Ref, 
Token, Parser.Lexer);
+         when Delete =>
+            if Op.Del_Node /= Invalid_Node_Access then
+               if not Tree.In_Tree (Op.Del_Node) then
+                  Report_Error ("op.del_node not in tree");
+               end if;
             end if;
+         end case;
+      end loop;
+   end Validate_Ops;
+
+   procedure Process_Grammar_Token
+     (Parser : in out Base_Parser'Class;
+      Token  : in     Lexer.Token;
+      Node   : in     Syntax_Trees.Valid_Node_Access)
+   is
+      use all type Syntax_Trees.User_Data_Access;
+   begin
+      if Parser.User_Data /= null then
+         Parser.User_Data.Lexer_To_Augmented (Parser.Tree, Token, Node);
+      end if;
+   end Process_Grammar_Token;
+
+   procedure Process_Non_Grammar_Token
+     (Parser       : in out Base_Parser'Class;
+      Grammar_Node : in     Syntax_Trees.Valid_Node_Access;
+      Token        : in     Lexer.Token)
+   is
+      use all type Syntax_Trees.Node_Access;
+      use all type Syntax_Trees.User_Data_Access;
+   begin
+      Parser.Tree.Non_Grammar_Var (Grammar_Node).Append (Token);
+      if Parser.User_Data /= null then
+         Parser.User_Data.Lexer_To_Augmented (Parser.Tree, Token, 
Grammar_Node);
+      end if;
+   end Process_Non_Grammar_Token;
+
+   ----------
+   --  Package public subprograms, declaration order
+
+   function Image (Item : in Recover_Op_Nodes; Tree : in 
Syntax_Trees.Tree'Class) return String
+   is
+      use Syntax_Trees;
+   begin
+      return
+        "(" & Image (Item.Op) & ", " &
+        (case Item.Op is
+         when Insert =>
+           (if Item.Ins_Node = Invalid_Node_Access
+            then Image (Item.Ins_ID, Tree.Lexer.Descriptor.all)
+            else Tree.Image (Item.Ins_Node)) &
+              "," & Item.Ins_Before'Image,
+         when Delete =>
+           (if Item.Del_Node = Invalid_Node_Access
+            then Image (Item.Del_ID, Tree.Lexer.Descriptor.all)
+            else Tree.Image (Item.Del_Node, Terminal_Node_Numbers => True)) &
+              "," & Item.Del_Index'Image)
+        & ")";
+   end Image;
+
+   function To_Recover_Op_Nodes (Item : in Recover_Op_Arrays.Vector) return 
Recover_Op_Nodes_Arrays.Vector
+   is
+      use Recover_Op_Arrays;
+   begin
+      return Result : Recover_Op_Nodes_Arrays.Vector do
+         for I in First_Index (Item) .. Last_Index (Item) loop
+            declare
+               Op : Recover_Op renames Element (Item, I);
+            begin
+               case Op.Op is
+               when Insert =>
+
+                  Result.Append
+                    ((Insert,
+                      Input_Node_Index => Syntax_Trees.Invalid_Node_Index,
+                      Ins_ID           => Op.Ins_ID,
+                      Ins_Before       => Op.Ins_Before,
+                      Ins_Node         => Syntax_Trees.Invalid_Node_Access));
+
+               when Delete =>
+
+                  Result.Append
+                    ((Delete,
+                      Input_Node_Index => Syntax_Trees.Invalid_Node_Index,
+                      Del_ID           => Op.Del_ID,
+                      Del_Index        => Op.Del_Token_Index,
+                      Del_Node         => Syntax_Trees.Invalid_Node_Access));
+
+               when others =>
+                  null;
+               end case;
+            end;
+         end loop;
+      end return;
+   end To_Recover_Op_Nodes;
+
+   overriding function Dispatch_Equal (Left : in Lexer_Error; Right : in 
Syntax_Trees.Error_Data'Class) return Boolean
+   is
+      use all type WisiToken.Lexer.Error;
+   begin
+      return Right in Lexer_Error and then Left.Error = Lexer_Error 
(Right).Error;
+   end Dispatch_Equal;
+
+   overriding function To_Message
+     (Data       : in Lexer_Error;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return Syntax_Trees.Error_Data'Class
+   is begin
+      return Error_Message'
+        (+Image (Data, Tree, Error_Node),
+         Recover_Ops  => <>,
+         Recover_Test => null);
+   end To_Message;
+
+   overriding function Image
+     (Data       : in Lexer_Error;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return String
+   is
+      use Ada.Strings.Unbounded;
+      Result : Unbounded_String;
+   begin
+      Append (Result, "lexer error:" & Data.Error.Char_Pos'Image & ", '");
+      for C of Data.Error.Recover_Char loop
+         if C /= ASCII.NUL then
+            Append (Result, C);
          end if;
       end loop;
+      Append (Result, "'");
+      return To_String (Result);
+   end Image;
 
-      if Error then
+   function Input_Lexer_Error (Stream : not null access 
Ada.Streams.Root_Stream_Type'Class) return Lexer_Error
+   is begin
+      declare
+         Recover_Char_Count : Integer;
+      begin
+         return Result : Lexer_Error do
+            Result.Error.Char_Pos := Buffer_Pos'Value (Next_Value (Stream, 
Delims));
+            Recover_Char_Count := Integer'Value (Next_Value (Stream, Delims));
+            for I in 1 .. Recover_Char_Count loop
+               Character'Read (Stream, Result.Error.Recover_Char (I));
+            end loop;
+         end return;
+      end;
+   end Input_Lexer_Error;
+
+   procedure Output_Lexer_Error (Stream : not null access 
Ada.Streams.Root_Stream_Type'Class; Item : in Lexer_Error)
+   is
+      Recover_Char_Count : Integer := 0;
+   begin
+      for Char of Item.Error.Recover_Char loop
+         if Char /= ASCII.NUL then
+            Recover_Char_Count := @ + 1;
+         end if;
+      end loop;
+      String'Write (Stream, "(" & Trimmed_Image (Item.Error.Char_Pos) & 
Recover_Char_Count'Image);
+      for I in 1 .. Recover_Char_Count loop
+         Character'Write (Stream, Item.Error.Recover_Char (I));
+      end loop;
+      Character'Write (Stream, ')');
+   end Output_Lexer_Error;
+
+   overriding procedure Adjust_Copy (Data : in out Parse_Error)
+   is begin
+      Adjust_Copy (Data.Recover_Ops);
+   end Adjust_Copy;
+
+   overriding function Dispatch_Equal (Left : in Parse_Error; Right : in 
Syntax_Trees.Error_Data'Class) return Boolean
+   is begin
+      if not (Right in Parse_Error) then
+         return False;
+      else
          declare
-            Error : WisiToken.Lexer.Error renames 
Parser.Lexer.Errors.Reference (Parser.Lexer.Errors.Last);
+            Right_Parse : Parse_Error renames Parse_Error (Right);
          begin
-            if Error.Recover_Char (1) /= ASCII.NUL then
-               Error.Recover_Token := Parser.Terminals.Last_Index;
-            end if;
+            --  Allow updating recover info after error recovery; current value
+            --  may have no or different recovery information, so don't check
+            --  that.
+            return Left.Expecting = Right_Parse.Expecting;
+         end;
+      end if;
+   end Dispatch_Equal;
+
+   overriding function To_Message
+     (Data       : in Parse_Error;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return Syntax_Trees.Error_Data'Class
+   is begin
+      return Error_Message'
+        (+Image (Data, Tree, Error_Node), Data.Recover_Ops, Data.Recover_Test);
+   end To_Message;
+
+   overriding function Image
+     (Data       : in Parse_Error;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return String
+   is
+      use Syntax_Trees;
+      use all type Ada.Containers.Count_Type;
+
+      First_Term : constant Node_Access := Tree.First_Terminal (Error_Node);
+
+      Item_Byte_Region : constant Buffer_Region :=
+        (if First_Term = Invalid_Node_Access
+         then Null_Buffer_Region
+         else Tree.Byte_Region (First_Term, Trailing_Non_Grammar => False));
+
+      Msg : constant String :=
+        "syntax_error: expecting " & Image (Data.Expecting, 
Tree.Lexer.Descriptor.all) &
+        ", found " &
+        (if First_Term = Invalid_Node_Access
+         then "empty nonterm " & Image (Tree.ID (Error_Node), 
Tree.Lexer.Descriptor.all)
+         else "'" & Tree.Lexer.Buffer_Text (Item_Byte_Region) & "'");
+   begin
+      if Data.Recover_Ops.Length /= 0 then
+         return Msg & ASCII.LF & "   recovered: " & Image (Data.Recover_Ops, 
Tree);
+      else
+         return Msg;
+      end if;
+   end Image;
+
+   function Input_Parse_Error (Stream : not null access 
Ada.Streams.Root_Stream_Type'Class) return Parse_Error
+   is begin
+      declare
+         First_Terminal : constant Token_ID := Token_ID'Value (Next_Value 
(Stream, Delims));
+         Last_Terminal  : constant Token_ID := Token_ID'Value (Next_Value 
(Stream, Delims));
+
+         procedure Get_Token_ID_Set (Item : in out Token_ID_Set)
+         is begin
+            for I in Item'Range loop
+               Item (I) := Boolean'Value (Next_Value (Stream, Delims));
+            end loop;
+         end Get_Token_ID_Set;
+
+      begin
+         return Result : Parse_Error (First_Terminal, Last_Terminal)
+         do
+            Get_Token_ID_Set (Result.Expecting);
+            Result.Recover_Ops  := Input_Recover_Ops (Stream);
+         end return;
+      end;
+   end Input_Parse_Error;
+
+   procedure Output_Parse_Error (Stream : not null access 
Ada.Streams.Root_Stream_Type'Class; Item : in Parse_Error)
+   is begin
+      String'Write (Stream, "(" & Trimmed_Image (Item.First_Terminal) & 
Item.Last_Terminal'Image);
+      for B of Item.Expecting loop
+         String'Write (Stream, " " & B'Image);
+      end loop;
+      Output_Recover_Ops (Stream, Item.Recover_Ops);
+      Character'Write (Stream, ')');
+   end Output_Parse_Error;
+
+   overriding
+   procedure Set_Node_Access
+     (Data           : in out Parse_Error;
+      Node_Index_Map : in     Syntax_Trees.Node_Index_Array_Node_Access.Vector)
+   is begin
+      Set_Node_Access (Data.Recover_Ops, Node_Index_Map);
+   end Set_Node_Access;
+
+   overriding
+   procedure Validate_Error
+     (Data                : in     Parse_Error;
+      Tree                : in     Syntax_Trees.Tree'Class;
+      Error_Node          : in     Syntax_Trees.Valid_Node_Access;
+      Node_Error_Reported : in out Boolean)
+   is begin
+      Validate_Ops (Data.Recover_Ops, Tree, Error_Node, Node_Error_Reported);
+   end Validate_Error;
+
+   overriding procedure Adjust_Copy (Data : in out In_Parse_Action_Error)
+   is begin
+      Adjust_Copy (Data.Recover_Ops);
+   end Adjust_Copy;
+
+   overriding function Dispatch_Equal
+     (Left  : in In_Parse_Action_Error;
+      Right : in Syntax_Trees.Error_Data'Class)
+     return Boolean
+   is begin
+      if not (Right in In_Parse_Action_Error) then
+         return False;
+      else
+         declare
+            use all type WisiToken.Syntax_Trees.In_Parse_Actions.Status;
+            Right_In_Parse : In_Parse_Action_Error renames 
In_Parse_Action_Error (Right);
+         begin
+            --  Allow updating recover info after error recovery.
+            return Left.Status = Right_In_Parse.Status;
          end;
       end if;
+   end Dispatch_Equal;
+
+   overriding function To_Message
+     (Data       : in In_Parse_Action_Error;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return Syntax_Trees.Error_Data'Class
+   is begin
+      return Error_Message'
+        (+Image (Data, Tree, Error_Node), Data.Recover_Ops, Data.Recover_Test);
+   end To_Message;
+
+   overriding function Image
+     (Data       : in In_Parse_Action_Error;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return String
+   is
+      use Ada.Strings.Unbounded;
+      use all type Ada.Containers.Count_Type;
+
+      Result : Unbounded_String;
+   begin
+      Result := +"in_parse_action_error: " & WisiToken.In_Parse_Actions.Image 
(Data.Status, Tree, Error_Node);
+
+      if Data.Recover_Ops.Length /= 0 then
+         Append (Result, ASCII.LF & "   recovered: " & Image 
(Data.Recover_Ops, Tree));
+      end if;
+
+      return -Result;
+   end Image;
+
+   function Input_In_Parse_Action_Error
+     (Stream : not null access Ada.Streams.Root_Stream_Type'Class) return 
In_Parse_Action_Error
+   is begin
+      declare
+         use Syntax_Trees.In_Parse_Actions;
+         Label       : constant Status_Label := Status_Label'Value (Next_Value 
(Stream, Delims));
+      begin
+         return Result : In_Parse_Action_Error
+         do
+            case Label is
+            when Ok =>
+               Result.Status := (Label => Ok);
+            when Error =>
+               case Error'(Label) is
+               when Missing_Name_Error =>
+                  Result.Status := (Label => Missing_Name_Error, others => 1);
+               when Extra_Name_Error =>
+                  Result.Status := (Label => Extra_Name_Error, others => 1);
+               when Match_Names_Error =>
+                  Result.Status := (Label => Match_Names_Error, others => 1);
+               end case;
+
+               Result.Status.Begin_Name := Positive_Index_Type'Value 
(Next_Value (Stream, Delims));
+               Result.Status.End_Name   := Positive_Index_Type'Value 
(Next_Value (Stream, Delims));
+            end case;
+            Result.Recover_Ops  := Input_Recover_Ops (Stream);
+         end return;
+      end;
+   end Input_In_Parse_Action_Error;
+
+   procedure Output_In_Parse_Action_Error
+     (Stream : not null access Ada.Streams.Root_Stream_Type'Class; Item : in 
In_Parse_Action_Error)
+   is
+      use Syntax_Trees.In_Parse_Actions;
+   begin
+      String'Write (Stream, "((" & Item.Status.Label'Image);
+      case Item.Status.Label is
+      when Ok =>
+         Character'Write (Stream, ')');
+      when Error =>
+         String'Write (Stream, Item.Status.Begin_Name'Image & 
Item.Status.End_Name'Image & ")");
+      end case;
+
+      Output_Recover_Ops (Stream, Item.Recover_Ops);
+      Character'Write (Stream, ')');
+   end Output_In_Parse_Action_Error;
+
+   overriding
+   procedure Set_Node_Access
+     (Data           : in out In_Parse_Action_Error;
+      Node_Index_Map : in     Syntax_Trees.Node_Index_Array_Node_Access.Vector)
+   is begin
+      Set_Node_Access (Data.Recover_Ops, Node_Index_Map);
+   end Set_Node_Access;
+
+   overriding
+   procedure Validate_Error
+     (Data                : in     In_Parse_Action_Error;
+      Tree                : in     Syntax_Trees.Tree'Class;
+      Error_Node          : in     Syntax_Trees.Valid_Node_Access;
+      Node_Error_Reported : in out Boolean)
+   is begin
+      Validate_Ops (Data.Recover_Ops, Tree, Error_Node, Node_Error_Reported);
+   end Validate_Error;
+
+   overriding procedure Adjust_Copy (Data : in out Error_Message)
+   is begin
+      Adjust_Copy (Data.Recover_Ops);
+   end Adjust_Copy;
+
+   overriding function Dispatch_Equal (Left : in Error_Message; Right : in 
Syntax_Trees.Error_Data'Class) return Boolean
+   is begin
+      if not (Right in Error_Message) then
+         return False;
+      else
+         declare
+            use all type Ada.Strings.Unbounded.Unbounded_String;
+            Right_Message : Error_Message renames Error_Message (Right);
+         begin
+            --  Allow updating recover info after error recovery.
+            return Left.Msg = Right_Message.Msg;
+         end;
+      end if;
+   end Dispatch_Equal;
+
+   overriding function To_Message
+     (Data       : in Error_Message;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return Syntax_Trees.Error_Data'Class
+   is
+      pragma Unreferenced (Tree, Error_Node);
+   begin
+      return Data;
+   end To_Message;
+
+   overriding function Image
+     (Data       : in Error_Message;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return String
+   is begin
+      return "message: " & (-Data.Msg);
+   end Image;
+
+   function Recover_Op_Array_Const_Ref_From_Cursor
+     (Item : in Syntax_Trees.Error_Data_Lists.Cursor)
+     return Recover_Op_Array_Const_Ref_Type
+   is
+      Err : Syntax_Trees.Error_Data'Class renames 
Syntax_Trees.Error_Data_Lists.Unchecked_Ref (Item).all;
+   begin
+      if Err in Parse_Error then
+         return (Element => Parse_Error (Err).Recover_Ops'Access, Dummy => 1);
+
+      elsif Err in In_Parse_Action_Error then
+         return (Element => In_Parse_Action_Error (Err).Recover_Ops'Access, 
Dummy => 1);
+
+      elsif Err in Error_Message then
+         return (Element => Error_Message (Err).Recover_Ops'Access, Dummy => 
1);
+
+      else
+         raise SAL.Programmer_Error;
+      end if;
+   end Recover_Op_Array_Const_Ref_From_Cursor;
+
+   function Recover_Op_Array_Const_Ref
+     (Error : aliased in Syntax_Trees.Error_Data'Class)
+     return Recover_Op_Array_Const_Ref_Type
+   is begin
+      if Error in Parse_Error then
+         return (Element => Parse_Error (Error).Recover_Ops'Access, Dummy => 
1);
+
+      elsif Error in In_Parse_Action_Error then
+         return (Element => In_Parse_Action_Error (Error).Recover_Ops'Access, 
Dummy => 1);
+
+      elsif Error in Error_Message then
+         return (Element => Error_Message (Error).Recover_Ops'Access, Dummy => 
1);
+
+      else
+         raise SAL.Programmer_Error;
+      end if;
+   end Recover_Op_Array_Const_Ref;
+
+   function Recover_Op_Array_Var_Ref
+     (Error : aliased in out Syntax_Trees.Error_Data'Class)
+     return Recover_Op_Array_Var_Ref_Type
+   is begin
+      if Error in Parse_Error then
+         return (Element => Parse_Error (Error).Recover_Ops'Access, Dummy => 
1);
+
+      elsif Error in In_Parse_Action_Error then
+         return (Element => In_Parse_Action_Error (Error).Recover_Ops'Access, 
Dummy => 1);
+
+      elsif Error in Error_Message then
+         return (Element => Error_Message (Error).Recover_Ops'Access, Dummy => 
1);
+
+      else
+         raise SAL.Programmer_Error;
+      end if;
+   end Recover_Op_Array_Var_Ref;
+
+   function Recover_Test_Const_Ref
+     (Item : in Syntax_Trees.Error_Data_Lists.Cursor)
+     return Recover_Test_Const_Ref_Type
+   is
+      Err : Syntax_Trees.Error_Data'Class renames 
Syntax_Trees.Error_Data_Lists.Constant_Ref (Item);
+   begin
+      if Err in Parse_Error then
+         return (Element => Parse_Error (Err).Recover_Test, Dummy => 1);
+
+      elsif Err in In_Parse_Action_Error then
+         return (Element => In_Parse_Action_Error (Err).Recover_Test, Dummy => 
1);
+
+      elsif Err in Error_Message then
+         return (Element => Error_Message (Err).Recover_Test, Dummy => 1);
+
+      else
+         raise SAL.Programmer_Error;
+      end if;
+   end Recover_Test_Const_Ref;
+
+   function Recover_Test_Var_Ref
+     (Error : aliased in out Syntax_Trees.Error_Data'Class)
+     return Recover_Test_Var_Ref_Type
+   is begin
+      if Error in Parse_Error then
+         return (Element => Parse_Error (Error).Recover_Test'Access, Dummy => 
1);
 
-      return Token.ID;
+      elsif Error in In_Parse_Action_Error then
+         return (Element => In_Parse_Action_Error (Error).Recover_Test'Access, 
Dummy => 1);
+
+      elsif Error in Error_Message then
+         return (Element => Error_Message (Error).Recover_Test'Access, Dummy 
=> 1);
+
+      else
+         raise SAL.Programmer_Error;
+      end if;
+   end Recover_Test_Var_Ref;
+
+   function Recover_Image
+     (Error : in Syntax_Trees.Error_Data'Class;
+      Tree  : in Syntax_Trees.Tree)
+     return String
+   is begin
+      return Image (Recover_Op_Array_Const_Ref (Error), Tree);
+   end Recover_Image;
+
+   function Input_Error_Message (Stream : not null access 
Ada.Streams.Root_Stream_Type'Class) return Error_Message
+   is
+      Msg_Length : constant Integer := Integer'Value (Next_Value (Stream, 
Delims));
+      Msg : String (1 .. Msg_Length);
+   begin
+      String'Read (Stream, Msg);
+      return Result : Error_Message
+      do
+         Result.Msg         := Ada.Strings.Unbounded.To_Unbounded_String (Msg);
+         Result.Recover_Ops := Input_Recover_Ops (Stream);
+      end return;
+   end Input_Error_Message;
+
+   procedure Output_Error_Message
+     (Stream : not null access Ada.Streams.Root_Stream_Type'Class; Item : in 
Error_Message)
+   is begin
+      String'Write (Stream, "(");
+      String'Write (Stream, Ada.Strings.Unbounded.Length (Item.Msg)'Image);
+      Character'Write (Stream, '"');
+      String'Write (Stream, Ada.Strings.Unbounded.To_String (Item.Msg));
+      Character'Write (Stream, '"');
+      Output_Recover_Ops (Stream, Item.Recover_Ops);
+      Character'Write (Stream, ')');
+   end Output_Error_Message;
+
+   overriding
+   procedure Set_Node_Access
+     (Data           : in out Error_Message;
+      Node_Index_Map : in     Syntax_Trees.Node_Index_Array_Node_Access.Vector)
+   is begin
+      Set_Node_Access (Data.Recover_Ops, Node_Index_Map);
+   end Set_Node_Access;
+
+   overriding
+   procedure Validate_Error
+     (Data                : in     Error_Message;
+      Tree                : in     Syntax_Trees.Tree'Class;
+      Error_Node          : in     Syntax_Trees.Valid_Node_Access;
+      Node_Error_Reported : in out Boolean)
+   is begin
+      Validate_Ops (Data.Recover_Ops, Tree, Error_Node, Node_Error_Reported);
+   end Validate_Error;
+
+   function Error_Pred_Lexer (Cur : in Syntax_Trees.Error_Data_Lists.Cursor) 
return Boolean
+   is
+      use Syntax_Trees.Error_Data_Lists;
+   begin
+      return
+        (if Element (Cur) in Lexer_Error then True
+         else False);
+   end Error_Pred_Lexer;
+
+   function Error_Pred_Parse (Cur : in Syntax_Trees.Error_Data_Lists.Cursor) 
return Boolean
+   is
+      use Syntax_Trees.Error_Data_Lists;
+   begin
+      return
+        (if Element (Cur) in Parse_Error then True
+         else False);
+   end Error_Pred_Parse;
+
+   function Error_Pred_In_Parse_Action (Cur : in 
Syntax_Trees.Error_Data_Lists.Cursor) return Boolean
+   is
+      use Syntax_Trees.Error_Data_Lists;
+   begin
+      return
+        (if Element (Cur) in In_Parse_Action_Error then True
+         else False);
+   end Error_Pred_In_Parse_Action;
+
+   function Error_Pred_Message (Cur : in Syntax_Trees.Error_Data_Lists.Cursor) 
return Boolean
+   is
+      use Syntax_Trees.Error_Data_Lists;
+   begin
+      return
+        (if Element (Cur) in Error_Message then True
+         else False);
+   end Error_Pred_Message;
+
+   function Error_Pred_Parse_Message (Cur : in 
Syntax_Trees.Error_Data_Lists.Cursor) return Boolean
+   is
+      use Syntax_Trees.Error_Data_Lists;
+   begin
+      return
+        (if Element (Cur) in Parse_Error then True
+         elsif Element (Cur) in Error_Message then True
+         else False);
+   end Error_Pred_Parse_Message;
+
+   function Error_Pred_Lexer_Parse_Message (Cur : in 
Syntax_Trees.Error_Data_Lists.Cursor) return Boolean
+   is
+      use Syntax_Trees.Error_Data_Lists;
+   begin
+      return
+        (if Element (Cur) in Lexer_Error then False
+         --  Lexer errors are only cleared by re-lexing in Edit_Tree.
+         --  test_incremental.adb Lexer_Errors_1
+
+         elsif Element (Cur) in Parse_Error then True
+         --  A previous Parse_Error; test_incremental.adb Recover_1,
+         --  test_incremental.adb Multiple_Errors_On_One_Token_1, _2,
+         --  ada_mode-interactive_06.adb
+
+         elsif Element (Cur) in Error_Message then True
+         --  A moved In_Parse_Error.
+
+         else False);
+   end Error_Pred_Lexer_Parse_Message;
+
+   function Find_Parse_In_Parse_Action_Error
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Syntax_Trees.Valid_Node_Access)
+     return Syntax_Trees.Error_Data'Class
+   is
+      use Syntax_Trees;
+      Found : Boolean := False;
+   begin
+      --  test_mckenzie_recover.adb String_Quote_0 has lexer and parse error
+      --  on same node. There should only be one Parse or In_Parse_Action
+      --  error on a node.
+      for Err of Tree.Error_List (Node) loop
+         if Err in Parse_Error or Err in In_Parse_Action_Error then
+            Found := True;
+         end if;
+      end loop;
+
+      if not Found then
+         raise SAL.Programmer_Error;
+      end if;
+
+      for Err of Tree.Error_List (Node) loop
+         if Err in Parse_Error or Err in In_Parse_Action_Error then
+            return Err;
+         end if;
+      end loop;
+
+      raise SAL.Programmer_Error; -- keep the compiler happy
+   end Find_Parse_In_Parse_Action_Error;
+
+   function Find_Non_Lexer_Error
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Syntax_Trees.Valid_Node_Access)
+     return Syntax_Trees.Error_Data'Class
+   is
+      use Syntax_Trees;
+      Found : Boolean := False;
+   begin
+      for Err of Tree.Error_List (Node) loop
+         if not (Err in Lexer_Error) then
+            Found := True;
+         end if;
+      end loop;
+
+      if not Found then
+         raise SAL.Programmer_Error;
+      end if;
+
+      for Err of Tree.Error_List (Node) loop
+         if not (Err in Lexer_Error) then
+            return Err;
+         end if;
+      end loop;
+
+      raise SAL.Programmer_Error; -- keep the compiler happy
+   end Find_Non_Lexer_Error;
+
+   function Get_In_Parse_Action
+     (Parser : in Base_Parser;
+      ID     : in Production_ID)
+     return Syntax_Trees.In_Parse_Actions.In_Parse_Action
+   is begin
+      if Parser.Productions.Is_Empty then
+         return null;
+      elsif Parser.Productions (ID.LHS).RHSs.Is_Empty then
+         return null;
+      else
+         return Parser.Productions (ID.LHS).RHSs (ID.RHS).In_Parse_Action;
+      end if;
+   end Get_In_Parse_Action;
+
+   function Get_Post_Parse_Action
+     (Productions : in Syntax_Trees.Production_Info_Trees.Vector;
+      ID          : in Production_ID)
+     return Syntax_Trees.Post_Parse_Action
+   is begin
+      if Productions.Is_Empty then
+         return null;
+      elsif Productions (ID.LHS).RHSs.Is_Empty then
+         return null;
+      else
+         return Productions (ID.LHS).RHSs (ID.RHS).Post_Parse_Action;
+      end if;
+   end Get_Post_Parse_Action;
+
+   function Get_Post_Parse_Action
+     (Parser : in Base_Parser;
+      ID     : in Production_ID)
+     return Syntax_Trees.Post_Parse_Action
+   is begin
+      return Get_Post_Parse_Action (Parser.Productions, ID);
+   end Get_Post_Parse_Action;
+
+   function Next_Grammar_Token
+     (Parser            : in out Base_Parser'Class;
+      Last_Grammar_Node : in out WisiToken.Syntax_Trees.Node_Access)
+     return Token_ID
+   is
+      use Syntax_Trees;
+
+      Tree  : Syntax_Trees.Tree renames Parser.Tree;
+      Lexer : WisiToken.Lexer.Handle renames Parser.Tree.Lexer;
+   begin
+      loop
+         declare
+            Token        : WisiToken.Lexer.Token;
+            Error_Count  : constant Natural := Lexer.Find_Next (Token);
+            Lexer_Errors : Error_Data_Lists.List;
+         begin
+
+            if Trace_Lexer > Outline then
+               Tree.Lexer.Trace.Put_Line (WisiToken.Lexer.Full_Image (Token, 
Tree.Lexer.Descriptor.all));
+            end if;
+
+            if Error_Count > 0 then
+               declare
+                  Cur : WisiToken.Lexer.Error_Lists.Cursor := 
Lexer.Errors.Last;
+               begin
+                  for I in 1 .. Error_Count - 1 loop
+                     WisiToken.Lexer.Error_Lists.Previous (Cur);
+                  end loop;
+                  for I in 1 .. Error_Count loop
+                     Lexer_Errors.Append (Lexer_Error'(Error => Lexer.Errors 
(Cur)));
+                     WisiToken.Lexer.Error_Lists.Next (Cur);
+                  end loop;
+               end;
+            end if;
+
+            if Token.ID >= Lexer.Descriptor.First_Terminal then
+               declare
+                  Ref : constant Terminal_Ref := Tree.Add_Terminal 
(Parser.Tree.Shared_Stream, Token, Lexer_Errors);
+               begin
+                  Process_Grammar_Token (Parser, Token, Ref.Node);
+                  Last_Grammar_Node := Ref.Node;
+               end;
+            else
+               if Trace_Lexer > Detail then
+                  Tree.Lexer.Trace.Put_Line ("non-grammar in " & 
Parser.Tree.Image (Last_Grammar_Node));
+               end if;
+               if Error_Count > 0 then
+                  --  test_incremental.adb Lexer_Errors_04, _05
+                  Tree.Add_Errors (Tree.Shared_Stream, Last_Grammar_Node, 
Lexer_Errors);
+               end if;
+
+               Process_Non_Grammar_Token (Parser, Last_Grammar_Node, Token);
+            end if;
+
+            if Error_Count > 0 and Trace_Lexer > Detail then
+               Tree.Lexer.Trace.Put_Line
+                 ("lexer error" & (if Error_Count > 1 then "s" else "") &
+                    " in " & Parser.Tree.Image (Last_Grammar_Node));
+            end if;
+
+            if Token.ID >= Lexer.Descriptor.First_Terminal then
+               return Token.ID;
+            end if;
+         end;
+      end loop;
    end Next_Grammar_Token;
 
    procedure Lex_All (Parser : in out Base_Parser'Class)
    is
-      EOF_ID : constant Token_ID := Parser.Trace.Descriptor.EOI_ID;
+      EOI_ID : constant Token_ID := Parser.Tree.Lexer.Descriptor.EOI_ID;
+
+      Last_Grammar_Node : WisiToken.Syntax_Trees.Node_Access;
    begin
-      Parser.Lexer.Errors.Clear;
-      Parser.Terminals.Clear;
-      Parser.Line_Begin_Token.Clear;
+      Parser.Tree.Start_Lex;
+
+      Last_Grammar_Node := Parser.Tree.SOI;
+
       loop
-         exit when EOF_ID = Next_Grammar_Token (Parser);
+         exit when EOI_ID = Next_Grammar_Token (Parser, Last_Grammar_Node);
       end loop;
       if Trace_Parse > Outline then
-         Parser.Trace.Put_Line (Token_Index'Image 
(Parser.Terminals.Last_Index) & " tokens lexed");
+         Parser.Tree.Lexer.Trace.Put_Line (Syntax_Trees.Get_Node_Index 
(Last_Grammar_Node)'Image & " tokens lexed");
       end if;
-
    end Lex_All;
 
+   function Equal (Left : in Recover_Op; Right : in Insert_Op) return Boolean
+   is
+      use all type WisiToken.Syntax_Trees.Sequential_Index;
+   begin
+      return Left.Op = Insert and then
+        Left.Ins_ID = Right.Ins_ID and then
+        Left.Ins_Before = Right.Ins_Before;
+   end Equal;
+
+   function None (Ops : aliased in Recover_Op_Arrays.Vector; Op : in 
Recover_Op_Label) return Boolean
+   is
+      use Recover_Op_Arrays, Recover_Op_Array_Refs;
+   begin
+      for I in First_Index (Ops) .. Last_Index (Ops) loop
+         if Constant_Ref (Ops, I).Op = Op then
+            return False;
+         end if;
+      end loop;
+      return True;
+   end None;
+
+   function None_Since_FF (Ops : aliased in Recover_Op_Arrays.Vector; Op : in 
Recover_Op_Label) return Boolean
+   is
+      use Recover_Op_Arrays, Recover_Op_Array_Refs;
+   begin
+      for I in reverse First_Index (Ops) .. Last_Index (Ops) loop
+         declare
+            O : Recover_Op renames Constant_Ref (Ops, I);
+         begin
+            exit when O.Op = Fast_Forward;
+            if O.Op = Op then
+               return False;
+            end if;
+         end;
+      end loop;
+      return True;
+   end None_Since_FF;
+
+   function Image (KMN : in WisiToken.Parse.KMN) return String
+   is begin
+      return "(" & KMN.Stable_Bytes'Image & "," &
+        KMN.Stable_Chars'Image & "," &
+        KMN.Inserted_Bytes'Image & "," &
+        KMN.Inserted_Chars'Image & "," &
+        KMN.Deleted_Bytes'Image & "," &
+        KMN.Deleted_Chars'Image & ")";
+   end Image;
+
+   procedure Validate_KMN
+     (KMN                       : in WisiToken.Parse.KMN;
+      Initial_Stable_Byte_First : in Buffer_Pos;
+      Initial_Stable_Char_First : in Buffer_Pos;
+      Edited_Stable_Byte_First  : in Buffer_Pos;
+      Edited_Stable_Char_First  : in Buffer_Pos;
+      Initial_Text_Byte_Region  : in Buffer_Region;
+      Initial_Text_Char_Region  : in Buffer_Region;
+      Edited_Text_Byte_Region   : in Buffer_Region;
+      Edited_Text_Char_Region   : in Buffer_Region)
+   is
+      Stable_Byte_Region : constant Buffer_Region :=
+        (Initial_Stable_Byte_First, Initial_Stable_Byte_First + 
KMN.Stable_Bytes - 1);
+      Stable_Char_Region : constant Buffer_Region :=
+        (Initial_Stable_Char_First, Initial_Stable_Char_First + 
KMN.Stable_Chars - 1);
+
+      Inserted_Byte_Region : constant Buffer_Region :=
+        (Edited_Stable_Byte_First + KMN.Stable_Bytes,
+         Edited_Stable_Byte_First + KMN.Stable_Bytes + KMN.Inserted_Bytes - 1);
+      Inserted_Char_Region : constant Buffer_Region :=
+        (Edited_Stable_Char_First + KMN.Stable_Chars,
+         Edited_Stable_Char_First + KMN.Stable_Chars + KMN.Inserted_Chars - 1);
+
+      Deleted_Byte_Region : constant Buffer_Region :=
+        (Stable_Byte_Region.Last + 1, Stable_Byte_Region.Last + 
KMN.Deleted_Bytes);
+      Deleted_Char_Region : constant Buffer_Region :=
+        (Stable_Char_Region.Last + 1, Stable_Char_Region.Last + 
KMN.Deleted_Chars);
+   begin
+      if not Contains (Outer => Initial_Text_Byte_Region, Inner => 
Stable_Byte_Region) then
+         raise User_Error with "KMN stable byte region outside initial source 
text";
+      end if;
+      if not Contains (Outer => Initial_Text_Char_Region, Inner => 
Stable_Char_Region) then
+         raise User_Error with "KMN stable char region outside initial source 
text";
+      end if;
+
+      if KMN.Inserted_Bytes > 0 then
+         if not Contains (Outer => Edited_Text_Byte_Region, Inner => 
Inserted_Byte_Region) then
+            raise User_Error with "KMN inserted byte region outside initial 
source text";
+         end if;
+         if not Contains (Outer => Edited_Text_Char_Region, Inner => 
Inserted_Char_Region) then
+            raise User_Error with "KMN inserted char region outside edited 
source text";
+         end if;
+      end if;
+
+
+      if KMN.Deleted_Bytes > 0 then
+         if not Contains (Outer => Initial_Text_Byte_Region, Inner => 
Deleted_Byte_Region) then
+            raise User_Error with "KMN deleted byte region outside initial 
source text";
+         end if;
+         if not Contains (Outer => Initial_Text_Char_Region, Inner => 
Deleted_Char_Region) then
+            raise User_Error with "KMN deleted char region outside initial 
source text";
+         end if;
+      end if;
+   end Validate_KMN;
+
+   procedure Validate_KMN
+     (List                     : in KMN_Lists.List;
+      Initial_Text_Byte_Region : in Buffer_Region;
+      Initial_Text_Char_Region : in Buffer_Region;
+      Edited_Text_Byte_Region  : in Buffer_Region;
+      Edited_Text_Char_Region  : in Buffer_Region)
+   is
+      Initial_Byte_First : Base_Buffer_Pos := Initial_Text_Byte_Region.First;
+      Initial_Char_First : Base_Buffer_Pos := Initial_Text_Char_Region.First;
+      Edited_Byte_First  : Base_Buffer_Pos := Edited_Text_Byte_Region.First;
+      Edited_Char_First  : Base_Buffer_Pos := Edited_Text_Char_Region.First;
+   begin
+      for KMN of List loop
+         Validate_KMN
+           (KMN,
+            Initial_Stable_Byte_First => Initial_Byte_First,
+            Initial_Stable_Char_First => Initial_Char_First,
+            Edited_Stable_Byte_First  => Edited_Byte_First,
+            Edited_Stable_Char_First  => Edited_Char_First,
+            Initial_Text_Byte_Region  => Initial_Text_Byte_Region,
+            Initial_Text_Char_Region  => Initial_Text_Char_Region,
+            Edited_Text_Byte_Region   => Edited_Text_Byte_Region,
+            Edited_Text_Char_Region   => Edited_Text_Char_Region);
+
+         Initial_Byte_First := @ + KMN.Stable_Bytes + KMN.Deleted_Bytes;
+         Initial_Char_First := @ + KMN.Stable_Chars + KMN.Deleted_Chars;
+         Edited_Byte_First  := @ + KMN.Stable_Bytes + KMN.Inserted_Bytes;
+         Edited_Char_First  := @ + KMN.Stable_Chars + KMN.Inserted_Chars;
+      end loop;
+
+      if Initial_Byte_First - 1 /= Initial_Text_Byte_Region.Last then
+         raise User_Error with "KMN list (deleted last" & 
Base_Buffer_Pos'Image (Initial_Byte_First - 1) &
+           ") does not match initial text (last" & 
Initial_Text_Byte_Region.Last'Image & ")";
+      end if;
+      if Edited_Byte_First - 1 /= Edited_Text_Byte_Region.Last then
+         raise User_Error with "KMN list (inserted last" & 
Base_Buffer_Pos'Image (Edited_Byte_First - 1) &
+           ") does not match edited text (last" & 
Edited_Text_Byte_Region.Last'Image & ")";
+      end if;
+   end Validate_KMN;
+
+   procedure Put_Errors (Tree : in Syntax_Trees.Tree)
+   is
+      --  FIXME: move to Syntax_Trees?
+      use WisiToken.Syntax_Trees;
+   begin
+      for Err in Tree.Error_Iterate loop
+         declare
+            Error_Node : constant Valid_Node_Access := Tree.Error_Node (Err);
+         begin
+            Tree.Lexer.Trace.Put_Line
+              (Tree.Error_Message
+                 (Error_Node, Error (Err).Image (Tree, Error_Node)));
+         end;
+      end loop;
+   end Put_Errors;
+
+   procedure Put_Errors (Parser : in Base_Parser'Class)
+   is begin
+      Put_Errors (Parser.Tree);
+   end Put_Errors;
+
+   procedure Put_Errors (Tree : Syntax_Trees.Tree; Stream : in 
Syntax_Trees.Stream_ID)
+   is
+      use WisiToken.Syntax_Trees;
+   begin
+      for Cur in Tree.Stream_Error_Iterate (Stream) loop
+         declare
+            Error_Ref  : constant Stream_Error_Ref  := Error (Cur);
+            Error_Node : constant Valid_Node_Access := Syntax_Trees.Error_Node 
(Error_Ref);
+         begin
+            for Err of Tree.Error_List (Error_Node) loop
+               Tree.Lexer.Trace.Put_Line
+                 (Tree.Error_Message
+                    (Ref     => Tree.Error_Stream_Node_Ref (Error_Ref), -- For 
line, column
+                     Message => Err.Image (Tree, Error_Node)));
+            end loop;
+         end;
+      end loop;
+   end Put_Errors;
+
+   procedure Put_Errors (Parser : in Base_Parser'Class; Stream : in 
Syntax_Trees.Stream_ID)
+   is begin
+      Put_Errors (Parser.Tree, Stream);
+   end Put_Errors;
+
 end WisiToken.Parse;
diff --git a/wisitoken-parse.ads b/wisitoken-parse.ads
index 14936580c1..fdf482f031 100644
--- a/wisitoken-parse.ads
+++ b/wisitoken-parse.ads
@@ -2,7 +2,7 @@
 --
 --  Subprograms common to more than one parser, higher-level than in 
wisitoken.ads
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -18,67 +18,640 @@
 pragma License (Modified_GPL);
 
 with Ada.Finalization;
+with Ada.Streams;
+with SAL.Gen_Bounded_Definite_Vectors.Gen_Image_Aux;
+with SAL.Gen_Bounded_Definite_Vectors.Gen_Refs;
+with SAL.Gen_Definite_Doubly_Linked_Lists.Gen_Image;
+with SAL.Gen_Indefinite_Doubly_Linked_Lists.Gen_Image_Aux;
 with WisiToken.Lexer;
 with WisiToken.Syntax_Trees;
 package WisiToken.Parse is
 
-   type Base_Parser is abstract new Ada.Finalization.Limited_Controlled with 
record
-      Trace     : access WisiToken.Trace'Class;
-      Lexer     : WisiToken.Lexer.Handle;
-      User_Data : WisiToken.Syntax_Trees.User_Data_Access;
-      Terminals : aliased WisiToken.Base_Token_Arrays.Vector;
-
-      Line_Begin_Token : aliased WisiToken.Line_Begin_Token_Vectors.Vector;
-      --  Line_Begin_Token (I) is the index into Terminals of the first
-      --  grammar token on line I. Line_Begin_Token.First_Index is the first
-      --  line containing a grammar token (after leading comments). However,
-      --  if the only token on line I is a non_grammar token (ie a comment,
-      --  or a newline for a blank line), Line_Begin_Token (I) is the last
-      --  grammar token on the previous non-blank line. If Line (I) is a
-      --  non-first line in a multi-line terminal token, Line_Begin_Token
-      --  (I) is Invalid_Token_Index.
+   type Recover_Op_Label is (Fast_Forward, Undo_Reduce, Push_Back, Insert, 
Delete);
+   subtype Insert_Delete_Op_Label is Recover_Op_Label range Insert .. Delete;
+   --  Fast_Forward is a placeholder to mark a fast_forward parse; that
+   --  resets what operations are allowed to be done on a config.
+   --
+   --  Undo_Reduce is the inverse of Reduce.
+   --
+   --  Push_Back pops the top stack item, and moves the input stream
+   --  pointer back to the first shared_terminal contained by that item.
+   --
+   --  Insert inserts a new token in the token input stream, before the
+   --  given point in Terminals.
+   --
+   --  Delete deletes one item from the token input stream, at the given
+   --  point.
+
+   --  WORKAROUND: GNAT Community 2020 with -gnat2020 S'Image outputs
+   --  integer when S is a subtype. Fixed in Community 2021.
+   function Image (Item : in Recover_Op_Label) return String
+   is (case Item is
+       when Fast_Forward => "FAST_FORWARD",
+       when Undo_Reduce  => "UNDO_REDUCE",
+       when Push_Back    => "PUSH_BACK",
+       when Insert       => "INSERT",
+       when Delete       => "DELETE");
+
+   type Recover_Op (Op : Recover_Op_Label := Fast_Forward) is record
+      --  Stores recover operations during error recovery. We store enough
+      --  information to perform the operation on the main parser stack and
+      --  input stream when the config is the result of a successful
+      --  recover.
+
+      case Op is
+      when Fast_Forward =>
+         FF_First_Index : Syntax_Trees.Sequential_Index;
+         --  First token in fast forward region
+
+         FF_Next_Index : Syntax_Trees.Sequential_Index;
+         --  Config current_token after the operation is done; next after the
+         --  last token in the fast forward region.
+         --
+         --  If FF_First_Index = FF_Next_Index, no tokens were actually parsed
+         --  for the fast_forward; it is just a marker to allow error recovery
+         --  to reset op order restrictions.
+
+      when Undo_Reduce =>
+         Nonterm : Token_ID;
+         --  The nonterminal popped off the stack.
+
+         Token_Count : SAL.Base_Peek_Type;
+         --  The number of tokens pushed on the stack.
+
+         UR_Token_Index : Syntax_Trees.Base_Sequential_Index;
+         --  First terminal in the undo_reduce token; Invalid_Sequential_Index 
if
+         --  empty. Used to check that successive Undo_Reduce are valid.
+
+      when Push_Back =>
+         PB_ID : Token_ID;
+         --  The nonterm ID popped off the stack.
+
+         PB_Token_Index : Syntax_Trees.Base_Sequential_Index;
+         --  First terminal in the pushed_back token; Invalid_Sequential_Index 
if
+         --  empty. Used to check that successive Push_Backs are valid.
+
+      when Insert =>
+         Ins_ID : Token_ID;
+         --  The token ID inserted.
+
+         Ins_Before : Syntax_Trees.Sequential_Index;
+         --  Ins_ID is inserted before Ins_Before.
+
+      when Delete =>
+         Del_ID : Token_ID;
+         --  The token ID deleted; a terminal token.
+
+         Del_Token_Index : Syntax_Trees.Sequential_Index;
+         --  Token at Del_Token_Index is deleted.
+
+      end case;
+   end record;
+   subtype Insert_Delete_Op is Recover_Op with Dynamic_Predicate => 
(Insert_Delete_Op.Op in Insert_Delete_Op_Label);
+   subtype Insert_Op is Recover_Op with Dynamic_Predicate => (Insert_Op.Op = 
Insert);
+
+   function Token_Index (Op : in Insert_Delete_Op) return 
Syntax_Trees.Sequential_Index
+   is (case Insert_Delete_Op_Label'(Op.Op) is
+       when Insert => Op.Ins_Before,
+       when Delete => Op.Del_Token_Index);
+
+   function ID (Op : in Insert_Delete_Op) return WisiToken.Token_ID
+   is (case Insert_Delete_Op_Label'(Op.Op) is
+       when Insert => Op.Ins_ID,
+       when Delete => Op.Del_ID);
+
+   function Equal (Left : in Recover_Op; Right : in Insert_Op) return Boolean;
+
+   package Recover_Op_Arrays is new SAL.Gen_Bounded_Definite_Vectors
+     (Positive_Index_Type, Recover_Op, Default_Element =>
+        (Fast_Forward, Syntax_Trees.Sequential_Index'Last, 
Syntax_Trees.Sequential_Index'First), Capacity => 80);
+   --  Using a fixed size vector significantly speeds up
+   --  McKenzie_Recover. The capacity is determined by the maximum number
+   --  of repair operations, which is limited by the cost_limit McKenzie
+   --  parameter plus an arbitrary number from the language-specific
+   --  repairs; in practice, a capacity of 80 is enough so far. If a
+   --  config does hit that limit, it is abandoned; some other config is
+   --  likely to be cheaper.
+
+   package Recover_Op_Array_Refs is new Recover_Op_Arrays.Gen_Refs;
+
+   function Recover_Op_Image (Item : in Recover_Op; Descriptor : in 
WisiToken.Descriptor) return String
+   is ("(" & Image (Item.Op) & ", " &
+         (case Item.Op is
+          when Fast_Forward => Syntax_Trees.Trimmed_Image 
(Item.FF_First_Index) & ", " &
+            Syntax_Trees.Trimmed_Image (Item.FF_Next_Index),
+          when Undo_Reduce  => Image (Item.Nonterm, Descriptor) & "," &
+            Item.Token_Count'Image & ", " & Syntax_Trees.Trimmed_Image 
(Item.UR_Token_Index),
+          when Push_Back    => Image (Item.PB_ID, Descriptor) & ", " & 
Syntax_Trees.Trimmed_Image (Item.PB_Token_Index),
+          when Insert       => Image (Item.Ins_ID, Descriptor) & ", " & 
Syntax_Trees.Trimmed_Image (Item.Ins_Before),
+          when Delete       => Image (Item.Del_ID, Descriptor) & ", " &
+            Syntax_Trees.Trimmed_Image (Item.Del_Token_Index))
+         & ")");
+
+   function Image (Item : in Recover_Op; Descriptor : in WisiToken.Descriptor) 
return String
+     renames Recover_Op_Image;
+
+   function Recover_Op_Array_Image is new Recover_Op_Arrays.Gen_Image_Aux 
(WisiToken.Descriptor, Image);
+   function Image (Item : in Recover_Op_Arrays.Vector; Descriptor : in 
WisiToken.Descriptor) return String
+     renames Recover_Op_Array_Image;
+
+   function None (Ops : aliased in Recover_Op_Arrays.Vector; Op : in 
Recover_Op_Label) return Boolean;
+   --  True if Ops contains no Op.
+
+   function None_Since_FF (Ops : aliased in Recover_Op_Arrays.Vector; Op : in 
Recover_Op_Label) return Boolean;
+   --  True if Ops contains no Op after the last Fast_Forward (or ops.first, if
+   --  no Fast_Forward).
+
+   type Recover_Op_Nodes (Op : Insert_Delete_Op_Label := Insert) is record
+      --  Stores recover operation data used by the main parser to implement
+      --  insert, delete; and by the client editor to implement auto
+      --  corrections.
+      --
+      --  We can't compute buffer positions until after
+      --  User_Data.Insert_Tokens runs, after parse completes; it can move
+      --  non_grammar around, which affects buffer positions.
+
+      Input_Node_Index : Syntax_Trees.Node_Index := 
Syntax_Trees.Invalid_Node_Index;
+      --  Used by Get_Tree to store the node_index for Ins_Node or Del_Node
+      --  read from the input file; converted to Node_Access by 
Set_Node_Access.
+
+      case Op is
+      when Insert =>
+         Ins_ID : Token_ID := Invalid_Token_ID;
+         --  The token ID inserted.
+
+         Ins_Before : Syntax_Trees.Sequential_Index := 
Syntax_Trees.Sequential_Index'First;
+         --  Ins_ID is inserted before Ins_Before in the Shared_Stream.
+
+         Ins_Node : Syntax_Trees.Node_Access := 
Syntax_Trees.Invalid_Node_Access;
+         --  The parse stream node holding the inserted token.
+
+      when Delete =>
+         Del_ID : Token_ID := Invalid_Token_ID;
+         --  The token ID deleted; a terminal token.
+
+         Del_Index : Syntax_Trees.Sequential_Index := 
Syntax_Trees.Sequential_Index'First;
+         --  Token at Del_Index is deleted; used by parser to skip the token.
+
+         Del_Node : Syntax_Trees.Node_Access := 
Syntax_Trees.Invalid_Node_Access;
+         --  Del_Node is deleted; used by post-parse actions to adjust for the
+         --  deleted token. Del_Node.Parent is the previous non-deleted 
terminal.
+      end case;
+   end record;
+
+   subtype Delete_Op_Nodes is Recover_Op_Nodes (Delete);
+
+   package Recover_Op_Nodes_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
+     (Positive_Index_Type, Recover_Op_Nodes, Default_Element => (others => 
<>));
+
+   function Image (Item : in Recover_Op_Nodes; Tree : in 
Syntax_Trees.Tree'Class) return String;
+
+   function Image is new Recover_Op_Nodes_Arrays.Gen_Image_Aux
+     (Aux_Data            => Syntax_Trees.Tree'Class,
+      Index_Trimmed_Image => Trimmed_Image,
+      Element_Image       => Image);
+
+   function To_Recover_Op_Nodes (Item : in Recover_Op_Arrays.Vector) return 
Recover_Op_Nodes_Arrays.Vector;
+
+   type Recover_Test_Info is record
+      --  Used by test_mckenzie_recover.adb
+      Ops           : Recover_Op_Arrays.Vector;
+      Cost          : Natural;
+      Enqueue_Count : Natural;
+      Check_Count   : Natural;
+   end record;
+   type Recover_Test_Info_Access is access Recover_Test_Info;
+   --  No "Free" declared for Recover_Test_Info_Access; it is only used
+   --  in test_mckenzie_recover.adb, so we don't care about recovering
+   --  the memory. This allows us to copy these access values freely.
+
+   type Lexer_Error is new Syntax_Trees.Error_Data with record
+      Error : WisiToken.Lexer.Error;
+   end record;
+
+   overriding procedure Adjust_Copy (Data : in out Lexer_Error) is null;
+   overriding function Dispatch_Equal (Left : in Lexer_Error; Right : in 
Syntax_Trees.Error_Data'Class) return Boolean;
+   overriding function To_Message
+     (Data       : in Lexer_Error;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return Syntax_Trees.Error_Data'Class;
+
+   overriding function Image
+     (Data       : in Lexer_Error;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return String;
+
+   overriding function Class_Image (Data : in Lexer_Error) return String is 
("lexer");
+
+   overriding
+   procedure Validate_Error
+     (Data                : in     Lexer_Error;
+      Tree                : in     Syntax_Trees.Tree'Class;
+      Error_Node          : in     Syntax_Trees.Valid_Node_Access;
+      Node_Error_Reported : in out Boolean)
+   is null;
+
+   function Input_Lexer_Error (Stream : not null access 
Ada.Streams.Root_Stream_Type'Class) return Lexer_Error;
+   --  Raises ada.streams.stream_IO.End_Error when first stream element read 
is ')'
+   procedure Output_Lexer_Error (Stream : not null access 
Ada.Streams.Root_Stream_Type'Class; Item : in Lexer_Error);
+   for Lexer_Error'Input use Input_Lexer_Error;
+   for Lexer_Error'Output use Output_Lexer_Error;
+
+   overriding
+   procedure Set_Node_Access
+     (Data           : in out Lexer_Error;
+      Node_Index_Map : in     Syntax_Trees.Node_Index_Array_Node_Access.Vector)
+   is null;
+
+   type Parse_Error
+     (First_Terminal : Token_ID;
+      Last_Terminal  : Token_ID)
+   is new Syntax_Trees.Error_Data with record
+      Expecting   : Token_ID_Set (First_Terminal .. Last_Terminal);
+      Recover_Ops : aliased Recover_Op_Nodes_Arrays.Vector;
+
+      Recover_Test : aliased Recover_Test_Info_Access; -- only set when 
running test_mckenzie_recover.adb.
+   end record;
+
+   overriding procedure Adjust_Copy (Data : in out Parse_Error);
+   overriding function Dispatch_Equal (Left : in Parse_Error; Right : in 
Syntax_Trees.Error_Data'Class) return Boolean;
+   overriding function To_Message
+     (Data       : in Parse_Error;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return Syntax_Trees.Error_Data'Class;
+
+   overriding function Image
+     (Data       : in Parse_Error;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return String;
+
+   overriding function Class_Image (Data : in Parse_Error) return String is 
("parser");
+
+   overriding
+   procedure Validate_Error
+     (Data                : in     Parse_Error;
+      Tree                : in     Syntax_Trees.Tree'Class;
+      Error_Node          : in     Syntax_Trees.Valid_Node_Access;
+      Node_Error_Reported : in out Boolean);
+
+   function Input_Parse_Error (Stream : not null access 
Ada.Streams.Root_Stream_Type'Class) return Parse_Error;
+   --  Raises ada.streams.stream_IO.End_Error when first stream element read 
is ')'
+
+   procedure Output_Parse_Error (Stream : not null access 
Ada.Streams.Root_Stream_Type'Class; Item : in Parse_Error);
+   for Parse_Error'Input use Input_Parse_Error;
+   for Parse_Error'Output use Output_Parse_Error;
+
+   overriding
+   procedure Set_Node_Access
+     (Data           : in out Parse_Error;
+      Node_Index_Map : in     
Syntax_Trees.Node_Index_Array_Node_Access.Vector);
+
+   type In_Parse_Action_Error is new Syntax_Trees.Error_Data with record
+      Status      : WisiToken.Syntax_Trees.In_Parse_Actions.Status;
+      Recover_Ops : aliased Recover_Op_Nodes_Arrays.Vector;
+
+      Recover_Test : aliased Recover_Test_Info_Access; -- only set when 
running test_mckenzie_recover.adb.
+   end record;
+
+   overriding procedure Adjust_Copy (Data : in out In_Parse_Action_Error);
+   overriding function Dispatch_Equal
+     (Left  : in In_Parse_Action_Error;
+      Right : in Syntax_Trees.Error_Data'Class)
+     return Boolean;
+   overriding function To_Message
+     (Data       : in In_Parse_Action_Error;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return Syntax_Trees.Error_Data'Class;
+
+   overriding function Image
+     (Data       : in In_Parse_Action_Error;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return String;
+
+   overriding function Class_Image (Data : in In_Parse_Action_Error) return 
String is ("in_parse_action");
+
+   overriding
+   procedure Validate_Error
+     (Data                : in     In_Parse_Action_Error;
+      Tree                : in     Syntax_Trees.Tree'Class;
+      Error_Node          : in     Syntax_Trees.Valid_Node_Access;
+      Node_Error_Reported : in out Boolean);
+
+   function Input_In_Parse_Action_Error
+     (Stream : not null access Ada.Streams.Root_Stream_Type'Class) return 
In_Parse_Action_Error;
+   --  Raises ada.streams.stream_IO.End_Error when first stream element read 
is ')'
+   procedure Output_In_Parse_Action_Error
+     (Stream : not null access Ada.Streams.Root_Stream_Type'Class; Item : in 
In_Parse_Action_Error);
+   for In_Parse_Action_Error'Input use Input_In_Parse_Action_Error;
+   for In_Parse_Action_Error'Output use Output_In_Parse_Action_Error;
+
+   overriding
+   procedure Set_Node_Access
+     (Data           : in out In_Parse_Action_Error;
+      Node_Index_Map : in     
Syntax_Trees.Node_Index_Array_Node_Access.Vector);
+
+   type Error_Message is new Syntax_Trees.Error_Data with record
+      Msg         : Ada.Strings.Unbounded.Unbounded_String;
+      Recover_Ops : aliased Recover_Op_Nodes_Arrays.Vector;
+
+      Recover_Test : aliased Recover_Test_Info_Access; -- only set when 
running test_mckenzie_recover.adb.
+   end record;
+
+   overriding procedure Adjust_Copy (Data : in out Error_Message);
+   overriding function Dispatch_Equal
+     (Left  : in Error_Message;
+      Right : in Syntax_Trees.Error_Data'Class)
+     return Boolean;
+   overriding function To_Message
+     (Data       : in Error_Message;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return Syntax_Trees.Error_Data'Class;
+
+   overriding function Image
+     (Data       : in Error_Message;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Syntax_Trees.Valid_Node_Access)
+     return String;
+
+   overriding function Class_Image (Data : in Error_Message) return String is 
("message");
+
+   overriding
+   procedure Validate_Error
+     (Data                : in     Error_Message;
+      Tree                : in     Syntax_Trees.Tree'Class;
+      Error_Node          : in     Syntax_Trees.Valid_Node_Access;
+      Node_Error_Reported : in out Boolean);
+
+   type Recover_Op_Array_Const_Ref_Type (Element : not null access constant 
Recover_Op_Nodes_Arrays.Vector) is private
+   with Implicit_Dereference => Element;
+
+   --  WORKAROUND: if Recover_Op_Array_Const_Ref_From_Cursor is named
+   --  Recover_Op_Array_Const_Ref, GNAT Community 2021 confuses it with
+   --  the other Recover_Op_Array_Const_Ref.
+   function Recover_Op_Array_Const_Ref_From_Cursor
+     (Item : in Syntax_Trees.Error_Data_Lists.Cursor)
+     return Recover_Op_Array_Const_Ref_Type
+   with Pre => Syntax_Trees.Error_Data_Lists.Has_Element (Item) and then
+               not (Syntax_Trees.Error_Data_Lists.Element (Item) in 
Lexer_Error);
+
+   function Recover_Op_Array_Const_Ref
+     (Error : aliased in Syntax_Trees.Error_Data'Class)
+     return Recover_Op_Array_Const_Ref_Type
+   with Pre => not (Error in Lexer_Error);
+
+   type Recover_Op_Array_Var_Ref_Type (Element : not null access 
Recover_Op_Nodes_Arrays.Vector) is private
+   with Implicit_Dereference => Element;
+
+   function Recover_Op_Array_Var_Ref
+     (Error : aliased in out Syntax_Trees.Error_Data'Class)
+     return Recover_Op_Array_Var_Ref_Type
+   with Pre => not (Error in Lexer_Error);
+
+   type Recover_Test_Const_Ref_Type (Element : not null access constant 
Recover_Test_Info) is private
+   with Implicit_Dereference => Element;
+
+   function Recover_Test_Const_Ref
+     (Item : in Syntax_Trees.Error_Data_Lists.Cursor)
+     return Recover_Test_Const_Ref_Type
+   with Pre => Syntax_Trees.Error_Data_Lists.Has_Element (Item) and then
+               not (Syntax_Trees.Error_Data_Lists.Element (Item) in 
Lexer_Error);
+
+   type Recover_Test_Var_Ref_Type (Element : not null access 
Recover_Test_Info_Access) is private
+   with Implicit_Dereference => Element;
+
+   function Recover_Test_Var_Ref
+     (Error : aliased in out Syntax_Trees.Error_Data'Class)
+     return Recover_Test_Var_Ref_Type
+   with Pre => not (Error in Lexer_Error);
+
+   function Recover_Image
+     (Error : in Syntax_Trees.Error_Data'Class;
+      Tree  : in Syntax_Trees.Tree)
+     return String
+   with Pre => not (Error in Lexer_Error);
+   --  Aggregate image of Error.Recover_Ops.
+
+   function Recover_Image is new Syntax_Trees.Error_Data_Lists.Gen_Image_Aux 
(Syntax_Trees.Tree, Recover_Image);
+
+   function Input_Error_Message (Stream : not null access 
Ada.Streams.Root_Stream_Type'Class) return Error_Message;
+   --  Raises ada.streams.stream_IO.End_Error when first stream element read 
is ')'
+   procedure Output_Error_Message
+     (Stream : not null access Ada.Streams.Root_Stream_Type'Class; Item : in 
Error_Message);
+   for Error_Message'Input use Input_Error_Message;
+   for Error_Message'Output use Output_Error_Message;
+
+   overriding
+   procedure Set_Node_Access
+     (Data           : in out Error_Message;
+      Node_Index_Map : in     
Syntax_Trees.Node_Index_Array_Node_Access.Vector);
+
+   function Error_Pred_Lexer (Cur : in Syntax_Trees.Error_Data_Lists.Cursor) 
return Boolean;
+   --  Return True if Cur is a Lexer_Error; for
+   --  Syntax_Trees.Error_Predicate.
+
+   function Error_Pred_Parse (Cur : in Syntax_Trees.Error_Data_Lists.Cursor) 
return Boolean;
+   --  Return True if Cur is a Parse_Error; for
+   --  Syntax_Trees.Error_Predicate.
+
+   function Error_Pred_In_Parse_Action (Cur : in 
Syntax_Trees.Error_Data_Lists.Cursor) return Boolean;
+   --  Return True if Cur is In_Parse_Action_Error;
+   --  for Syntax_Trees.Error_Predicate.
+
+   function Error_Pred_Message (Cur : in Syntax_Trees.Error_Data_Lists.Cursor) 
return Boolean;
+   --  Return True if Cur is an Error_Message; for
+   --  Syntax_Trees.Error_Predicate.
+
+   function Error_Pred_Parse_Message (Cur : in 
Syntax_Trees.Error_Data_Lists.Cursor) return Boolean;
+   --  Return True if Cur is one of Parse_Error or Error_Message; for
+   --  Syntax_Trees.Error_Predicate.
+
+   function Error_Pred_Lexer_Parse_Message (Cur : in 
Syntax_Trees.Error_Data_Lists.Cursor) return Boolean;
+   --  Return True if Cur is one of Lexer_Error, Parse_Error, or
+   --  Error_Message; for Syntax_Trees.Error_Predicate.
+
+   function Find_Parse_In_Parse_Action_Error
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Syntax_Trees.Valid_Node_Access)
+     return Syntax_Trees.Error_Data'Class;
+   --  Return the first Parse_Error or In_Parse_Action_Error from Node.
+   --
+   --  This does not return a reference, because any update to an error
+   --  requires copying the error node; see note at declaration of
+   --  Syntax_Trees.Error_Data.
+   --  FIXME: delete if not used.
+
+   function Find_Non_Lexer_Error
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Syntax_Trees.Valid_Node_Access)
+     return Syntax_Trees.Error_Data'Class;
+   --  FIXME: delete if not used.
+
+   type Base_Parser is abstract new Ada.Finalization.Limited_Controlled
+   with record
+      Tree        : aliased Syntax_Trees.Tree;
+      Productions : Syntax_Trees.Production_Info_Trees.Vector;
+      User_Data   : Syntax_Trees.User_Data_Access;
    end record;
    --  Common to all parsers. Finalize should free any allocated objects.
 
-   function Next_Grammar_Token (Parser : in out Base_Parser'Class) return 
Token_ID;
+   function Source_File_Name (Item : in Base_Parser'Class) return String
+   is (Item.Tree.Lexer.File_Name);
+
+   function Get_In_Parse_Action
+     (Parser : in Base_Parser;
+      ID     : in Production_ID)
+     return Syntax_Trees.In_Parse_Actions.In_Parse_Action;
+
+   function Get_Post_Parse_Action
+     (Productions : in Syntax_Trees.Production_Info_Trees.Vector;
+      ID          : in Production_ID)
+     return Syntax_Trees.Post_Parse_Action;
+
+   function Get_Post_Parse_Action
+     (Parser : in Base_Parser;
+      ID     : in Production_ID)
+     return Syntax_Trees.Post_Parse_Action;
+
+   function Next_Grammar_Token
+     (Parser            : in out Base_Parser'Class;
+      Last_Grammar_Node : in out WisiToken.Syntax_Trees.Node_Access)
+     return Token_ID
+   with Post => Next_Grammar_Token'Result /= Invalid_Token_ID;
    --  Get next token from Lexer, call User_Data.Lexer_To_Augmented. If
-   --  it is a grammar token, store in Terminals and return its id.
-   --  Otherwise, repeat.
+   --  it is a grammar token, store in Parser.Tree (Stream) and return
+   --  its ID. If is it a non_grammar token, store it in
+   --  Last_Grammar_Node.Non_Grammar or Parser.Tree.Non_Grammar, and
+   --  repeat.
    --
    --  Propagates Fatal_Error from Lexer.
 
-   procedure Lex_All (Parser : in out Base_Parser'Class);
-   --  Clear Terminals, Line_Begin_Token; reset User_Data. Then call
-   --  Next_Grammar_Token repeatedly until EOF_ID is returned.
+   procedure Lex_All (Parser : in out Base_Parser'Class)
+   with Pre => Parser.Tree.Cleared;
+   --  Call Tree.Start_Lex, clear Last_Grammar_Node; reset User_Data.
+   --  Then call Next_Grammar_Token repeatedly until EOF_ID is returned,
+   --  storing all tokens in Parser.Tree.Shared_Stream.
    --
    --  The user must first call Lexer.Reset_* to set the input text.
 
-   procedure Parse (Parser : aliased in out Base_Parser) is abstract;
-   --  Call Lex_All, then execute parse algorithm to parse the tokens,
-   --  storing the result in Parser for Execute_Actions.
+   type KMN is record
+      --  Similar to [Lahav 2004] page 6; describes changed and unchanged
+      --  regions in a text buffer. We assume the range boundaries do not
+      --  break a multi-byte character.
+
+      Stable_Bytes : Zero_Buffer_Pos; -- Count of unmodified bytes before 
change
+      Stable_Chars : Zero_Buffer_Pos; -- "" characters
+
+      Inserted_Bytes : Zero_Buffer_Pos; -- Count of inserted bytes, after 
Stable.
+      Inserted_Chars : Zero_Buffer_Pos;
+
+      Deleted_Bytes : Zero_Buffer_Pos; -- Count of deleted bytes, after Stable
+      Deleted_Chars : Zero_Buffer_Pos;
+   end record;
+
+   Invalid_KMN : constant KMN := (others => 0);
+
+   function Image (KMN : in WisiToken.Parse.KMN) return String;
+
+   procedure Validate_KMN
+     (KMN                      : in WisiToken.Parse.KMN;
+      Initial_Stable_Byte_First : in Buffer_Pos;
+      Initial_Stable_Char_First : in Buffer_Pos;
+      Edited_Stable_Byte_First  : in Buffer_Pos;
+      Edited_Stable_Char_First  : in Buffer_Pos;
+      Initial_Text_Byte_Region  : in Buffer_Region;
+      Initial_Text_Char_Region  : in Buffer_Region;
+      Edited_Text_Byte_Region   : in Buffer_Region;
+      Edited_Text_Char_Region   : in Buffer_Region);
+   --  Raise User_Error if KMN violates text regions.
+
+   package KMN_Lists is new SAL.Gen_Definite_Doubly_Linked_Lists (KMN);
+
+   function Image is new KMN_Lists.Gen_Image (Image);
+
+   procedure Validate_KMN
+     (List                     : in KMN_Lists.List;
+      Initial_Text_Byte_Region : in Buffer_Region;
+      Initial_Text_Char_Region : in Buffer_Region;
+      Edited_Text_Byte_Region  : in Buffer_Region;
+      Edited_Text_Char_Region  : in Buffer_Region);
+
+   procedure Parse
+     (Parser     : in out Base_Parser;
+      Log_File   : in     Ada.Text_IO.File_Type;
+      Edits      : in     KMN_Lists.List := KMN_Lists.Empty_List;
+      Pre_Edited : in     Boolean        := False)
+   is abstract;
+   --  If Pre_Edited, skip this first step (used for unit tests). Else if
+   --  Edits is empty, call Lex_All; if Edits is not empty, call
+   --  Edit_Tree.
+   --
+   --  Then execute parse algorithm to parse the new tokens,
+   --  storing the result in Parser.Tree for Execute_Actions.
+   --
+   --  If Log_File is open, write information about each error recover
+   --  session to it. See implementation for format.
    --
-   --  If a parse error is encountered, raises Syntax_Error.
-   --  Parser.Lexer_Errors and Parser contain information about the
-   --  errors.
+   --  If a non-recoverable parse error is encountered, raises
+   --  Syntax_Error. Parser.Lexer_Errors and Parser contain information
+   --  about the errors.
    --
    --  For other errors, raises Parse_Error with an appropriate error
    --  message.
 
-   function Tree (Parser : in Base_Parser) return Syntax_Trees.Tree is 
abstract;
-   --  Return the syntax tree resulting from the parse.
+   procedure Put_Errors (Tree : in Syntax_Trees.Tree)
+   with Pre => Tree.Editable;
+   procedure Put_Errors (Parser : in Base_Parser'Class)
+   with Pre => Parser.Tree.Editable;
+   --  Output Parser.Tree errors to Tree.Lexer.Trace.
 
-   function Tree_Var_Ref (Parser : aliased in out Base_Parser) return 
Syntax_Trees.Tree_Variable_Reference is abstract;
-   --  Return a writable reference to the syntax tree resulting from the parse.
+   procedure Put_Errors (Tree : Syntax_Trees.Tree; Stream : in 
Syntax_Trees.Stream_ID);
+   procedure Put_Errors (Parser : in Base_Parser'Class; Stream : in 
Syntax_Trees.Stream_ID);
+   --  Output Parser.Tree.Stream errors to Tree.Lexer.Trace.
 
-   function Any_Errors (Parser : in Base_Parser) return Boolean is abstract;
+   procedure Execute_Actions
+     (Parser              : in out Base_Parser;
+      Action_Region_Bytes : in     WisiToken.Buffer_Region := 
WisiToken.Null_Buffer_Region)
+   is abstract;
+   --  Execute all actions in Parser.Tree nodes that overlap
+   --  Action_Region_Bytes; all nodes if Action_Region_Bytes =
+   --  Null_Buffer_Region. See wisitoken-syntax_trees.ads for other
+   --  actions performed by Execute_Actions.
 
-   procedure Put_Errors (Parser : in Base_Parser) is abstract;
-   --  Output error messages to Ada.Text_IO.Current_Error.
+private
 
-   procedure Execute_Actions
-     (Parser          : in out Base_Parser;
-      Image_Augmented : in     Syntax_Trees.Image_Augmented := null)
-     is abstract;
-   --  Execute all actions in Parser.Tree.
+   type Recover_Op_Array_Const_Ref_Type (Element : not null access constant 
Recover_Op_Nodes_Arrays.Vector) is record
+      Dummy : Integer := raise Program_Error with "uninitialized reference";
+   end record;
+
+   type Recover_Op_Array_Var_Ref_Type (Element : not null access 
Recover_Op_Nodes_Arrays.Vector) is record
+      Dummy : Integer := raise Program_Error with "uninitialized reference";
+   end record;
+
+   type Recover_Test_Const_Ref_Type (Element : not null access constant 
Recover_Test_Info) is record
+      Dummy : Integer := raise Program_Error with "uninitialized reference";
+   end record;
+
+   type Recover_Test_Var_Ref_Type (Element : not null access 
Recover_Test_Info_Access) is record
+      Dummy : Integer := raise Program_Error with "uninitialized reference";
+   end record;
+
+   --  Visible for child packages
+
+   procedure Process_Grammar_Token
+     (Parser : in out Base_Parser'Class;
+      Token  : in     Lexer.Token;
+      Node   : in     Syntax_Trees.Valid_Node_Access);
+
+   procedure Process_Non_Grammar_Token
+     (Parser       : in out Base_Parser'Class;
+      Grammar_Node : in     Syntax_Trees.Valid_Node_Access;
+      Token        : in     Lexer.Token);
 
 end WisiToken.Parse;
diff --git a/wisitoken-parse_table-mode.el b/wisitoken-parse_table-mode.el
index 7cd12a9b0e..5a3c712ada 100644
--- a/wisitoken-parse_table-mode.el
+++ b/wisitoken-parse_table-mode.el
@@ -1,6 +1,6 @@
 ;; wisitoken-parse_table-mode.el --- For navigating in a parse table as output 
by wisitoken-bnf-generate. -*- lexical-binding:t -*-
 ;;
-;; Copyright (C) 2017 - 2021  Free Software Foundation, Inc.
+;; Copyright (C) 2017 - 2022  Free Software Foundation, Inc.
 ;;
 ;; Author: Stephen Leake <stephen_leake@stephe-leake.org>
 ;; Maintainer: Stephen Leake <stephen_leake@stephe-leake.org>
@@ -26,11 +26,21 @@
 
 (require 'xref)
 
+(defvar wisitoken-parse_table-last-buffer nil
+  "Last buffer in which a wisitoken-parse_table operation was performed")
+
 (defun wisitoken-parse_table--xref-backend () 'wisitoken-parse_table)
 
-(cl-defgeneric xref-backend-identifier-completion-table ((_backend (eql 
wisitoken-parse_table)))
-  ;; could complete on nonterms, find productions
-  nil)
+(cl-defgeneric xref-backend-identifier-completion-table (_backend)
+  (let ((names nil))
+    (save-excursion
+      (goto-char (point-min))
+      (search-forward "Productions:")
+      (forward-line)
+      (while (looking-at "[0-9.]+: \\([a-z_]+\\) <=")
+       (push (cons (match-string 1) (list (buffer-file-name) 
(line-number-at-pos) 0)) names)
+       (forward-line))
+      names)))
 
 (cl-defmethod xref-backend-identifier-at-point ((_backend (eql 
wisitoken-parse_table)))
   ;; if we are on one of:
@@ -43,6 +53,7 @@
   ;;
   ;; - 'reduce n tokens to <nonterminal> <prod_id>'
   ;; => return 'prod_id: name'
+  (setq wisitoken-parse_table-last-buffer (current-buffer))
   (cond
    ((save-excursion
       (beginning-of-line)
@@ -58,8 +69,9 @@
    (t
     (thing-at-point 'symbol))))
 
-(cl-defgeneric xref-backend-definitions ((_backend (eql 
wisitoken-parse_table)) identifier)
+(cl-defgeneric xref-backend-definitions (_backend identifier)
   ;; IDENTIFIER is from xref-back-identifier-at-point; a state number or a 
nonterminal
+  (setq wisitoken-parse_table-last-buffer (current-buffer))
   (let ((state-p (string-match "\\`[0-9]+\\'" identifier))
        (prod_id-p (string-match "\\`[0-9.]+: " identifier)))
     (save-excursion
@@ -77,6 +89,100 @@
       (list (xref-make identifier (xref-make-buffer-location (current-buffer) 
(match-beginning 0))))
       )))
 
+;;;###autoload
+(defun wisitoken-parse_table-goto ()
+  "Get symbol at point, goto symbol's definition.
+Symbol can be a nonterminal name, or a state number."
+  (interactive)
+  (let ((symbol (thing-at-point 'symbol)))
+    (pop-to-buffer wisitoken-parse_table-last-buffer)
+    (xref-find-definitions symbol)))
+
+(defun wisitok-p_t-nonterm-alist ()
+  (let ((names nil))
+    (save-excursion
+      (goto-char (point-min))
+      (search-forward "Productions:")
+      (forward-line)
+      (while (looking-at "\\([0-9]+\\.[0-9]+\\): \\([a-z_]+\\) <=")
+       (push (cons (match-string 1) (match-string 2)) names)
+       (forward-line))
+      names)))
+
+(defconst wisitok-p_t-conflict-reduce-regexp
+  "\\(reduce\\) [0-9]+ tokens to \\([[:alnum:]_]+\\)")
+
+(defun wisitok-p_t-conflict-alist ()
+  (let ((conflicts nil)
+       (nonterms (wisitok-p_t-nonterm-alist))
+       line)
+
+    (save-excursion
+      (goto-char (point-min))
+      (search-forward "Parse Table:")
+      (while (search-forward-regexp (concat "^ +" 
wisitok-p_t-conflict-reduce-regexp) nil t)
+       (let ((conflict (concat "REDUCE " (match-string 2)))
+             (on-token nil))
+         (goto-char (line-beginning-position 0))
+         (setq line (line-number-at-pos))
+         (back-to-indentation)
+         (looking-at "\\([A-Z_]+\\) +=> ")
+         (setq on-token (match-string 1))
+         (goto-char (match-end 0))
+         (looking-at
+          (concat "\\(?:" wisitok-p_t-conflict-reduce-regexp
+                  "\\)\\|\\(?:\\(shift\\) and goto state [0-9]+ 
\\([0-9]+\\.[0-9]+\\)\\)"))
+         (cond
+          ((match-beginning 1)
+           (setq conflict (concat "REDUCE " (match-string 2) " | " conflict)))
+          ((match-beginning 3)
+           (setq conflict (concat "SHIFT " (cdr (assoc (match-string 4) 
nonterms)) " | " conflict)))
+          )
+
+         (forward-line 2)
+         (while (looking-at (concat "^ +" wisitok-p_t-conflict-reduce-regexp))
+           (setq conflict (concat conflict " | REDUCE " (match-string 2)))
+           (forward-line 1))
+
+         (setq conflict (concat conflict " on token " on-token))
+
+         (push (cons conflict (list (buffer-file-name) line 0)) conflicts)
+         )))
+    conflicts))
+
+(defconst wisitok-p_t-action-nonterm-regexp "\\(?:SHIFT\\|REDUCE\\) 
[[:alnum:]_]+")
+
+(defun wisitoken-parse_table--get-conflict ()
+  (save-excursion
+    (goto-char (line-beginning-position))
+    (when (looking-at "%conflict ")
+     (goto-char (match-end 0))
+     (looking-at
+      (concat
+       wisitok-p_t-action-nonterm-regexp
+       "\\(?: | " wisitok-p_t-action-nonterm-regexp "\\)+ on token 
[[:alnum:]_]+"))
+     (match-string-no-properties 0))))
+
+;;;###autoload
+(defun wisitoken-parse_table-conflict-goto (&optional prompt)
+  "Get conflict at point, goto first occurance.
+With user arg, prompt for parse table buffer."
+  (interactive "P")
+  (when prompt
+    (setq wisitoken-parse_table-last-buffer
+         (get-buffer
+          (read-buffer
+           (format "parse table buffer (%s): " 
wisitoken-parse_table-last-buffer)
+           wisitoken-parse_table-last-buffer
+           t))))
+  (let ((conflict (wisitoken-parse_table--get-conflict)))
+    (pop-to-buffer wisitoken-parse_table-last-buffer)
+    ;; IMPROVEME: we may need to cache the completion table in a large buffer
+    (let ((loc (cdr (assoc conflict (wisitok-p_t-conflict-alist)))))
+      (if loc
+         (wisi-goto-source (nth 0 loc) (nth 1 loc) (nth 2 loc))
+       (user-error "conflict not found")))))
+
 ;;;###autoload
 (define-minor-mode wisitoken-parse_table-mode
   "Provides navigation in wisi-generate parse table output."
diff --git a/wisitoken-productions.ads b/wisitoken-productions.ads
index 8b8c8f8d79..b134a50bab 100644
--- a/wisitoken-productions.ads
+++ b/wisitoken-productions.ads
@@ -2,7 +2,7 @@
 --
 --  Type and operations for building grammar productions.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -21,7 +21,6 @@
 pragma License (Modified_GPL);
 
 with SAL.Gen_Unbounded_Definite_Vectors;
-with WisiToken.Semantic_Checks;
 with WisiToken.Syntax_Trees;
 package WisiToken.Productions is
    use all type Ada.Containers.Count_Type;
@@ -38,8 +37,8 @@ package WisiToken.Productions is
       --  Recursion for each token. There may be more than one recursion cycle 
for any token,
       --  but we don't track that.
 
-      Action : WisiToken.Syntax_Trees.Semantic_Action;
-      Check  : WisiToken.Semantic_Checks.Semantic_Check;
+      Post_Parse_Action : Syntax_Trees.Post_Parse_Action;
+      In_Parse_Action   : Syntax_Trees.In_Parse_Actions.In_Parse_Action;
    end record
    with Dynamic_Predicate =>
      (Tokens.Length = 0 or Tokens.First_Index = 1) and
@@ -50,8 +49,9 @@ package WisiToken.Productions is
      (Natural, Right_Hand_Side, Default_Element => (others => <>));
 
    type Instance is record
-      LHS  : Token_ID := Invalid_Token_ID;
-      RHSs : RHS_Arrays.Vector;
+      LHS            : Token_ID := Invalid_Token_ID;
+      Optimized_List : Boolean  := False;
+      RHSs           : RHS_Arrays.Vector;
    end record;
 
    package Prod_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
@@ -74,10 +74,10 @@ package WisiToken.Productions is
    --  Put Image of each production to Ada.Text_IO.Current_Output, for 
parse_table.
 
    package Line_Number_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
-     (Natural, Line_Number_Type, Default_Element => Invalid_Line_Number);
+     (Natural, Line_Number_Type, Default_Element => Line_Number_Type'First);
 
    type Prod_Source_Line_Map is record
-      Line    : Line_Number_Type := Invalid_Line_Number;
+      Line    : Line_Number_Type := Line_Number_Type'First;
       RHS_Map : Line_Number_Arrays.Vector;
    end record;
 
diff --git a/wisitoken-semantic_checks.adb b/wisitoken-semantic_checks.adb
deleted file mode 100644
index d69ac77f3d..0000000000
--- a/wisitoken-semantic_checks.adb
+++ /dev/null
@@ -1,152 +0,0 @@
---  Abstract :
---
---  See spec.
---
---  Copyright (C) 2017 - 2019 Free Software Foundation, Inc.
---
---  This library is free software;  you can redistribute it and/or modify it
---  under terms of the  GNU General Public License  as published by the Free
---  Software  Foundation;  either version 3,  or (at your  option) any later
---  version. This library is distributed in the hope that it will be useful,
---  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
---  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
---  As a special exception under Section 7 of GPL version 3, you are granted
---  additional permissions described in the GCC Runtime Library Exception,
---  version 3.1, as published by the Free Software Foundation.
-
-pragma License (Modified_GPL);
-
-with Ada.Characters.Handling;
-package body WisiToken.Semantic_Checks is
-
-   function Image (Item : in Check_Status; Descriptor : in 
WisiToken.Descriptor) return String
-   is begin
-      case Item.Label is
-      when Ok =>
-         return Check_Status_Label'Image (Item.Label);
-      when Error =>
-         return '(' & Check_Status_Label'Image (Item.Label) & ", " &
-           Image (Item.Begin_Name, Descriptor) & ',' &
-           Image (Item.End_Name, Descriptor) & ')';
-      end case;
-   end Image;
-
-   function Match_Names
-     (Lexer        : access constant WisiToken.Lexer.Instance'Class;
-      Descriptor   : in     WisiToken.Descriptor;
-      Tokens       : in     Recover_Token_Array;
-      Start_Index  : in     Positive_Index_Type;
-      End_Index    : in     Positive_Index_Type;
-      End_Optional : in     Boolean)
-     return Check_Status
-   is
-      Start_Name_Region : constant Buffer_Region :=
-        (if Tokens (Start_Index).Name = Null_Buffer_Region
-         then Tokens (Start_Index).Byte_Region
-         else Tokens (Start_Index).Name);
-      End_Name_Region : constant Buffer_Region :=
-        (if Tokens (End_Index).Name = Null_Buffer_Region
-         then Tokens (End_Index).Byte_Region
-         else Tokens (End_Index).Name);
-
-      function Equal return Boolean
-      is
-         use Ada.Characters.Handling;
-      begin
-         if Descriptor.Case_Insensitive then
-            return To_Lower (Lexer.Buffer_Text (Start_Name_Region)) =
-              To_Lower (Lexer.Buffer_Text (End_Name_Region));
-         else
-            return Lexer.Buffer_Text (Start_Name_Region) = Lexer.Buffer_Text 
(End_Name_Region);
-         end if;
-      end Equal;
-
-   begin
-      if Tokens (Start_Index).Virtual or Tokens (End_Index).Virtual then
-         return (Label => Ok);
-
-      elsif End_Optional then
-         if End_Name_Region = Null_Buffer_Region then
-            return (Label => Ok);
-         elsif Start_Name_Region = Null_Buffer_Region then
-            return (Extra_Name_Error, Tokens (Start_Index), Tokens 
(End_Index));
-         else
-            if Equal then
-               return (Label => Ok);
-            else
-               return (Match_Names_Error, Tokens (Start_Index), Tokens 
(End_Index));
-            end if;
-         end if;
-
-      else
-         if Start_Name_Region = Null_Buffer_Region then
-            if End_Name_Region = Null_Buffer_Region then
-               return (Label => Ok);
-            else
-               return (Extra_Name_Error, Tokens (Start_Index), Tokens 
(End_Index));
-            end if;
-
-         elsif End_Name_Region = Null_Buffer_Region then
-            return (Missing_Name_Error, Tokens (Start_Index), Tokens 
(End_Index));
-
-         else
-            if Equal then
-               return (Label => Ok);
-            else
-               return (Match_Names_Error, Tokens (Start_Index), Tokens 
(End_Index));
-            end if;
-         end if;
-      end if;
-   end Match_Names;
-
-   function Propagate_Name
-     (Nonterm    : in out Recover_Token;
-      Tokens     : in     Recover_Token_Array;
-      Name_Index : in     Positive_Index_Type)
-     return Check_Status
-   is begin
-      if Tokens (Name_Index).Name = Null_Buffer_Region then
-         Nonterm.Name := Tokens (Name_Index).Byte_Region;
-      else
-         Nonterm.Name := Tokens (Name_Index).Name;
-      end if;
-      return (Label => Ok);
-   end Propagate_Name;
-
-   function Merge_Names
-     (Nonterm     : in out Recover_Token;
-      Tokens      : in     Recover_Token_Array;
-      First_Index : in     Positive_Index_Type;
-      Last_Index  : in     Positive_Index_Type)
-     return Check_Status
-   is
-      First_Name : Buffer_Region renames Tokens (First_Index).Name;
-      Last_Name  : Buffer_Region renames Tokens (Last_Index).Name;
-   begin
-      Nonterm.Name :=
-        First_Name and
-          (if Last_Name = Null_Buffer_Region
-           then Tokens (Last_Index).Byte_Region
-           else Last_Name);
-      return (Label => Ok);
-   end Merge_Names;
-
-   function Terminate_Partial_Parse
-     (Partial_Parse_Active    : in Boolean;
-      Partial_Parse_Byte_Goal : in Buffer_Pos;
-      Recover_Active          : in Boolean;
-      Nonterm                 : in Recover_Token)
-     return Check_Status
-   is begin
-      if Partial_Parse_Active and then
-        (not Recover_Active) and then
-        Nonterm.Byte_Region.Last >= Partial_Parse_Byte_Goal
-      then
-         raise WisiToken.Partial_Parse;
-      else
-         return (Label => Ok);
-      end if;
-   end Terminate_Partial_Parse;
-
-end WisiToken.Semantic_Checks;
diff --git a/wisitoken-semantic_checks.ads b/wisitoken-semantic_checks.ads
deleted file mode 100644
index c55371e627..0000000000
--- a/wisitoken-semantic_checks.ads
+++ /dev/null
@@ -1,106 +0,0 @@
---  Abstract :
---
---  Grammar semantic check routines.
---
---  Copyright (C) 2017 - 2019 Free Software Foundation, Inc.
---
---  This library is free software;  you can redistribute it and/or modify it
---  under terms of the  GNU General Public License  as published by the Free
---  Software  Foundation;  either version 3,  or (at your  option) any later
---  version. This library is distributed in the hope that it will be useful,
---  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
---  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
---  As a special exception under Section 7 of GPL version 3, you are granted
---  additional permissions described in the GCC Runtime Library Exception,
---  version 3.1, as published by the Free Software Foundation.
-
-pragma License (Modified_GPL);
-
-with WisiToken.Lexer;
-package WisiToken.Semantic_Checks is
-
-   type Check_Status_Label is
-     (Ok,
-      Missing_Name_Error, -- block start has name, required block end name 
missing
-      Extra_Name_Error,   -- block start has no name, end has one
-      Match_Names_Error); -- both names present, but don't match
-
-   subtype Error is Check_Status_Label range Check_Status_Label'Succ (Ok) .. 
Check_Status_Label'Last;
-
-   type Check_Status (Label : Check_Status_Label := Ok) is record
-      case Label is
-      when Ok =>
-         null;
-
-      when Error =>
-         Begin_Name : Recover_Token;
-         End_Name   : Recover_Token;
-      end case;
-   end record;
-
-   subtype Error_Check_Status is Check_Status
-   with Dynamic_Predicate => Error_Check_Status.Label /= Ok;
-
-   function Image (Item : in Check_Status; Descriptor : WisiToken.Descriptor) 
return String;
-
-   type Semantic_Check is access function
-     (Lexer          : access constant WisiToken.Lexer.Instance'Class;
-      Nonterm        : in out Recover_Token;
-      Tokens         : in     Recover_Token_Array;
-      Recover_Active : in     Boolean)
-     return Check_Status;
-   --  Called during parsing and error recovery to implement higher level
-   --  checks, such as block name matching in Ada.
-
-   Null_Check : constant Semantic_Check := null;
-
-   function Match_Names
-     (Lexer        : access constant WisiToken.Lexer.Instance'Class;
-      Descriptor   : in     WisiToken.Descriptor;
-      Tokens       : in     Recover_Token_Array;
-      Start_Index  : in     Positive_Index_Type;
-      End_Index    : in     Positive_Index_Type;
-      End_Optional : in     Boolean)
-     return Check_Status;
-   --  Check that buffer text at Tokens (Start_Index).Name matches buffer
-   --  text at Tokens (End_Index).Name. Comparison is controlled by
-   --  Descriptor.Case_Insensitive.
-
-   function Propagate_Name
-     (Nonterm    : in out Recover_Token;
-      Tokens     : in     Recover_Token_Array;
-      Name_Index : in     Positive_Index_Type)
-     return Check_Status;
-   function Merge_Names
-     (Nonterm     : in out Recover_Token;
-      Tokens      : in     Recover_Token_Array;
-      Name_Index  : in     Positive_Index_Type)
-     return Check_Status
-   renames Propagate_Name;
-   --  Set Nonterm.Name to Tokens (Name_Index).Name, or .Byte_Region, if
-   --  .Name is Null_Buffer_Region. Return Ok.
-
-   function Merge_Names
-     (Nonterm     : in out Recover_Token;
-      Tokens      : in     Recover_Token_Array;
-      First_Index : in     Positive_Index_Type;
-      Last_Index  : in     Positive_Index_Type)
-     return Check_Status;
-   --  Then set Nonterm.Name to the merger of Tokens (First_Index ..
-   --  Last_Index).Name, return Ok.
-   --
-   --  If Tokens (Last_Index).Name is Null_Buffer_Region, use Tokens
-   --  (Last_Index).Byte_Region instead.
-
-   function Terminate_Partial_Parse
-     (Partial_Parse_Active    : in Boolean;
-      Partial_Parse_Byte_Goal : in Buffer_Pos;
-      Recover_Active          : in Boolean;
-      Nonterm                 : in Recover_Token)
-     return Check_Status;
-   pragma Inline (Terminate_Partial_Parse);
-   --  If partial parse is complete, raise Wisitoken.Partial_Parse;
-   --  otherwise return Ok.
-
-end WisiToken.Semantic_Checks;
diff --git a/wisitoken-syntax_trees-lr_utils.adb 
b/wisitoken-syntax_trees-lr_utils.adb
index cf04a80c13..1960eaa42c 100644
--- a/wisitoken-syntax_trees-lr_utils.adb
+++ b/wisitoken-syntax_trees-lr_utils.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2019, 2020 Stephen Leake All Rights Reserved.
+--  Copyright (C) 2019 - 2022 Stephen Leake All Rights Reserved.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -19,21 +19,19 @@ pragma License (Modified_GPL);
 package body WisiToken.Syntax_Trees.LR_Utils is
 
    procedure Raise_Programmer_Error
-     (Label      : in String;
-      Descriptor : in WisiToken.Descriptor;
-      Lexer      : in WisiToken.Lexer.Handle;
-      Tree       : in WisiToken.Syntax_Trees.Tree;
-      Terminals  : in WisiToken.Base_Token_Arrays.Vector;
-      Node       : in Node_Index)
-   is
-      Terminal_Index : constant Base_Token_Index := Tree.First_Shared_Terminal 
(Node);
-   begin
-      raise SAL.Programmer_Error with Error_Message
-        (Lexer.File_Name,
-         --  Not clear why we need Line + 1 here, to match Emacs.
-         (if Terminal_Index = Invalid_Token_Index then 1 else Terminals 
(Terminal_Index).Line + 1), 0,
-         Label & ": " &
-           Tree.Image (Node, Descriptor, Include_Children => True, 
Include_RHS_Index => True, Node_Numbers => True));
+     (Label     : in String;
+      Tree      : in WisiToken.Syntax_Trees.Tree;
+      Node      : in Node_Access)
+   is begin
+      raise SAL.Programmer_Error with Tree.Error_Message
+        (Node,
+         Message => Label & ": " &
+           Tree.Image (Node, Children => True, RHS_Index => True, Node_Numbers 
=> True));
+   exception
+   when others =>
+      --  From Tree.Image; mangled tree
+      raise SAL.Programmer_Error with Label & ": " & Trimmed_Image 
(Tree.Get_Node_Index (Node)) & ":" &
+        Image (Tree.ID (Node), Tree.Lexer.Descriptor.all);
    end Raise_Programmer_Error;
 
    function Count (Container : Constant_List) return Ada.Containers.Count_Type
@@ -47,12 +45,12 @@ package body WisiToken.Syntax_Trees.LR_Utils is
       return Result;
    end Count;
 
-   function Contains (Container : in Constant_List; Node : in 
Valid_Node_Index) return Boolean
+   function Contains (Container : in Constant_List; Node : in 
Valid_Node_Access) return Boolean
    is begin
       return (for some N of Container => N = Node);
    end Contains;
 
-   function To_Cursor (Container : in Constant_List; Node : in 
Valid_Node_Index) return Cursor
+   function To_Cursor (Container : in Constant_List; Node : in 
Valid_Node_Access) return Cursor
    is
       pragma Unreferenced (Container);
    begin
@@ -66,27 +64,29 @@ package body WisiToken.Syntax_Trees.LR_Utils is
 
    function First
      (Tree       : in WisiToken.Syntax_Trees.Tree;
-      Root       : in WisiToken.Node_Index;
+      Root       : in Node_Access;
       List_ID    : in WisiToken.Token_ID;
       Element_ID : in WisiToken.Token_ID)
-     return Node_Index
+     return Node_Access
    is begin
-      if Root = Invalid_Node_Index then
-         return Invalid_Node_Index;
+      if Root = Invalid_Node_Access then
+         return Invalid_Node_Access;
       else
-         return Result : Node_Index do
+         return Result : Node_Access do
             Result := Root;
             loop
                declare
-                  Children : constant Valid_Node_Index_Array := Tree.Children 
(Result);
+                  Child : constant Node_Access := Tree.Child (Result, 1);
                begin
-                  if Tree.ID (Children (1)) = List_ID then
-                     Result := Children (1);
-                  elsif Tree.ID (Children (1)) = Element_ID then
-                     Result := Children (1);
+                  if Child = null then
+                     raise SAL.Programmer_Error with "deleted child";
+                  elsif Tree.ID (Child) = List_ID then
+                     Result := Child;
+                  elsif Tree.ID (Child) = Element_ID then
+                     Result := Child;
                      exit;
                   else
-                     raise SAL.Programmer_Error;
+                     raise SAL.Programmer_Error with "node" & Result'Image & " 
is not an element of the list";
                   end if;
                end;
             end loop;
@@ -101,11 +101,11 @@ package body WisiToken.Syntax_Trees.LR_Utils is
 
    function Last
      (Tree : in WisiToken.Syntax_Trees.Tree;
-      Root : in WisiToken.Node_Index)
-     return Node_Index
+      Root : in Node_Access)
+     return Node_Access
    is begin
-      if Root = Invalid_Node_Index then
-         return Invalid_Node_Index;
+      if Root = Invalid_Node_Access then
+         return Invalid_Node_Access;
       else
          --  Tree is one of:
          --
@@ -118,7 +118,7 @@ package body WisiToken.Syntax_Trees.LR_Utils is
          --  | element_list
          --  | | element:
          --  | element: Last
-         return Tree.Child (Root, SAL.Base_Peek_Type (Tree.Child_Count 
(Root)));
+         return Tree.Child (Root, Tree.Child_Count (Root));
       end if;
    end Last;
 
@@ -131,13 +131,13 @@ package body WisiToken.Syntax_Trees.LR_Utils is
      (Tree       : in Syntax_Trees.Tree;
       List_ID    : in Token_ID;
       Element_ID : in Token_ID;
-      Position   : in Node_Index)
-     return Node_Index
+      Position   : in Node_Access)
+     return Node_Access
    is begin
-      if Position = Invalid_Node_Index then
+      if Position = Invalid_Node_Access then
          return Position;
       else
-         return Result : Node_Index do
+         return Result : Node_Access do
             declare
                --  Tree is one of:
                --
@@ -169,18 +169,18 @@ package body WisiToken.Syntax_Trees.LR_Utils is
                --  | | rhs_item: Element
                --  | rhs_item: next element : Aunt
 
-               Grand_Parent : constant Node_Index := Tree.Parent (Position, 2);
+               Grand_Parent : constant Node_Access := Tree.Parent (Position, 
2);
 
-               Aunts           : constant Valid_Node_Index_Array :=
-                 (if Grand_Parent = Invalid_Node_Index or else Tree.ID 
(Grand_Parent) /= List_ID
-                  then (1 .. 0 => Invalid_Node_Index)
+               Aunts : constant Node_Access_Array :=
+                 (if Grand_Parent = Invalid_Node_Access or else Tree.ID 
(Grand_Parent) /= List_ID
+                  then (1 .. 0 => Invalid_Node_Access)
                   else Tree.Children (Grand_Parent));
 
                Last_List_Child : SAL.Base_Peek_Type := Aunts'First - 1;
             begin
-               if Grand_Parent = Invalid_Node_Index or else Tree.ID 
(Grand_Parent) /= List_ID then
+               if Grand_Parent = Invalid_Node_Access or else Tree.ID 
(Grand_Parent) /= List_ID then
                   --  No next
-                  Result := Invalid_Node_Index;
+                  Result := Invalid_Node_Access;
                else
                   for I in Aunts'Range loop
                      if Tree.ID (Aunts (I)) in List_ID | Element_ID then
@@ -190,7 +190,7 @@ package body WisiToken.Syntax_Trees.LR_Utils is
 
                   if Last_List_Child = 1 then
                      --  No next
-                     Result := Invalid_Node_Index;
+                     Result := Invalid_Node_Access;
                   else
                      Result := Aunts (Last_List_Child);
                   end if;
@@ -209,13 +209,13 @@ package body WisiToken.Syntax_Trees.LR_Utils is
 
    function Previous
      (Tree     : in Syntax_Trees.Tree;
-      Position : in Node_Index)
-     return Node_Index
+      Position : in Node_Access)
+     return Node_Access
    is begin
-      if Position = Invalid_Node_Index then
+      if Position = Invalid_Node_Access then
          return Position;
       else
-         return Result : Node_Index do
+         return Result : Node_Access do
             --  Tree is one of:
             --
             --  case a: first element, no prev
@@ -237,18 +237,17 @@ package body WisiToken.Syntax_Trees.LR_Utils is
             --  | | rhs_item: prev element
             --  | rhs_item: Element
             declare
-               Parent : constant Valid_Node_Index := Tree.Parent (Position);
+               Parent : constant Valid_Node_Access := Tree.Parent (Position);
             begin
                if Position = Tree.Child (Parent, 1) then
                   --  No prev
-                  Result := Invalid_Node_Index;
+                  Result := Invalid_Node_Access;
 
                else
                   declare
-                     Prev_Children : constant Valid_Node_Index_Array := 
Tree.Children
-                       (Tree.Child (Parent, 1));
+                     Prev_Node : constant Node_Access := Tree.Child (Parent, 
1);
                   begin
-                     Result := Prev_Children (Prev_Children'Last);
+                     Result := Tree.Child (Prev_Node, Tree.Child_Count 
(Prev_Node));
                   end;
                end if;
             end;
@@ -261,7 +260,10 @@ package body WisiToken.Syntax_Trees.LR_Utils is
       return (Node => Previous (Iter.Container.Tree.all, Position.Node));
    end Previous;
 
-   function List_Constant_Ref (Container : aliased in Constant_List'Class; 
Position : in Cursor) return Valid_Node_Index
+   function List_Constant_Ref
+     (Container : aliased in Constant_List'Class;
+      Position  :         in Cursor)
+     return Valid_Node_Access
    is
       pragma Unreferenced (Container);
    begin
@@ -280,7 +282,7 @@ package body WisiToken.Syntax_Trees.LR_Utils is
 
    function Find
      (Container : in Constant_List;
-      Target    : in Valid_Node_Index)
+      Target    : in Valid_Node_Access)
      return Cursor
    is begin
       for Cur in Container.Iterate_Constant loop
@@ -309,7 +311,7 @@ package body WisiToken.Syntax_Trees.LR_Utils is
 
       function Create_List
         (Tree         : aliased in out WisiToken.Syntax_Trees.Tree;
-         Root         :         in     Valid_Node_Index;
+         Root         :         in     Valid_Node_Access;
          List_ID      :         in     WisiToken.Token_ID;
          Element_ID   :         in     WisiToken.Token_ID;
          Separator_ID :         in     WisiToken.Token_ID)
@@ -335,7 +337,7 @@ package body WisiToken.Syntax_Trees.LR_Utils is
 
       function Create_List
         (Tree       : aliased in out WisiToken.Syntax_Trees.Tree;
-         Root       :         in     Valid_Node_Index;
+         Root       :         in     Valid_Node_Access;
          List_ID    :         in     WisiToken.Token_ID;
          Element_ID :         in     WisiToken.Token_ID)
         return Constant_List
@@ -351,35 +353,35 @@ package body WisiToken.Syntax_Trees.LR_Utils is
       function Create_List
         (Container :         in     Constant_List;
          Tree      : aliased in out WisiToken.Syntax_Trees.Tree;
-         Root      :         in     Valid_Node_Index)
+         Root      :         in     Valid_Node_Access)
         return Constant_List
       is begin
          return Create_List (Tree, Root, Container.List_ID, 
Container.Element_ID);
       end Create_List;
 
-      function Create_List (Container : in out List; Root : in 
Valid_Node_Index) return List
+      function Create_List (Container : in out List; Root : in 
Valid_Node_Access) return List
       is begin
          return Create_List (Container.Tree.all, Root, Container.List_ID, 
Container.Element_ID, Container.Separator_ID);
       end Create_List;
 
       function Create_From_Element
         (Tree         : aliased in out WisiToken.Syntax_Trees.Tree;
-         Element      :         in     Valid_Node_Index;
+         Element      :         in     Valid_Node_Access;
          List_ID      :         in     WisiToken.Token_ID;
          Element_ID   :         in     WisiToken.Token_ID;
          Separator_ID :         in     WisiToken.Token_ID)
         return List
       is
-         Root : Valid_Node_Index := Tree.Parent (Element);
+         Root : Valid_Node_Access := Tree.Parent (Element);
       begin
          loop
-            exit when Tree.Parent (Root) = Invalid_Node_Index or else Tree.ID 
(Tree.Parent (Root)) /= List_ID;
+            exit when Tree.Parent (Root) = Invalid_Node_Access or else Tree.ID 
(Tree.Parent (Root)) /= List_ID;
             Root := Tree.Parent (Root);
          end loop;
          return Create_List (Tree, Root, List_ID, Element_ID, Separator_ID);
       end Create_From_Element;
 
-      function Create_From_Element (Container : in out List; Element : in 
Valid_Node_Index) return List
+      function Create_From_Element (Container : in out List; Element : in 
Valid_Node_Access) return List
       is begin
          return Create_From_Element
            (Container.Tree.all, Element, Container.List_ID, 
Container.Element_ID, Container.Separator_ID);
@@ -387,15 +389,15 @@ package body WisiToken.Syntax_Trees.LR_Utils is
 
       function Create_From_Element
         (Tree       : aliased in out WisiToken.Syntax_Trees.Tree;
-         Element    :         in     Valid_Node_Index;
+         Element    :         in     Valid_Node_Access;
          List_ID    :         in     WisiToken.Token_ID;
          Element_ID :         in     WisiToken.Token_ID)
         return Constant_List
       is
-         Root : Valid_Node_Index := Tree.Parent (Element);
+         Root : Valid_Node_Access := Tree.Parent (Element);
       begin
          loop
-            exit when Tree.Parent (Root) = Invalid_Node_Index or else Tree.ID 
(Tree.Parent (Root)) /= List_ID;
+            exit when Tree.Parent (Root) = Invalid_Node_Access or else Tree.ID 
(Tree.Parent (Root)) /= List_ID;
             Root := Tree.Parent (Root);
          end loop;
          return Create_List (Tree, Root, List_ID, Element_ID);
@@ -405,7 +407,7 @@ package body WisiToken.Syntax_Trees.LR_Utils is
       is begin
          return
            (Tree              => Tree'Access,
-            Root              => Invalid_Node_Index,
+            Root              => Invalid_Node_Access,
             List_ID           => Invalid_Token_ID,
             One_Element_RHS   => 0,
             Multi_Element_RHS => 0,
@@ -417,7 +419,7 @@ package body WisiToken.Syntax_Trees.LR_Utils is
       is begin
          return
            (Tree       => Tree'Access,
-            Root       => Invalid_Node_Index,
+            Root       => Invalid_Node_Access,
             List_ID    => Invalid_Token_ID,
             Element_ID => Invalid_Token_ID);
       end Invalid_List;
@@ -432,7 +434,7 @@ package body WisiToken.Syntax_Trees.LR_Utils is
       is begin
          return
            (Tree'Access,
-            Root              => Invalid_Node_Index,
+            Root              => Invalid_Node_Access,
             List_ID           => List_ID,
             One_Element_RHS   => (if Multi_Element_RHS = 0 then 1 else 0),
             Multi_Element_RHS => Multi_Element_RHS,
@@ -450,11 +452,11 @@ package body WisiToken.Syntax_Trees.LR_Utils is
 
    procedure Append
      (Container   : in out List;
-      New_Element : in     Valid_Node_Index)
+      New_Element : in     Valid_Node_Access)
    is
       Tree : Syntax_Trees.Tree renames Container.Tree.all;
    begin
-      if Container.Root = Invalid_Node_Index then
+      if Container.Root = Invalid_Node_Access then
          Container :=
            (Container.Tree,
             List_ID           => Container.List_ID,
@@ -464,15 +466,16 @@ package body WisiToken.Syntax_Trees.LR_Utils is
             Separator_ID      => Container.Separator_ID,
             Root              => Tree.Add_Nonterm
               (Production     => (Container.List_ID, 
Container.One_Element_RHS),
-               Children       => (1 => New_Element)));
+               Children       => (1 => New_Element),
+               Clear_Parents  => True));
 
       else
          --  Adding element Last in spec example
          declare
-            List_Parent : constant Node_Index         := Tree.Parent 
(Container.Root);
-            Old_Root    : constant Valid_Node_Index   := Container.Root;
+            List_Parent : constant Node_Access         := Tree.Parent 
(Container.Root);
+            Old_Root    : constant Valid_Node_Access   := Container.Root;
             Child_Index : constant SAL.Base_Peek_Type :=
-              (if List_Parent = Invalid_Node_Index
+              (if List_Parent = Invalid_Node_Access
                then 0
                else Tree.Child_Index (List_Parent, Old_Root));
          begin
@@ -482,18 +485,16 @@ package body WisiToken.Syntax_Trees.LR_Utils is
                  Children       =>
                    (if Container.Separator_ID = Invalid_Token_ID
                     then (Old_Root, New_Element)
-                    else (Old_Root, Tree.Add_Terminal 
(Container.Separator_ID), New_Element)));
-
-            if List_Parent = Invalid_Node_Index then
-               if Tree.Root = Old_Root then
-                  Tree.Root := Container.Root;
-               end if;
+                    else (Old_Root, Tree.Add_Terminal 
(Container.Separator_ID), New_Element)),
+                 Clear_Parents => True); -- Have to clear Old_Root
 
+            if List_Parent = Invalid_Node_Access then
+               null;
             else
                Tree.Replace_Child
                  (List_Parent,
                   Child_Index,
-                  Old_Child => Deleted_Child,
+                  Old_Child => Invalid_Node_Access,
                   New_Child => Container.Root);
             end if;
          end;
@@ -502,11 +503,11 @@ package body WisiToken.Syntax_Trees.LR_Utils is
 
    procedure Prepend
      (Container   : in out List;
-      New_Element : in     Valid_Node_Index)
+      New_Element : in     Valid_Node_Access)
    is
       Tree : Syntax_Trees.Tree renames Container.Tree.all;
    begin
-      if Container.Root = Invalid_Node_Index then
+      if Container.Root = Invalid_Node_Access then
          Container :=
            (Container.Tree,
             List_ID           => Container.List_ID,
@@ -516,32 +517,38 @@ package body WisiToken.Syntax_Trees.LR_Utils is
             Separator_ID      => Container.Separator_ID,
             Root              => Tree.Add_Nonterm
               (Production     => (Container.List_ID, 
Container.One_Element_RHS),
-               Children       => (1 => New_Element)));
+               Children       => (1 => New_Element),
+               Clear_Parents  => False));
 
       else
          --  Inserting element First (with list parent node and separator) in 
spec example
          declare
-            Old_First  : constant Valid_Node_Index := Container.First.Node;
-            Parent : constant Valid_Node_Index := Tree.Parent (Old_First);
+            Old_First   : constant Valid_Node_Access := Container.First.Node;
+            Update_Root : constant Boolean           := Old_First.Parent = 
Container.Root;
 
-            List_Node : constant Valid_Node_Index := Tree.Add_Nonterm
+            List_Node : constant Valid_Node_Access := Tree.Add_Nonterm
               ((Container.List_ID, Container.One_Element_RHS),
-               (1 => New_Element));
+               (1 => New_Element),
+               Clear_Parents => False);
          begin
             Tree.Set_Children
-              (Node     => Parent,
+              (Node     => Old_First.Parent,
                New_ID   => (Container.List_ID, Container.Multi_Element_RHS),
                Children =>
                  (if Container.Separator_ID = Invalid_Token_ID
                   then (List_Node, Old_First)
                   else (List_Node, Tree.Add_Terminal (Container.Separator_ID), 
Old_First)));
+
+            if Update_Root then
+               Container.Root := Old_First.Parent;
+            end if;
          end;
       end if;
    end Prepend;
 
    procedure Insert
      (Container   : in out List;
-      New_Element : in     Valid_Node_Index;
+      New_Element : in     Valid_Node_Access;
       After       : in     Cursor)
    is
       --  Current Tree (see wisitoken_syntax_trees-test.adb Test_Insert_1):
@@ -574,25 +581,26 @@ package body WisiToken.Syntax_Trees.LR_Utils is
       --  | | element: Before
       --  | separator
       --  | element: Last
-      Iter   : constant Iterator   := Container.Iterate;
-      Before : constant Node_Index := Iter.Next (After).Node;
+      Iter   : constant Iterator    := Container.Iterate;
+      Before : constant Node_Access := Iter.Next (After).Node;
    begin
-      if After.Node = Invalid_Node_Index then
+      if After.Node = Invalid_Node_Access then
          Prepend (Container, New_Element);
-      elsif Before = Invalid_Node_Index then
+      elsif Before = Invalid_Node_Access then
          Append (Container, New_Element);
       else
          declare
-            Parent      : constant Valid_Node_Index := Container.Tree.Parent 
(Before);
-            Old_Child   : constant Valid_Node_Index := Container.Tree.Parent 
(After.Node);
+            Parent      : constant Valid_Node_Access := Container.Tree.Parent 
(Before);
+            Old_Child   : constant Valid_Node_Access := Container.Tree.Parent 
(After.Node);
             Child_Index : constant SAL.Peek_Type    := 
Container.Tree.Child_Index (Parent, Old_Child);
 
-            New_List_Nonterm : constant Valid_Node_Index := 
Container.Tree.Add_Nonterm
+            New_List_Nonterm : constant Valid_Node_Access := 
Container.Tree.Add_Nonterm
               (Production => (Container.List_ID, Container.Multi_Element_RHS),
                Children   =>
                  (if Container.Separator_ID = Invalid_Token_ID
                   then (Old_Child, New_Element)
-                  else (Old_Child, Container.Tree.Add_Terminal 
(Container.Separator_ID), New_Element)));
+                  else (Old_Child, Container.Tree.Add_Terminal 
(Container.Separator_ID), New_Element)),
+               Clear_Parents => True); -- have to clear Old_Child
 
          begin
             --  After = Container.First is not a special case:
@@ -623,7 +631,7 @@ package body WisiToken.Syntax_Trees.LR_Utils is
             Container.Tree.Replace_Child
               (Parent               => Parent,
                Child_Index          => Child_Index,
-               Old_Child            => Deleted_Child,
+               Old_Child            => Invalid_Node_Access,
                New_Child            => New_List_Nonterm,
                Old_Child_New_Parent => New_List_Nonterm);
          end;
@@ -634,7 +642,8 @@ package body WisiToken.Syntax_Trees.LR_Utils is
      (Source_List  : in     Constant_List'Class;
       Source_First : in     Cursor := No_Element;
       Source_Last  : in     Cursor := No_Element;
-      Dest_List    : in out List'Class)
+      Dest_List    : in out List'Class;
+      User_Data    : in     User_Data_Access_Constant)
    is
       Source_Iter : constant Constant_Iterator := Source_List.Iterate_Constant;
 
@@ -644,7 +653,7 @@ package body WisiToken.Syntax_Trees.LR_Utils is
       for N of Source_List loop
          exit when not Has_Element (Item);
 
-         Dest_List.Append (Dest_List.Tree.Copy_Subtree (Item.Node));
+         Dest_List.Append (Dest_List.Tree.Copy_Subtree (Item.Node, User_Data));
 
          exit when Item = Last;
 
@@ -661,21 +670,19 @@ package body WisiToken.Syntax_Trees.LR_Utils is
       if Container.First = Container.Last then
          --  result is empty
          declare
-            List_Parent : constant Node_Index := Tree.Parent (Container.Root);
+            List_Parent : constant Node_Access := Tree.Parent (Container.Root);
          begin
-            if List_Parent = Invalid_Node_Index then
-               if Tree.Root = Container.Root then
-                  Tree.Root := Invalid_Node_Index;
-               end if;
+            if List_Parent = Invalid_Node_Access then
+               null;
 
             else
                Tree.Replace_Child
                  (List_Parent,
                   Child_Index => Tree.Child_Index (List_Parent, 
Container.Root),
-                  Old_Child => Container.Root,
-                  New_Child => Deleted_Child);
+                  Old_Child   => Container.Root,
+                  New_Child   => Invalid_Node_Access);
             end if;
-            Container.Root := Invalid_Node_Index;
+            Container.Root := Invalid_Node_Access;
          end;
 
       elsif Item = Container.First then
@@ -697,7 +704,7 @@ package body WisiToken.Syntax_Trees.LR_Utils is
          --  0003: | | | ...
 
          declare
-            Parent_2 : constant Valid_Node_Index := Tree.Parent (Item.Node, 2);
+            Parent_2 : Valid_Node_Access := Tree.Parent (Item.Node, 2);
          begin
             Tree.Set_Children
               (Parent_2,
@@ -723,11 +730,11 @@ package body WisiToken.Syntax_Trees.LR_Utils is
          --  03: | ...
 
          declare
-            List_Parent : constant Node_Index       := Tree.Parent 
(Container.Root);
-            New_Root    : constant Valid_Node_Index := Tree.Child 
(Container.Root, 1);
+            List_Parent : constant Node_Access       := Tree.Parent 
(Container.Root);
+            New_Root    : constant Valid_Node_Access := Tree.Child 
(Container.Root, 1);
          begin
-            if List_Parent = Invalid_Node_Index then
-               Tree.Delete_Parent (New_Root);
+            if List_Parent = Invalid_Node_Access then
+               Tree.Clear_Parent (New_Root, Clear_Children => True); -- Tree 
is Editable
                Container.Root := New_Root;
 
             else
@@ -738,7 +745,7 @@ package body WisiToken.Syntax_Trees.LR_Utils is
                     (List_Parent, Parent_Index,
                      Old_Child            => Container.Root,
                      New_Child            => New_Root,
-                     Old_Child_New_Parent => Invalid_Node_Index);
+                     Old_Child_New_Parent => Invalid_Node_Access);
                end;
             end if;
 
@@ -772,19 +779,19 @@ package body WisiToken.Syntax_Trees.LR_Utils is
          --  14: | element: 4, Last
 
          declare
-            Parent_1         : constant Valid_Node_Index := Tree.Parent 
(Item.Node);
-            Parent_2         : constant Valid_Node_Index := Tree.Parent 
(Parent_1);
-            Parent_1_Child_1 : constant Valid_Node_Index := Tree.Child 
(Parent_1, 1);
+            Parent_1         : constant Valid_Node_Access := Tree.Parent 
(Item.Node);
+            Parent_2         : constant Valid_Node_Access := Tree.Parent 
(Parent_1);
+            Parent_1_Child_1 : constant Valid_Node_Access := Tree.Child 
(Parent_1, 1);
          begin
             Tree.Replace_Child
               (Parent_2, 1,
                Old_Child            => Parent_1,
                New_Child            => Parent_1_Child_1,
-               Old_Child_New_Parent => Invalid_Node_Index);
+               Old_Child_New_Parent => Invalid_Node_Access);
          end;
       end if;
 
-      Item.Node := Invalid_Node_Index;
+      Item.Node := Invalid_Node_Access;
    end Delete;
 
    function Valid_Skip_List (Tree : aliased in out Syntax_Trees.Tree; 
Skip_List : in Skip_Array) return Boolean
@@ -824,16 +831,17 @@ package body WisiToken.Syntax_Trees.LR_Utils is
       Skip_Found        :         in out Boolean;
       Tree              : aliased in out Syntax_Trees.Tree;
       Separator_ID      :         in     Token_ID;
-      Multi_Element_RHS :         in     Natural)
-      return Node_Index
+      Multi_Element_RHS :         in     Natural;
+      User_Data         :         in     User_Data_Access_Constant)
+      return Node_Access
    is
       Dest_List : List := Creators.Empty_List
         (Tree, Source_List.List_ID, Multi_Element_RHS, Source_List.Element_ID, 
Separator_ID);
 
       function Get_Dest_Child
-        (Node      : in Valid_Node_Index;
+        (Node      : in Valid_Node_Access;
          Skip_List : in Skip_Array)
-        return Valid_Node_Index
+        return Valid_Node_Access
       with Pre => Tree.Is_Nonterm (Node) and
                   (Skip_List'Length > 1 and then
                    (Skip_List (Skip_List'First).Label = Nested and Skip_List 
(Skip_List'Last).Label = Skip))
@@ -848,11 +856,11 @@ package body WisiToken.Syntax_Trees.LR_Utils is
                   List_ID    => Skip_This.List_ID,
                   Element_ID => Skip_This.Element_ID),
                Skip_List (Skip_List'First + 1 .. Skip_List'Last),
-               Skip_Found, Tree, Skip_This.Separator_ID, 
Skip_This.Multi_Element_RHS);
+               Skip_Found, Tree, Skip_This.Separator_ID, 
Skip_This.Multi_Element_RHS, User_Data);
          else
             declare
-               Source_Children : constant Valid_Node_Index_Array := 
Tree.Children (Node);
-               Dest_Children   : Valid_Node_Index_Array 
(Source_Children'Range);
+               Source_Children : constant Node_Access_Array := Tree.Children 
(Node);
+               Dest_Children   : Node_Access_Array (Source_Children'Range);
             begin
                for I in Source_Children'Range loop
                   if Source_Children (I) = Skip_This.List_Root then
@@ -863,17 +871,19 @@ package body WisiToken.Syntax_Trees.LR_Utils is
                            List_ID    => Skip_This.List_ID,
                            Element_ID => Skip_This.Element_ID),
                         Skip_List (Skip_List'First + 1 .. Skip_List'Last),
-                        Skip_Found, Tree, Skip_This.Separator_ID, 
Skip_This.Multi_Element_RHS);
+                        Skip_Found, Tree, Skip_This.Separator_ID, 
Skip_This.Multi_Element_RHS, User_Data);
                   else
                      if Tree.Label (Source_Children (I)) = Nonterm then
                         Dest_Children (I) := Get_Dest_Child (Source_Children 
(I), Skip_List);
                      else
-                        Dest_Children (I) := Tree.Copy_Subtree 
(Source_Children (I));
+                        Dest_Children (I) := Tree.Copy_Subtree 
(Source_Children (I), User_Data);
                      end if;
                   end if;
                end loop;
 
-               return Tree.Add_Nonterm (Tree.Production_ID (Node), 
Dest_Children, Tree.Action (Node));
+               return Tree.Add_Nonterm
+                 (Tree.Production_ID (Node), To_Valid_Node_Access 
(Dest_Children),
+                  Clear_Parents => False);
             end;
          end if;
       end Get_Dest_Child;
@@ -892,7 +902,7 @@ package body WisiToken.Syntax_Trees.LR_Utils is
                Dest_List.Append (Get_Dest_Child (N, Skip_List));
             end case;
          else
-            Dest_List.Append (Tree.Copy_Subtree (N));
+            Dest_List.Append (Tree.Copy_Subtree (N, User_Data));
          end if;
       end loop;
       return Dest_List.Root;
@@ -900,8 +910,9 @@ package body WisiToken.Syntax_Trees.LR_Utils is
 
    function Copy_Skip_Nested
      (Skip_List :         in     Skip_Info;
-      Tree      : aliased in out Syntax_Trees.Tree)
-     return Node_Index
+      Tree      : aliased in out Syntax_Trees.Tree;
+      User_Data :         in     User_Data_Access_Constant)
+     return Node_Access
    is
       Source_List : constant Constant_List := Creators.Create_List
         (Tree,
@@ -911,9 +922,9 @@ package body WisiToken.Syntax_Trees.LR_Utils is
 
       Skip_Found : Boolean := False;
    begin
-      return Result : constant Node_Index := Copy_Skip_Nested
+      return Result : constant Node_Access := Copy_Skip_Nested
         (Source_List, Skip_List.Skips, Skip_Found, Tree, 
Skip_List.Start_Separator_ID,
-         Skip_List.Start_Multi_Element_RHS)
+         Skip_List.Start_Multi_Element_RHS, User_Data)
       do
          if not Skip_Found then
             raise SAL.Programmer_Error with "Skip not found";
@@ -923,14 +934,14 @@ package body WisiToken.Syntax_Trees.LR_Utils is
 
    function List_Root
      (Tree    : in Syntax_Trees.Tree;
-      Node    : in Valid_Node_Index;
+      Node    : in Valid_Node_Access;
       List_ID : in Token_ID)
-     return Valid_Node_Index
+     return Valid_Node_Access
    is
-      Root : Node_Index := Node;
+      Root : Node_Access := Node;
    begin
       loop
-         exit when Tree.Parent (Root) = Invalid_Node_Index or else Tree.ID 
(Tree.Parent (Root)) /= List_ID;
+         exit when Tree.Parent (Root) = Invalid_Node_Access or else Tree.ID 
(Tree.Parent (Root)) /= List_ID;
          Root := Tree.Parent (Root);
       end loop;
       return Root;
diff --git a/wisitoken-syntax_trees-lr_utils.ads 
b/wisitoken-syntax_trees-lr_utils.ads
index 6f0403c7c4..24df80f13f 100644
--- a/wisitoken-syntax_trees-lr_utils.ads
+++ b/wisitoken-syntax_trees-lr_utils.ads
@@ -9,7 +9,7 @@
 --  requires mode 'aliased in' for First, Last, which is not
 --  conformant with Ada.Iterator_Interfaces.
 --
---  Copyright (C) 2019, 2020 Stephen Leake All Rights Reserved.
+--  Copyright (C) 2019, 2020, 2022 Stephen Leake All Rights Reserved.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -27,15 +27,11 @@ pragma License (Modified_GPL);
 with Ada.Iterator_Interfaces;
 with SAL.Gen_Unconstrained_Array_Image_Aux;
 package WisiToken.Syntax_Trees.LR_Utils is
-   use all type SAL.Base_Peek_Type;
 
    procedure Raise_Programmer_Error
-     (Label      : in String;
-      Descriptor : in WisiToken.Descriptor;
-      Lexer      : in WisiToken.Lexer.Handle;
-      Tree       : in WisiToken.Syntax_Trees.Tree;
-      Terminals  : in WisiToken.Base_Token_Arrays.Vector;
-      Node       : in WisiToken.Node_Index);
+     (Label : in String;
+      Tree  : in WisiToken.Syntax_Trees.Tree;
+      Node  : in WisiToken.Syntax_Trees.Node_Access);
    pragma No_Return (Raise_Programmer_Error);
 
    ----------
@@ -65,7 +61,7 @@ package WisiToken.Syntax_Trees.LR_Utils is
    type Constant_List (<>) is tagged private with
      Constant_Indexing => List_Constant_Ref,
      Default_Iterator  => Iterate_Constant,
-     Iterator_Element  => Valid_Node_Index;
+     Iterator_Element  => Valid_Node_Access;
 
    function Tree (Container : in Constant_List) return Tree_Constant_Reference
    with Pre => not Container.Is_Invalid;
@@ -75,7 +71,7 @@ package WisiToken.Syntax_Trees.LR_Utils is
    function Is_Empty (Container : in Constant_List) return Boolean;
    --  Returns True if Container is invalid, or if Container is empty
 
-   function Root (Container : in Constant_List) return Node_Index
+   function Root (Container : in Constant_List) return Node_Access
    with Pre => not Container.Is_Invalid;
 
    function List_ID (Container : in Constant_List) return Token_ID
@@ -87,14 +83,14 @@ package WisiToken.Syntax_Trees.LR_Utils is
    function Count (Container : in Constant_List) return 
Ada.Containers.Count_Type
    with Pre => not Container.Is_Invalid;
 
-   function Contains (Container : in Constant_List; Node : in 
Valid_Node_Index) return Boolean
+   function Contains (Container : in Constant_List; Node : in 
Valid_Node_Access) return Boolean
    with Pre => not Container.Is_Invalid;
 
    type Cursor is private;
 
    No_Element : constant Cursor;
 
-   function To_Cursor (Container : in Constant_List; Node : in 
Valid_Node_Index) return Cursor
+   function To_Cursor (Container : in Constant_List; Node : in 
Valid_Node_Access) return Cursor
    with Pre => (not Container.Is_Invalid) and then
                (Container.Contains (Node) and Container.Tree.ID (Node) = 
Container.Element_ID);
 
@@ -103,12 +99,8 @@ package WisiToken.Syntax_Trees.LR_Utils is
 
    function Has_Element (Cursor : in LR_Utils.Cursor) return Boolean;
 
-   function Node (Cursor : in LR_Utils.Cursor) return Node_Index;
-   --  Invalid_Node_Index if not Has_Element (Cursor).
-
-   function Get_Node (Cursor : in LR_Utils.Cursor) return Node_Index
-     renames Node;
-   --  Useful when Node is hidden by another declaration.
+   function Element (Cursor : in LR_Utils.Cursor) return Node_Access;
+   --  Invalid_Node_Access if not Has_Element (Cursor).
 
    package Iterator_Interfaces is new Ada.Iterator_Interfaces (Cursor, 
Has_Element);
 
@@ -127,7 +119,8 @@ package WisiToken.Syntax_Trees.LR_Utils is
    function List_Constant_Ref
      (Container : aliased in Constant_List'Class;
       Position  :         in Cursor)
-     return Valid_Node_Index;
+     return Valid_Node_Access
+   with Pre => Has_Element (Position);
 
    type Constant_Iterator (Container : not null access constant Constant_List) 
is new
      Iterator_Interfaces.Reversible_Iterator
@@ -144,7 +137,7 @@ package WisiToken.Syntax_Trees.LR_Utils is
    type Find_Equal is access function
      (Target : in String;
       List   : in Constant_List'Class;
-      Node   : in Valid_Node_Index)
+      Node   : in Valid_Node_Access)
    return Boolean;
    --  Function called by Find to compare Target to Node. Target, List
    --  are the Find arguments; Node is an element of List. Return True if
@@ -152,7 +145,7 @@ package WisiToken.Syntax_Trees.LR_Utils is
 
    function Find
      (Container : in Constant_List;
-      Target    : in Valid_Node_Index)
+      Target    : in Valid_Node_Access)
      return Cursor
    with Pre => not Container.Is_Invalid and Container.Tree.ID (Target) = 
Container.Element_ID;
 
@@ -165,7 +158,7 @@ package WisiToken.Syntax_Trees.LR_Utils is
 
    type List (<>) is new Constant_List with private with
      Default_Iterator  => Iterate,
-     Iterator_Element  => Valid_Node_Index;
+     Iterator_Element  => Valid_Node_Access;
 
    function Separator_ID (Container : in List) return Token_ID
    with Pre => not Container.Is_Invalid;
@@ -179,18 +172,19 @@ package WisiToken.Syntax_Trees.LR_Utils is
 
       function Create_List
         (Tree         : aliased in out WisiToken.Syntax_Trees.Tree;
-         Root         :         in     Valid_Node_Index;
+         Root         :         in     Valid_Node_Access;
          List_ID      :         in     WisiToken.Token_ID;
          Element_ID   :         in     WisiToken.Token_ID;
          Separator_ID :         in     WisiToken.Token_ID)
         return List
-      with Pre => (Tree.Is_Nonterm (Root) and then Tree.Has_Children (Root)) 
and Tree.ID (Root) = List_ID;
+      with Pre => Tree.Editable and (Tree.Is_Nonterm (Root) and then 
Tree.Has_Children (Root)) and
+                  Tree.ID (Root) = List_ID;
       --  If there is no separator, set Separator_ID = 
WisiToken.Invalid_Token_ID
       --  The list cannot be empty; use Empty_List for an empty list.
 
       function Create_List
         (Tree       : aliased in out WisiToken.Syntax_Trees.Tree;
-         Root       :         in     Valid_Node_Index;
+         Root       :         in     Valid_Node_Access;
          List_ID    :         in     WisiToken.Token_ID;
          Element_ID :         in     WisiToken.Token_ID)
         return Constant_List
@@ -200,7 +194,7 @@ package WisiToken.Syntax_Trees.LR_Utils is
       function Create_List
         (Container :         in     Constant_List;
          Tree      : aliased in out WisiToken.Syntax_Trees.Tree;
-         Root      :         in     Valid_Node_Index)
+         Root      :         in     Valid_Node_Access)
         return Constant_List
       with Pre => (Container.Tree.Is_Nonterm (Root) and then
                    Container.Tree.Has_Children (Root)) and
@@ -208,25 +202,26 @@ package WisiToken.Syntax_Trees.LR_Utils is
       --  Same as Create_List, get all other params from Container.
       --  Need Tree for non-constant view.
 
-      function Create_List (Container : in out List; Root : in 
Valid_Node_Index) return List
+      function Create_List (Container : in out List; Root : in 
Valid_Node_Access) return List
       with Pre => (Container.Tree.Is_Nonterm (Root) and then 
Container.Tree.Has_Children (Root)) and
                   Container.Tree.ID (Root) = Container.List_ID;
       --  Same as Create_List, get all other params from Container.
 
       function Create_From_Element
         (Tree         : aliased in out WisiToken.Syntax_Trees.Tree;
-         Element      :         in     Valid_Node_Index;
+         Element      :         in     Valid_Node_Access;
          List_ID      :         in     WisiToken.Token_ID;
          Element_ID   :         in     WisiToken.Token_ID;
          Separator_ID :         in     WisiToken.Token_ID)
         return List
-      with Pre => Tree.ID (Tree.Parent (Element)) = List_ID and
+      with Pre => Tree.Editable and
+                  Tree.ID (Tree.Parent (Element)) = List_ID and
                   Tree.ID (Element) = Element_ID and
                   Tree.ID (Tree.Parent (Element)) = List_ID;
       --  Same as Create_List, but it first finds the root as an ancestor of
       --  Element.
 
-      function Create_From_Element (Container : in out List; Element : in 
Valid_Node_Index) return List
+      function Create_From_Element (Container : in out List; Element : in 
Valid_Node_Access) return List
       with Pre => Container.Tree.ID (Container.Tree.Parent (Element)) = 
Container.List_ID and
                   Container.Tree.ID (Element) = Container.Element_ID and
                   Container.Tree.ID (Container.Tree.Parent (Element)) = 
Container.List_ID;
@@ -234,7 +229,7 @@ package WisiToken.Syntax_Trees.LR_Utils is
 
       function Create_From_Element
         (Tree       : aliased in out WisiToken.Syntax_Trees.Tree;
-         Element    :         in     Valid_Node_Index;
+         Element    :         in     Valid_Node_Access;
          List_ID    :         in     WisiToken.Token_ID;
          Element_ID :         in     WisiToken.Token_ID)
         return Constant_List
@@ -258,8 +253,9 @@ package WisiToken.Syntax_Trees.LR_Utils is
          Multi_Element_RHS :         in     Natural;
          Element_ID        :         in     WisiToken.Token_ID;
          Separator_ID      :         in     WisiToken.Token_ID)
-        return List;
-      --  Result Root returns Invalid_Node_Index; First, Last return empty
+        return List
+      with Pre => Tree.Editable;
+      --  Result Root returns Invalid_Node_Access; First, Last return empty
       --  cursor, count returns 0; Append works correctly.
 
       function Empty_List (Container : in out List) return List;
@@ -273,7 +269,7 @@ package WisiToken.Syntax_Trees.LR_Utils is
 
    procedure Append
      (Container   : in out List;
-      New_Element : in     Valid_Node_Index)
+      New_Element : in     Valid_Node_Access)
    with Pre => not Container.Is_Invalid and then Container.Tree.ID 
(New_Element) = Container.Element_ID;
    --  Append New_Item to Container, including Container.Separator_ID if
    --  it is not Invalid_Token_Index.
@@ -284,16 +280,16 @@ package WisiToken.Syntax_Trees.LR_Utils is
 
    procedure Prepend
      (Container   : in out List;
-      New_Element : in     Valid_Node_Index)
+      New_Element : in     Valid_Node_Access)
    with Pre => not Container.Is_Invalid and then Container.Tree.ID 
(New_Element) = Container.Element_ID;
    --  Prepend New_Item to Container, including Container.Separator_ID if
    --  it is not Invalid_Token_Index.
    --
-   --  Container.Root parent is unchanged.
+   --  Container.Root parent is updated.
 
    procedure Insert
      (Container   : in out List;
-      New_Element : in     Valid_Node_Index;
+      New_Element : in     Valid_Node_Access;
       After       : in     Cursor)
    with Pre => not Container.Is_Invalid and then
                (Container.Tree.ID (New_Element) = Container.Element_ID and
@@ -312,7 +308,8 @@ package WisiToken.Syntax_Trees.LR_Utils is
      (Source_List  : in     Constant_List'Class;
       Source_First : in     Cursor := No_Element;
       Source_Last  : in     Cursor := No_Element;
-      Dest_List    : in out List'Class)
+      Dest_List    : in out List'Class;
+      User_Data    : in     User_Data_Access_Constant)
    with Pre => Compatible (Source_List, Dest_List);
    --  Deep copy slice of Source_List, appending to Dest_List.
    --
@@ -328,15 +325,17 @@ package WisiToken.Syntax_Trees.LR_Utils is
 
    type Skip_Label is (Nested, Skip);
 
-   type Skip_Item (Label : Skip_Label := Skip_Label'First) is
+   type Skip_Item (Label : Skip_Label := Skip) is
+   --  The default values must be valid to allow Skip_Info to be default
+   --  initialized.
    record
-      Element : Valid_Node_Index;
+      Element : Node_Access := null;
       case Label is
       when Nested =>
          --  Element is an element in the list currently being copied
          --  containing a nested list with an element to skip (given by Element
          --  in the next Skip_Item). The nested list is defined by:
-         List_Root         : Valid_Node_Index;
+         List_Root         : Valid_Node_Access;
          List_ID           : Token_ID;
          Element_ID        : Token_ID;
          Separator_ID      : Token_ID;
@@ -350,9 +349,10 @@ package WisiToken.Syntax_Trees.LR_Utils is
    subtype Nested_Skip_Item is Skip_Item (Nested);
 
    function Image (Item : in Skip_Item; Descriptor : in WisiToken.Descriptor) 
return String
-   is ("(" & Item.Label'Image & ", " & Item.Element'Image &
+   is ("(" & Item.Label'Image & ", " & Trimmed_Image (Get_Node_Index 
(Item.Element)) &
          (case Item.Label is
-          when Nested => "," & Item.List_Root'Image & ", " & Image 
(Item.List_ID, Descriptor),
+          when Nested => "," & Trimmed_Image (Get_Node_Index (Item.List_Root)) 
& ", " &
+               Image (Item.List_ID, Descriptor),
           when Skip => "") &
          ")");
 
@@ -362,11 +362,11 @@ package WisiToken.Syntax_Trees.LR_Utils is
    record
       --  Skip_Last may be Positive_Index_Type'First - 1 to indicate an
       --  empty or invalid skip list.
-      Start_List_Root         : Valid_Node_Index := Valid_Node_Index'Last;
-      Start_List_ID           : Token_ID         := Invalid_Token_ID;
-      Start_Element_ID        : Token_ID         := Invalid_Token_ID;
-      Start_Separator_ID      : Token_ID         := Invalid_Token_ID;
-      Start_Multi_Element_RHS : Natural          := 0;
+      Start_List_Root         : Node_Access := Invalid_Node_Access;
+      Start_List_ID           : Token_ID   := Invalid_Token_ID;
+      Start_Element_ID        : Token_ID   := Invalid_Token_ID;
+      Start_Separator_ID      : Token_ID   := Invalid_Token_ID;
+      Start_Multi_Element_RHS : Natural    := 0;
       Skips                   : Skip_Array (Positive_Index_Type'First .. 
Skip_Last);
    end record;
 
@@ -377,7 +377,8 @@ package WisiToken.Syntax_Trees.LR_Utils is
    is ("(" &
          (if Item.Start_List_ID = Invalid_Token_ID
           then ""
-          else Item.Start_List_Root'Image & ", " & Image (Item.Start_List_ID, 
Descriptor) & ", " &
+          else Trimmed_Image (Get_Node_Index (Item.Start_List_Root)) & ", " &
+             Image (Item.Start_List_ID, Descriptor) & ", " &
              Image (Item.Skips, Descriptor))
          & ")");
 
@@ -390,15 +391,17 @@ package WisiToken.Syntax_Trees.LR_Utils is
 
    function Copy_Skip_Nested
      (Skip_List :         in     Skip_Info;
-      Tree      : aliased in out Syntax_Trees.Tree)
-     return Node_Index
-   with Pre => Skip_List.Start_List_ID /= Invalid_Token_ID and then
-               (Valid_Skip_List (Tree, Skip_List.Skips) and
-                Skip_List.Start_List_ID /= Skip_List.Start_Element_ID);
+      Tree      : aliased in out Syntax_Trees.Tree;
+      User_Data :         in     User_Data_Access_Constant)
+     return Node_Access
+   with Pre => Tree.Editable and
+               (Skip_List.Start_List_ID /= Invalid_Token_ID and then
+                (Valid_Skip_List (Tree, Skip_List.Skips) and
+                 Skip_List.Start_List_ID /= Skip_List.Start_Element_ID));
    --  Copy list rooted at Skip_List.Start_List, skipping one element as
    --  indicated by Skip_List.Skip. Return root of copied list.
    --
-   --  Result is Invalid_Node_Index (indicating an empty list) if
+   --  Result is Invalid_Node_Access (indicating an empty list) if
    --  Skip_List has only one item (Skip), and Skip_List.Start_List_Root
    --  has only that item.
    --
@@ -407,25 +410,32 @@ package WisiToken.Syntax_Trees.LR_Utils is
 
    function List_Root
      (Tree    : in Syntax_Trees.Tree;
-      Node    : in Valid_Node_Index;
+      Node    : in Valid_Node_Access;
       List_ID : in Token_ID)
-     return Valid_Node_Index
+     return Valid_Node_Access
    with Pre => Tree.ID (Node) = List_ID;
 
 private
    type Cursor is record
-      Node : Node_Index;
+      Node : Node_Access;
    end record;
 
-   No_Element : constant Cursor := (Node => Invalid_Node_Index);
+   No_Element : constant Cursor := (Node => Invalid_Node_Access);
 
    type Constant_List (Tree : not null access WisiToken.Syntax_Trees.Tree) is 
tagged
      --  We'd prefer to have Tree be 'constant' here, but then it would
      --  also be constant in List, where we _don't_ want that. An
      --  alternative design would be to not derive List from Constant_List;
-     --  then we would would have to duplicate all operations.
+     --  then we would would have to duplicate all operations. Another
+     --  alternative design would be to derive Constant_List from List, and
+     --  tighten the constraint on Tree in Constant_List. But that doesn't
+     --  work either. Since most applications using LR_Utils are editing
+     --  the tree anyway, we live with requiring variable access.
+     --
+     --  The current design also means we can't specify Variable_Indexing
+     --  on List.
    record
-      Root       : WisiToken.Node_Index;
+      Root       : WisiToken.Syntax_Trees.Node_Access;
       List_ID    : WisiToken.Token_ID;
       Element_ID : WisiToken.Token_ID;
    end record;
@@ -444,9 +454,9 @@ private
    is (Container.List_ID = Invalid_Token_ID);
 
    function Is_Empty (Container : in Constant_List) return Boolean
-   is (Container.Root = Invalid_Node_Index);
+   is (Container.Root = Invalid_Node_Access);
 
-   function Root (Container : in Constant_List) return Node_Index
+   function Root (Container : in Constant_List) return Node_Access
    is (Container.Root);
 
    function List_ID (Container : in Constant_List) return Token_ID
@@ -456,9 +466,9 @@ private
    is (Container.Element_ID);
 
    function Has_Element (Cursor : in LR_Utils.Cursor) return Boolean
-   is (Cursor.Node /= Invalid_Node_Index);
+   is (Cursor.Node /= Invalid_Node_Access);
 
-   function Node (Cursor : in LR_Utils.Cursor) return Node_Index
+   function Element (Cursor : in LR_Utils.Cursor) return Node_Access
    is (Cursor.Node);
 
    function Separator_ID (Container : in List) return Token_ID
diff --git a/wisitoken-syntax_trees.adb b/wisitoken-syntax_trees.adb
index 0a38ae7f0f..cb90dff031 100644
--- a/wisitoken-syntax_trees.adb
+++ b/wisitoken-syntax_trees.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -18,2094 +18,9916 @@
 pragma License (Modified_GPL);
 
 with Ada.Containers;
-with Ada.Text_IO;
-with SAL.Generic_Decimal_Image;
+with Ada.Exceptions;
+with Ada.Streams.Stream_IO;
+with Ada.Strings.Maps;
+with Ada.Tags;
+with GNAT.Traceback.Symbolic;
 package body WisiToken.Syntax_Trees is
 
    --  Body specs, alphabetical, as needed
 
-   function Image
-     (Tree              : in Syntax_Trees.Tree;
-      N                 : in Syntax_Trees.Node;
-      Node_Index        : in Valid_Node_Index;
-      Descriptor        : in WisiToken.Descriptor;
-      Include_Children  : in Boolean;
-      Include_RHS_Index : in Boolean := False;
-      Node_Numbers      : in Boolean := False)
-     return String;
+   type Visit_Parent_Mode is (Before, After);
+
+   function Append_Stream_Element
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      Node   : in     Valid_Node_Access;
+      State  : in     Unknown_State_Index)
+     return Terminal_Ref
+   with Post => Tree.Valid_Terminal (Append_Stream_Element'Result);
+   --  Add Node at Stream.Last; if not Shared_Stream, set Stack_Top to
+   --  element containing Node. If Node is from Shared_Stream, it has
+   --  been copied
+
+   function Child_Index (Parent : in Node; Child : in Valid_Node_Access) 
return SAL.Peek_Type;
+
+   procedure Copy_Ancestors
+     (Tree      : in out Syntax_Trees.Tree;
+      Ref       : in out Stream_Node_Parents;
+      New_Node  : in     Valid_Node_Access;
+      User_Data : in     User_Data_Access_Constant);
+   --  New_Node is a copy of Ref.Ref.Node; copy all ancestors, updating
+   --  child links.
+
+   function Copy_Node
+     (Tree                   : in out Syntax_Trees.Tree;
+      Node                   : in     Valid_Node_Access;
+      Parent                 : in     Node_Access;
+      User_Data              : in     User_Data_Access_Constant;
+      Copy_Children          : in     Boolean;
+      Copy_Following_Deleted : in     Boolean;
+      New_Error_List         : in     Error_List_Access := null;
+      Set_Error_List         : in     Boolean           := False;
+      Set_Copied_Node        : in     Boolean           := False)
+     return Valid_Node_Access
+   with Pre => (if Copy_Children then Tree.Parents_Set);
+   --  If Set_Error_List is False, new node has copy of Node.Error_List.
+   --  Otherwise, new node has New_Error_List.
+
+   function Find_Match
+     (Error_List : in Error_List_Access;
+      Predicate  : in Error_Predicate)
+     return Error_Data_Lists.Cursor;
+
+   procedure First_Source_Terminal
+     (Tree                 : in     Syntax_Trees.Tree;
+      Ref                  : in out Stream_Node_Parents;
+      Trailing_Non_Grammar : in     Boolean)
+   with Pre => Rooted (Ref.Ref) and Ref.Parents.Depth = 0;
+   --  Update Ref to first source terminal in Ref.Node, initialize Ref.Parents.
+
+   procedure Insert_Stream_Element
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      Node   : in     Valid_Node_Access;
+      Before : in     Stream_Element_Lists.Cursor := 
Stream_Element_Lists.No_Element);
+   --  If Before is No_Element, add Node after Stream.Stack_Top (at
+   --  beginning of input). Otherwise add Node before Before.
+   --
+   --  Caller must change Stream.Stack_Top if necessary.
+
+   function Insert_Stream_Element
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      Node   : in     Valid_Node_Access;
+      Before : in     Stream_Element_Lists.Cursor := 
Stream_Element_Lists.No_Element)
+     return Rooted_Ref;
+   --  Same as procedure, return new element.
+
+   function Is_Optimized_List
+     (Productions : in Production_Info_Trees.Vector;
+      ID          : in Token_ID)
+     return Boolean;
 
-   procedure Move_Branch_Point (Tree : in out Syntax_Trees.Tree; Required_Node 
: in Valid_Node_Index);
+   function Last_Source_Terminal
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Valid_Node_Access;
+      Trailing_Non_Grammar : in Boolean)
+     return Node_Access;
 
-   type Visit_Parent_Mode is (Before, After);
+   procedure Last_Source_Terminal
+     (Tree                 : in     Syntax_Trees.Tree;
+      Ref                  : in out Stream_Node_Parents;
+      Trailing_Non_Grammar : in     Boolean)
+   with Pre => Rooted (Ref.Ref) and Ref.Parents.Depth = 0;
+   --  Update Ref to last source terminal in Ref.Node, initialize Ref.Parents.
+
+   procedure Move_Element
+     (Tree      : in out Syntax_Trees.Tree;
+      Stream    : in     Stream_ID;
+      Ref       : in out Stream_Node_Parents;
+      New_Node  : in     Valid_Node_Access;
+      User_Data : in     User_Data_Access_Constant);
+   --  Move Ref to Stream, replacing Ref.Node with New_Node,
+   --  copying all ancestors. Update Ref to point to new stream element
+   --  with copied nodes.
+
+   procedure Next_Node (Node : in out Node_Access);
+   --  Assumes Tree.Parents_Set.
+
+   procedure Next_Node (Tree : in Syntax_Trees.Tree; Node : in out 
Stream_Node_Parents)
+   with Pre => Node.Ref.Node /= Invalid_Node_Access;
+
+   procedure Next_Non_Grammar
+     (Tree    : in     Syntax_Trees.Tree;
+      Ref     : in out Stream_Node_Ref);
+
+   procedure Next_Nonterm (Tree : in Syntax_Trees.Tree; Ref : in out 
Stream_Node_Ref);
+
+   function Next_Source_Terminal
+     (Tree                 : in Syntax_Trees.Tree;
+      Ref                  : in Stream_Node_Ref;
+      Trailing_Non_Grammar : in Boolean)
+     return Stream_Node_Ref;
+
+   procedure Next_Source_Terminal
+     (Tree                 : in     Syntax_Trees.Tree;
+      Ref                  : in out Stream_Node_Parents;
+      Trailing_Non_Grammar : in     Boolean);
+
+   function Prev_Source_Terminal
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Node_Access;
+      Trailing_Non_Grammar : in Boolean)
+     return Node_Access;
+
+   procedure Prev_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in out Node_Access;
+      Parents : in out Node_Stacks.Stack);
 
    function Process_Tree
      (Tree         : in Syntax_Trees.Tree;
-      Node         : in Valid_Node_Index;
+      Node         : in Valid_Node_Access;
       Visit_Parent : in Visit_Parent_Mode;
       Process_Node : access function
         (Tree : in Syntax_Trees.Tree;
-         Node : in Valid_Node_Index)
+         Node : in Valid_Node_Access)
         return Boolean)
      return Boolean;
    --  Call Process_Node on nodes in tree rooted at Node. Return when
    --  Process_Node returns False (Process_Tree returns False), or when
    --  all nodes have been processed (Process_Tree returns True).
 
+   procedure Replace_Node (Element : in Stream_Index; New_Node : in 
Valid_Node_Access);
+
    procedure Set_Children
      (Tree     : in out Syntax_Trees.Tree;
-      Parent   : in     Valid_Node_Index;
-      Children : in     Valid_Node_Index_Array);
+      Parent   : in out Valid_Node_Access;
+      Children : in     Node_Access_Array)
+   with Pre => Tree.Parents_Set;
+
+   function Subtree_Image
+     (Tree         : in Syntax_Trees.Tree;
+      Node         : in Node_Access;
+      Node_Numbers : in Boolean := True;
+      Non_Grammar  : in Boolean := False;
+      Augmented    : in Boolean := False;
+      Line_Numbers : in Boolean := False;
+      Level        : in Integer := 0)
+     return String;
 
    ----------
    --  Public and body operations, alphabetical
 
-   function Action
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
-     return Semantic_Action
-   is begin
-      return
-        (if Node <= Tree.Last_Shared_Node
-         then Tree.Shared_Tree.Nodes (Node).Action
-         else Tree.Branched_Nodes (Node).Action);
-   end Action;
-
-   procedure Add_Child
-     (Tree   : in out Syntax_Trees.Tree;
-      Parent : in     Valid_Node_Index;
-      Child  : in     Valid_Node_Index)
+   procedure Add_Deleted
+     (Tree          : in out Syntax_Trees.Tree;
+      Deleted_Node  : in     Valid_Node_Access;
+      Prev_Terminal : in out Stream_Node_Parents;
+      User_Data     : in     User_Data_Access_Constant)
    is
-      Node : Syntax_Trees.Node renames Tree.Shared_Tree.Nodes (Parent);
+      --  We need to copy Prev_Terminal.Node, and replace any links to it.
+      --  It is tempting to attempt to optimize this; if no parsers have
+      --  spawned since Following_Deleted was last edited, we don't need to
+      --  copy Prev_Terminal.Ref.Node again. But that would require a
+      --  reference count on Prev_Terminal.Ref.Node, which would be
+      --  non-incremental in spawn and terminate parser.
+
+      New_Node : constant Valid_Node_Access := Copy_Node
+        (Tree, Prev_Terminal.Ref.Node,
+         Parent        =>
+           (if Tree.Parents_Set
+            then Prev_Terminal.Ref.Node
+            else Invalid_Node_Access),
+         User_Data              => User_Data,
+         Copy_Children          => False,
+         Copy_Following_Deleted => True);
    begin
-      Node.Children.Append (Child);
-      Tree.Shared_Tree.Nodes (Child).Parent := Parent;
-   end Add_Child;
+      if Prev_Terminal.Parents.Depth = 0 then
+         pragma Assert (Rooted (Prev_Terminal.Ref));
+         --  There are no child links yet
+         Replace_Node (Prev_Terminal.Ref.Element, New_Node);
 
-   function Add_Identifier
-     (Tree        : in out Syntax_Trees.Tree;
-      ID          : in     Token_ID;
-      Identifier  : in     Identifier_Index;
-      Byte_Region : in     WisiToken.Buffer_Region)
-     return Valid_Node_Index
-   is begin
-      Tree.Shared_Tree.Nodes.Append
-        ((Label       => Virtual_Identifier,
-          Byte_Region => Byte_Region,
-          ID          => ID,
-          Identifier  => Identifier,
-          others      => <>));
-      Tree.Last_Shared_Node := Tree.Shared_Tree.Nodes.Last_Index;
-      return Tree.Last_Shared_Node;
-   end Add_Identifier;
+         Prev_Terminal.Ref.Node := New_Node;
 
-   function Add_Nonterm
-     (Tree            : in out Syntax_Trees.Tree;
-      Production      : in     WisiToken.Production_ID;
-      Children        : in     Valid_Node_Index_Array;
-      Action          : in     Semantic_Action := null;
-      Default_Virtual : in     Boolean         := False)
-     return Valid_Node_Index
-   is
-      Nonterm_Node : Valid_Node_Index;
-   begin
-      if Tree.Flush then
-         Tree.Shared_Tree.Nodes.Append
-           ((Label      => Syntax_Trees.Nonterm,
-             ID         => Production.LHS,
-             Action     => Action,
-             RHS_Index  => Production.RHS,
-             Virtual    => (if Children'Length = 0 then Default_Virtual else 
False),
-             others     => <>));
-         Tree.Last_Shared_Node := Tree.Shared_Tree.Nodes.Last_Index;
-         Nonterm_Node          := Tree.Last_Shared_Node;
       else
-         Tree.Branched_Nodes.Append
-           ((Label     => Syntax_Trees.Nonterm,
-             ID        => Production.LHS,
-             Action    => Action,
-             RHS_Index => Production.RHS,
-             Virtual   => (if Children'Length = 0 then Default_Virtual else 
False),
-             others    => <>));
-         Nonterm_Node := Tree.Branched_Nodes.Last_Index;
-      end if;
-
-      if Children'Length = 0 then
-         return Nonterm_Node;
+         --  Need to edit child link, which requires copying parent node, up to
+         --  the element root.
+         Copy_Ancestors (Tree, Prev_Terminal, New_Node, User_Data);
       end if;
 
-      Set_Children (Tree, Nonterm_Node, Children);
+      Prev_Terminal.Ref.Node.Following_Deleted.Append (Deleted_Node);
 
-      return Nonterm_Node;
-   end Add_Nonterm;
+      --  We need to move the non_grammar now, so they are correct for later
+      --  error recover sessions. test_incremental.adb : Edit_String_10
 
-   function Add_Terminal
-     (Tree      : in out Syntax_Trees.Tree;
-      Terminal  : in     Token_Index;
-      Terminals : in     Base_Token_Arrays.Vector)
-     return Valid_Node_Index
-   is begin
-      if Tree.Flush then
-         Tree.Shared_Tree.Nodes.Append
-           ((Label       => Shared_Terminal,
-             ID          => Terminals (Terminal).ID,
-             Byte_Region => Terminals (Terminal).Byte_Region,
-             Terminal    => Terminal,
-             others      => <>));
-         Tree.Last_Shared_Node := Tree.Shared_Tree.Nodes.Last_Index;
-         return Tree.Last_Shared_Node;
-      else
-         Tree.Branched_Nodes.Append
-           ((Label       => Shared_Terminal,
-             ID          => Terminals (Terminal).ID,
-             Byte_Region => Terminals (Terminal).Byte_Region,
-             Terminal    => Terminal,
-             others      => <>));
-         return Tree.Branched_Nodes.Last_Index;
+      if Trace_Action > WisiToken.Outline then
+         --  Trace_Action to be consistent with messages from user 
Delete_Token.
+         Tree.Lexer.Trace.Put_Line
+           ("delete token " & Tree.Image (Deleted_Node, Node_Numbers => True, 
Non_Grammar => True));
+         if Deleted_Node.Non_Grammar.Length > 0 then
+            Tree.Lexer.Trace.Put_Line
+              (" ... move non_grammar to " & Tree.Image
+                 (Prev_Terminal.Ref.Node, Node_Numbers => True, Non_Grammar => 
True));
+         end if;
       end if;
-   end Add_Terminal;
 
-   function Add_Terminal
-     (Tree     : in out Syntax_Trees.Tree;
-      Terminal : in     Token_ID;
-      Before   : in     Base_Token_Index := Invalid_Token_Index)
-     return Valid_Node_Index
-   is begin
-      if Tree.Flush then
-         Tree.Shared_Tree.Nodes.Append
-           ((Label  => Virtual_Terminal,
-             ID     => Terminal,
-             Before => Before,
-             others => <>));
-         Tree.Last_Shared_Node := Tree.Shared_Tree.Nodes.Last_Index;
-         return Tree.Last_Shared_Node;
-      else
-         Tree.Branched_Nodes.Append
-           ((Label  => Virtual_Terminal,
-             ID     => Terminal,
-             Before => Before,
-             others => <>));
-         return Tree.Branched_Nodes.Last_Index;
-      end if;
-   end Add_Terminal;
+      Prev_Terminal.Ref.Node.Non_Grammar.Append (Deleted_Node.Non_Grammar);
 
-   function Before
-     (Tree             : in Syntax_Trees.Tree;
-      Virtual_Terminal : in Valid_Node_Index)
-     return Base_Token_Index
-   is begin
-      if Tree.Flush then
-         return Tree.Shared_Tree.Nodes (Virtual_Terminal).Before;
-      else
-         return Tree.Branched_Nodes (Virtual_Terminal).Before;
-      end if;
-   end Before;
+      --  We don't do Deleted_Node.Non_Grammar.Clear here; we are not
+      --  editing the shared stream. That is done in Finish_Parse.
+   end Add_Deleted;
 
-   function Augmented
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
-     return Base_Token_Class_Access
-   is begin
-      if Node <= Tree.Last_Shared_Node then
-         return Tree.Shared_Tree.Nodes (Node).Augmented;
+   procedure Add_Errors
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      Node   : in     Valid_Node_Access;
+      Errors : in     Error_Data_Lists.List)
+   is
+      pragma Unreferenced (Stream); --  Only used in precondition.
+   begin
+      if Node.Error_List = null then
+         Node.Error_List := new Error_Data_Lists.List'(Errors);
       else
-         return Tree.Branched_Nodes (Node).Augmented;
+         for Err of Errors loop
+            Node.Error_List.Append (Err);
+         end loop;
       end if;
-   end Augmented;
+   end Add_Errors;
 
-   function Augmented_Const
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
-     return Base_Token_Class_Access_Constant
-   is begin
-      if Node <= Tree.Last_Shared_Node then
-         return Base_Token_Class_Access_Constant (Tree.Shared_Tree.Nodes 
(Node).Augmented);
-      else
-         return Base_Token_Class_Access_Constant (Tree.Branched_Nodes 
(Node).Augmented);
-      end if;
-   end Augmented_Const;
+   function Add_Error
+     (Tree      : in out Syntax_Trees.Tree;
+      Node      : in     Valid_Node_Access;
+      Data      : in     Error_Data'Class;
+      User_Data : in     User_Data_Access_Constant)
+     return Valid_Node_Access
+   --  Copy Node, adding Data to its error list. Return new node.
+   is
+      function Copy_Errors return Error_List_Access
+      is begin
+         if Node.Error_List = null then
+            return new Error_Data_Lists.List'(Error_Data_Lists.To_List (Data));
+         else
+            return Result : constant Error_List_Access := new 
Error_Data_Lists.List'(Node.Error_List.all)
+            do
+               Result.Append (Data);
+            end return;
+         end if;
+      end Copy_Errors;
+   begin
+      return Copy_Node
+        (Tree, Node,
+         Parent         =>
+           (if Tree.Parents_Set
+            then Node.Parent
+            else Invalid_Node_Access),
+         User_Data              => User_Data,
+         Copy_Children          => False,
+         Copy_Following_Deleted => True,
+         New_Error_List         => Copy_Errors,
+         Set_Error_List         => True);
+   end Add_Error;
+
+   function Add_Errors
+     (Tree      : in out Syntax_Trees.Tree;
+      Node      : in     Valid_Node_Access;
+      Errors    : in     Error_Data_Lists.List;
+      User_Data : in     User_Data_Access_Constant)
+     return Valid_Node_Access
+   --  Copy Node, adding Errors to its error list. Return new node.
+   is
+      function Copy_Errors return Error_List_Access
+      is begin
+         if Node.Error_List = null then
+            return new Error_Data_Lists.List'(Errors);
+         else
+            return Result : constant Error_List_Access := new 
Error_Data_Lists.List'(Node.Error_List.all)
+            do
+               for Error of Errors loop
+                  Result.Append (Error);
+               end loop;
+            end return;
+         end if;
+      end Copy_Errors;
+   begin
+      return Copy_Node
+        (Tree, Node,
+         Parent         =>
+           (if Tree.Parents_Set
+            then Node.Parent
+            else Invalid_Node_Access),
+         User_Data              => User_Data,
+         Copy_Children          => False,
+         Copy_Following_Deleted => True,
+         New_Error_List         => Copy_Errors,
+         Set_Error_List         => True);
+   end Add_Errors;
+
+   procedure Add_Error_To_Input
+     (Tree      : in out Syntax_Trees.Tree;
+      Stream    : in     Stream_ID;
+      Data      : in     Error_Data'Class;
+      User_Data : in     User_Data_Access_Constant)
+   is
+      use Stream_Element_Lists;
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+      Error_Ref : Stream_Node_Parents := Tree.To_Stream_Node_Parents
+        (if Parse_Stream.Stack_Top = Parse_Stream.Elements.Last
+         then (Tree.Shared_Stream,
+               (Cur => Parse_Stream.Shared_Link),
+               Element (Parse_Stream.Shared_Link).Node)
+         else (Stream,
+               (Cur => Next (Parse_Stream.Stack_Top)),
+               Element (Next (Parse_Stream.Stack_Top)).Node));
+   begin
+      Tree.First_Terminal (Error_Ref, Following => False);
 
-   function Buffer_Region_Is_Empty (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean
-   is begin
-      if Node <= Tree.Last_Shared_Node then
-         return Tree.Shared_Tree.Nodes (Node).Byte_Region = Null_Buffer_Region;
-      else
-         return Tree.Branched_Nodes (Node).Byte_Region = Null_Buffer_Region;
+      Move_Element
+        (Tree, Stream, Error_Ref, Add_Error (Tree, Error_Ref.Ref.Node, Data, 
User_Data), User_Data);
+
+      if Parse_Stream.Elements.Last = Parse_Stream.Stack_Top then
+         Next (Parse_Stream.Shared_Link);
       end if;
-   end Buffer_Region_Is_Empty;
+   end Add_Error_To_Input;
 
-   function Byte_Region
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
-     return WisiToken.Buffer_Region
-   is begin
-      return
-        (if Node <= Tree.Last_Shared_Node
-         then Tree.Shared_Tree.Nodes (Node).Byte_Region
-         else Tree.Branched_Nodes (Node).Byte_Region);
-   end Byte_Region;
+   procedure Add_Error_To_Stack_Top
+     (Tree      : in out Syntax_Trees.Tree;
+      Stream    : in     Stream_ID;
+      Data      : in     Error_Data'Class;
+      User_Data : in     User_Data_Access_Constant)
+   is
+      use Stream_Element_Lists;
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+      Orig : Stream_Element := Element (Parse_Stream.Stack_Top);
+   begin
+      Orig.Node := Add_Error
+        (Tree, Element (Parse_Stream.Stack_Top).Node, Data, User_Data);
+      Replace_Element (Parse_Stream.Stack_Top, Orig);
+   end Add_Error_To_Stack_Top;
 
-   function Child
-     (Tree        : in Syntax_Trees.Tree;
-      Node        : in Valid_Node_Index;
-      Child_Index : in Positive_Index_Type)
-     return Node_Index
+   procedure Add_Errors
+     (Tree      : in out Syntax_Trees.Tree;
+      Error_Ref : in out Stream_Node_Parents;
+      Errors    : in     Error_Data_Lists.List;
+      User_Data : in     User_Data_Access_Constant)
    is
-      function Compute (N : in Syntax_Trees.Node) return Node_Index
+      function Copy_Errors return Error_List_Access
       is begin
-         if N.Label /= Nonterm then
-            return Invalid_Node_Index;
-
-         elsif Child_Index in N.Children.First_Index .. N.Children.Last_Index 
then
-            return N.Children (Child_Index);
+         if Error_Ref.Ref.Node.Error_List = null then
+            return new Error_Data_Lists.List'(Errors);
          else
-            return Invalid_Node_Index;
+            return Result : constant Error_List_Access := new 
Error_Data_Lists.List'(Error_Ref.Ref.Node.Error_List.all)
+            do
+               for Err of Errors loop
+                  Result.Append (Err);
+               end loop;
+            end return;
          end if;
-      end Compute;
+      end Copy_Errors;
+
+      New_Node : constant Valid_Node_Access := Copy_Node
+        (Tree, Error_Ref.Ref.Node,
+         Parent         =>
+           (if Tree.Parents_Set
+            then Error_Ref.Ref.Node.Parent
+            else Invalid_Node_Access),
+         User_Data              => User_Data,
+         Copy_Children          => False,
+         New_Error_List         => Copy_Errors,
+         Copy_Following_Deleted => True,
+         Set_Error_List         => True);
    begin
-      if Node <= Tree.Last_Shared_Node then
-         return Compute (Tree.Shared_Tree.Nodes (Node));
-      else
-         return Compute (Tree.Branched_Nodes (Node));
-      end if;
-   end Child;
+      Copy_Ancestors (Tree, Error_Ref, New_Node, User_Data);
+   end Add_Errors;
 
-   function Child_Count (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Ada.Containers.Count_Type
+   function Add_Identifier
+     (Tree        : in out Syntax_Trees.Tree;
+      ID          : in     Token_ID;
+      Identifier  : in     Identifier_Index)
+     return Valid_Node_Access
    is begin
-      return Tree.Get_Node_Const_Ref (Node).Children.Length;
-   end Child_Count;
+      return Result : constant Valid_Node_Access := new Node'
+        (Label       => Virtual_Identifier,
+         Child_Count => 0,
+         ID          => ID,
+         Node_Index  => -(Tree.Nodes.Last_Index + 1),
+         Identifier  => Identifier,
+         others      => <>)
+      do
+         Tree.Nodes.Append (Result);
+      end return;
+   end Add_Identifier;
 
-   function Child_Index
-     (N     : in Node;
-      Child : in Valid_Node_Index)
-     return SAL.Peek_Type
-   is begin
-      for I in N.Children.First_Index .. N.Children.Last_Index loop
-         if N.Children (I) = Child then
-            return I;
+   function Add_Nonterm_1
+     (Tree             : in out Syntax_Trees.Tree;
+      Production       : in     WisiToken.Production_ID;
+      Children         : in     Valid_Node_Access_Array;
+      Clear_Parents    : in     Boolean;
+      Recover_Conflict : in     Boolean)
+     return Valid_Node_Access
+   is
+      Nonterm_Node : constant Valid_Node_Access := new Node'
+        (Label            => Syntax_Trees.Nonterm,
+         Child_Count      => Children'Last,
+         ID               => Production.LHS,
+         Node_Index       => -(Tree.Nodes.Last_Index + 1),
+         Children         => To_Node_Access (Children),
+         RHS_Index        => Production.RHS,
+         Virtual          => False,
+         Recover_Conflict => Recover_Conflict,
+         others           => <>);
+   begin
+      Tree.Nodes.Append (Nonterm_Node);
+
+      for Child of Children loop
+         case Child.Label is
+         when Source_Terminal | Virtual_Identifier =>
+            null;
+
+         when Virtual_Terminal =>
+            Nonterm_Node.Virtual := True;
+
+         when Nonterm =>
+            if Child.Virtual then
+               Nonterm_Node.Virtual := True;
+            end if;
+         end case;
+
+         if Child.Parent /= Invalid_Node_Access then
+            if Clear_Parents then
+               declare
+                  Other_Parent : constant Node_Access := Child.Parent;
+                  Child_Index  : constant SAL.Base_Peek_Type := 
Syntax_Trees.Child_Index
+                    (Other_Parent.all, Child);
+               begin
+                  Other_Parent.Children (Child_Index) := Invalid_Node_Access;
+               end;
+            else
+               raise SAL.Programmer_Error with "attempt to use children with 
existing parents";
+            end if;
+         end if;
+
+         if Tree.Parents_Set then
+            Child.Parent := Nonterm_Node;
          end if;
       end loop;
-      raise SAL.Programmer_Error; -- Should be prevented by precondition
-   end Child_Index;
 
-   function Child_Index
-     (Tree   : in out Syntax_Trees.Tree;
-      Parent : in     Valid_Node_Index;
-      Child  : in     Valid_Node_Index)
-     return SAL.Peek_Type
-   is
-      N : Node_Var_Ref renames Get_Node_Var_Ref (Tree, Parent);
-   begin
-      return Child_Index (N, Child);
-   end Child_Index;
+      return Nonterm_Node;
+   end Add_Nonterm_1;
 
-   function Children (N : in Syntax_Trees.Node) return Valid_Node_Index_Array
+   function Add_Nonterm
+     (Tree          : in out Syntax_Trees.Tree;
+      Production    : in     WisiToken.Production_ID;
+      Children      : in     Valid_Node_Access_Array;
+      Clear_Parents : in     Boolean)
+     return Valid_Node_Access
    is begin
-      if N.Children.Length = 0 then
-         return (1 .. 0 => <>);
-      else
-         return Result : Valid_Node_Index_Array (N.Children.First_Index .. 
N.Children.Last_Index) do
-            for I in Result'Range loop
-               Result (I) := N.Children (I);
-            end loop;
-         end return;
-      end if;
-   end Children;
+      return Add_Nonterm_1 (Tree, Production, Children, Clear_Parents, 
Recover_Conflict => False);
+   end Add_Nonterm;
+
+   function Add_Source_Terminal_1
+     (Tree             : in out Syntax_Trees.Tree;
+      Terminal         : in     WisiToken.Lexer.Token;
+      In_Shared_Stream : in     Boolean;
+      Errors           : in     Error_Data_Lists.List)
+     return Valid_Node_Access
+   is begin
+      return Result : constant Valid_Node_Access := new Node'
+        (Label       => Source_Terminal,
+         Child_Count => 0,
+         ID          => Terminal.ID,
+
+         Node_Index  =>
+           (if In_Shared_Stream
+            then Tree.Next_Terminal_Node_Index
+            else -(Tree.Nodes.Last_Index + 1)),
+
+         Byte_Region    => Terminal.Byte_Region,
+         Char_Region    => Terminal.Char_Region,
+         New_Line_Count => New_Line_Count (Terminal.Line_Region),
+         Error_List     =>
+           (if Errors.Length = 0
+            then null
+            else new Error_Data_Lists.List'(Errors)),
+         others      => <>)
+      do
+         if Terminal.ID = Tree.Lexer.Descriptor.EOI_ID then
+            pragma Assert (In_Shared_Stream);
+            Tree.EOI := Result;
+            Result.Non_Grammar.Append (Terminal);
+         end if;
+         if In_Shared_Stream then
+            Tree.Next_Terminal_Node_Index := @ + 1;
+         end if;
+         Tree.Nodes.Append (Result);
+      end return;
+   end Add_Source_Terminal_1;
+
+   function Add_Terminal
+     (Tree     : in out Syntax_Trees.Tree;
+      Stream   : in     Stream_ID;
+      Terminal : in     WisiToken.Lexer.Token;
+      Errors   : in     Error_Data_Lists.List)
+     return Single_Terminal_Ref
+   is begin
+      return Append_Stream_Element
+        (Tree, Stream,
+         Add_Source_Terminal_1
+           (Tree, Terminal,
+            In_Shared_Stream => Stream = Tree.Shared_Stream,
+            Errors           => Errors),
+         State               => Unknown_State);
+   end Add_Terminal;
+
+   function Add_Terminal
+     (Tree     : in out Syntax_Trees.Tree;
+      Terminal : in     WisiToken.Lexer.Token;
+      Errors   : in     Error_Data_Lists.List)
+     return Valid_Node_Access
+   is begin
+      return Add_Source_Terminal_1 (Tree, Terminal, In_Shared_Stream => False, 
Errors => Errors);
+   end Add_Terminal;
 
-   function Children (Tree : in Syntax_Trees.Tree; Node : in Valid_Node_Index) 
return Valid_Node_Index_Array
+   function Add_Terminal
+     (Tree     : in out Syntax_Trees.Tree;
+      Terminal : in     Token_ID)
+     return Valid_Node_Access
    is begin
-      if Node <= Tree.Last_Shared_Node then
-         return Children (Tree.Shared_Tree.Nodes (Node));
+      return Result : constant Valid_Node_Access := new Node'
+        (Label       => Virtual_Terminal,
+         Child_Count => 0,
+         ID          => Terminal,
+         Node_Index  => -(Tree.Nodes.Last_Index + 1),
+         others      => <>)
+      do
+         Tree.Nodes.Append (Result);
+      end return;
+   end Add_Terminal;
+
+   function Append_Stream_Element
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      Node   : in     Valid_Node_Access;
+      State  : in     Unknown_State_Index)
+     return Terminal_Ref
+   is
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+      New_Element  : constant Stream_Element_Lists.Cursor := 
Parse_Stream.Elements.Append
+        ((Node  => Node,
+          State => State));
+   begin
+      if Stream = Tree.Shared_Stream then
+         --  Stack_Top is always Invalid_Stream_Element.
+         null;
       else
-         return Children (Tree.Branched_Nodes (Node));
+         Parse_Stream.Stack_Top := New_Element;
       end if;
-   end Children;
 
-   procedure Clear (Tree : in out Syntax_Trees.Base_Tree)
+      return (Stream, (Cur => New_Element), Node);
+   end Append_Stream_Element;
+
+   function Augmented
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Augmented_Class_Access
    is begin
-      Tree.Finalize;
-   end Clear;
+      return Node.Augmented;
+   end Augmented;
 
-   procedure Clear (Tree : in out Syntax_Trees.Tree)
+   function Augmented_Const
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Augmented_Class_Access_Constant
    is begin
-      if Tree.Shared_Tree.Augmented_Present then
-         for Node of Tree.Branched_Nodes loop
-            if Node.Label = Nonterm then
-               Free (Node.Augmented);
-            end if;
-         end loop;
-      end if;
-      Tree.Shared_Tree.Finalize;
-      Tree.Last_Shared_Node := Invalid_Node_Index;
-      Tree.Branched_Nodes.Clear;
-   end Clear;
+      return Augmented_Class_Access_Constant (Node.Augmented);
+   end Augmented_Const;
 
-   function Copy_Subtree
-     (Tree : in out Syntax_Trees.Tree;
-      Root : in     Valid_Node_Index)
-     return Valid_Node_Index
+   procedure Breakdown
+     (Tree           : in out Syntax_Trees.Tree;
+      Ref            : in out Stream_Node_Parents;
+      Productions    : in     Production_Info_Trees.Vector;
+      User_Data      : in     Syntax_Trees.User_Data_Access_Constant;
+      First_Terminal : in     Boolean)
    is
-      function Copy_Node
-        (Tree   : in out Syntax_Trees.Tree;
-         Index  : in     Valid_Node_Index;
-         Parent : in     Node_Index)
-        return Valid_Node_Index
-      is begin
-         case Tree.Shared_Tree.Nodes (Index).Label is
-         when Shared_Terminal =>
-            declare
-               Node : Syntax_Trees.Node renames Tree.Shared_Tree.Nodes (Index);
-            begin
-               Tree.Shared_Tree.Nodes.Append
-                 ((Label       => Shared_Terminal,
-                   ID          => Node.ID,
-                   Byte_Region => Node.Byte_Region,
-                   Parent      => Parent,
-                   State       => Unknown_State,
-                   Augmented   => Node.Augmented,
-                   Terminal    => Node.Terminal));
-            end;
-
-         when Virtual_Terminal =>
-            declare
-               Node : Syntax_Trees.Node renames Tree.Shared_Tree.Nodes (Index);
-            begin
-               Tree.Shared_Tree.Nodes.Append
-                 ((Label       => Virtual_Terminal,
-                   ID          => Node.ID,
-                   Byte_Region => Node.Byte_Region,
-                   Parent      => Parent,
-                   State       => Unknown_State,
-                   Augmented   => Node.Augmented,
-                   Before      => Node.Before));
-            end;
+      use Stream_Element_Lists;
+
+      Stream    : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Ref.Ref.Stream.Cur);
+      Cur       : Cursor                    renames Ref.Ref.Element.Cur;
+      Target    : Valid_Node_Access         renames Ref.Ref.Node;
+      To_Delete : Cursor                    := Cur;
+
+      Inverted_Parents : Node_Stacks.Stack := Ref.Parents.Invert;
+      --  Parents from Target to Cur. Inverted_Parents.Peek (1) is
+      --  Ref.Ref.Element.Node, and Inverted_Parents.Peek
+      --  (Inverted_Parents.Depth) is Target parent.
+      --
+      --  As Breakdown proceeds, Inverted_Parents is popped;
+      --  Inverted_Parents.Peek (1) is always the stream node that is the
+      --  ancestor of Target.
+
+      procedure Move_Errors
+      --  Move errors on To_Delete.Element.Node to first terminal. Update
+      --  Inverted_Parents.
+      is
+         Node       : constant Valid_Node_Access := Element (To_Delete).Node;
+         New_Errors : Error_Data_Lists.List;
+      begin
+         if Node.Error_List = null or else Node.Error_List.Length = 0 then
+            return;
+         else
+            for Err of Node.Error_List.all loop
+               New_Errors.Append (To_Message (Err, Tree, Node));
+            end loop;
 
-         when Virtual_Identifier =>
+            --  At this point, To_Delete is still in the stream; the children 
of
+            --  To_Delete have been pushed on the stream before it; Cur is the
+            --  first of those children. Find the terminal to move errors to, 
and
+            --  the new stream element containing it.
             declare
-               Node : Syntax_Trees.Node renames Tree.Shared_Tree.Nodes (Index);
+               First_Terminal      : Stream_Node_Parents := 
Tree.To_Stream_Node_Parents
+                 (Tree.To_Rooted_Ref (Ref.Ref.Stream, (Cur => To_Delete)));
+               Modify_Element_Node : Node_Access;
+               Modify_Element      : Cursor              := Cur;
             begin
-               Tree.Shared_Tree.Nodes.Append
-                 ((Label       => Virtual_Identifier,
-                   ID          => Node.ID,
-                   Byte_Region => Node.Byte_Region,
-                   Parent      => Parent,
-                   State       => Unknown_State,
-                   Augmented   => Node.Augmented,
-                   Identifier  => Node.Identifier));
-            end;
+               Tree.First_Terminal (First_Terminal, Following => False);
+
+               --  We know To_Delete has at least one terminal, because we 
don't put
+               --  errors on empty nonterms, we just delete them.
+               pragma Assert (First_Terminal.Ref.Node /= Invalid_Node_Access);
+
+               --  We are deleting To_Delete; don't include it in
+               --  First_Terminal.Parents, to avoid an unnecessary copy.
+               First_Terminal.Parents.Bottom_Pop;
+               if First_Terminal.Parents.Depth = 0 then
+                  --  ada_mode-recover_debbugs_36548.adb
+                  Modify_Element_Node := First_Terminal.Ref.Node;
+               else
+                  Modify_Element_Node := First_Terminal.Parents.Peek 
(First_Terminal.Parents.Depth);
+               end if;
+               loop
+                  exit when Element (Modify_Element).Node = 
Modify_Element_Node;
+                  Next (Modify_Element);
+               end loop;
+               First_Terminal.Ref.Element := (Cur => Modify_Element);
 
-         when Nonterm =>
-            declare
-               Children     : constant Valid_Node_Index_Array := Tree.Children 
(Index);
-               Parent       : Node_Index                      := 
Invalid_Node_Index;
-               New_Children : Valid_Node_Index_Arrays.Vector;
-            begin
-               if Children'Length > 0 then
-                  New_Children.Set_First_Last (Children'First, Children'Last);
-                  for I in Children'Range loop
-                     New_Children (I) := Copy_Node (Tree, Children (I), 
Parent);
+               declare
+                  Update_Target : constant Boolean := First_Terminal.Ref.Node 
= Target;
+
+                  --  Inverted_Parents holds the path from To_Delete to 
Target; if
+                  --  Add_Errors copies any of those nodes, update the 
Inverted_Parents
+                  --  value.
+                  Update_Parents_Index         : SAL.Base_Peek_Type := 0;
+                  Inverted_Parents_Index       : SAL.Base_Peek_Type := 1;
+                  First_Terminal_Parents_Index : SAL.Base_Peek_Type := 
First_Terminal.Parents.Depth;
+               begin
+                  loop
+                     exit when First_Terminal_Parents_Index = 0 or 
Inverted_Parents_Index > Inverted_Parents.Depth;
+                     exit when Inverted_Parents.Peek (Inverted_Parents_Index) 
/=
+                       First_Terminal.Parents.Peek 
(First_Terminal_Parents_Index);
+                     Update_Parents_Index := Inverted_Parents_Index;
+                     Inverted_Parents_Index := @ + 1;
+                     First_Terminal_Parents_Index := @ - 1;
                   end loop;
-               end if;
 
+                  Tree.Add_Errors (First_Terminal, New_Errors, User_Data);
+
+                  for I in 1 .. Update_Parents_Index loop
+                     Inverted_Parents.Set
+                       (I, Inverted_Parents.Depth, First_Terminal.Parents.Peek 
(First_Terminal.Parents.Depth - I + 1));
+                  end loop;
+                  if Update_Target then
+                     Target           := First_Terminal.Ref.Node;
+                  end if;
+               end;
+            end;
+         end if;
+      end Move_Errors;
+   begin
+      Undo_Reduce_Loop :
+      loop
+         declare
+            Stream_Node : constant Valid_Node_Access := Element (Cur).Node;
+         begin
+            exit Undo_Reduce_Loop when
+              (if First_Terminal
+               then Tree.First_Terminal (Stream_Node) = Target
+               else Stream_Node = Target);
+
+            pragma Assert (Stream_Node.Label = Nonterm);
+            pragma Assert (Stream_Node.Child_Count > 0); -- otherwise we would 
not get here.
+
+            if Is_Optimized_List (Productions, Stream_Node.ID) and 
Stream_Node.RHS_Index = 1 then
+               --  Split list at list ancestor of Ref.Node
+               --
+               --  From test_syntax_trees.adb Breakdown_Optimized_List_01, 
stream looks
+               --  like:
+               --
+               --  <prev_stream_element>
+               --
+               --  -32:declarations_1
+               --  | -30:declarations_1         Split_Node parent 2
+               --  | | -28:declarations_1       Split_Node parent 1
+               --  | | | -26:declarations_1
+               --  | | | | -24:declarations_0
+               --  | | | | | -23:declaration_0
+               --  | | | | | | <a : A;>
+               --  | | | | -25:declaration_0
+               --  | | | | | <b : B;>
+               --  | | | -27:declaration_0      Split_Node
+               --  | | | | <c : C;>
+               --  | | -29:declaration_0
+               --  | | | <d : D;>
+               --  | -31:declaration_0
+               --  | | <e : E;>
+               --
+               --  <next_stream_element>
+               --
+               --  Stream_Node is node -32; the top declarations node with 
RHS_Index
+               --  1.
+               --
+               --  Note that if this list has been edited before, it may have 
top
+               --  nodes with RHS_Index 2; each of those declarations node has 
two
+               --  declarations children, and are already optimized, so they 
are
+               --  broken down the same as non-list nodes. Other cases leave
+               --  RHS_Index 2 in lower nodes; test_incremental.adb 
Recover_08*.
                declare
-                  Node : Syntax_Trees.Node renames Tree.Shared_Tree.Nodes 
(Index);
+                  List_ID          : constant Token_ID  := Stream_Node.ID;
+                  Target_Anc_Index : SAL.Base_Peek_Type := 1;
+                  Split_Node       : Node_Access;
+
+                  Insert_Following : constant Stream_Element_Lists.Cursor := 
Next (Cur);
+                  --  Insert nodes following Split_Node before this.
+
+                  Insert_Leading : Stream_Element_Lists.Cursor := Cur;
+                  --  Insert nodes preceding Split_node before this.
+
+                  procedure Find_Target_Element
+                  --  Update Cur to stream element at or after Cur containing 
Target
+                  is
+                     Target_Anc_Node : constant Valid_Node_Access :=
+                       (if Target_Anc_Index = Inverted_Parents.Depth
+                        then Target
+                        else Inverted_Parents.Peek (Target_Anc_Index + 1));
+                  begin
+                     Find_Element :
+                     loop
+                        exit Find_Element when Element (Cur).Node = 
Target_Anc_Node;
+                        Next (Cur);
+                     end loop Find_Element;
+                  end Find_Target_Element;
+
+                  procedure Insert_Children (Node : in Valid_Node_Access)
+                  --  Insert Node.Children (2 .. Node.Child_Count) into Stream 
before
+                  --  Insert_Following.
+                  --
+                  --  There is more than one child when the list has a 
separator
+                  --  (optimized_list_ebnf term, ada_annex_p
+                  --  parameter_specification_list,
+                  --  ada_mode-incremental_recover_02.adb), or multiple list 
elements
+                  --  (ada_lite_ebnf case_statement_alternative).
+                  is begin
+                     for I in 2 .. Node.Child_Count loop
+                        if I = 2 then
+                           Insert_Leading :=
+                             Stream.Elements.Insert (Insert_Following, 
(Node.Children (I), Unknown_State));
+                        else
+                           Stream.Elements.Insert (Insert_Following, 
(Node.Children (I), Unknown_State));
+                        end if;
+                        Node.Children (I).Parent := Invalid_Node_Access;
+                     end loop;
+                  end Insert_Children;
                begin
-                  Tree.Shared_Tree.Nodes.Append
-                    ((Label              => Nonterm,
-                      ID                 => Node.ID,
-                      Byte_Region        => Node.Byte_Region,
-                      Parent             => Parent,
-                      State              => Unknown_State,
-                      Augmented          => Node.Augmented,
-                      Virtual            => Node.Virtual,
-                      RHS_Index          => Node.RHS_Index,
-                      Action             => Node.Action,
-                      Name               => Node.Name,
-                      Children           => New_Children,
-                      Min_Terminal_Index => Node.Min_Terminal_Index));
+                  --  Find Split_Node, the first ancestor of Target that is a
+                  --  declarations. Start from the tree root, to handle nested 
lists;
+                  --  test_incremental.adb Recover_07.
+                  declare
+                     Temp : SAL.Base_Peek_Type := 2;
+                  begin
+                     loop
+                        exit when Temp > Inverted_Parents.Depth;
+                        exit when Inverted_Parents.Peek (Temp).ID /= List_ID;
+                        Target_Anc_Index := Temp;
+                        Temp := @ + 1;
+                     end loop;
+                  end;
+                  Split_Node := Inverted_Parents.Peek (Target_Anc_Index);
+
+                  --  Bring Split_Node to the stream, with previous and 
following list
+                  --  nodes each under a single stream element.
+                  --
+                  --  Suppose Split_Node is node -27 (test_syntax_trees.adb
+                  --  Breakdown_Optimized_List_01 case "c"). The desired 
stream is:
+                  --
+                  --  <prev_stream_element>
+                  --
+                  --  -26:declarations_1
+                  --  | -24:declarations_0
+                  --  | | -23:declaration_0
+                  --  | | | <a : A;>
+                  --  | -25:declaration_0
+                  --  | | <b : B;>
+                  --
+                  --  -27:declaration_0       Split_Node
+                  --  | <c : C;>
+                  --
+                  --  -35:declarations_1      New node
+                  --  | -34:declarations_0    New node
+                  --  | | | -29:declaration_0
+                  --  | | | | <d : D;>
+                  --  | | -31:declaration_0
+                  --  | | | <e : E;>
+                  --
+                  --  <next_stream_element>
+                  --
+                  --  The other test cases in Breakdown_Optimized_List_01 test 
special
+                  --  cases where Split_Node is near or at the beginning or 
end of the
+                  --  list.
+
+                  if Target.ID = List_ID or
+                     --  The target is a list node in the list we are breaking 
down.
+                     --  test_incremental.adb Recover_06a .. e.
+
+                    Split_Node.RHS_Index = 0
+                     --  There are no list elements preceding Split_Node; 
handle separator,
+                     --  multi-element list element. test_incremental.adb 
Edit_Code_18
+                  then
+                     Replace_Element (Cur, (Split_Node.Children (1), 
Unknown_State));
+                     Insert_Children (Split_Node);
+                     Find_Target_Element;
+
+                  elsif Split_Node.Children (1).RHS_Index = 0 then
+                     --  There is one list element preceding Split_Node
+                     Replace_Element (Cur, (Split_Node.Children (1).Children 
(1), Unknown_State));
+                     Insert_Children (Split_Node.Children (1));
+                     Split_Node.Children (1).Children (1).Parent := 
Invalid_Node_Access;
+                     Insert_Children (Split_Node);
+                     Find_Target_Element;
+
+                  else
+                     --  There are more than one list elements preceding 
Split_Node
+                     Replace_Element (Cur, (Split_Node.Children (1), 
Unknown_State));
+                     Insert_Children (Split_Node);
+                     Find_Target_Element;
+                  end if;
+
+                  Split_Node.Children (1).Parent := Invalid_Node_Access;
+                  Split_Node.Parent := Invalid_Node_Access;
+
+                  if Target_Anc_Index - 1 = 0 then
+                     --  There are no list elements following Split_node
+                     null;
+
+                  elsif Target_Anc_Index - 1 = 1 then
+                     --  There is only one list element following Split_Node
+                     --  test_syntax_trees.adb Breakdown_Optimized_List_01 
case d1
+                     --
+                     --  Or Deleting_Node is an RHS_Index = 2 node, and all of 
the list
+                     --  elements following Split_Node are already under an 
RHS_Index = 1
+                     --  node; test_incremental.adb Recover_08a.
+                     declare
+                        Deleting_Node : constant Valid_Node_Access := 
Inverted_Parents.Pop;
+                     begin
+                        if Deleting_Node.Error_List /= null then
+                           --  FIXME: Move errors. need test case
+                           raise SAL.Not_Implemented with "error on 
optimized_list";
+                        end if;
+
+                        Insert_Children (Deleting_Node);
+
+                        Deleting_Node.Parent := Invalid_Node_Access;
+                        Deleting_Node.Children (1).Parent := 
Invalid_Node_Access;
+                     end;
+
+                  else
+                     --  Multiple list elements following Split_Node
+                     --
+                     --  Split_Node children are now in the stream. Build a 
new list node
+                     --  with content of following nodes, insert it before 
Insert_Following.
+                     declare
+                        Following_Node : Node_Access := Invalid_Node_Access;
+                        --  The root of the new list.
+                     begin
+                        for I in reverse 1 .. Target_Anc_Index - 1 loop
+                           declare
+                              Deleting_Node : constant Valid_Node_Access := 
Inverted_Parents.Peek (I);
+                           begin
+                              pragma Assert (Deleting_Node.ID = Stream_Node.ID 
and Deleting_Node.RHS_Index in 1 | 2);
+                              --  test_incremental.adb Recover_08a, b have 
RHS_Index = 2 here.
+
+                              if Deleting_Node.Error_List /= null then
+                                 --  FIXME: Move errors. need test case
+                                 raise SAL.Not_Implemented with "error on 
optimized_list";
+                              end if;
+
+                              for Child of Deleting_Node.Children loop
+                                 Child.Parent := Invalid_Node_Access;
+                              end loop;
+
+                              if Deleting_Node.RHS_Index = 2 then
+                                 pragma Assert
+                                   (Deleting_Node.Children'Length = 2 and
+                                      Insert_Leading /= Insert_Following);
+
+                                 pragma Assert (I /= 1);
+                                 --  If I = 1 here, Deleting_Node would be 
Stream_Node, and would be
+                                 --  handled by the not optimizing_list branch 
of Undo_Reduce_Loop.
+
+                                 if Inverted_Parents.Peek (I + 1) = 
Deleting_Node.Children (1) then
+                                    pragma Assert (Following_Node = null);
+                                    --  Split_Node is under 
Deleting_Node.Children (1).
+                                    --  test_incremental.adb Recover_08c.
+
+                                    --  Example tree from test_incremental.adb 
Recover_08c step 3
+                                    --
+                                    --  125:declaration_list_1       ; 
Inverted_Parents.Peek (Target_Anc_Index - 1)
+                                    --  | 118:declaration_list_2     ; 
Deleting_Node
+                                    --  | | 117:declaration_list_1
+                                    --  | | | 105:declaration_list_0 ; 
Split_Node
+                                    --  | | | | 48 declaration
+                                    --  | | | | | <a>
+                                    --  | | | 116:declaration
+                                    --  | | | | <b>               ; b contains 
by the deleted ';'
+                                    --  | | 103:declaration_list_1   ;
+                                    --  | | | 102:declaration_list_0 ; 
Split_Node
+                                    --  | | | | <c d>                ; 
Insert_Leading
+                                    --
+                                    --  105:Split_Node children are on the 
stream, 103 goes in
+                                    --  Following_Node. Other children of 118 
are handled in the
+                                    --  next iteration of this loop.
+
+                                    Following_Node := Add_Nonterm_1
+                                      (Tree, (List_ID, 0),
+                                       Children         => (1 => 
Deleting_Node.Children (2)),
+                                       Clear_Parents    => False,
+                                       Recover_Conflict => False);
+
+                                 else
+                                    pragma Assert (Following_Node /= null);
+                                    --  Split_Node is under 
Deleting_Node.Children (2).
+
+                                    --  Example tree from test_incremental.adb 
Recover_08d step 3
+                                    --
+                                    --  118:declaration_list_1       ; 
Inverted_Parents.Peek (Target_Anc_Index - 1)
+                                    --  | 111:declaration_list_2     ; 
Deleting_Node
+                                    --  | | 110:declaration_list_1   ;
+                                    --  | | |   <a b c>              ; c is 
followed by the deleted ';'
+                                    --  | | 103:declaration_list_1   ;
+                                    --  | | | 102:declaration_list_0 ; 
Split_Node
+                                    --  | | | | <d>                  ; 
Insert_Leading
+                                    --
+                                    --  102:Split_Node children are on the 
stream, rest of children of 103
+                                    --  are in Following_Node. 110 goes before 
Insert_Leading; nothing is
+                                    --  added to Following_Node. Other 
children of 118 are handled in the
+                                    --  next iteration of this loop.
+                                    Insert_Leading := Stream.Elements.Insert
+                                      (Insert_Leading, (Deleting_Node.Children 
(1), Unknown_State));
+                                 end if;
+
+                              elsif Following_Node = Invalid_Node_Access then
+                                 pragma Assert
+                                   (I = Target_Anc_Index - 1 and
+                                      Deleting_Node.Children (1).ID = List_ID 
and
+                                      Deleting_Node.RHS_Index in 0 | 1);
+                                 --  Deleting_Node.Children (1).RHS_Index any 
of 0 .. 2, depending on
+                                 --  how many nodes were before Split_Node and 
whether that sublist was
+                                 --  edited.
+
+                                 declare
+                                    New_Children : Valid_Node_Access_Array (1 
.. Deleting_Node.Child_Count - 1) :=
+                                      (others => Dummy_Node);
+                                 begin
+                                    for I in New_Children'Range loop
+                                       --  If the list has a separator, it 
should go on the stream, not in
+                                       --  Following. But we can't distinguish 
that from a multi-item list
+                                       --  element. And the parser handles 
both by breaking this down and
+                                       --  shifting the items individually. 
test_incremental.adb
+                                       --  Edit_Code_18.
+                                       New_Children (I) := 
Deleting_Node.Children (I + 1);
+                                    end loop;
+
+                                    Following_Node := Add_Nonterm_1
+                                      (Tree, (List_ID, 0),
+                                       Children         => New_Children,
+                                       Clear_Parents    => False,
+                                       Recover_Conflict => False);
+                                 end;
+
+                              else
+                                 pragma Assert
+                                   (Deleting_Node.RHS_Index = 1 and
+                                      Deleting_Node.Children (1).ID = List_ID 
and
+                                      Deleting_Node.Children (1).RHS_Index in 
1 | 2);
+
+                                 declare
+                                    New_Children : Valid_Node_Access_Array (1 
.. Deleting_Node.Child_Count) :=
+                                      (others => Dummy_Node);
+                                 begin
+                                    New_Children (1) := Following_Node;
+                                    for I in 2 .. New_Children'Last loop
+                                       New_Children (I) := 
Deleting_Node.Children (I);
+                                    end loop;
+
+                                    Following_Node := Add_Nonterm_1
+                                      (Tree, (List_ID, 1),
+                                       Children         => New_Children,
+                                       Clear_Parents    => False,
+                                       Recover_Conflict => False);
+                                 end;
+                              end if;
+                           end;
+                        end loop;
+                        Inverted_Parents.Pop (Target_Anc_Index - 1);
+
+                        Stream.Elements.Insert (Insert_Following, 
(Following_Node, Unknown_State));
+                     end;
+                  end if;
+                  Inverted_Parents.Pop; --  Split_Node
+
+                  exit Undo_Reduce_Loop when Element (Cur).Node = Target;
                end;
 
-               Tree.Last_Shared_Node := Tree.Shared_Tree.Nodes.Last_Index;
-               Parent := Tree.Last_Shared_Node;
-               for I in New_Children.First_Index .. New_Children.Last_Index 
loop
-                  Tree.Shared_Tree.Nodes (New_Children (I)).Parent := Parent;
+            else
+               --  Not an optimized_list node. Bring all children of Node to 
stream.
+               for I in reverse 1 .. Stream_Node.Child_Count loop
+                  Cur := Stream.Elements.Insert
+                    (Element  =>
+                       (Node  => Stream_Node.Children (I),
+                        State => Unknown_State),
+                     Before   => Cur);
+
+                  Stream_Node.Children (I).Parent := Invalid_Node_Access;
                end loop;
 
-               return Parent;
-            end;
-         end case;
-         Tree.Last_Shared_Node := Tree.Shared_Tree.Nodes.Last_Index;
-         return Tree.Last_Shared_Node;
-      end Copy_Node;
+               --  We need to do Move_Errors before the children of To_Delete 
are set
+               --  Invalid. test_incremental.adb Missing_Name_1. Pop To_Delete 
from
+               --  Inverted_Parents now, to match First_Terminal in 
Move_Errors.
+               Inverted_Parents.Pop;
+               Move_Errors;
 
+               Stream.Elements.Delete (To_Delete);
+
+               --  Find stream element containing Target
+               declare
+                  Node : constant Valid_Node_Access :=
+                    (if Inverted_Parents.Depth = 0
+                     then Target
+                     else Inverted_Parents.Peek);
+               begin
+                  Find_Element_1 :
+                  loop
+                     exit Undo_Reduce_Loop when Element (Cur).Node = Target;
+                     exit Find_Element_1 when Element (Cur).Node = Node;
+                     Next (Cur);
+                  end loop Find_Element_1;
+               end;
+            end if;
+         end;
+         To_Delete := Cur;
+      end loop Undo_Reduce_Loop;
+      Ref.Parents := Inverted_Parents.Invert;
+   end Breakdown;
+
+   procedure Breakdown
+     (Tree           : in out Syntax_Trees.Tree;
+      Ref            : in out Stream_Node_Ref;
+      Productions    : in     Production_Info_Trees.Vector;
+      User_Data      : in     Syntax_Trees.User_Data_Access_Constant;
+      First_Terminal : in     Boolean)
+   is
+      Ref_Parents : Stream_Node_Parents := Tree.To_Stream_Node_Parents (Ref);
    begin
-      return Copy_Node (Tree, Root, Invalid_Node_Index);
-   end Copy_Subtree;
+      Ref := Invalid_Stream_Node_Ref; --  Allow Delete Ref.Element.
+      Breakdown (Tree, Ref_Parents, Productions, User_Data, First_Terminal);
+      Ref := Ref_Parents.Ref;
+   end Breakdown;
 
-   function Count_IDs
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index;
-      ID   : in Token_ID)
-     return SAL.Base_Peek_Type
+   function Byte_Region
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Valid_Node_Access;
+      Trailing_Non_Grammar : in Boolean)
+     return WisiToken.Buffer_Region
    is
-      function Compute (N : in Syntax_Trees.Node) return SAL.Base_Peek_Type
-      is
-         use all type SAL.Base_Peek_Type;
-      begin
-         return Result : SAL.Base_Peek_Type := 0 do
-            if N.ID = ID then
-               Result := 1;
-            end if;
-            case N.Label is
-            when Shared_Terminal | Virtual_Terminal | Virtual_Identifier =>
-               null;
-            when Nonterm =>
-               for I of N.Children loop
-                  --  We don't check for Deleted_Child here; encountering one 
indicates
-                  --  an error in the user algorithm.
-                  Result := Result + Count_IDs (Tree, I, ID);
-               end loop;
-            end case;
-         end return;
-      end Compute;
-   begin
-      return Compute
-        ((if Node <= Tree.Last_Shared_Node
-          then Tree.Shared_Tree.Nodes (Node)
-          else Tree.Branched_Nodes (Node)));
-   end Count_IDs;
+      Prev_Source_Terminal : Node_Access := Invalid_Node_Access;
+      Next_Source_Terminal : Node_Access := Invalid_Node_Access;
 
-   function Count_Terminals
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
-     return Integer
-     --  Count_Terminals must return Integer for Get_Terminals,
-     --  Positive_Index_Type for Get_Terminal_IDs.
-   is
-      function Compute (N : in Syntax_Trees.Node) return Integer
+      procedure Set_Prev
       is begin
-         case N.Label is
-         when Shared_Terminal | Virtual_Terminal | Virtual_Identifier =>
-            return 1;
+         if Node.ID = Tree.Lexer.Descriptor.SOI_ID then
+            Prev_Source_Terminal := Node;
+         else
+            Prev_Source_Terminal := Tree.Prev_Source_Terminal (Node, 
Trailing_Non_Grammar => True);
+         end if;
+      end Set_Prev;
 
-         when Nonterm =>
-            return Result : Integer := 0 do
-               for C of N.Children loop
-                  --  This can be called to build a debugging image while 
editing the tree
-                  if C /= Deleted_Child then
-                     Result := Result + Count_Terminals (Tree, C);
-                  end if;
-               end loop;
-            end return;
-         end case;
-      end Compute;
    begin
-      return Compute
-        ((if Node <= Tree.Last_Shared_Node
-          then Tree.Shared_Tree.Nodes (Node)
-          else Tree.Branched_Nodes (Node)));
-   end Count_Terminals;
+      case Node.Label is
+      when Source_Terminal =>
+         if Trailing_Non_Grammar and Node.Non_Grammar.Length > 0 then
+            pragma Assert (Node.Byte_Region /= Null_Buffer_Region);
+            return
+              (First => Node.Byte_Region.First,
+               Last  => Node.Non_Grammar 
(Node.Non_Grammar.Last_Index).Byte_Region.Last);
+         else
+            return Node.Byte_Region;
+         end if;
 
-   procedure Delete_Parent
-     (Tree : in out Syntax_Trees.Tree;
-      Node : in     Valid_Node_Index)
-   is
-      N      : Syntax_Trees.Node renames Tree.Shared_Tree.Nodes (Node);
-      Parent : Syntax_Trees.Node renames Tree.Shared_Tree.Nodes (N.Parent);
-   begin
-      Parent.Children (Child_Index (Parent, Node)) := Deleted_Child;
+      when Virtual_Terminal | Virtual_Identifier =>
+         case Node.Insert_Location is
+         when After_Prev | Between =>
+            --  It is tempting to assert Node.Non_Grammar.Length > 0 for 
between
+            --  (because there should be a new_line), but that is not true for
+            --  "null" when we insert "null;" - it is only true for the last
+            --  virtual token on the line.
+            if Node.Non_Grammar.Length > 0 then
+               return
+                 (First => Node.Non_Grammar 
(Node.Non_Grammar.First_Index).Byte_Region.First,
+                  Last  => Node.Non_Grammar 
(Node.Non_Grammar.Last_Index).Byte_Region.Last);
+
+            elsif Tree.Parents_Set then
+               Set_Prev;
+               if Prev_Source_Terminal = Invalid_Node_Access then
+                  --  Node is in an all virtual parse stream element.
+                  return Null_Buffer_Region;
+               else
+                  return
+                    (First => Tree.Byte_Region (Prev_Source_Terminal, 
Trailing_Non_Grammar => True).Last,
+                     Last  => Tree.Byte_Region (Prev_Source_Terminal, 
Trailing_Non_Grammar => True).Last - 1);
+               end if;
+            else
+               return Null_Buffer_Region;
+            end if;
 
-      if N.Parent = Tree.Root then
-         Tree.Root := Node;
-      end if;
+         when Before_Next =>
+            if Node.Non_Grammar.Length > 0 then
+               return
+                 (First => Node.Non_Grammar 
(Node.Non_Grammar.First_Index).Byte_Region.First,
+                  Last  => Node.Non_Grammar 
(Node.Non_Grammar.Last_Index).Byte_Region.Last);
 
-      N.Parent := Invalid_Node_Index;
-   end Delete_Parent;
+            elsif Tree.Parents_Set then
+               if Node.ID = Tree.Lexer.Descriptor.EOI_ID then
+                  Next_Source_Terminal := Node;
+               else
+                  Next_Source_Terminal := Tree.Next_Source_Terminal (Node, 
Trailing_Non_Grammar => True);
+               end if;
 
-   function Error_Message
-     (Tree      : in Syntax_Trees.Tree;
-      Terminals : in Base_Token_Array_Access_Constant;
-      Node      : in Valid_Node_Index;
-      File_Name : in String;
-      Message   : in String)
-     return String
-   is
-      First_Terminal : constant Valid_Node_Index  := Tree.First_Terminal 
(Node);
-      Line           : Line_Number_Type  := Line_Number_Type'First;
-      Column         : Ada.Text_IO.Count := Ada.Text_IO.Count'First;
-   begin
-      case Tree.Label (First_Terminal) is
-      when Shared_Terminal =>
-         declare
-            Token : Base_Token renames Terminals.all 
(Tree.First_Shared_Terminal (First_Terminal));
-         begin
-            Line   := Token.Line;
-            Column := Token.Column;
-         end;
+               if Next_Source_Terminal = Invalid_Node_Access then
+                  --  Node is in an all virtual parse stream element.
+                  return Null_Buffer_Region;
+               else
+                  return
+                    (First => Tree.Byte_Region (Next_Source_Terminal, 
Trailing_Non_Grammar => True).First,
+                     Last  => Tree.Byte_Region (Next_Source_Terminal, 
Trailing_Non_Grammar => True).First - 1);
+               end if;
+            else
+               return Null_Buffer_Region;
+            end if;
+         end case;
 
-      when Virtual_Terminal | Virtual_Identifier =>
-         Line   := Line_Number_Type'First;
-         Column := Ada.Text_IO.Count (Tree.Byte_Region (First_Terminal).First);
+      when Nonterm =>
+         if Node.Child_Count = 0 then
+            if Tree.Parents_Set then
+               Set_Prev;
+               if Prev_Source_Terminal = Invalid_Node_Access then
+                  --  Node is the root of an empty tree or parse stream 
element.
+                  return Null_Buffer_Region;
+               else
+                  declare
+                     First : constant Buffer_Pos := Tree.Byte_Region
+                       (Prev_Source_Terminal, Trailing_Non_Grammar => 
True).Last + 1;
+                  begin
+                     return (First, First - 1);
+                  end;
+               end if;
+            else
+               return Null_Buffer_Region;
+            end if;
+         else
+            Prev_Source_Terminal := First_Source_Terminal
+              (Tree, Node, Trailing_Non_Grammar => False, Following => False);
+            Next_Source_Terminal := Last_Source_Terminal (Tree, Node, 
Trailing_Non_Grammar);
+         end if;
 
-      when others =>
-         null;
-      end case;
-      return WisiToken.Error_Message (File_Name, Line, Column, Message);
-   end Error_Message;
+         if Prev_Source_Terminal = Invalid_Node_Access then
+            --  Node.Child_Count > 0, but Node contains no source_terminals; 
it is
+            --  all virtual. Find best estimate for First.
+            if Tree.Parents_Set then
+               Set_Prev;
+               if Prev_Source_Terminal = Invalid_Node_Access then
+                  --  Tree is corrupt
+                  return Null_Buffer_Region;
+               else
+                  declare
+                     First : constant Buffer_Pos := Tree.Char_Region
+                       (Prev_Source_Terminal, Trailing_Non_Grammar).First + 1;
+                  begin
+                     return (First, First - 1);
+                  end;
+               end if;
 
-   overriding procedure Finalize (Tree : in out Base_Tree)
-   is begin
-      Tree.Traversing  := False;
-      Tree.Parents_Set := False;
-      if Tree.Augmented_Present then
-         for Node of Tree.Nodes loop
-            if Node.Label = Nonterm then
-               Free (Node.Augmented);
+            else
+               return Null_Buffer_Region;
             end if;
-         end loop;
-         Tree.Augmented_Present := False;
-      end if;
-      Tree.Nodes.Finalize;
-   end Finalize;
+         end if;
 
-   overriding procedure Finalize (Tree : in out Syntax_Trees.Tree)
+         pragma Assert (Prev_Source_Terminal /= Invalid_Node_Access and 
Next_Source_Terminal /= Invalid_Node_Access);
+         return
+           (First => Tree.Byte_Region (Prev_Source_Terminal, 
Trailing_Non_Grammar).First,
+            Last  => Tree.Byte_Region (Next_Source_Terminal, 
Trailing_Non_Grammar).Last);
+      end case;
+   end Byte_Region;
+
+   function Byte_Region (Tree : in Syntax_Trees.Tree; Index : in Stream_Index) 
return WisiToken.Buffer_Region
    is begin
-      if Tree.Last_Shared_Node /= Invalid_Node_Index then
-         --  Tree.Branched_Nodes Augmented are shallow copies of
-         --  Tree.Shared_Tree.Nodes Augmented, so we don't free them there;
-         --  they are freed in Base_Tree.Finalize above.
-         Tree.Branched_Nodes.Finalize;
-         Tree.Last_Shared_Node := Invalid_Node_Index;
-         Tree.Shared_Tree := null;
-      end if;
-   end Finalize;
+      return Byte_Region (Tree, Stream_Element_Lists.Element (Index.Cur).Node, 
Trailing_Non_Grammar => False);
+   end Byte_Region;
 
-   function Insert_After
-     (User_Data            : in out User_Data_Type;
-      Tree                 : in     Syntax_Trees.Tree'Class;
-      Token                : in     Valid_Node_Index;
-      Insert_On_Blank_Line : in     Boolean)
-     return Boolean
+   function Byte_Region
+     (Tree                 : in Syntax_Trees.Tree;
+      Ref                  : in Stream_Node_Ref;
+      Trailing_Non_Grammar : in Boolean := False)
+     return WisiToken.Buffer_Region
    is
-      pragma Unreferenced (User_Data, Tree, Token, Insert_On_Blank_Line);
+      Prev_Source_Terminal : Terminal_Ref := Invalid_Stream_Node_Ref;
+      Next_Source_Terminal : Terminal_Ref := Invalid_Stream_Node_Ref;
    begin
-      return False;
-   end Insert_After;
+      if Ref.Node = Invalid_Node_Access or else --  Empty nonterm
+        Ref.Node.Label = Nonterm
+      then
+         Prev_Source_Terminal :=
+           (Ref.Stream, Ref.Element, First_Source_Terminal (Tree, Ref.Node, 
Trailing_Non_Grammar, Following => False));
+         Next_Source_Terminal :=
+           (Ref.Stream, Ref.Element, Last_Source_Terminal (Tree, Ref.Node, 
Trailing_Non_Grammar));
+
+         if Prev_Source_Terminal.Node = Invalid_Node_Access then
+            Prev_Source_Terminal := Tree.Prev_Source_Terminal (Ref, 
Trailing_Non_Grammar);
+            Next_Source_Terminal := Tree.Next_Source_Terminal (Ref, 
Trailing_Non_Grammar);
+         end if;
 
-   function Find_Ancestor
-     (Tree       : in Syntax_Trees.Tree;
-      Node       : in Valid_Node_Index;
-      ID         : in Token_ID;
-      Max_Parent : in Boolean := False)
-     return Node_Index
-   is
-      N           : Node_Index := Node;
-      Last_Parent : Node_Index := Invalid_Node_Index;
-   begin
-      loop
-         N :=
-           (if N <= Tree.Last_Shared_Node
-            then Tree.Shared_Tree.Nodes (N).Parent
-            else Tree.Branched_Nodes (N).Parent);
+         return
+           (First => Tree.Byte_Region (Prev_Source_Terminal, 
Trailing_Non_Grammar).Last,
+            Last => Tree.Byte_Region (Next_Source_Terminal, 
Trailing_Non_Grammar).First);
+      else
+         case Terminal_Label'(Ref.Node.Label) is
+         when Source_Terminal =>
+            if Trailing_Non_Grammar and Ref.Node.Non_Grammar.Length > 0 then
+               pragma Assert (Ref.Node.Byte_Region /= Null_Buffer_Region);
+               return
+                 (First => Ref.Node.Byte_Region.First,
+                  Last  => Ref.Node.Non_Grammar 
(Ref.Node.Non_Grammar.Last_Index).Byte_Region.Last);
+            else
+               return Ref.Node.Byte_Region;
+            end if;
 
-         exit when N = Invalid_Node_Index;
-         Last_Parent := N;
+         when Virtual_Terminal | Virtual_Identifier =>
+            case Ref.Node.Insert_Location is
+            when After_Prev | Between =>
+               if Ref.Node.Non_Grammar.Length > 0 then
+                  return
+                    (First => Ref.Node.Non_Grammar 
(Ref.Node.Non_Grammar.First_Index).Byte_Region.First,
+                     Last  => Ref.Node.Non_Grammar 
(Ref.Node.Non_Grammar.Last_Index).Byte_Region.Last);
+               else
+                  Prev_Source_Terminal := Tree.Prev_Source_Terminal (Ref, 
Trailing_Non_Grammar);
+                  return
+                    (First => Tree.Byte_Region (Prev_Source_Terminal, 
Trailing_Non_Grammar => True).Last,
+                     Last  => Tree.Byte_Region (Prev_Source_Terminal, 
Trailing_Non_Grammar => True).Last - 1);
+               end if;
 
-         exit when ID =
-           (if N <= Tree.Last_Shared_Node
-            then Tree.Shared_Tree.Nodes (N).ID
-            else Tree.Branched_Nodes (N).ID);
-      end loop;
+            when Before_Next =>
+               pragma Assert (Ref.Node.Non_Grammar.Length = 0);
+               Next_Source_Terminal := Tree.Next_Source_Terminal (Ref, 
Trailing_Non_Grammar);
+               return
+                 (First => Tree.Byte_Region (Next_Source_Terminal, 
Trailing_Non_Grammar => True).First,
+                  Last  => Tree.Byte_Region (Next_Source_Terminal, 
Trailing_Non_Grammar => True).First - 1);
+            end case;
 
-      return (if Max_Parent then Last_Parent else N);
-   end Find_Ancestor;
+         end case;
+      end if;
+   end Byte_Region;
 
-   function Find_Ancestor
-     (Tree       : in Syntax_Trees.Tree;
-      Node       : in Valid_Node_Index;
-      IDs        : in Token_ID_Array;
-      Max_Parent : in Boolean := False)
-     return Node_Index
+   function Byte_Region
+     (Tree                 : in Syntax_Trees.Tree;
+      Ref                  : in Stream_Node_Parents;
+      Parse_Stream         : in Stream_ID;
+      Trailing_Non_Grammar : in Boolean := False)
+     return WisiToken.Buffer_Region
    is
-      N           : Node_Index := Node;
-      Last_Parent : Node_Index := Invalid_Node_Index;
+      Prev_Source_Terminal : Stream_Node_Parents := 
Invalid_Stream_Node_Parents;
+      Next_Source_Terminal : Stream_Node_Parents := 
Invalid_Stream_Node_Parents;
    begin
-      loop
-         N :=
-           (if N <= Tree.Last_Shared_Node
-            then Tree.Shared_Tree.Nodes (N).Parent
-            else Tree.Branched_Nodes (N).Parent);
+      if Ref.Ref.Node = Invalid_Node_Access or else --  Empty nonterm
+        Ref.Ref.Node.Label = Nonterm
+      then
+         Prev_Source_Terminal := Ref;
+         Tree.First_Source_Terminal (Prev_Source_Terminal, 
Trailing_Non_Grammar);
+         Next_Source_Terminal := Ref;
+         Tree.Last_Source_Terminal (Next_Source_Terminal, 
Trailing_Non_Grammar);
+
+         if Prev_Source_Terminal.Ref.Node = Invalid_Node_Access then
+            Tree.Prev_Source_Terminal (Prev_Source_Terminal, Parse_Stream, 
Trailing_Non_Grammar);
+            Tree.Next_Source_Terminal (Next_Source_Terminal, 
Trailing_Non_Grammar);
+         end if;
 
-         exit when N = Invalid_Node_Index;
-         Last_Parent := N;
+         return
+           (First => Tree.Byte_Region (Prev_Source_Terminal, Parse_Stream, 
Trailing_Non_Grammar).Last,
+            Last => Tree.Byte_Region (Next_Source_Terminal, Parse_Stream, 
Trailing_Non_Grammar).First);
+      else
+         case Terminal_Label'(Ref.Ref.Node.Label) is
+         when Source_Terminal =>
+            if Trailing_Non_Grammar and Ref.Ref.Node.Non_Grammar.Length > 0 
then
+               pragma Assert (Ref.Ref.Node.Byte_Region /= Null_Buffer_Region);
+               return
+                 (First => Ref.Ref.Node.Byte_Region.First,
+                  Last  => Ref.Ref.Node.Non_Grammar 
(Ref.Ref.Node.Non_Grammar.Last_Index).Byte_Region.Last);
+            else
+               return Ref.Ref.Node.Byte_Region;
+            end if;
 
-         exit when
-           (for some ID of IDs => ID =
-              (if N <= Tree.Last_Shared_Node
-               then Tree.Shared_Tree.Nodes (N).ID
-               else Tree.Branched_Nodes (N).ID));
-      end loop;
-      return (if Max_Parent then Last_Parent else N);
-   end Find_Ancestor;
+         when Virtual_Terminal | Virtual_Identifier =>
+            if Ref.Ref.Node.Non_Grammar.Length > 0 then
+               return
+                 (First => Ref.Ref.Node.Non_Grammar 
(Ref.Ref.Node.Non_Grammar.First_Index).Byte_Region.First,
+                  Last  => Ref.Ref.Node.Non_Grammar 
(Ref.Ref.Node.Non_Grammar.Last_Index).Byte_Region.Last);
+            else
+               case Ref.Ref.Node.Insert_Location is
+               when After_Prev | Between =>
+                  Prev_Source_Terminal := Ref;
+                  Tree.Prev_Source_Terminal (Prev_Source_Terminal, 
Parse_Stream, Trailing_Non_Grammar);
+                  return
+                    (First => Tree.Byte_Region (Prev_Source_Terminal, 
Parse_Stream, Trailing_Non_Grammar => True).Last,
+                     Last  => Tree.Byte_Region
+                       (Prev_Source_Terminal, Parse_Stream, 
Trailing_Non_Grammar => True).Last - 1);
+
+               when Before_Next =>
+                  --  It doesn't make sense for 
Ref.Ref.Node.Non_Grammar.Length > 0
+                  --  here, but we tolerate buggy language-specific code in 
Insert_Token
+                  --  or Delete_Token.
+                  Next_Source_Terminal := Ref;
+                  Tree.Next_Source_Terminal (Next_Source_Terminal, 
Trailing_Non_Grammar);
+                  return
+                    (First => Tree.Byte_Region (Next_Source_Terminal, 
Parse_Stream, Trailing_Non_Grammar => True).First,
+                     Last  => Tree.Byte_Region (Next_Source_Terminal, 
Parse_Stream, Trailing_Non_Grammar => True)
+                       .First - 1);
+               end case;
+            end if;
+         end case;
+      end if;
+   end Byte_Region;
 
-   function Find_Child
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index;
-      ID   : in Token_ID)
-     return Node_Index
-   is
-      function Compute (N : in Syntax_Trees.Node) return Node_Index
-      is begin
-         case N.Label is
-         when Shared_Terminal | Virtual_Terminal | Virtual_Identifier =>
-            return Invalid_Node_Index;
+   function Byte_Region (Tree : in Syntax_Trees.Tree; Item : in Recover_Token) 
return Buffer_Region
+   is begin
+      if Item.Virtual then
+         return Null_Buffer_Region;
+      elsif Item.Element_Node = Invalid_Node_Access then
+         return Null_Buffer_Region;
+      else
+         case Item.Element_Node.Label is
+         when Source_Terminal =>
+            return Item.Element_Node.Byte_Region;
+         when Virtual_Terminal | Virtual_Identifier =>
+            return Null_Buffer_Region;
          when Nonterm =>
-            for C of N.Children loop
-               if C /= Deleted_Child then
-                  if ID =
-                    (if C <= Tree.Last_Shared_Node
-                     then Tree.Shared_Tree.Nodes (C).ID
-                     else Tree.Branched_Nodes (C).ID)
-                  then
-                     return C;
-                  end if;
+            declare
+               First : constant Node_Access := First_Source_Terminal
+                 (Tree, Item.Element_Node, Trailing_Non_Grammar => False, 
Following => False);
+               Last  : constant Node_Access := Last_Source_Terminal
+                 (Tree, Item.Element_Node, Trailing_Non_Grammar => False);
+            begin
+               if First = Invalid_Node_Access then
+                  return Null_Buffer_Region;
+               else
+                  return (First.Byte_Region.First, Last.Byte_Region.Last);
                end if;
-            end loop;
-            return Invalid_Node_Index;
+            end;
          end case;
-      end Compute;
-   begin
-      return Compute
-        ((if Node <= Tree.Last_Shared_Node
-          then Tree.Shared_Tree.Nodes (Node)
-          else Tree.Branched_Nodes (Node)));
-   end Find_Child;
+      end if;
+   end Byte_Region;
 
-   function Find_Descendant
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index;
-      ID   : in Token_ID)
-     return Node_Index
+   function Char_Region
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Valid_Node_Access;
+      Trailing_Non_Grammar : in Boolean)
+     return Buffer_Region
    is
-      Found : Node_Index := Invalid_Node_Index;
+      Prev_Source_Terminal : Node_Access := Invalid_Node_Access;
+      Next_Source_Terminal : Node_Access := Invalid_Node_Access;
 
-      function Process (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean
-      is
-         Node_ID : constant Token_ID :=
-           (if Node <= Tree.Last_Shared_Node
-            then Tree.Shared_Tree.Nodes (Node).ID
-            else Tree.Branched_Nodes (Node).ID);
-      begin
-         if Node_ID = ID then
-            Found := Node;
-            return False;
+      procedure Set_Prev
+      is begin
+         if Node.ID = Tree.Lexer.Descriptor.SOI_ID then
+            Prev_Source_Terminal := Node;
          else
-            return True;
+            Prev_Source_Terminal := Tree.Prev_Source_Terminal (Node, 
Trailing_Non_Grammar => True);
          end if;
-      end Process;
+      end Set_Prev;
 
-      Junk : constant Boolean := Process_Tree (Tree, Node, Before, 
Process'Access);
-      pragma Unreferenced (Junk);
    begin
-      return Found;
-   end Find_Descendant;
-
-   function Find_Descendant
-     (Tree      : in     Syntax_Trees.Tree;
-      Node      : in     Valid_Node_Index;
-      Predicate : access function (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean)
-     return Node_Index
-   is
-      Found : Node_Index := Invalid_Node_Index;
-
-      function Process (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean
-      is begin
-         if Predicate (Tree, Node) then
-            Found := Node;
-            return False;
+      case Node.Label is
+      when Source_Terminal =>
+         if Trailing_Non_Grammar and Node.Non_Grammar.Length > 0 then
+            pragma Assert (Node.Char_Region /= Null_Buffer_Region);
+            return
+              (First => Node.Char_Region.First,
+               Last  => Node.Non_Grammar 
(Node.Non_Grammar.Last_Index).Char_Region.Last);
          else
-            return True;
+            return Node.Char_Region;
          end if;
-      end Process;
-
-      Junk : constant Boolean := Process_Tree (Tree, Node, Before, 
Process'Access);
-      pragma Unreferenced (Junk);
-   begin
-      return Found;
-   end Find_Descendant;
 
-   function Find_Sibling
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index;
-      ID   : in Token_ID)
-     return Node_Index
-   is
-      function Compute_2 (N : in Syntax_Trees.Node) return Node_Index
-      is begin
-         case N.Label is
-         when Shared_Terminal | Virtual_Terminal | Virtual_Identifier =>
-            return Invalid_Node_Index;
+      when Virtual_Terminal | Virtual_Identifier =>
+         if Node.Non_Grammar.Length > 0 then
+            return
+              (First => Node.Non_Grammar 
(Node.Non_Grammar.First_Index).Char_Region.First,
+               Last  => Node.Non_Grammar 
(Node.Non_Grammar.Last_Index).Char_Region.Last);
 
-         when Nonterm =>
-            for C of N.Children loop
-               if C /= Deleted_Child then
-                  if ID =
-                    (if C <= Tree.Last_Shared_Node
-                     then Tree.Shared_Tree.Nodes (C).ID
-                     else Tree.Branched_Nodes (C).ID)
-                  then
-                     return C;
+         else
+            case Node.Insert_Location is
+            when After_Prev | Between =>
+               if Tree.Parents_Set then
+                  Set_Prev;
+                  if Prev_Source_Terminal = Invalid_Node_Access then
+                     --  Node is in an all virtual parse stream element.
+                     return Null_Buffer_Region;
+                  else
+                     return
+                       (First => Tree.Char_Region (Prev_Source_Terminal, 
Trailing_Non_Grammar => True).Last,
+                        Last  => Tree.Char_Region (Prev_Source_Terminal, 
Trailing_Non_Grammar => True).Last - 1);
                   end if;
+               else
+                  return Null_Buffer_Region;
                end if;
-            end loop;
-            return Invalid_Node_Index;
-         end case;
-      end Compute_2;
 
-      function Compute_1 (Parent : in Node_Index) return Node_Index
-      is begin
-         if Parent = Invalid_Node_Index then
-            return Invalid_Node_Index;
+            when Before_Next =>
+               if Tree.Parents_Set then
+                  if Node.ID = Tree.Lexer.Descriptor.EOI_ID then
+                     Next_Source_Terminal := Node;
+                  else
+                     Next_Source_Terminal := Tree.Next_Source_Terminal (Node, 
Trailing_Non_Grammar => True);
+                  end if;
+
+                  if Next_Source_Terminal = Invalid_Node_Access then
+                     --  Node is in an all virtual parse stream element.
+                     return Null_Buffer_Region;
+                  else
+                     return
+                       (First => Tree.Char_Region (Next_Source_Terminal, 
Trailing_Non_Grammar => True).First,
+                        Last  => Tree.Char_Region (Next_Source_Terminal, 
Trailing_Non_Grammar => True).First - 1);
+                  end if;
+               else
+                  return Null_Buffer_Region;
+               end if;
+            end case;
+         end if;
 
+      when Nonterm =>
+         if Node.Child_Count = 0 then
+            if Tree.Parents_Set then
+               Set_Prev;
+               if Prev_Source_Terminal = Invalid_Node_Access then
+                  --  Node is the root of an empty tree or parse stream 
element.
+                  return Null_Buffer_Region;
+               else
+                  declare
+                     First : constant Buffer_Pos := Tree.Char_Region
+                       (Prev_Source_Terminal, Trailing_Non_Grammar => 
True).Last + 1;
+                  begin
+                     return (First, First - 1);
+                  end;
+               end if;
+            else
+               return Null_Buffer_Region;
+            end if;
          else
-            return Compute_2
-              ((if Parent <= Tree.Last_Shared_Node
-                then Tree.Shared_Tree.Nodes (Parent)
-                else Tree.Branched_Nodes (Parent)));
+            Prev_Source_Terminal := First_Source_Terminal
+              (Tree, Node, Trailing_Non_Grammar => False, Following => False);
+            Next_Source_Terminal := Last_Source_Terminal (Tree, Node, 
Trailing_Non_Grammar);
          end if;
-      end Compute_1;
-   begin
-      return Compute_1
-        ((if Node <= Tree.Last_Shared_Node
-          then Tree.Shared_Tree.Nodes (Node).Parent
-          else Tree.Branched_Nodes (Node).Parent));
-   end Find_Sibling;
 
-   function First_Index (Tree : in Syntax_Trees.Tree) return Node_Index
+         if Prev_Source_Terminal = Invalid_Node_Access then
+            --  Node.Child_Count > 0, but Node contains no source_terminals; 
it is
+            --  all virtual. Find best estimate for First.
+            if Tree.Parents_Set then
+               Set_Prev;
+               declare
+                  First : constant Buffer_Pos := Tree.Char_Region
+                    (Prev_Source_Terminal, Trailing_Non_Grammar).First + 1;
+               begin
+                  return (First, First - 1);
+               end;
+
+            else
+               return Null_Buffer_Region;
+            end if;
+         end if;
+
+         pragma Assert (Prev_Source_Terminal /= Invalid_Node_Access and 
Next_Source_Terminal /= Invalid_Node_Access);
+         return
+           (First => Tree.Char_Region (Prev_Source_Terminal, 
Trailing_Non_Grammar).First,
+            Last  => Tree.Char_Region (Next_Source_Terminal, 
Trailing_Non_Grammar).Last);
+      end case;
+   end Char_Region;
+
+   function Check_Multi_Line
+     (Tree       : in     Syntax_Trees.Tree;
+      Node       : in     Valid_Node_Access;
+      Line       : in     Line_Number_Type;
+      Char_Pos   : in out Buffer_Pos;
+      Start_Line : in     Line_Number_Type)
+     return Boolean
+   with Pre => Node.Label in Terminal_Label and 
Tree.Lexer.Can_Contain_New_Line (Node.ID)
+   --  Return True if Node contains the new_line that ends Line - 1; set
+   --  Char_Pos to the character position following the New_Line.
+   --  Start_Line must be the line number at the first char of Node.
+   is
+      Temp : Base_Buffer_Pos := Node.Byte_Region.First;
+   begin
+      for I in Start_Line + 1 .. Line loop
+         Temp := Tree.Lexer.Find_New_Line ((Temp, Node.Byte_Region.Last));
+         if Temp = Invalid_Buffer_Pos then
+            return False;
+         end if;
+         Temp := @ + 1;
+      end loop;
+      Char_Pos := Temp;
+      return True;
+   end Check_Multi_Line;
+
+   function Check_Non_Grammar
+     (Tree     : in     Syntax_Trees.Tree;
+      Node     : in     Valid_Node_Access;
+      Line     : in     Line_Number_Type;
+      Char_Pos : in out Buffer_Pos)
+     return Boolean
+   with Pre => Node.Label in Terminal_Label
+   --  Return True if Node contains non_grammar that ends Line - 1; set
+   --  Char_Pos to the character position following the New_Line.
    is begin
-      return Tree.Shared_Tree.Nodes.First_Index;
-   end First_Index;
+      for Token of Node.Non_Grammar loop
+         if Token.Line_Region.First <= Line - 1 and Token.Line_Region.Last >= 
Line then
+            declare
+               Temp : constant Base_Buffer_Pos := 
Tree.Lexer.Line_Begin_Char_Pos (Token, Line);
+            begin
+               if Temp /= Invalid_Buffer_Pos then
+                  Char_Pos := Temp;
+                  return True;
+               end if;
+            end;
+         end if;
+      end loop;
+      return False;
+   end Check_Non_Grammar;
+
+   function Child
+     (Tree        : in Syntax_Trees.Tree;
+      Node        : in Valid_Node_Access;
+      Child_Index : in Positive_Index_Type)
+     return Node_Access
+   is
+   begin
+      if Child_Index in Node.Children'Range then
+         return Node.Children (Child_Index);
+      else
+         return Invalid_Node_Access;
+      end if;
+   end Child;
 
-   procedure Flush (Tree : in out Syntax_Trees.Tree)
+   function Child_Count (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return SAL.Base_Peek_Type
    is begin
-      --  This is the opposite of Move_Branch_Point
-      Tree.Shared_Tree.Nodes.Merge (Tree.Branched_Nodes);
-      Tree.Last_Shared_Node := Tree.Shared_Tree.Nodes.Last_Index;
-      Tree.Flush            := True;
-   end Flush;
+      return Node.Child_Count;
+   end Child_Count;
 
-   function Flushed (Tree : in Syntax_Trees.Tree) return Boolean
+   function Child_Index (Parent : in Node; Child : in Valid_Node_Access) 
return SAL.Peek_Type
    is begin
-      return Tree.Flush;
-   end Flushed;
+      for I in Parent.Children'Range loop
+         if Parent.Children (I) = Child then
+            return I;
+         end if;
+      end loop;
+      raise SAL.Programmer_Error;
+   end Child_Index;
 
-   procedure Get_IDs
-     (Tree   : in     Syntax_Trees.Tree;
-      Node   : in     Valid_Node_Index;
-      ID     : in     Token_ID;
-      Result : in out Valid_Node_Index_Array;
+   function Child_Index
+     (Tree   : in Syntax_Trees.Tree;
+      Parent : in Valid_Node_Access;
+      Child  : in Valid_Node_Access)
+     return SAL.Peek_Type
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Child_Index (Parent.all, Child);
+   end Child_Index;
+
+   function Children (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Node_Access_Array
+   is begin
+      return Node.Children;
+   end Children;
+
+   function Children_Recover_Tokens
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Recover_Token_Array
+   is
+      Node : constant Node_Access := Stream_Element_Lists.Element 
(Element.Cur).Node;
+   begin
+      --  WORKAROUND: GNAT Community 2020 doesn't support 'of' here, and it
+      --  hangs if there are any errors in the statement with 'in'.
+      --  return (for I in Node.Children'Range => Tree.Get_Recover_Token 
(Node.Children (I)));
+      return Result : Recover_Token_Array (1 .. Node.Child_Count) do
+         for I in Node.Children'Range loop
+            Result (I) := Tree.Get_Recover_Token (Node.Children (I));
+         end loop;
+      end return;
+   end Children_Recover_Tokens;
+
+   procedure Clear
+     (Tree        : in out Syntax_Trees.Tree;
+      Free_Memory : in     Boolean := False)
+   is begin
+      for N of Tree.Nodes loop
+         Free (N.Augmented);
+         Free (N.Error_List);
+         Free (N);
+      end loop;
+      Tree.Nodes.Clear (Free_Memory);
+
+      --  Clear saved element list cursors in parse streams before freeing
+      --  the element lists, so they don't try to decrement reference counts
+      --  in deallocated elements. We can't rely on cursor Finalize for
+      --  this; that's done in arbitrary order. Loop in reverse order so
+      --  shared stream is last; other streams have links to it.
+      for Stream of reverse Tree.Streams loop
+         Stream.Stack_Top   := Stream_Element_Lists.No_Element;
+         Stream.Shared_Link := Stream_Element_Lists.No_Element;
+         if Debug_Mode then
+            Stream.Elements.Check_Ref_Counts;
+         end if;
+      end loop;
+      Tree.Streams.Clear;
+
+      Tree.Root                     := Invalid_Node_Access;
+      Tree.SOI                      := Invalid_Node_Access;
+      Tree.EOI                      := Invalid_Node_Access;
+      Tree.Next_Stream_Label        := Shared_Stream_Label + 1;
+      Tree.Next_Terminal_Node_Index := 1;
+      Tree.Traversing               := False;
+      Tree.Parents_Set              := False;
+   end Clear;
+
+   function Cleared (Tree : in Syntax_Trees.Tree) return Boolean
+   is begin
+      return Tree.Streams.Length = 0 and Tree.Nodes.Length = 0;
+   end Cleared;
+
+   procedure Clear_Augmented (Tree : in Syntax_Trees.Tree)
+   is begin
+      for Node of Tree.Nodes loop
+         Free (Node.Augmented);
+         if Node.Label = Source_Terminal then
+            for N of Node.Following_Deleted loop
+               Free (N.Augmented);
+            end loop;
+         end if;
+      end loop;
+   end Clear_Augmented;
+
+   procedure Clear_Parent
+     (Tree           : in out Syntax_Trees.Tree;
+      Node           : in     Valid_Node_Access;
+      Clear_Children : in     Boolean)
+   is begin
+      if Node.Parent /= Invalid_Node_Access and Clear_Children then
+         Node.Parent.Children (Child_Index (Node.Parent.all, Node)) := null;
+
+         if Node.Parent = Tree.Root then
+            Tree.Root := Node;
+         end if;
+      end if;
+      Node.Parent := Invalid_Node_Access;
+   end Clear_Parent;
+
+   procedure Clear_Parse_Streams
+     (Tree       : in out Syntax_Trees.Tree;
+      Keep_Nodes : in     Valid_Node_Access_Lists.List := 
Valid_Node_Access_Lists.Empty_List)
+   is begin
+      if Tree.Root = Invalid_Node_Access then
+         Tree.Root := Syntax_Trees.Root (Tree);
+      end if;
+
+      --  Add SOI, EOI (from the parse stream, to include any
+      --  Following_Deleted and Error_Data) to Root children, so
+      --  Prev/Next_Non_Grammar can find them.
+      declare
+         Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Tree.Streams.Last);
+         SOI : constant Valid_Node_Access := Stream_Element_Lists.Element 
(Parse_Stream.Elements.First).Node;
+
+         Last_Node : constant Valid_Node_Access := 
Stream_Element_Lists.Element (Parse_Stream.Elements.Last).Node;
+
+         EOI : constant Valid_Node_Access :=
+           (if Tree.ID (Last_Node) = Tree.Lexer.Descriptor.EOI_ID
+            then Last_Node
+            else Tree.EOI);
+
+         New_Children : Node_Access_Array (1 .. Tree.Root.Child_Count + 2);
+      begin
+         if Tree.Streams.Last = Tree.Shared_Stream.Cur and 
Tree.Root.Child_Count = 3 then
+            --  This is a packrat parse; SOI, EOI already in tree
+            pragma Assert (Tree.Root.Children (1) = Tree.SOI and 
Tree.Root.Children (3) = EOI);
+         else
+            --  There is a parse stream, or this is an incremental parse where 
the
+            --  edit did not require a parse.
+            New_Children (1) := SOI;
+            New_Children (2 .. New_Children'Last - 1) := Tree.Root.Children;
+            New_Children (New_Children'Last) := EOI;
+
+            Tree.SOI := SOI;
+            Tree.EOI := EOI;
+
+            Tree.Root.Children := (others => Invalid_Node_Access);
+
+            Tree.Root := new Node'
+              (Label       => Nonterm,
+               Copied_Node => Invalid_Node_Access,
+               Child_Count => Tree.Root.Child_Count + 2,
+               ID          => Tree.Root.ID,
+               Node_Index  => Tree.Root.Node_Index,
+               Parent      => null,
+               Augmented   => Tree.Root.Augmented,
+               Error_List  =>
+                 (if Tree.Root.Error_List = null
+                  then null
+                  else new Error_Data_Lists.List'(Tree.Root.Error_List.all)),
+               Virtual          => Tree.Root.Virtual,
+               Recover_Conflict => False,
+               RHS_Index        => Tree.Root.RHS_Index,
+               Name_Offset      => Tree.Root.Name_Offset,
+               Name_Length      => Tree.Root.Name_Length,
+               Children         => New_Children);
+
+            for Child of New_Children loop
+               Child.Parent := Tree.Root;
+            end loop;
+
+            Tree.Nodes.Append (Tree.Root);
+         end if;
+      end;
+
+      --  Clear saved element list cursors in parse streams before freeing
+      --  the element lists, so they don't try to decrement reference counts
+      --  in deallocated elements. We can't rely on cursor Finalize for
+      --  this; that's done in arbitrary order.
+      for Stream of Tree.Streams loop
+         Stream.Stack_Top   := Stream_Element_Lists.No_Element;
+         Stream.Shared_Link := Stream_Element_Lists.No_Element;
+      end loop;
+
+      Tree.Streams.Clear;
+      Tree.Next_Stream_Label := Shared_Stream_Label + 1;
+
+      Tree.Shared_Stream.Cur := Parse_Stream_Lists.No_Element;
+
+      if not Tree.Parents_Set then
+         Set_Parents (Tree);
+      end if;
+
+      for Node of Tree.Nodes loop
+         --  Only nodes that have parents are part of the final parse result.
+         --  In an incremental parse, breakdown removes nodes from a parse
+         --  stream, and clears any parent pointers involved.
+         if Node.Parent = null and then
+           Node /= Tree.Root and then
+           Node /= Tree.SOI and then
+           Node /= Tree.EOI and then
+           not (for some N of Keep_Nodes => N = Node)
+         then
+            --  It is tempting to try to enforce that all deleted nonterms have
+            --  Children = (others => Invalid_Node_Access) here. However, that 
is
+            --  not true when Breakdown is called by the main parser;
+            --  Tree.Parents_Set is false, indicating there might be multiple
+            --  streams, so Breakdown does not clear children.
+            Free (Node);
+         end if;
+      end loop;
+
+      --  Compact Tree.Nodes
+      declare
+         Free : Node_Index := Tree.Nodes.First_Index - 1;
+      begin
+         for I in Tree.Nodes.First_Index .. Tree.Nodes.Last_Index loop
+            if Free < Tree.Nodes.First_Index then
+               if Tree.Nodes (I) = Invalid_Node_Access then
+                  Free := I;
+               end if;
+            else
+               if Tree.Nodes (I) /= Invalid_Node_Access then
+                  Tree.Nodes (Free) := Tree.Nodes (I);
+                  Free := @ + 1;
+               end if;
+            end if;
+         end loop;
+
+         if Free > Tree.Nodes.First_Index then
+            Tree.Nodes.Set_First_Last (First => Tree.Nodes.First_Index, Last 
=> Free - 1);
+         end if;
+      end;
+   end Clear_Parse_Streams;
+
+   function Column (Tree : in Syntax_Trees.Tree; Node : in Valid_Node_Access) 
return Ada.Text_IO.Count
+   is
+      Char_Region : constant Buffer_Region := Tree.Char_Region (Node, 
Trailing_Non_Grammar => False);
+   begin
+      if Char_Region = Null_Buffer_Region then
+         return 0;
+      else
+         declare
+            Begin_Char_Pos : constant Buffer_Pos := Line_Begin_Char_Pos
+              (Tree, Line_Region (Tree, Node, Trailing_Non_Grammar => 
True).First);
+         begin
+            return
+              (if Begin_Char_Pos = Invalid_Buffer_Pos
+               then 0
+               else Ada.Text_IO.Count (Char_Region.First - Begin_Char_Pos));
+         end;
+      end if;
+   end Column;
+
+   function Column
+     (Tree   : in Syntax_Trees.Tree;
+      Node   : in Valid_Node_Access;
+      Stream : in Stream_ID)
+     return Ada.Text_IO.Count
+   is
+      Char_Region : constant Buffer_Region := Tree.Char_Region (Node, 
Trailing_Non_Grammar => False);
+   begin
+      if Char_Region.First = Invalid_Buffer_Pos then
+         return 0;
+      else
+         declare
+            Begin_Char_Pos : constant Buffer_Pos := Line_Begin_Char_Pos
+              (Tree, Tree.Line_Region (Node, Trailing_Non_Grammar => 
True).First, Stream);
+         begin
+            return
+              (if Begin_Char_Pos = Invalid_Buffer_Pos
+               then 0
+               else Ada.Text_IO.Count (Char_Region.First - Begin_Char_Pos));
+         end;
+      end if;
+   end Column;
+
+   function Contains
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID;
+      Token  : in Stream_Index)
+     return Boolean
+   is begin
+      return (Tree.Is_Valid (Stream) and Token /= Invalid_Stream_Index) and 
then
+        Tree.Streams (Stream.Cur).Elements.Contains (Token.Cur);
+   end Contains;
+
+   function Contains_Error
+     (Tree       : in Syntax_Trees.Tree;
+      Error_Node : in Valid_Node_Access;
+      Data       : in Error_Data'Class)
+     return Boolean
+   is begin
+      if Error_Node.Error_List = null then
+         return False;
+      else
+         return (for some Err of Error_Node.Error_List.all => Dispatch_Equal 
(Data, Err));
+      end if;
+   end Contains_Error;
+
+   function Contains_Virtual_Terminal
+     (Tree   : in Syntax_Trees.Tree;
+      Item : in Recover_Token)
+     return Boolean
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return
+        (if Item.Virtual
+         then Item.Contains_Virtual_Terminal
+         else
+           (case Item.Element_Node.Label is
+            when Source_Terminal => False,
+            when Virtual_Terminal | Virtual_Identifier => True,
+            when Nonterm => Item.Element_Node.Virtual));
+   end Contains_Virtual_Terminal;
+
+   function Contains_Virtual_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Boolean
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return
+        (case Node.Label is
+         when Source_Terminal => False,
+         when Virtual_Terminal | Virtual_Identifier => True,
+         when Nonterm => Node.Virtual);
+   end Contains_Virtual_Terminal;
+
+   function Copied_Node (Node : in Valid_Node_Access) return Node_Access
+   is begin
+      return Node.Copied_Node;
+   end Copied_Node;
+
+   procedure Copy_Ancestors
+     (Tree      : in out Syntax_Trees.Tree;
+      Ref       : in out Stream_Node_Parents;
+      New_Node  : in     Valid_Node_Access;
+      User_Data : in     User_Data_Access_Constant)
+   --  Replace Ref.Node with New_Node, copying all parents thru
+   --  Ref.Element, updating Ref.Element and Ref.Parents to match.
+   is
+      Temp      : Valid_Node_Access := New_Node;
+      Old_Child : Valid_Node_Access := Ref.Ref.Node;
+      New_Stack : Node_Stacks.Stack;
+   begin
+      Ref.Ref.Node := New_Node;
+      if Ref.Parents.Depth = 0 then
+         --  ada_mode-recover_debbugs_36548.adb
+         return;
+      end if;
+      loop
+         declare
+            Old_Parent  : constant Valid_Node_Access := Ref.Parents.Pop;
+            New_Child   : constant Valid_Node_Access := Temp;
+            Child_Index : constant SAL.Peek_Type     := 
Syntax_Trees.Child_Index
+              (Parent => Old_Parent.all, Child => Old_Child);
+         begin
+            Temp := Copy_Node
+              (Tree, Old_Parent,
+               Parent        =>
+                 (if Tree.Parents_Set
+                  then Old_Parent.Parent
+                  else Invalid_Node_Access),
+               User_Data              => User_Data,
+               Copy_Children          => False,
+               Copy_Following_Deleted => True);
+            Temp.Children (Child_Index) := New_Child;
+            Old_Child.Parent            := Invalid_Node_Access;
+            if Tree.Parents_Set then
+               New_Child.Parent := Temp;
+            end if;
+            New_Stack.Push (Temp);
+
+            if Ref.Parents.Depth = 0 then
+               if Old_Parent = Tree.Root then
+                  Tree.Root := Temp;
+               end if;
+               exit;
+            end if;
+
+            Old_Child := Old_Parent;
+         end;
+      end loop;
+
+      Replace_Node (Ref.Ref.Element, New_Stack.Peek);
+
+      loop
+         Ref.Parents.Push (New_Stack.Pop);
+         exit when New_Stack.Depth = 0;
+      end loop;
+   end Copy_Ancestors;
+
+   function Copy_Augmented
+     (User_Data : in User_Data_Type;
+      Augmented : in Augmented_Class_Access)
+     return Augmented_Class_Access
+   is begin
+      raise SAL.Programmer_Error;
+      return null;
+   end Copy_Augmented;
+
+   function Copy_Node
+     (Tree                   : in out Syntax_Trees.Tree;
+      Node                   : in     Valid_Node_Access;
+      Parent                 : in     Node_Access;
+      User_Data              : in     User_Data_Access_Constant;
+      Copy_Children          : in     Boolean;
+      Copy_Following_Deleted : in     Boolean;
+      New_Error_List         : in     Error_List_Access := null;
+      Set_Error_List         : in     Boolean           := False;
+      Set_Copied_Node        : in     Boolean           := False)
+     return Valid_Node_Access
+   is
+      use all type Error_Data_Lists.List;
+      New_Node : Node_Access;
+   begin
+      case Node.Label is
+      when Source_Terminal =>
+         New_Node := new Syntax_Trees.Node'
+           (Label          => Source_Terminal,
+            Copied_Node    => Invalid_Node_Access,
+            Child_Count    => 0,
+            ID             => Node.ID,
+            Node_Index     => Tree.Next_Terminal_Node_Index,
+            Byte_Region    => Node.Byte_Region,
+            Char_Region    => Node.Char_Region,
+            New_Line_Count => Node.New_Line_Count,
+            Parent         => Parent,
+            Augmented      =>
+              (if Node.Augmented = null or User_Data = null
+               then null
+               else Copy_Augmented (User_Data.all, Node.Augmented)),
+            Error_List        =>
+              (if Set_Error_List
+               then New_Error_List
+               else
+                 (if Node.Error_List = null
+                  then null
+                  else new Error_Data_Lists.List'(Node.Error_List.all))),
+            Non_Grammar       => Node.Non_Grammar,
+            Sequential_Index  => Node.Sequential_Index,
+            Following_Deleted => Valid_Node_Access_Lists.Empty_List);
+
+         if Copy_Following_Deleted then
+            if Copy_Children then
+               for Deleted_Node of Node.Following_Deleted loop
+                  New_Node.Following_Deleted.Append
+                    (Copy_Node (Tree, Deleted_Node, New_Node, User_Data, 
Copy_Children => False,
+                                Copy_Following_Deleted => False));
+               end loop;
+            else
+               New_Node.Following_Deleted := Node.Following_Deleted;
+            end if;
+         end if;
+
+         Tree.Next_Terminal_Node_Index := @ + 1;
+
+      when Virtual_Terminal =>
+         New_Node := new Syntax_Trees.Node'
+           (Label           => Virtual_Terminal,
+            Copied_Node     => Invalid_Node_Access,
+            Child_Count     => 0,
+            ID              => Node.ID,
+            Node_Index      => -(Tree.Nodes.Last_Index + 1),
+            Parent          => Parent,
+            Augmented       =>
+              (if Node.Augmented = null or User_Data = null
+               then null
+               else Copy_Augmented (User_Data.all, Node.Augmented)),
+            Error_List       => New_Error_List,
+            Non_Grammar      => Node.Non_Grammar,
+            Sequential_Index => Node.Sequential_Index,
+            Insert_Location  => Node.Insert_Location);
+
+      when Virtual_Identifier =>
+
+         New_Node := new Syntax_Trees.Node'
+           (Label       => Virtual_Identifier,
+            Copied_Node => Invalid_Node_Access,
+            Child_Count => 0,
+            ID          => Node.ID,
+            Node_Index  => -(Tree.Nodes.Last_Index + 1),
+            Parent      => Parent,
+            Augmented   =>
+              (if Node.Augmented = null or User_Data = null
+               then null
+               else Copy_Augmented (User_Data.all, Node.Augmented)),
+            Error_List       => New_Error_List,
+            Non_Grammar      => Node.Non_Grammar,
+            Sequential_Index => Node.Sequential_Index,
+            Identifier       => Node.Identifier,
+            Insert_Location  => Node.Insert_Location);
+
+      when Nonterm =>
+         --  Copy children first to preserve Node_Index order = parse order in 
a batch parsed tree.
+         declare
+            New_Children : Node_Access_Array (Node.Children'Range);
+         begin
+            if Copy_Children then
+               for I in New_Children'Range loop
+                  New_Children (I) := Copy_Node
+                    (Tree, Node.Children (I), Invalid_Node_Access, User_Data, 
Copy_Children,
+                     Copy_Following_Deleted => True);
+               end loop;
+            else
+               New_Children := Node.Children;
+            end if;
+
+            New_Node := new Syntax_Trees.Node'
+              (Label       => Nonterm,
+               Copied_Node => Invalid_Node_Access,
+               Child_Count => Node.Child_Count,
+               ID          => Node.ID,
+               Node_Index  => -(Tree.Nodes.Last_Index + 1),
+               Parent      => Parent,
+               Augmented   =>
+                 (if Node.Augmented = null or User_Data = null
+                  then null
+                  else Copy_Augmented (User_Data.all, Node.Augmented)),
+               Error_List       => New_Error_List,
+               Virtual          => Node.Virtual,
+               Recover_Conflict => Node.Recover_Conflict,
+               RHS_Index        => Node.RHS_Index,
+               Name_Offset      => Node.Name_Offset,
+               Name_Length      => Node.Name_Length,
+               Children         => New_Children);
+
+            if Copy_Children then
+               pragma Assert (Tree.Parents_Set);
+               for Child of New_Node.Children loop
+                  Child.Parent := New_Node;
+               end loop;
+
+            else
+               if Tree.Parents_Set then
+                  Set_Children (Tree, New_Node, Node.Children);
+               else
+                  New_Node.Children := Node.Children;
+               end if;
+            end if;
+         end;
+      end case;
+
+      Tree.Nodes.Append (New_Node);
+
+      if Set_Copied_Node then
+         Node.Copied_Node := New_Node;
+      end if;
+
+      return New_Node;
+   end Copy_Node;
+
+   function Copy_Subtree
+     (Tree      : in out Syntax_Trees.Tree;
+      Root      : in     Node_Access;
+      User_Data : in     User_Data_Access_Constant)
+     return Node_Access
+   is begin
+      if Root = Invalid_Node_Access then
+         return Invalid_Node_Access;
+      else
+         return Copy_Node (Tree, Root, Invalid_Node_Access, User_Data, 
Copy_Children => True,
+                           Copy_Following_Deleted => True);
+      end if;
+   end Copy_Subtree;
+
+   procedure Copy_Tree
+     (Source      : in     Tree;
+      Destination :    out Tree;
+      User_Data   : in     User_Data_Access_Constant)
+   is
+      Next_Terminal_Node_Index : Node_Index := 0;
+
+      function Copy_Node
+        (Source_Node : in Valid_Node_Access;
+         Dest_Parent : in Node_Access)
+        return Valid_Node_Access
+      is
+         New_Dest_Node : Node_Access;
+
+         function Copy_Errors return Error_List_Access
+         is (if Source_Node.Error_List = null
+             then null
+             else new Error_Data_Lists.List'(Source_Node.Error_List.all));
+
+      begin
+         case Source_Node.Label is
+         when Source_Terminal =>
+
+            if Next_Terminal_Node_Index = 0 and Source_Node.ID /= 
Source.Lexer.Descriptor.SOI_ID then
+               --  SOI is normally the first terminal seen.
+               Next_Terminal_Node_Index := @ + 1;
+            end if;
+
+            New_Dest_Node := new Syntax_Trees.Node'
+              (Label       => Source_Terminal,
+               Copied_Node => Invalid_Node_Access,
+               Child_Count => 0,
+               ID          => Source_Node.ID,
+               Node_Index  => Next_Terminal_Node_Index,
+               Parent      => Dest_Parent,
+               Augmented   =>
+                 (if Source_Node.Augmented = null or User_Data = null
+                  then null
+                  else Copy_Augmented (User_Data.all, Source_Node.Augmented)),
+               Error_List        => Copy_Errors,
+               Byte_Region       => Source_Node.Byte_Region,
+               Char_Region       => Source_Node.Char_Region,
+               New_Line_Count    => Source_Node.New_Line_Count,
+               Non_Grammar       => Source_Node.Non_Grammar,
+               Sequential_Index  => Source_Node.Sequential_Index,
+               Following_Deleted => Valid_Node_Access_Lists.Empty_List);
+
+            Next_Terminal_Node_Index := @ + 1;
+
+            if New_Dest_Node.ID = Source.Lexer.Descriptor.SOI_ID then
+               Destination.SOI := New_Dest_Node;
+            elsif New_Dest_Node.ID = Source.Lexer.Descriptor.EOI_ID then
+               Destination.EOI  := New_Dest_Node;
+            end if;
+
+            for Deleted of Source_Node.Following_Deleted loop
+               New_Dest_Node.Following_Deleted.Append (Copy_Node (Deleted, 
New_Dest_Node));
+            end loop;
+
+         when Virtual_Terminal =>
+
+            New_Dest_Node := new Syntax_Trees.Node'
+              (Label       => Virtual_Terminal,
+               Copied_Node => Invalid_Node_Access,
+               Child_Count => 0,
+               ID          => Source_Node.ID,
+               Node_Index  => -(Destination.Nodes.Last_Index + 1),
+               Parent      => Dest_Parent,
+               Augmented   =>
+                 (if Source_Node.Augmented = null or User_Data = null
+                  then null
+                  else Copy_Augmented (User_Data.all, Source_Node.Augmented)),
+               Error_List       => Copy_Errors,
+               Non_Grammar      => Source_Node.Non_Grammar,
+               Sequential_Index => Source_Node.Sequential_Index,
+               Insert_Location  => Source_Node.Insert_Location);
+
+         when Virtual_Identifier =>
+
+            New_Dest_Node := new Syntax_Trees.Node'
+              (Label       => Virtual_Identifier,
+               Copied_Node => Invalid_Node_Access,
+               Child_Count => 0,
+               ID          => Source_Node.ID,
+               Node_Index  => -(Destination.Nodes.Last_Index + 1),
+               Parent      => Dest_Parent,
+               Augmented   =>
+                 (if Source_Node.Augmented = null or User_Data = null
+                  then null
+                  else Copy_Augmented (User_Data.all, Source_Node.Augmented)),
+               Error_List       => Copy_Errors,
+               Non_Grammar      => Source_Node.Non_Grammar,
+               Sequential_Index => Source_Node.Sequential_Index,
+               Identifier       => Source_Node.Identifier,
+               Insert_Location  => Source_Node.Insert_Location);
+
+         when Nonterm =>
+            --  Copy children first to preserve Node_Index order = parse order 
in a batch parsed tree.
+            declare
+               New_Children : Node_Access_Array (Source_Node.Children'Range);
+            begin
+               for I in New_Children'Range loop
+                  New_Children (I) := Copy_Node (Source_Node.Children (I), 
Dummy_Node);
+               end loop;
+
+               New_Dest_Node := new Syntax_Trees.Node'
+                 (Label       => Nonterm,
+                  Copied_Node => Invalid_Node_Access,
+                  Child_Count => Source_Node.Child_Count,
+                  ID          => Source_Node.ID,
+                  Node_Index  => -(Destination.Nodes.Last_Index + 1),
+                  Parent      => Dest_Parent,
+                  Augmented   =>
+                    (if Source_Node.Augmented = null or User_Data = null
+                     then null
+                     else Copy_Augmented (User_Data.all, 
Source_Node.Augmented)),
+                  Error_List       => Copy_Errors,
+                  Virtual          => Source_Node.Virtual,
+                  Recover_Conflict => Source_Node.Recover_Conflict,
+                  RHS_Index        => Source_Node.RHS_Index,
+                  Name_Offset      => Source_Node.Name_Offset,
+                  Name_Length      => Source_Node.Name_Length,
+                  Children         => New_Children);
+
+               for Child of New_Dest_Node.Children loop
+                  Child.Parent := New_Dest_Node;
+               end loop;
+            end;
+
+         end case;
+         Source_Node.Copied_Node := New_Dest_Node;
+         Destination.Nodes.Append (New_Dest_Node);
+         return New_Dest_Node;
+      end Copy_Node;
+   begin
+      Destination.Clear (Free_Memory => False);
+      Destination.Lexer                    := Source.Lexer;
+      Destination.Next_Terminal_Node_Index := Source.Next_Terminal_Node_Index;
+      Destination.Traversing               := False;
+      Destination.Parents_Set              := True;
+
+      Destination.Root := Copy_Node (Source.Root, Invalid_Node_Access);
+
+      for Err in Destination.Error_Iterate loop
+         Error_Data_Lists.Variable_Ref (Err.Error).Adjust_Copy;
+      end loop;
+
+      for Node of Source.Nodes loop
+         Node.Copied_Node := Invalid_Node_Access;
+      end loop;
+   end Copy_Tree;
+
+   function Correct_Stream_Node
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return Boolean
+   is begin
+      return Ref = Invalid_Stream_Node_Ref or else
+        (Ref.Element /= Invalid_Stream_Index and then
+           (Ref.Node = Invalid_Node_Access or else
+              (not Tree.Parents_Set or else
+                 Tree.Subtree_Root (Ref.Node) = Tree.Get_Node (Ref.Stream, 
Ref.Element))));
+   end Correct_Stream_Node;
+
+   function Count_IDs
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access;
+      ID   : in Token_ID)
+     return SAL.Base_Peek_Type
+   is begin
+      return Result : SAL.Base_Peek_Type := 0 do
+         if Node.ID = ID then
+            Result := 1;
+         end if;
+         case Node.Label is
+         when Nonterm =>
+            for I of Node.Children loop
+               --  We don't check for deleted child here; encountering one 
indicates
+               --  an error in the user algorithm.
+               Result := @ + Count_IDs (Tree, I, ID);
+            end loop;
+         when others =>
+            null;
+         end case;
+      end return;
+   end Count_IDs;
+
+   function Count_Terminals
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Natural
+   --  Count_Terminals must return Integer for Get_Terminals,
+   --  Positive_Index_Type for Get_Terminal_IDs.
+   is begin
+      case Node.Label is
+      when Source_Terminal | Virtual_Terminal | Virtual_Identifier =>
+         return 1;
+
+      when Nonterm =>
+         return Result : Natural := 0 do
+            for C of Node.Children loop
+               --  This can be called to build a debugging image while editing 
the tree
+               if C /= null then
+                  Result := Result + Count_Terminals (Tree, C);
+               end if;
+            end loop;
+         end return;
+      end case;
+   end Count_Terminals;
+
+   function Current_Error_Ref_No_Search
+     (Tree             : in Syntax_Trees.Tree;
+      Stream           : in Stream_ID;
+      Terminal_Predicate  : in Error_Predicate;
+      Nonterm_Predicate : in Error_Predicate)
+     return Stream_Error_Ref
+   is
+      --  IMPROVEME: move Parse_Error etc into syntax_trees so we don't have
+      --  to pass predicates here.
+
+      use Stream_Element_Lists;
+      use all type Error_Data_Lists.Cursor;
+
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+   begin
+      return Result : Stream_Error_Ref do
+
+         --  First look in Parse_Stream input.
+         Result.Ref.Ref.Stream  := Stream;
+         Result.Ref.Ref.Element := (Cur => Next (Parse_Stream.Stack_Top));
+
+         if Result.Ref.Ref.Element.Cur /= No_Element then
+            Result.Ref.Ref.Node := Element (Result.Ref.Ref.Element.Cur).Node;
+            if Result.Ref.Ref.Node.Label = Nonterm then
+               Tree.First_Terminal (Result.Ref, Following => False);
+            end if;
+            Result.Error := Find_Match (Result.Ref.Ref.Node.Error_List, 
Terminal_Predicate);
+         end if;
+
+         if Result.Error = Error_Data_Lists.No_Element then
+            --  Try Shared_Stream
+            Result.Ref.Ref :=
+              (Stream  => Tree.Shared_Stream,
+               Element => (Cur => Parse_Stream.Shared_Link),
+               Node    => Element (Parse_Stream.Shared_Link).Node);
+
+            if Result.Ref.Ref.Node.Label = Nonterm then
+               Tree.First_Terminal (Result.Ref, Following => False);
+            end if;
+            Result.Error := Find_Match (Result.Ref.Ref.Node.Error_List, 
Terminal_Predicate);
+         end if;
+
+         if Result.Error = Error_Data_Lists.No_Element then
+            --  Must be an In_Parse_Error on the last reduce.
+            Result.Ref.Ref := (Stream, (Cur => Parse_Stream.Stack_Top), 
Element (Parse_Stream.Stack_Top).Node);
+
+            if Result.Ref.Ref.Node /= Syntax_Trees.Invalid_Node_Access then
+               Result.Error := Find_Match (Result.Ref.Ref.Node.Error_List, 
Nonterm_Predicate);
+            end if;
+         end if;
+
+         if Result.Error = Error_Data_Lists.No_Element then
+            raise SAL.Programmer_Error with "current_error_ref: no matching 
error found";
+         end if;
+      end return;
+   end Current_Error_Ref_No_Search;
+
+   function Current_Error_Ref
+     (Tree                : in Syntax_Trees.Tree;
+      Stream              : in Stream_ID;
+      Terminal_Predicate  : in Error_Predicate;
+      Nonterm_Predicate   : in Error_Predicate;
+      Error_Node_Features : in Syntax_Trees.Error_Node_Features := (others => 
<>))
+     return Stream_Error_Ref
+   is
+      --  IMPROVEME: move Parse_Error etc into syntax_trees so we don't have
+      --  to pass predicates here.
+
+      use Stream_Element_Lists;
+      use all type Error_Data_Lists.Cursor;
+
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+
+      Last_Terminal  : Stream_Node_Parents;
+      First_Terminal : Stream_Node_Parents;
+      --  Of the current search node
+
+      Target_Seq_Index : constant Base_Sequential_Index :=
+        (if Error_Node_Features.Deleted
+         then Error_Node_Features.Prev_Term_Seq_Index
+         else Error_Node_Features.Seq_Index);
+
+      Last_Step_Towards_EOI : Boolean;
+
+      procedure Step (Ref : in out Stream_Node_Parents; Towards_EOI : in 
Boolean)
+      --  Step Ref.Ref.Element in direction given by Towards_EOI, skipping 
empty nonterms.
+      is begin
+         Last_Step_Towards_EOI := Towards_EOI;
+         loop
+            if Towards_EOI then
+               Next (Ref.Ref.Element.Cur);
+
+               if Ref.Ref.Element.Cur = Stream_Element_Lists.No_Element then
+                  if Ref.Ref.Stream /= Tree.Shared_Stream then
+                     --  Try Shared_Stream
+                     Ref.Ref.Stream  := Tree.Shared_Stream;
+                     Ref.Ref.Element := (Cur => Parse_Stream.Shared_Link);
+                  else
+                     raise SAL.Programmer_Error with "current_error_ref: no 
matching error found";
+                  end if;
+               end if;
+            else
+               Previous (Ref.Ref.Element.Cur);
+               if Ref.Ref.Element.Cur = Stream_Element_Lists.No_Element then
+                  raise SAL.Programmer_Error with "current_error_ref: no 
matching error found";
+               end if;
+            end if;
+
+            Ref.Ref.Node := Element (Ref.Ref.Element.Cur).Node;
+            Ref.Parents  := Node_Stacks.Empty_Stack;
+
+            Last_Terminal := Ref;
+            Tree.Last_Sequential_Terminal (Last_Terminal, Stream, Preceding => 
False);
+            exit when Last_Terminal.Ref.Node /= Invalid_Node_Access;
+         end loop;
+         First_Terminal := Ref;
+         Tree.First_Sequential_Terminal (First_Terminal, Following => False);
+      end Step;
+
+      function Handle_Deleted (Result : in out Stream_Error_Ref) return Boolean
+      is begin
+         if Result.Ref.Ref.Node.Label = Source_Terminal and then
+           Result.Ref.Ref.Node.Sequential_Index = 
Error_Node_Features.Prev_Term_Seq_Index
+         then
+            for Cur in Result.Ref.Ref.Node.Following_Deleted.Iterate loop
+               declare
+                  Node : Valid_Node_Access renames 
Result.Ref.Ref.Node.Following_Deleted (Cur);
+               begin
+                  if Node.Sequential_Index = Error_Node_Features.Seq_Index then
+                     Result.Error   := Find_Match (Node.Error_List, 
Terminal_Predicate);
+                     Result.Deleted := Cur;
+                     return True;
+                  end if;
+               end;
+            end loop;
+            return True;
+         else
+            return False;
+         end if;
+      end Handle_Deleted;
+
+      procedure Set_First_Last (Ref : in Stream_Node_Parents)
+      --  Set First_, Last_Terminal; null if Ref.Ref.Node is an empty nonterm.
+      is begin
+         Last_Terminal := Ref;
+         Tree.Last_Sequential_Terminal (Last_Terminal, Stream, Preceding => 
False);
+         First_Terminal := Ref;
+         Tree.First_Sequential_Terminal (First_Terminal, Following => False);
+      end Set_First_Last;
+
+      function First_Child (Ref : in out Stream_Node_Parents) return Boolean
+      with Pre => Ref.Ref.Node.Label = Nonterm
+      --  If Ref.Ref.Node is an empty nonterm, return False. Otherwise,
+      --  update Ref first child of Ref.Ref.Node, return True.
+      is begin
+         if Ref.Ref.Node.Child_Count = 0 then
+            return False;
+         end if;
+
+         Ref.Parents.Push (Ref.Ref.Node);
+         Ref.Ref.Node := Ref.Ref.Node.Children (1);
+         return True;
+      end First_Child;
+
+      function Next_Sibling (Ref : in out Stream_Node_Parents) return Boolean
+      --  Step Ref to next sibling of Ref.Ref.Node. If Ref.Ref.Node is last
+      --  child of parent, leave Ref at next sibling of parent. Do not step
+      --  Ref.Ref.Element. Return True if there is such a sibling, False
+      --  when not (ie ref.parents is or becomes empty).
+      is begin
+         loop
+            if Ref.Parents.Is_Empty then
+               return False;
+            end if;
+
+            declare
+               Child_Index : constant SAL.Peek_Type := Syntax_Trees.Child_Index
+                 (Parent => Ref.Parents.Peek.all, Child => Ref.Ref.Node);
+            begin
+               if Child_Index = Ref.Parents.Peek.Child_Count then
+                  Ref.Ref.Node := Ref.Parents.Pop;
+               else
+                  Ref.Ref.Node := Ref.Parents.Peek.Children (Child_Index + 1);
+                  return True;
+               end if;
+            end;
+         end loop;
+      end Next_Sibling;
+
+   begin
+      if Error_Node_Features.Seq_Index = Invalid_Sequential_Index then
+         return Current_Error_Ref_No_Search (Tree, Stream, Terminal_Predicate, 
Nonterm_Predicate);
+      end if;
+
+      return Result : Stream_Error_Ref do
+         --  In the worst case, the error node can be anywhere in the stack or
+         --  stream input. In the stack, it can be on an arbitrary descendent
+         --  of the element root. In the stream input, it can be on the first
+         --  terminal of a nonterm.
+         --
+         --  So we assume the worst case, and use first/last terminal
+         --  sequential_index to guide the search when descenting a subtree.
+
+         Result.Ref.Ref := (Stream, (Cur => Parse_Stream.Stack_Top), Element 
(Parse_Stream.Stack_Top).Node);
+         Last_Terminal  := Result.Ref;
+         Tree.Last_Sequential_Terminal (Last_Terminal, Stream, Preceding => 
False);
+         if Last_Terminal.Ref.Node = Invalid_Node_Access then
+            Step (Result.Ref, Towards_EOI => False);
+         else
+            First_Terminal := Result.Ref;
+            Tree.First_Sequential_Terminal (First_Terminal, Following => 
False);
+         end if;
+
+         Search_Stream :
+         loop
+            if Target_Seq_Index < First_Terminal.Ref.Node.Sequential_Index then
+               Step (Result.Ref, Towards_EOI => False);
+
+            elsif Target_Seq_Index > Last_Terminal.Ref.Node.Sequential_Index 
then
+               Step (Result.Ref, Towards_EOI => True);
+
+            else
+               Search_Subtree :
+               loop
+                  case Error_Node_Features.Label is
+                  when Terminal_Label =>
+                     exit Search_Stream when Error_Node_Features.Deleted and 
then Handle_Deleted (Result);
+
+                     if Result.Ref.Ref.Node.Label = Error_Node_Features.Label 
and then
+                       Result.Ref.Ref.Node.Sequential_Index = Target_Seq_Index
+                     then
+                        Result.Error := Find_Match 
(Result.Ref.Ref.Node.Error_List, Terminal_Predicate);
+                        exit Search_Stream;
+                     end if;
+
+                  when Nonterm =>
+                     if Result.Ref.Ref.Node.Label = Error_Node_Features.Label 
and then
+                       First_Terminal.Ref.Node.Sequential_Index = 
Target_Seq_Index
+                     then
+                        Result.Error := Find_Match 
(Result.Ref.Ref.Node.Error_List, Nonterm_Predicate);
+                        exit Search_Stream when Result.Error /= 
Error_Data_Lists.No_Element;
+                     end if;
+                  end case;
+
+                  --  Continue searching
+                  case Result.Ref.Ref.Node.Label is
+                  when Terminal_Label =>
+                     exit Search_Subtree when not Next_Sibling (Result.Ref);
+
+                  when Nonterm =>
+                     if First_Child (Result.Ref) then
+
+                        Find_Child :
+                        loop
+                           Set_First_Last (Result.Ref);
+
+                           if Last_Terminal.Ref.Node /= Invalid_Node_Access 
and then
+                             Target_Seq_Index <= 
Last_Terminal.Ref.Node.Sequential_Index
+                           then
+                              --  Search this child
+                              exit Find_Child;
+                           else
+                              exit Search_Subtree when not Next_Sibling 
(Result.Ref);
+                           end if;
+                        end loop Find_Child;
+
+                     else
+                        exit Search_Subtree when not Next_Sibling (Result.Ref);
+                     end if;
+                  end case;
+               end loop Search_Subtree;
+
+               Step (Result.Ref, Last_Step_Towards_EOI);
+            end if;
+
+         end loop Search_Stream;
+
+         if Result.Error = Error_Data_Lists.No_Element then
+            raise SAL.Programmer_Error with "current_error_ref: no matching 
error found";
+         end if;
+      end return;
+   end Current_Error_Ref;
+
+   function Current_Token
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID)
+     return Rooted_Ref
+   is
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+   begin
+      if Parse_Stream.Stack_Top = Parse_Stream.Elements.Last then
+         return
+           (Stream  => Tree.Shared_Stream,
+            Element => (Cur => Parse_Stream.Shared_Link),
+            Node    => Stream_Element_Lists.Element 
(Parse_Stream.Shared_Link).Node);
+      else
+         declare
+            El : constant Stream_Element_Lists.Cursor := 
Stream_Element_Lists.Next (Parse_Stream.Stack_Top);
+         begin
+            return
+              (Stream  => Stream,
+               Element => (Cur => El),
+               Node    => Stream_Element_Lists.Element (El).Node);
+         end;
+      end if;
+   end Current_Token;
+
+   procedure Delete_Current_Token
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID)
+   is
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+   begin
+      if Parse_Stream.Stack_Top = Parse_Stream.Elements.Last then
+         --  Input is Shared_Link
+         Stream_Element_Lists.Next (Parse_Stream.Shared_Link);
+      else
+         --  Input is Stream input.
+         declare
+            use Stream_Element_Lists;
+            To_Delete : Cursor := Next (Parse_Stream.Stack_Top);
+         begin
+            --  Any Non_Grammar on To_Delete should be moved in Delete_Token, 
called
+            --  by Execute_Actions after parse is complete.
+            Parse_Stream.Elements.Delete (To_Delete);
+         end;
+      end if;
+   end Delete_Current_Token;
+
+   procedure Delete_Error
+     (Tree  : in out Syntax_Trees.Tree;
+      Error : in out Error_Ref)
+   is
+      Error_Node : constant Valid_Node_Access :=
+        (if Error.Deleted = Valid_Node_Access_Lists.No_Element
+         then Error.Node
+         else Error.Node.Following_Deleted (Error.Deleted));
+
+      To_Delete : Error_Data_Lists.Cursor := Error.Error;
+   begin
+      Tree.Next_Error (Error);
+      Error_Node.Error_List.Delete (To_Delete);
+      if Error_Node.Error_List.Length = 0 then
+         Free (Error_Node.Error_List);
+      end if;
+   end Delete_Error;
+
+   procedure Delete_Error
+     (Tree  : in out Syntax_Trees.Tree;
+      Error : in out Stream_Error_Ref)
+   is
+      Error_Node : constant Valid_Node_Access :=
+        (if Error.Deleted = Valid_Node_Access_Lists.No_Element
+         then Error.Ref.Ref.Node
+         else Error.Ref.Ref.Node.Following_Deleted (Error.Deleted));
+
+      To_Delete : Error_Data_Lists.Cursor := Error.Error;
+   begin
+      Tree.Next_Error (Error);
+      Error_Node.Error_List.Delete (To_Delete);
+      if Error_Node.Error_List.Length = 0 then
+         Free (Error_Node.Error_List);
+      end if;
+   end Delete_Error;
+
+   procedure Delete_Errors_In_Input
+     (Tree      : in out Syntax_Trees.Tree;
+      Stream    : in     Stream_ID;
+      Predicate : in     Error_Predicate;
+      User_Data : in     User_Data_Access_Constant)
+   is
+      use Stream_Element_Lists;
+
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+
+      Error_Ref : Stream_Node_Parents := Tree.To_Stream_Node_Parents
+        (if Parse_Stream.Stack_Top = Parse_Stream.Elements.Last then
+           (Stream  => Tree.Shared_Stream,
+            Element => (Cur => Parse_Stream.Shared_Link),
+            Node    => Element (Parse_Stream.Shared_Link).Node)
+         else
+           (Stream  => Stream,
+            Element => (Cur => Next (Parse_Stream.Stack_Top)),
+            Node    => Element (Next (Parse_Stream.Stack_Top)).Node));
+
+      procedure Delete_Errors
+      --  Delete errors matching Predicate from Error_Ref.Ref.Node
+      is
+         function Edit_Error_List return Error_List_Access
+         is begin
+            return Result : Error_List_Access := new 
Error_Data_Lists.List'(Error_Ref.Ref.Node.Error_List.all)
+            do
+               declare
+                  use Error_Data_Lists;
+                  Cur       : Error_Data_Lists.Cursor := Result.First;
+                  To_Delete : Error_Data_Lists.Cursor;
+               begin
+                  loop
+                     exit when Cur = Error_Data_Lists.No_Element;
+                     if Predicate (Cur) then
+                        To_Delete := Cur;
+                        Next (Cur);
+                        Result.Delete (To_Delete);
+                     else
+                        Next (Cur);
+                     end if;
+                  end loop;
+               end;
+               if Result.Length = 0 then
+                  Free (Result);
+               end if;
+            end return;
+         end Edit_Error_List;
+      begin
+         if Error_Ref.Ref.Node.Error_List /= null and then 
Error_Ref.Ref.Node.Error_List.Length > 0 then
+            declare
+               New_Node : constant Valid_Node_Access := Copy_Node
+                 (Tree, Error_Ref.Ref.Node,
+                  Parent         =>
+                    (if Tree.Parents_Set
+                     then Error_Ref.Ref.Node.Parent
+                     else Invalid_Node_Access),
+                  User_Data              => User_Data,
+                  Copy_Children          => False,
+                  Copy_Following_Deleted => True,
+                  New_Error_List         => Edit_Error_List,
+                  Set_Error_List         => True);
+            begin
+               Move_Element (Tree, Stream, Error_Ref, New_Node, User_Data);
+            end;
+         end if;
+
+         case Error_Ref.Ref.Node.Label is
+         when Terminal_Label =>
+            null;
+
+         when Nonterm =>
+            declare
+               Children : constant Node_Access_Array := 
Error_Ref.Ref.Node.Children;
+            begin
+               for Child of Children loop
+                  Error_Ref.Parents.Push (Error_Ref.Ref.Node);
+                  Error_Ref.Ref.Node := Child;
+                  Delete_Errors;
+               end loop;
+            end;
+         end case;
+
+         if Error_Ref.Parents.Depth > 0 then
+            Error_Ref.Ref.Node := Error_Ref.Parents.Pop;
+         end if;
+      end Delete_Errors;
+   begin
+      --  IMPROVEME incremental: This algorithm is not incremental, and a
+      --  waste of time on almost all nonterms; cache Has_Errors Boolean in
+      --  each nonterm.
+      Delete_Errors;
+   end Delete_Errors_In_Input;
+
+   procedure Delete_Stream (Tree : in out Syntax_Trees.Tree; Stream : in out 
Stream_ID)
+   is
+      use Parse_Stream_Lists;
+   begin
+      declare
+         Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+      begin
+         Parse_Stream.Stack_Top   := Stream_Element_Lists.No_Element;
+         Parse_Stream.Shared_Link := Stream_Element_Lists.No_Element;
+      end;
+      Tree.Streams.Delete (Stream.Cur);
+   end Delete_Stream;
+
+   procedure Delete_Subtree
+     (Tree : in out Syntax_Trees.Tree;
+      Root : in out Node_Access)
+   is
+      procedure Delete_Node
+        (Tree : in out Syntax_Trees.Tree;
+         Node : in out Node_Access)
+      is begin
+         Free (Node.Augmented);
+         Free (Node.Error_List);
+
+         case Node.Label is
+         when Source_Terminal =>
+            Node.Following_Deleted.Clear;
+
+         when Virtual_Terminal | Virtual_Identifier =>
+            null;
+
+         when Nonterm =>
+            for I in Node.Children'Range loop
+               Delete_Node (Tree, Node.Children (I));
+            end loop;
+
+         end case;
+         Free (Node);
+      end Delete_Node;
+   begin
+      if Root = Invalid_Node_Access then
+         null;
+      else
+         Delete_Node (Tree, Root);
+      end if;
+   end Delete_Subtree;
+
+   function Editable (Tree : in Syntax_Trees.Tree) return Boolean
+   is begin
+      return Tree.Parents_Set and Tree.Streams.Length = 0 and 
Tree.Shared_Stream.Cur = Parse_Stream_Lists.No_Element;
+   end Editable;
+
+   function Element_ID
+     (Tree : in Syntax_Trees.Tree;
+      Item : in Recover_Token)
+     return Token_ID
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return
+        (if Item.Virtual
+         then Item.ID
+         else Item.Element_Node.ID);
+   end Element_ID;
+
+   function Element_Is_Terminal (Tree : in Syntax_Trees.Tree; Item : in 
Recover_Token) return Boolean
+   is begin
+      if Item.Virtual then
+         return Is_Terminal (Item.ID, Tree.Lexer.Descriptor.all);
+      else
+         return Tree.Label (Item.Element_Node) in Terminal_Label;
+      end if;
+   end Element_Is_Terminal;
+
+   function Empty_Line
+     (Tree        : in Syntax_Trees.Tree;
+      Non_Grammar : in Lexer.Token_Arrays.Vector;
+      Line        : in Line_Number_Type)
+     return Boolean
+   with Pre =>
+     (for some Token of Non_Grammar => Contains (Token.Line_Region, Line) and
+        New_Line_Count (Token.Line_Region) > 0)
+   --  Return True if Line in Non_Grammar contains no non_grammar tokens
+   --  other than New_Line or EOI.
+   is begin
+      for I in Non_Grammar.First_Index .. Non_Grammar.Last_Index loop
+         if Contains (Non_Grammar (I).Line_Region, Line) and New_Line_Count 
(Non_Grammar (I).Line_Region) > 0 then
+            declare
+               Line_Begin_Char_Pos : constant Base_Buffer_Pos := 
Tree.Lexer.Line_Begin_Char_Pos (Non_Grammar (I), Line);
+            begin
+               if Line_Begin_Char_Pos /= Invalid_Buffer_Pos then
+                  if Non_Grammar (I).Char_Region.Last + 1 > 
Line_Begin_Char_Pos then
+                     return True;
+
+                  else
+                     pragma Assert (Non_Grammar (I).Char_Region.Last + 1 = 
Line_Begin_Char_Pos);
+                     return I < Non_Grammar.Last_Index and then
+                       not (Non_Grammar (I + 1).ID in 
Tree.Lexer.Descriptor.New_Line_ID | Tree.Lexer.Descriptor.EOI_ID);
+                  end if;
+               end if;
+            end;
+         end if;
+      end loop;
+      --  Getting here violates the precondition
+      raise SAL.Programmer_Error;
+   end Empty_Line;
+
+   procedure Enable_Ref_Count_Check (Tree : in out Syntax_Trees.Tree; Stream : 
in Stream_ID; Enable : in Boolean)
+   is begin
+      Tree.Streams (Stream.Cur).Elements.Enable_Ref_Count_Check (Enable);
+   end Enable_Ref_Count_Check;
+
+   function EOI (Tree : in Syntax_Trees.Tree) return Node_Access
+   is begin
+      return Tree.EOI;
+   end EOI;
+
+   function Error (Item : in Error_Ref) return Error_Data'Class
+   is begin
+      return Error_Data_Lists.Element (Item.Error);
+   end Error;
+
+   function Error (Item : in Stream_Error_Ref) return Error_Data'Class
+   is begin
+      return Error_Data_Lists.Element (Item.Error);
+   end Error;
+
+   function Error (Item : in Stream_Error_Cursor) return Stream_Error_Ref
+   is begin
+      return Item.SER;
+   end Error;
+
+   function Error_Count (Tree : in Syntax_Trees.Tree) return 
Ada.Containers.Count_Type
+   is
+      Error : Error_Ref := Tree.First_Error;
+   begin
+      return Result : Ada.Containers.Count_Type := 0 do
+         loop
+            exit when Error.Node = Invalid_Node_Access;
+            Result := @ + 1;
+            Tree.Next_Error (Error);
+         end loop;
+      end return;
+   end Error_Count;
+
+   function Error_Count (Tree : in Syntax_Trees.Tree; Stream : in Stream_ID) 
return Ada.Containers.Count_Type
+   is begin
+      return Result : Ada.Containers.Count_Type := 0 do
+         for Cur in Tree.Stream_Error_Iterate (Stream) loop
+            Result := @ + 1;
+         end loop;
+      end return;
+   end Error_Count;
+
+   function Error_Deleted (Error : in Stream_Error_Ref) return 
Valid_Node_Access_Lists.Cursor
+   is begin
+      return Error.Deleted;
+   end Error_Deleted;
+
+   function Error_Iterate
+     (Tree : aliased in Syntax_Trees.Tree)
+     return Error_Iterator_Interfaces.Forward_Iterator'Class
+   is begin
+      return Error_Iterator'(Tree => Tree'Access);
+   end Error_Iterate;
+
+   function Error_List (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Error_Data_List_Const_Ref
+   is begin
+      return Error_Data_List_Const_Ref'
+        (List  => (if Node.Error_List = null then Empty_Error_List'Access else 
Node.Error_List),
+         Dummy => 1);
+   end Error_List;
+
+   function Error_Message_1
+     (Tree             : in Syntax_Trees.Tree;
+      Prev_Non_Grammar : in WisiToken.Lexer.Token_Arrays.Vector;
+      First_Terminal   : in Node_Access;
+      Message          : in String)
+     return String
+   is
+      Line   : Line_Number_Type  := Line_Number_Type'First;
+      Column : Ada.Text_IO.Count := Ada.Text_IO.Count'First;
+   begin
+      if Prev_Non_Grammar.Length > 0 then
+         Line := Prev_Non_Grammar 
(Prev_Non_Grammar.Last_Index).Line_Region.Last;
+
+         if First_Terminal /= Invalid_Node_Access then
+            if First_Terminal.Label = Source_Terminal and then
+              First_Terminal.Char_Region.First /= Invalid_Buffer_Pos
+            then
+               declare
+                  Begin_Char_Pos : constant Buffer_Pos :=
+                    (if Tree.Editable
+                     then Tree.Line_Begin_Char_Pos (Line)
+                     else Tree.Line_Begin_Char_Pos (Line, Tree.Shared_Stream));
+               begin
+                  Column :=
+                    (if Begin_Char_Pos = Invalid_Buffer_Pos or
+                       First_Terminal.Char_Region.First < Begin_Char_Pos
+                     then 0
+                     else Ada.Text_IO.Count (First_Terminal.Char_Region.First 
- Begin_Char_Pos));
+               end;
+            end if;
+         else
+            --  No char_pos, so no column
+            null;
+         end if;
+
+      else
+         --  No line information, so also no column.
+         null;
+      end if;
+      return WisiToken.Error_Message (Tree.Lexer.File_Name, Line, Column, 
Message);
+   end Error_Message_1;
+
+   function Error_Message
+     (Tree    : in Syntax_Trees.Tree;
+      Node    : in Valid_Node_Access;
+      Message : in String)
+     return String
+   is
+      Non_Grammar : Node_Access := Invalid_Node_Access;
+      Null_Non_Grammar : WisiToken.Lexer.Token_Arrays.Vector;
+   begin
+      begin
+         --  Tolerate broken trees where Prev_Non_Grammar doesn't find SOI, or
+         --  raises an exception.
+         Non_Grammar := Tree.Prev_Non_Grammar (Node);
+      exception
+      when others =>
+         null;
+      end;
+      return Error_Message_1
+        (Tree,
+         (if Non_Grammar = Invalid_Node_Access then Null_Non_Grammar else 
Non_Grammar.Non_Grammar),
+         Tree.First_Terminal (Node), Message);
+   end Error_Message;
+
+   function Error_Message
+     (Tree    : in Syntax_Trees.Tree;
+      Ref     : in Stream_Node_Ref;
+      Message : in String)
+     return String
+   is begin
+      if Tree.Parents_Set then
+         declare
+            Non_Grammar    : Stream_Node_Ref := Ref;
+            First_Terminal : Stream_Node_Ref := Ref;
+         begin
+            Tree.Prev_Non_Grammar (Non_Grammar);
+            Tree.First_Terminal (First_Terminal);
+            return Error_Message_1 (Tree, Non_Grammar.Node.Non_Grammar, 
First_Terminal.Node, Message);
+         end;
+      elsif Rooted (Ref) then
+         declare
+            Non_Grammar    : Stream_Node_Parents := (Ref, Parents => <>);
+            First_Terminal : Stream_Node_Parents := (Ref, Parents => <>);
+         begin
+            Tree.Prev_Non_Grammar (Non_Grammar, Parse_Stream => 
Invalid_Stream_ID);
+            Tree.First_Terminal (First_Terminal, Following => True);
+            return Error_Message_1 (Tree, Non_Grammar.Ref.Node.Non_Grammar, 
First_Terminal.Ref.Node, Message);
+         end;
+
+      else
+         declare
+            Non_Grammar    : constant Node_Access := Tree.First_Non_Grammar  
(Ref.Node);
+            First_Terminal : constant Node_Access := Tree.First_Terminal 
(Ref.Node);
+         begin
+            return Error_Message_1
+              (Tree,
+               (if Non_Grammar = Invalid_Node_Access
+                then Lexer.Token_Arrays.Empty_Vector
+                else Non_Grammar.Non_Grammar),
+               First_Terminal,
+               Message);
+         end;
+      end if;
+   end Error_Message;
+
+   function Error_Node (Error : in Error_Ref) return Valid_Node_Access
+   is begin
+      if Valid_Node_Access_Lists.Has_Element (Error.Deleted) then
+         pragma Assert (Error.Node.Label = Source_Terminal);
+         return Error.Node.Following_Deleted (Error.Deleted);
+
+      else
+         return Error.Node;
+      end if;
+   end Error_Node;
+
+   function Error_Node (Tree : in Syntax_Trees.Tree; Error : in Error_Ref) 
return Valid_Node_Access
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Error_Node (Error);
+   end Error_Node;
+
+   function Error_Node (Error : in Stream_Error_Ref) return Valid_Node_Access
+   is begin
+      return Error_Node
+        (Error_Ref'
+           (Node    => Error.Ref.Ref.Node,
+            Deleted => Error.Deleted,
+            Error   => Error.Error));
+   end Error_Node;
+
+   function Error_Node (Tree : in Syntax_Trees.Tree; Error : in 
Stream_Error_Ref) return Valid_Node_Access
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Error_Node (Error);
+   end Error_Node;
+
+   function Error_Stream_Node_Ref (Tree : in Syntax_Trees.Tree; Error : in 
Stream_Error_Ref) return Stream_Node_Ref
+   is begin
+      return
+        (Stream        => Error.Ref.Ref.Stream,
+         Element       => Error.Ref.Ref.Element,
+         Node          => Tree.Error_Node
+           (Error_Ref'
+              (Node    => Error.Ref.Ref.Node,
+               Deleted => Error.Deleted,
+               Error   => Error.Error)));
+   end Error_Stream_Node_Ref;
+
+   overriding procedure Finalize (Tree : in out Syntax_Trees.Tree)
+   is begin
+      Clear (Tree, Free_Memory => True);
+      --  Tree.* array memory is freed by SAL Vectors Finalize.
+   end Finalize;
+
+   function Find_Ancestor
+     (Tree       : in Syntax_Trees.Tree;
+      Node       : in Valid_Node_Access;
+      ID         : in Token_ID;
+      Max_Parent : in Boolean := False)
+     return Node_Access
+   is
+      N           : Node_Access := Node;
+      Last_Parent : Node_Access := Invalid_Node_Access;
+   begin
+      loop
+         N := N.Parent;
+
+         exit when N = Invalid_Node_Access;
+         Last_Parent := N;
+
+         exit when ID = N.ID;
+      end loop;
+
+      return (if Max_Parent then Last_Parent else N);
+   end Find_Ancestor;
+
+   function Find_Ancestor
+     (Tree       : in Syntax_Trees.Tree;
+      Node       : in Valid_Node_Access;
+      IDs        : in Token_ID_Array;
+      Max_Parent : in Boolean := False)
+     return Node_Access
+   is
+      N           : Node_Access := Node;
+      Last_Parent : Node_Access := Invalid_Node_Access;
+   begin
+      loop
+         N := N.Parent;
+
+         exit when N = Invalid_Node_Access;
+         Last_Parent := N;
+
+         exit when (for some ID of IDs => ID = N.ID);
+      end loop;
+
+      return (if Max_Parent then Last_Parent else N);
+   end Find_Ancestor;
+
+   function Find_Byte_Pos
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Node_Access;
+      Byte_Pos             : in Buffer_Pos;
+      Trailing_Non_Grammar : in Boolean)
+     return Node_Access
+   --  Return terminal node in subtree under Node that contains
+   --  (including non_grammar if Trailing_Non_Grammar) or is after
+   --  Byte_Pos. Invalid_Node_Access if byte_Pos is after all of Node.
+   is begin
+      case Node.Label is
+      when Source_Terminal =>
+         if Byte_Pos <= Node.Byte_Region.Last then
+            return Node;
+         elsif (Trailing_Non_Grammar and Node.Non_Grammar.Length > 0) and then
+           Byte_Pos <= Node.Non_Grammar 
(Node.Non_Grammar.Last_Index).Byte_Region.Last
+         then
+            return Node;
+         elsif Node.ID = Tree.Lexer.Descriptor.EOI_ID and Byte_Pos = 
Node.Byte_Region.First then
+            return Node;
+         else
+            return Invalid_Node_Access;
+         end if;
+
+      when Virtual_Terminal | Virtual_Identifier =>
+         if (Trailing_Non_Grammar and Node.Non_Grammar.Length > 0) and then
+           Byte_Pos <= Node.Non_Grammar 
(Node.Non_Grammar.Last_Index).Byte_Region.Last
+         then
+            return Node;
+         else
+            return Invalid_Node_Access;
+         end if;
+
+      when Nonterm =>
+         for Child of Node.Children loop
+            declare
+               Region : constant Buffer_Region := Tree.Byte_Region (Child, 
Trailing_Non_Grammar);
+            begin
+               if Region = Null_Buffer_Region then
+                  --  Child is empty or virtual; try next
+                  null;
+               elsif Byte_Pos <= Region.First then
+                  return Tree.First_Terminal (Child);
+               elsif Byte_Pos <= Region.Last then
+                  return Find_Byte_Pos (Tree, Child, Byte_Pos, 
Trailing_Non_Grammar);
+               else
+                  null; -- try next child
+               end if;
+            end;
+         end loop;
+         --  Byte_Pos is after last child
+         return Invalid_Node_Access;
+      end case;
+   end Find_Byte_Pos;
+
+   function Find_Byte_Pos
+     (Tree                 : in Syntax_Trees.Tree;
+      Byte_Pos             : in Buffer_Pos;
+      Trailing_Non_Grammar : in Boolean)
+     return Node_Access
+   is
+      Node        : constant Node_Access   := Root (Tree);
+      Byte_Region : constant Buffer_Region :=
+        (First => Tree.SOI.Non_Grammar (1).Byte_Region.First,
+         Last  => Tree.EOI.Non_Grammar (1).Byte_Region.First);
+   begin
+      if Byte_Pos <= Byte_Region.First then
+         return Tree.First_Terminal (Node);
+      elsif Byte_Pos > Byte_Region.Last then
+         return Invalid_Node_Access;
+      else
+         return Find_Byte_Pos (Tree, Node, Byte_Pos, Trailing_Non_Grammar);
+      end if;
+   end Find_Byte_Pos;
+
+   function Find_Byte_Pos
+     (Tree                 : in Syntax_Trees.Tree;
+      Byte_Pos             : in Buffer_Pos;
+      Trailing_Non_Grammar : in Boolean;
+      Start_At             : in Terminal_Ref;
+      Stream               : in Stream_ID := Invalid_Stream_ID)
+     return Terminal_Ref
+   is
+      use Stream_Element_Lists;
+
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams
+        (if Start_At = Invalid_Stream_Node_Ref
+         then Stream.Cur
+         else Start_At.Stream.Cur);
+
+      function Find_Parent return Stream_Node_Ref
+      is
+         Node : Node_Access := Start_At.Node;
+      begin
+         loop
+            if Node.Parent = null then
+               declare
+                  Cur : constant Stream_Element_Lists.Cursor := 
Stream_Element_Lists.Next (Start_At.Element.Cur);
+               begin
+                  if Stream_Element_Lists.Has_Element (Cur) then
+                     return Tree.To_Rooted_Ref (Start_At.Stream, (Cur => Cur));
+                  else
+                     return Invalid_Stream_Node_Ref;
+                  end if;
+               end;
+
+            elsif Contains (Tree.Byte_Region (Node.Parent, 
Trailing_Non_Grammar => False), Byte_Pos) then
+               return (Start_At.Stream, Start_At.Element, Node.Parent);
+
+            else
+               Node := Node.Parent;
+            end if;
+         end loop;
+      end Find_Parent;
+
+      Result : Stream_Node_Ref :=
+        (if Start_At = Invalid_Stream_Node_Ref
+         then (Stream, (Cur => Parse_Stream.Elements.First), null)
+         else Find_Parent);
+   begin
+      loop
+         exit when Result.Element.Cur = No_Element;
+
+         Result.Node := Find_Byte_Pos
+           (Tree, Stream_Element_Lists.Element (Result.Element.Cur).Node, 
Byte_Pos, Trailing_Non_Grammar);
+         if Result.Node = Invalid_Node_Access then
+            --  Try next stream element
+            Result.Element.Cur := Next (Result.Element.Cur);
+         else
+            return Result;
+         end if;
+      end loop;
+      --  end of stream reached; Byte_Pos is after all of Stream
+      return Result;
+   end Find_Byte_Pos;
+
+   function Find_Char_Pos
+     (Tree                : in Syntax_Trees.Tree;
+      Node                : in Node_Access;
+      Char_Pos            : in Buffer_Pos;
+      Include_Non_Grammar : in Boolean;
+      After               : in Boolean)
+     return Node_Access
+   is begin
+      case Node.Label is
+      when Source_Terminal =>
+         if Contains (Node.Char_Region, Char_Pos) then
+            return Node;
+         elsif After and Char_Pos <= Node.Char_Region.Last then
+            return Node;
+         elsif (Include_Non_Grammar and Node.Non_Grammar.Length > 0) and then
+           Char_Pos <= Node.Non_Grammar 
(Node.Non_Grammar.Last_Index).Char_Region.Last
+         then
+            return Node;
+         else
+            return Invalid_Node_Access;
+         end if;
+
+      when Virtual_Terminal | Virtual_Identifier =>
+         if (Include_Non_Grammar and Node.Non_Grammar.Length > 0) and then
+           Char_Pos <= Node.Non_Grammar 
(Node.Non_Grammar.Last_Index).Char_Region.Last
+         then
+            return Node;
+         else
+            return Invalid_Node_Access;
+         end if;
+
+      when Nonterm =>
+         for Child of Node.Children loop
+            declare
+               Region : constant Buffer_Region := Tree.Char_Region (Child, 
Include_Non_Grammar);
+            begin
+               if Length (Region) = 0 then
+                  --  Child is empty or all virtual; try next
+                  null;
+               elsif Char_Pos <= Region.Last then
+                  declare
+                     Result : constant Node_Access := Find_Char_Pos
+                       (Tree, Child, Char_Pos, Include_Non_Grammar, After);
+                  begin
+                     if Tree.ID (Child) = Tree.Lexer.Descriptor.SOI_ID then
+                        pragma Assert (Child.Label = Source_Terminal);
+                        --  SOI does not have an empty region (see comment in 
Start_Lex), but
+                        --  we define it to not contain any text. EOI does 
have an empty
+                        --  region.
+                        if Char_Pos < Child.Char_Region.Last then
+                           if After then
+                              return Child;
+                           else
+                              return Invalid_Node_Access;
+                           end if;
+
+                        elsif (Include_Non_Grammar and 
Child.Non_Grammar.Length > 1) and then
+                          Char_Pos <= Child.Non_Grammar 
(Child.Non_Grammar.Last_Index).Char_Region.Last
+                        then
+                           return Child;
+                        else
+                           null; -- try next child
+                        end if;
+
+                     else
+                        return Result;
+                     end if;
+                  end;
+
+               else
+                  null; -- try next child
+               end if;
+            end;
+         end loop;
+         --  Char_Pos is after last child
+         return Invalid_Node_Access;
+      end case;
+   end Find_Char_Pos;
+
+   function Find_Char_Pos
+     (Tree                 : in Syntax_Trees.Tree;
+      Char_Pos             : in Buffer_Pos;
+      Trailing_Non_Grammar : in Boolean;
+      After                : in Boolean := False)
+     return Node_Access
+   is
+      Node : constant Node_Access := Root (Tree);
+      Char_Region : constant Buffer_Region := Tree.Char_Region (Node, 
Trailing_Non_Grammar);
+   begin
+      if Char_Pos < Char_Region.First then
+         if After then
+            return Tree.First_Terminal (Node);
+         else
+            return Invalid_Node_Access;
+         end if;
+
+      elsif Char_Pos > Char_Region.Last then
+         return Invalid_Node_Access;
+
+      else
+         return Find_Char_Pos (Tree, Node, Char_Pos, Trailing_Non_Grammar, 
After);
+      end if;
+   end Find_Char_Pos;
+
+   function Find_Child
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access;
+      ID   : in Token_ID)
+     return Node_Access
+   is begin
+      case Node.Label is
+      when Source_Terminal | Virtual_Terminal | Virtual_Identifier =>
+         return Invalid_Node_Access;
+      when Nonterm =>
+         for C of Node.Children loop
+            if C /= null then
+               if ID = C.ID then
+                  return C;
+               end if;
+            end if;
+         end loop;
+         return Invalid_Node_Access;
+      end case;
+   end Find_Child;
+
+   function Find_Descendant
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access;
+      ID   : in Token_ID)
+     return Node_Access
+   is
+      Found : Node_Access := Invalid_Node_Access;
+
+      function Process (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean
+      is
+         pragma Unreferenced (Tree);
+      begin
+         if Node.ID = ID then
+            Found := Node;
+            return False;
+         else
+            return True;
+         end if;
+      end Process;
+
+      Junk : constant Boolean := Process_Tree (Tree, Node, Before, 
Process'Access);
+      pragma Unreferenced (Junk);
+   begin
+      return Found;
+   end Find_Descendant;
+
+   function Find_Descendant
+     (Tree      : in     Syntax_Trees.Tree;
+      Node      : in     Valid_Node_Access;
+      Predicate : access function (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean)
+     return Node_Access
+   is
+      Found : Node_Access := Invalid_Node_Access;
+
+      function Process (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean
+      is begin
+         if Predicate (Tree, Node) then
+            Found := Node;
+            return False;
+         else
+            return True;
+         end if;
+      end Process;
+
+      Junk : constant Boolean := Process_Tree (Tree, Node, Before, 
Process'Access);
+      pragma Unreferenced (Junk);
+   begin
+      return Found;
+   end Find_Descendant;
+
+   function Find_Match
+     (Error_List : in Error_List_Access;
+      Predicate  : in Error_Predicate)
+     return Error_Data_Lists.Cursor
+   is
+      use Error_Data_Lists;
+   begin
+      if Error_List = null or Predicate = null then
+         return No_Element;
+      else
+         return Result : Cursor := Error_List.First do
+            loop
+               exit when Result = No_Element;
+               exit when Predicate (Result);
+               Next (Result);
+            end loop;
+         end return;
+      end if;
+   end Find_Match;
+
+   function Find_New_Line
+     (Tree     : in     Syntax_Trees.Tree;
+      Line     : in     Line_Number_Type;
+      Node     : in     Node_Access;
+      Char_Pos :    out Buffer_Pos)
+     return Node_Access
+   with Pre => Line > Line_Number_Type'First and Tree.Parents_Set
+   --  Return node under Node that contains the non-grammar containing a
+   --  new_line or EOI that ends Line - 1. Update Char_Pos to the
+   --  position of the first character on Line (or EOI). If not found,
+   --  result is Invalid_Node_Access, Char_Pos is Invalid_Buffer_Pos.
+   is begin
+      Char_Pos := Invalid_Buffer_Pos;
+
+      if Node = Invalid_Node_Access then
+         return Invalid_Node_Access;
+      end if;
+
+      case Node.Label is
+      when Terminal_Label =>
+         if Node.ID = Tree.Lexer.Descriptor.EOI_ID and then
+           --  Handle 'Line' as well as 'Line - 1' to allow returning char_pos
+           --  for the last line.
+           Node.Non_Grammar (Node.Non_Grammar.First_Index).Line_Region.First 
in Line - 1 | Line
+         then
+            Char_Pos := Node.Char_Region.First;
+            return Node;
+
+         elsif Check_Non_Grammar (Tree, Node, Line, Char_Pos) then
+            return Node;
+
+         elsif Tree.Lexer.Can_Contain_New_Line (Node.ID) and then
+           Check_Multi_Line (Tree, Node, Line, Char_Pos, Start_Line => 
Tree.Prev_New_Line (Node).Line)
+         then
+            return Node;
+
+         else
+            return Invalid_Node_Access;
+         end if;
+
+      when Nonterm =>
+         declare
+            Node_Line_Region : constant WisiToken.Line_Region := 
Tree.Line_Region
+              (Node, Trailing_Non_Grammar => True);
+         begin
+            if Node.Child_Count = 0 then
+               --  This must be an empty stream element.
+               return Invalid_Node_Access;
+
+            elsif Line - 1 in Node_Line_Region.First .. Node_Line_Region.Last 
then
+               if Line - 1 = Node_Line_Region.Last then
+                  --  Faster to check last child first.
+                  for I in reverse Node.Children'Range loop
+                     declare
+                        Temp : constant Node_Access := Find_New_Line
+                          (Tree, Line, Node.Children (I), Char_Pos);
+                     begin
+                        if Temp = Invalid_Node_Access then
+                           if I = Node.Children'First then
+                              return Invalid_Node_Access;
+                           else
+                              --  Check next child
+                              null;
+                           end if;
+                        else
+                           return Temp;
+                        end if;
+                     end;
+                  end loop;
+                  return Invalid_Node_Access;
+               else
+                  for I in Node.Children'Range loop
+                     declare
+                        Temp : constant Node_Access := Find_New_Line (Tree, 
Line, Node.Children (I), Char_Pos);
+                     begin
+                        if Temp = Invalid_Node_Access then
+                           if I = Node.Children'Last then
+                              return Invalid_Node_Access;
+                           else
+                              --  Check next child
+                              null;
+                           end if;
+                        else
+                           return Temp;
+                        end if;
+                     end;
+                  end loop;
+                  return Invalid_Node_Access;
+               end if;
+            else
+               return Invalid_Node_Access;
+            end if;
+         end;
+      end case;
+   end Find_New_Line;
+
+   function Find_New_Line
+     (Tree : in Syntax_Trees.Tree;
+      Line : in Line_Number_Type)
+     return Node_Access
+   is
+      Char_Pos : Buffer_Pos;
+   begin
+      return Find_New_Line (Tree, Line, Tree.Root, Char_Pos);
+   end Find_New_Line;
+
+   function Find_New_Line
+     (Tree                : in     Syntax_Trees.Tree;
+      Line                : in     Line_Number_Type;
+      Line_Begin_Char_Pos :    out Buffer_Pos)
+     return Node_Access
+   is begin
+      if Tree.SOI.Non_Grammar 
(Tree.SOI.Non_Grammar.First_Index).Line_Region.Last = Line then
+         Line_Begin_Char_Pos := Tree.SOI.Char_Region.First;
+         return Tree.SOI;
+      else
+         return Find_New_Line (Tree, Line, Tree.Root, Line_Begin_Char_Pos);
+      end if;
+   end Find_New_Line;
+
+   procedure Find_New_Line_1
+     (Tree         : in     Syntax_Trees.Tree;
+      Ref          : in out Stream_Node_Parents;
+      Parse_Stream : in     Stream_ID;
+      Line         : in     Line_Number_Type;
+      Char_Pos     :    out Buffer_Pos)
+   with Pre => Line > Line_Number_Type'First and Ref.Ref.Element /= 
Invalid_Stream_Index,
+     Post => Ref.Ref.Element = Ref.Ref.Element'Old and
+             (Ref.Ref.Node = Invalid_Node_Access or else
+                (Ref.Ref.Node.Label in Terminal_Label))
+   --  Update Ref to node under Ref.Node in Ref.Stream that contains the
+   --  Non_Grammar that ends Line - 1. Set Char_Pos to the position of
+   --  the first character on Line. If not found, Ref.Ref.Node is
+   --  Invalid_Node_Access, Char_Pos is Invalid_Buffer_Pos.
+   is
+   begin
+      Char_Pos := Invalid_Buffer_Pos;
+
+      if Ref.Ref.Node = Invalid_Node_Access then
+         return;
+      end if;
+
+      case Ref.Ref.Node.Label is
+      when Terminal_Label =>
+         if Ref.Ref.Node.ID = Tree.Lexer.Descriptor.EOI_ID and then
+           --  Handle 'Line' as well as 'Line - 1' to allow returning char_pos
+           --  for the last line.
+           Ref.Ref.Node.Non_Grammar 
(Ref.Ref.Node.Non_Grammar.First_Index).Line_Region.First in Line - 1 | Line
+         then
+            Char_Pos := Ref.Ref.Node.Char_Region.First;
+            return;
+         elsif Check_Non_Grammar (Tree, Ref.Ref.Node, Line, Char_Pos) then
+            return;
+         else
+            Ref.Ref.Node := Invalid_Node_Access;
+            return;
+         end if;
+
+      when Nonterm =>
+         if Ref.Ref.Node.Child_Count = 0 then
+            --  This must be an empty stream element.
+            Ref.Ref.Node := Invalid_Node_Access;
+            return;
+
+         else
+            declare
+               Node_Line_Region : constant WisiToken.Line_Region := 
Tree.Line_Region
+                 (Ref, Parse_Stream, Trailing_Non_Grammar => True);
+
+               function Check_Child (I : in SAL.Peek_Type; Forward : in 
Boolean) return Boolean
+               --  True => return from Find_New_Line; False => check next 
child.
+               is
+                  Temp : Stream_Node_Parents :=
+                    ((Ref.Ref.Stream, Ref.Ref.Element, Ref.Ref.Node.Children 
(I)),
+                     Ref.Parents);
+               begin
+                  Temp.Parents.Push (Ref.Ref.Node);
+                  Find_New_Line_1 (Tree, Temp, Parse_Stream, Line, Char_Pos);
+
+                  if Temp.Ref.Node = Invalid_Node_Access then
+                     if I = (if Forward then Ref.Ref.Node.Children'Last else 
Ref.Ref.Node.Children'First) then
+                        Ref.Ref.Node := Invalid_Node_Access;
+                        return True;
+                     else
+                        --  Check next child
+                        return False;
+                     end if;
+                  else
+                     Ref := Temp;
+                     return True;
+                  end if;
+               end Check_Child;
+
+            begin
+               if Contains (Node_Line_Region, Line - 1) then
+                  if Line - 1 = Node_Line_Region.Last then
+                     --  Faster to check last child first.
+                     for I in reverse Ref.Ref.Node.Children'Range loop
+                        if Check_Child (I, Forward => False) then
+                           return;
+                        end if;
+                     end loop;
+
+                  else
+                     for I in Ref.Ref.Node.Children'Range loop
+                        if Check_Child (I, Forward => True) then
+                           return;
+                        end if;
+                     end loop;
+                  end if;
+               end if;
+               Ref.Ref.Node := Invalid_Node_Access;
+               return;
+            end;
+         end if;
+      end case;
+   end Find_New_Line_1;
+
+   procedure Find_New_Line
+     (Tree         : in     Syntax_Trees.Tree;
+      Ref          : in out Stream_Node_Parents;
+      Parse_Stream : in     Stream_ID;
+      Line         : in     Line_Number_Type;
+      Char_Pos     :    out Buffer_Pos)
+   with Pre => Line > Line_Number_Type'First and Ref.Parents.Is_Empty and
+               Ref.Ref.Node = Stream_Element_Lists.Element 
(Ref.Ref.Element.Cur).Node,
+     Post => Ref.Ref = Invalid_Stream_Node_Ref or else
+             (Ref.Ref.Node.Label in Terminal_Label)
+
+   --  On entry, Ref.Ref should be Stream_First (Ref.Stream). Update Ref
+   --  to node in Ref.Stream or Tree.Shared_Stream that ends Line - 1 (or
+   --  EOI). Set Char_Pos to the position of the first character on Line
+   --  (or EOI). If not found (ie Line < SOI.Line or Line > EOI.Line),
+   --  Ref.Ref is Invalid_Stream_Node_Ref, Char_Pos is
+   --  Invalid_Buffer_Pos.
+   is
+      Start_Stream : constant Stream_ID := Ref.Ref.Stream;
+   begin
+      loop
+         Find_New_Line_1 (Tree, Ref, Parse_Stream, Line, Char_Pos);
+         if Ref.Ref = Invalid_Stream_Node_Ref then
+            return;
+
+         elsif Ref.Ref.Node = Invalid_Node_Access then
+            Stream_Next (Tree, Ref, Rooted => True);
+
+            if Ref.Ref = Invalid_Stream_Node_Ref then
+               if Start_Stream /= Tree.Shared_Stream then
+                  declare
+                     Parse_Stream : Syntax_Trees.Parse_Stream renames 
Tree.Streams (Start_Stream.Cur);
+                  begin
+                     Ref :=
+                       (Ref => (Tree.Shared_Stream, (Cur => 
Parse_Stream.Shared_Link), Invalid_Node_Access),
+                        Parents => <>);
+                     First_Terminal (Tree, Ref, Following => True);
+                  end;
+               else
+                  return;
+               end if;
+            end if;
+         else
+            return;
+         end if;
+      end loop;
+   end Find_New_Line;
+
+   procedure Find_Node (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access)
+   is begin
+      for I in Tree.Nodes.First_Index .. Tree.Nodes.Last_Index loop
+         if Tree.Nodes (I) = Node then
+            Tree.Lexer.Trace.Put_Line (I'Image);
+            exit;
+         end if;
+      end loop;
+   end Find_Node;
+
+   function Find_Sibling
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access;
+      ID   : in Token_ID)
+     return Node_Access
+   is begin
+      if Node.Parent = Invalid_Node_Access then
+         return Invalid_Node_Access;
+
+      else
+         case Node.Parent.Label is
+         when Source_Terminal | Virtual_Terminal | Virtual_Identifier =>
+            return Invalid_Node_Access;
+
+         when Nonterm =>
+            for C of Node.Parent.Children loop
+               if C /= null then
+                  if ID = C.ID then
+                     return C;
+                  end if;
+               end if;
+            end loop;
+            return Invalid_Node_Access;
+         end case;
+      end if;
+   end Find_Sibling;
+
+   overriding function First (Object : Error_Iterator) return Error_Ref
+   is begin
+      return First_Error (Object.Tree.all);
+   end First;
+
+   overriding function First (Object : Stream_Error_Iterator) return 
Stream_Error_Cursor
+   is begin
+      return (SER => First_Error (Object.Tree.all, (Cur => Object.Stream)));
+   end First;
+
+   procedure First_Error (Error : in out Error_Ref)
+   with Pre => Error.Node /= Invalid_Node_Access
+   --  Update Error to first error on or following Deleted in Node.
+   is
+      use Valid_Node_Access_Lists;
+   begin
+      loop
+         if not Has_Element (Error.Deleted) and then Error.Node.Error_List /= 
null then
+            Error.Error := Error.Node.Error_List.First;
+            return;
+         end if;
+
+         loop
+            exit when not Has_Element (Error.Deleted);
+
+            declare
+               Deleted_Node : Valid_Node_Access renames 
Error.Node.Following_Deleted (Error.Deleted);
+            begin
+               if Deleted_Node.Label = Source_Terminal and then
+                 Deleted_Node.Error_List /= null
+               then
+                  Error.Error := Deleted_Node.Error_List.First;
+                  return;
+               end if;
+            end;
+            Next (Error.Deleted);
+         end loop;
+
+         Next_Node (Error.Node);
+
+         exit when Error.Node = Invalid_Node_Access;
+
+         if Error.Node.Error_List /= null then
+            Error.Error   := Error.Node.Error_List.First;
+            Error.Deleted := No_Element;
+            return;
+         end if;
+
+         Error.Deleted :=
+           (if Error.Node.Label = Source_Terminal
+            then Error.Node.Following_Deleted.First
+            else No_Element);
+      end loop;
+   end First_Error;
+
+   procedure First_Error
+     (Tree  : in     Syntax_Trees.Tree;
+      Error : in out Stream_Error_Ref)
+   with Pre => Error.Ref.Ref.Node /= Invalid_Node_Access
+   --  Update Error to first error on or following
+   --  Error.Ref. On enter, Error.Ref is normally stream SOI.
+   is
+      use Valid_Node_Access_Lists;
+   begin
+      loop
+         if not Has_Element (Error.Deleted) and then 
Error.Ref.Ref.Node.Error_List /= null then
+            Error.Error := Error.Ref.Ref.Node.Error_List.First;
+            return;
+         end if;
+
+         loop
+            exit when not Has_Element (Error.Deleted);
+
+            declare
+               Deleted_Node : Valid_Node_Access renames 
Error.Ref.Ref.Node.Following_Deleted (Error.Deleted);
+            begin
+               if Deleted_Node.Error_List /= null then
+                  Error.Error := Deleted_Node.Error_List.First;
+                  return;
+               end if;
+            end;
+            Next (Error.Deleted);
+         end loop;
+
+         Next_Node (Tree, Error.Ref);
+         if Error.Ref.Ref.Node = Invalid_Node_Access then
+            --  No errors in tree
+            return;
+         end if;
+
+         Error.Deleted :=
+           (if Error.Ref.Ref.Node.Label = Source_Terminal
+            then Error.Ref.Ref.Node.Following_Deleted.First
+            else No_Element);
+      end loop;
+   end First_Error;
+
+   function First_Error (Tree : in Syntax_Trees.Tree) return Error_Ref
+   is begin
+      return Result : Error_Ref := (Tree.SOI, 
Tree.SOI.Following_Deleted.First, Error_Data_Lists.No_Element) do
+         First_Error (Result);
+      end return;
+   end First_Error;
+
+   function First_Error (Tree : in Syntax_Trees.Tree; Stream : in Stream_ID) 
return Stream_Error_Ref
+   is begin
+      return Result : Stream_Error_Ref :=
+        (Ref     => Tree.To_Stream_Node_Parents
+           (Tree.To_Rooted_Ref (Stream, Tree.Stream_First (Stream, Skip_SOI => 
True))),
+         Deleted => Valid_Node_Access_Lists.No_Element,
+         Error   => Error_Data_Lists.No_Element)
+      do
+         if Result.Ref.Ref.Node.Label = Source_Terminal then
+            Result.Deleted := Result.Ref.Ref.Node.Following_Deleted.First;
+         end if;
+         First_Error (Tree, Result);
+      end return;
+   end First_Error;
+
+   function First_Input
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID)
+     return Rooted_Ref
+   is
+      use Stream_Element_Lists;
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+      Cur : constant Cursor := Next (Parse_Stream.Stack_Top);
+   begin
+      return (Stream, (Cur => Cur), Element (Cur).Node);
+   end First_Input;
+
+   function First_Non_Grammar
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in     Valid_Node_Access;
+      Parents : in out Node_Stacks.Stack)
+     return Node_Access
+   is
+      Result : Node_Access := First_Terminal (Tree, Node, Parents);
+   begin
+      loop
+         exit when Result = Invalid_Node_Access;
+         exit when Result.Non_Grammar.Length > 0;
+         Next_Terminal (Tree, Result, Parents);
+      end loop;
+      return Result;
+   end First_Non_Grammar;
+
+   function First_Non_Grammar
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Node_Access
+   is
+      Parents : Node_Stacks.Stack;
+   begin
+      return First_Non_Grammar (Tree, Node, Parents);
+   end First_Non_Grammar;
+
+   function First_Parse_Stream (Tree : in Syntax_Trees.Tree) return Stream_ID
+   is begin
+      return (Cur => Parse_Stream_Lists.Next (Tree.Shared_Stream.Cur));
+   end First_Parse_Stream;
+
+   procedure First_Recover_Conflict (Tree : in Syntax_Trees.Tree; Ref : in out 
Stream_Node_Ref)
+   is begin
+      loop
+         exit when Ref = Invalid_Stream_Node_Ref;
+         exit when Ref.Node.Label = Nonterm and then Ref.Node.Recover_Conflict;
+
+         Next_Nonterm (Tree, Ref);
+      end loop;
+   end First_Recover_Conflict;
+
+   function First_Recover_Conflict (Tree : in Syntax_Trees.Tree) return 
Stream_Node_Ref
+   is begin
+      return Result : Stream_Node_Ref :=
+        (Stream  => Tree.Shared_Stream,
+         Element => (Cur => Tree.Streams 
(Tree.Shared_Stream.Cur).Elements.First),
+         Node    => Tree.SOI)
+      do
+         First_Recover_Conflict (Tree, Result);
+      end return;
+   end First_Recover_Conflict;
+
+   function First_Source_Terminal
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Valid_Node_Access;
+      Trailing_Non_Grammar : in Boolean;
+      Following            : in Boolean)
+     return Node_Access
+   is
+      --  If not Following, we use a Parents stack to limit Next_Terminal to
+      --  descendants of Node.
+      Parents : Node_Stacks.Stack;
+      Result  : Node_Access :=
+        (if Following
+         then First_Terminal (Tree, Node)
+         else First_Terminal (Tree, Node, Parents));
+   begin
+      loop
+         exit when Result = Invalid_Node_Access;
+         exit when
+           (if Trailing_Non_Grammar
+            then (case Terminal_Label'(Result.Label) is
+                  when Source_Terminal => True,
+                  when Virtual_Terminal | Virtual_Identifier =>
+                     Result.Non_Grammar.Length > 0)
+            else Result.Label = Source_Terminal);
+
+         if Following then
+            Next_Terminal (Tree, Result);
+         else
+            Next_Terminal (Tree, Result, Parents);
+         end if;
+      end loop;
+      return Result;
+   end First_Source_Terminal;
+
+   procedure First_Source_Terminal
+     (Tree                 : in     Syntax_Trees.Tree;
+      Ref                  : in out Stream_Node_Parents;
+      Trailing_Non_Grammar : in     Boolean)
+   is begin
+      Ref.Ref.Node := First_Terminal (Tree, Ref.Ref.Node, Ref.Parents);
+      loop
+         exit when Ref.Ref.Node = Invalid_Node_Access;
+         exit when
+           (if Trailing_Non_Grammar
+            then (case Terminal_Label'(Ref.Ref.Node.Label) is
+                  when Source_Terminal => True,
+                  when Virtual_Terminal => Ref.Ref.Node.Non_Grammar.Length > 0,
+                  when Virtual_Identifier => Ref.Ref.Node.Non_Grammar.Length > 
0)
+            else Ref.Ref.Node.Label = Source_Terminal);
+
+         Next_Terminal (Tree, Ref.Ref.Node, Ref.Parents);
+      end loop;
+   end First_Source_Terminal;
+
+   function First_Terminal (Tree : in Syntax_Trees.Tree; Item : in 
Recover_Token) return Node_Access
+   is begin
+      return
+        (if Item.Virtual
+         then Item.First_Terminal
+         else First_Terminal (Tree, Item.Element_Node));
+   end First_Terminal;
+
+   function First_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Node_Access
+   is begin
+      case Node.Label is
+      when Source_Terminal | Virtual_Terminal | Virtual_Identifier =>
+         return Node;
+      when Nonterm =>
+         for C of Node.Children loop
+            --  This is called from Error_Message; tolerate deleted children
+            if C /= Invalid_Node_Access then
+               declare
+                  Term : constant Node_Access := First_Terminal (Tree, C);
+               begin
+                  if Term /= Invalid_Node_Access then
+                     return Term;
+                  end if;
+               end;
+            end if;
+         end loop;
+         return Invalid_Node_Access;
+      end case;
+   end First_Terminal;
+
+   function First_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in     Valid_Node_Access;
+      Parents : in out Node_Stacks.Stack)
+     return Node_Access
+   is
+      Parent_Depth : constant SAL.Base_Peek_Type := Parents.Depth;
+   begin
+      case Node.Label is
+      when Terminal_Label =>
+         return Node;
+
+      when Nonterm =>
+         for C of Node.Children loop
+            --  We tolerate null C here because this function is called while
+            --  printing a tree for debug.
+            if C /= Invalid_Node_Access then
+               Parents.Push (Node);
+               declare
+                  First_Term : constant Node_Access := First_Terminal (Tree, 
C, Parents);
+               begin
+                  if First_Term /= Invalid_Node_Access then
+                     return First_Term;
+                  else
+                     Parents.Pop (Parents.Depth - Parent_Depth); -- discard 
parents from call to First_Terminal.
+                  end if;
+               end;
+            end if;
+         end loop;
+
+         --  All children are empty
+         return Invalid_Node_Access;
+      end case;
+   end First_Terminal;
+
+   procedure First_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Stream_Node_Ref)
+   is
+      use Stream_Element_Lists;
+   begin
+      Ref.Node := First_Terminal (Tree, Element (Ref.Element.Cur).Node);
+      loop
+         exit when Ref.Node /= Invalid_Node_Access;
+         Stream_Next (Tree, Ref, Rooted => False);
+         exit when not Has_Element (Ref.Element.Cur);
+      end loop;
+   end First_Terminal;
+
+   function First_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return Terminal_Ref
+   is
+      Result : Stream_Node_Ref := Ref;
+   begin
+      First_Terminal (Tree, Result);
+      return Result;
+   end First_Terminal;
+
+   function First_Terminal_In_Node
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return Terminal_Ref
+   is begin
+      return
+        (Ref.Stream,
+         Ref.Element,
+         First_Terminal (Tree, Ref.Node));
+   end First_Terminal_In_Node;
+
+   procedure First_Terminal
+     (Tree      : in     Syntax_Trees.Tree;
+      Ref       : in out Stream_Node_Parents;
+      Following : in     Boolean)
+   is
+      use Stream_Element_Lists;
+   begin
+      if Ref.Ref.Node = Invalid_Node_Access then
+         --  Ref is an empty nonterm.
+         null;
+      else
+         Ref.Ref.Node := First_Terminal (Tree, Ref.Ref.Node, Ref.Parents);
+      end if;
+
+      if Following then
+         loop
+            exit when Ref.Ref.Node /= Invalid_Node_Access;
+            Next_Terminal (Tree, Ref, Following);
+            exit when not Has_Element (Ref.Ref.Element.Cur);
+         end loop;
+      elsif Ref.Ref.Node = Invalid_Node_Access then
+         Ref.Parents.Clear;
+      end if;
+   end First_Terminal;
+
+   function First_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Rooted_Ref)
+     return Stream_Node_Parents
+   is
+      use Stream_Element_Lists;
+   begin
+      --  We'd like the result type to be Terminal_Stream_Node_Parents, but
+      --  the dynamic predicate is checked on assigning the initial value of
+      --  Result, and it fails if Ref is not a terminal.
+      return Result : Stream_Node_Parents := (Ref, Parents => <>) do
+         Result.Ref.Node := First_Terminal (Tree, Element 
(Result.Ref.Element.Cur).Node, Result.Parents);
+         loop
+            exit when Result.Ref.Node /= Invalid_Node_Access;
+            Next_Terminal (Tree, Result, Following => True);
+            exit when not Has_Element (Result.Ref.Element.Cur);
+         end loop;
+      end return;
+   end First_Terminal;
+
+   --  FIXME: alphabetize these.
+   procedure First_Sequential_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in out Node_Access;
+      Parents : in out Node_Stacks.Stack)
+   is begin
+      if Node = Invalid_Node_Access then
+         return;
+      end if;
+
+      Node := First_Terminal (Tree, Node, Parents);
+      loop
+         exit when Node = Invalid_Node_Access;
+         exit when Node.Sequential_Index /= Invalid_Sequential_Index;
+
+         Tree.Next_Terminal (Node, Parents);
+      end loop;
+   end First_Sequential_Terminal;
+
+   function First_Sequential_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in     Node_Access;
+      Parents : in out Node_Stacks.Stack)
+     return Node_Access
+   is
+      Result : Node_Access := Node;
+   begin
+      First_Sequential_Terminal (Tree, Result, Parents);
+      return Result;
+   end First_Sequential_Terminal;
+
+   function First_Sequential_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Node_Access)
+     return Node_Access
+   is
+      Parents : Node_Stacks.Stack;
+   begin
+      return Tree.First_Sequential_Terminal (Node, Parents);
+   end First_Sequential_Terminal;
+
+   procedure First_Sequential_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Syntax_Trees.Stream_Node_Ref)
+   is begin
+      Tree.First_Terminal (Ref);
+      loop
+         exit when Ref = Invalid_Stream_Node_Ref;
+
+         exit when Ref.Node /= Invalid_Node_Access and then
+           Tree.Get_Sequential_Index (Ref.Node) /= Invalid_Sequential_Index;
+
+         Tree.Next_Terminal (Ref);
+      end loop;
+   end First_Sequential_Terminal;
+
+   function First_Sequential_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Syntax_Trees.Rooted_Ref)
+     return Terminal_Ref
+   is
+      Ref_Parents : Stream_Node_Parents := (Ref, Parents => <>);
+   begin
+      Tree.First_Sequential_Terminal (Ref_Parents, Following => True);
+      return Ref_Parents.Ref;
+   end First_Sequential_Terminal;
+
+   procedure First_Sequential_Terminal
+     (Tree      : in     Syntax_Trees.Tree;
+      Ref       : in out Syntax_Trees.Stream_Node_Parents;
+      Following : in     Boolean)
+   is begin
+      Tree.First_Terminal (Ref, Following);
+      loop
+         exit when Ref.Ref = Invalid_Stream_Node_Ref;
+
+         exit when Ref.Ref.Node /= Invalid_Node_Access and then
+           Tree.Get_Sequential_Index (Ref.Ref.Node) /= 
Invalid_Sequential_Index;
+
+         exit when not Following and Ref.Ref.Node = Invalid_Node_Access;
+
+         Tree.Next_Terminal (Ref, Following);
+      end loop;
+   end First_Sequential_Terminal;
+
+   procedure Free_Augmented (Tree : in Syntax_Trees.Tree)
+   is begin
+      for Node of Tree.Nodes loop
+         Free (Node.Augmented);
+      end loop;
+   end Free_Augmented;
+
+   function Following_Deleted
+     (Tree : in out Syntax_Trees.Tree;
+      Node : in     Valid_Node_Access)
+     return Valid_Node_Access_List_Var_Ref
+   is begin
+      return
+        (List  => Node.Following_Deleted'Access,
+         Dummy => 1);
+   end Following_Deleted;
+
+   function Fully_Parsed (Tree : in Syntax_Trees.Tree) return Boolean
+   is begin
+      return Tree.Streams.Length = 2 and then Tree.Stream_Length ((Cur => 
Tree.Streams.Last)) in 2 .. 3;
+   end Fully_Parsed;
+
+   procedure Get_IDs
+     (Tree   : in     Syntax_Trees.Tree;
+      Node   : in     Valid_Node_Access;
+      ID     : in     Token_ID;
+      Result : in out Valid_Node_Access_Array;
+      Last   : in out SAL.Base_Peek_Type)
+   is begin
+      if Node.ID = ID then
+         Last := Last + 1;
+         Result (Last) := Node;
+      end if;
+      case Node.Label is
+      when Source_Terminal | Virtual_Terminal | Virtual_Identifier =>
+         null;
+      when Nonterm =>
+         for I of Node.Children loop
+            --  Encountering a deleted child here is an error in the user 
algorithm.
+            Get_IDs (Tree, I, ID, Result, Last);
+         end loop;
+      end case;
+   end Get_IDs;
+
+   function Get_IDs
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access;
+      ID   : in Token_ID)
+     return Valid_Node_Access_Array
+   is
+      Last : SAL.Base_Peek_Type := 0;
+   begin
+      return Result : Valid_Node_Access_Array (1 .. Count_IDs (Tree, Node, 
ID)) := (others => Dummy_Node) do
+         Get_IDs (Tree, Node, ID, Result, Last);
+      end return;
+   end Get_IDs;
+
+   function Get_Node
+     (Element : in Stream_Index)
+     return Valid_Node_Access
+   is begin
+      return Stream_Element_Lists.Element (Element.Cur).Node;
+   end Get_Node;
+
+   function Get_Node
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Valid_Node_Access
+   is begin
+      return Stream_Element_Lists.Element (Element.Cur).Node;
+   end Get_Node;
+
+   function Get_Node_Index (Node : in Node_Access) return Node_Index
+   is begin
+      return (if Node = Invalid_Node_Access then Invalid_Node_Index else 
Node.Node_Index);
+   end Get_Node_Index;
+
+   function Get_Node_Index (Tree : in Syntax_Trees.Tree; Node : in 
Node_Access) return Node_Index
+   is begin
+      return (if Node = Invalid_Node_Access then Invalid_Node_Index else 
Node.Node_Index);
+   end Get_Node_Index;
+
+   function Get_Node_Index (Element : in Stream_Index) return Node_Index
+   is begin
+      return
+        (if Stream_Element_Lists.Has_Element (Element.Cur)
+         then Stream_Element_Lists.Element (Element.Cur).Node.Node_Index
+         else Invalid_Node_Index);
+   end Get_Node_Index;
+
+   function Get_Node_Index
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Node_Index
+   is begin
+      return Get_Node_Index (Element);
+   end Get_Node_Index;
+
+   function Get_Recover_Token
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return Recover_Token
+   is begin
+      return
+        (Virtual      => False,
+         Element_Node =>
+           (if Ref.Element = Invalid_Stream_Index
+            then Invalid_Node_Access
+            else Stream_Element_Lists.Element (Ref.Element.Cur).Node),
+         Node         => Ref.Node);
+   end Get_Recover_Token;
+
+   function Get_Recover_Token
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Recover_Token
+   is begin
+      --  Used in McKenzie_Recover.Undo_Reduce, so same value as in Tree.Push 
(Node)
+      return
+        (Virtual      => False,
+         Element_Node => Node,
+         Node         => Node);
+   end Get_Recover_Token;
+
+   function Get_Sequential_Index (Tree : in Syntax_Trees.Tree; Node : in 
Node_Access) return Base_Sequential_Index
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return
+        (if Node = Invalid_Node_Access
+         then Invalid_Sequential_Index
+         else Node.Sequential_Index);
+   end Get_Sequential_Index;
+
+   procedure Get_Terminals
+     (Tree   : in     Syntax_Trees.Tree;
+      Node   : in     Valid_Node_Access;
+      Result : in out Valid_Node_Access_Array;
+      Last   : in out SAL.Base_Peek_Type)
+   is begin
+      case Node.Label is
+      when Source_Terminal | Virtual_Terminal | Virtual_Identifier =>
+         Last := Last + 1;
+         Result (Last) := Node;
+
+      when Nonterm =>
+         for C of Node.Children loop
+            --  This is called to build an edited source image while editing 
the tree
+            if C /= null then
+               Get_Terminals (Tree, C, Result, Last);
+            end if;
+         end loop;
+      end case;
+   end Get_Terminals;
+
+   function Get_Terminals (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Valid_Node_Access_Array
+   is
+      Last : SAL.Base_Peek_Type := 0;
+   begin
+      return Result : Valid_Node_Access_Array (1 .. SAL.Base_Peek_Type 
(Count_Terminals (Tree, Node))) :=
+        (others => Dummy_Node)
+      do
+         Get_Terminals (Tree, Node, Result, Last);
+      end return;
+   end Get_Terminals;
+
+   procedure Get_Terminal_IDs
+     (Tree   : in     Syntax_Trees.Tree;
+      Node   : in     Valid_Node_Access;
+      Result : in out Token_ID_Array;
       Last   : in out SAL.Base_Peek_Type)
+   is begin
+      case Node.Label is
+      when Source_Terminal | Virtual_Terminal | Virtual_Identifier =>
+         Last := Last + 1;
+         Result (Integer (Last)) := Node.ID;
+
+      when Nonterm =>
+         for I of Node.Children loop
+            --  Encountering Deleted_Child here is an error in the user 
algorithm.
+            Get_Terminal_IDs (Tree, I, Result, Last);
+         end loop;
+      end case;
+   end Get_Terminal_IDs;
+
+   function Get_Terminal_IDs (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Token_ID_Array
+   is
+      Last : SAL.Base_Peek_Type := 0;
+   begin
+      return Result : Token_ID_Array (1 .. Count_Terminals (Tree, Node))  do
+         Get_Terminal_IDs (Tree, Node, Result, Last);
+      end return;
+   end Get_Terminal_IDs;
+
+   procedure Get_Tree
+     (Tree      : in out Syntax_Trees.Tree;
+      File_Name : in     String)
+   is
+      use Ada.Streams.Stream_IO;
+
+      Delims : constant Ada.Strings.Maps.Character_Set := 
Ada.Strings.Maps.To_Set (" ()," & ASCII.LF);
+
+      File   : File_Type;
+      Stream : Stream_Access;
+
+      Node_Index_Map : Node_Index_Array_Node_Access.Vector;
+
+      package Node_Index_Lists is new SAL.Gen_Definite_Doubly_Linked_Lists
+        (Node_Index);
+
+      type Delayed_Following_Deleted_Type is record
+         Prev_Terminal : Node_Access;
+         List : Node_Index_Lists.List;
+      end record;
+
+      package Delayed_Following_Deleted_Lists is new 
SAL.Gen_Definite_Doubly_Linked_Lists
+        (Delayed_Following_Deleted_Type);
+
+      Delayed_Following_Deleted : Delayed_Following_Deleted_Lists.List;
+
+      function Next_Value return String
+      is begin
+         return WisiToken.Next_Value (Stream, Delims);
+      end Next_Value;
+
+      function Input_Buffer_Region return Buffer_Region
+      is begin
+         return Result : Buffer_Region do
+
+            Result.First := Base_Buffer_Pos'Value (Next_Value);
+            Result.Last  := Base_Buffer_Pos'Value (Next_Value);
+         end return;
+      end Input_Buffer_Region;
+
+      function Input_Line_Region return WisiToken.Line_Region
+      is begin
+         return Result : WisiToken.Line_Region do
+
+            Result.First := Line_Number_Type'Value (Next_Value);
+            Result.Last  := Line_Number_Type'Value (Next_Value);
+         end return;
+      end Input_Line_Region;
+
+      function Input_Token return WisiToken.Lexer.Token
+      is begin
+         return Result : WisiToken.Lexer.Token do
+
+            Result.ID          := Token_ID'Value (Next_Value);
+            Result.Byte_Region := Input_Buffer_Region;
+            Result.Char_Region := Input_Buffer_Region;
+            Result.Line_Region := Input_Line_Region;
+         end return;
+      end Input_Token;
+
+      function Input_Non_Grammar return WisiToken.Lexer.Token_Arrays.Vector
+      is
+         Length : constant SAL.Base_Peek_Type := SAL.Base_Peek_Type'Value 
(Next_Value);
+      begin
+         return Result : WisiToken.Lexer.Token_Arrays.Vector do
+            if Length > 0 then
+               Result.Set_First_Last (1, Length);
+               for I in 1 .. Length loop
+                  Result (I) := Input_Token;
+               end loop;
+            end if;
+         end return;
+      end Input_Non_Grammar;
+
+      function Get_Node_Access return Node_Access
+      is
+         Index : constant Node_Index := Node_Index'Value (Next_Value);
+      begin
+         if Index = Invalid_Node_Index then
+            return Invalid_Node_Access;
+         end if;
+
+         if Index in Node_Index_Map.First_Index .. Node_Index_Map.Last_Index 
then
+            return Node_Index_Map (Index);
+         else
+            raise SAL.Programmer_Error;
+         end if;
+      end Get_Node_Access;
+
+      function Input_Error_List return Error_List_Access
+      is
+         Length : constant Ada.Containers.Count_Type := 
Ada.Containers.Count_Type'Value (Next_Value);
+      begin
+         if Length = 0 then
+            return null;
+         else
+            return Result : constant Error_List_Access := new 
Error_Data_Lists.List do
+               for I in 1 .. Length loop
+                  Result.Append (Error_Data'Class'Input (Stream));
+               end loop;
+            end return;
+         end if;
+      end Input_Error_List;
+
+      procedure Input_Following_Deleted (Node : in Valid_Node_Access)
+      is
+         Length : constant Ada.Containers.Count_Type := 
Ada.Containers.Count_Type'Value (Next_Value);
+      begin
+         if Length = 0 then
+            return;
+         else
+            --  Following nodes have not been created yet. Need to read 
node_index
+            --  list, cache it somewhere, finish this after they are read.
+            declare
+               Item : Delayed_Following_Deleted_Type;
+            begin
+               Item.Prev_Terminal := Node;
+               for I in 1 .. Length loop
+                  Item.List.Append (Node_Index'Value (Next_Value));
+               end loop;
+            end;
+         end if;
+      end Input_Following_Deleted;
+
+      function Input_Children (Child_Count : in SAL.Base_Peek_Type) return 
Node_Access_Array
+      is begin
+         --  Child nodes are always created before parent nodes, so
+         --  Get_Node_Access will succeed.
+         return Result : Node_Access_Array (1 .. Child_Count) do
+            for I in Result'Range loop
+               Result (I) := Get_Node_Access;
+            end loop;
+         end return;
+      end Input_Children;
+
+      procedure Input_Node
+      --  Read one node from Stream
+      is begin
+         declare
+            Label       : constant Node_Label              := Node_Label'Value 
(Next_Value);
+            Child_Count : constant SAL.Base_Peek_Type      := 
SAL.Base_Peek_Type'Value (Next_Value);
+            ID          : constant Token_ID                := Token_ID'Value 
(Next_Value);
+            Node_Index  : constant Syntax_Trees.Node_Index := 
Syntax_Trees.Node_Index'Value (Next_Value);
+            Error_List  : constant Error_List_Access       := Input_Error_List;
+         begin
+            case Label is
+            when Terminal_Label =>
+               declare
+                  Non_Grammar      : constant Lexer.Token_Arrays.Vector := 
Input_Non_Grammar;
+                  Sequential_Index : constant Base_Sequential_Index     := 
Base_Sequential_Index'Value (Next_Value);
+               begin
+                  case Terminal_Label'(Label) is
+                  when Source_Terminal =>
+                     pragma Assert
+                       (Child_Count = 0 and
+                          (Node_Index > 0 or else ID = 
Tree.Lexer.Descriptor.SOI_ID));
+                     declare
+                        Byte_Region    : constant Buffer_Region         := 
Input_Buffer_Region;
+                        Char_Region    : constant Buffer_Region         := 
Input_Buffer_Region;
+                        New_Line_Count : constant Base_Line_Number_Type := 
Base_Line_Number_Type'Value
+                          (Next_Value);
+
+                        New_Node : constant Valid_Node_Access := new Node'
+                          (Label             => Source_Terminal,
+                           Copied_Node       => Invalid_Node_Access,
+                           Parent            => Invalid_Node_Access,
+                           Augmented         => null,
+                           Child_Count       => 0,
+                           ID                => ID,
+                           Node_Index        => Node_Index,
+                           Error_List        => Error_List,
+                           Non_Grammar       => Non_Grammar,
+                           Sequential_Index  => Sequential_Index,
+                           Byte_Region       => Byte_Region,
+                           Char_Region       => Char_Region,
+                           New_Line_Count    => New_Line_Count,
+                           Following_Deleted => 
Valid_Node_Access_Lists.Empty_List);
+                     begin
+                        Input_Following_Deleted (New_Node);
+
+                        Tree.Nodes.Append (New_Node);
+                        Node_Index_Map.Extend (Node_Index);
+                        Node_Index_Map (Node_Index) := New_Node;
+                        if New_Node.ID = Tree.Lexer.Descriptor.EOI_ID then
+                           Tree.EOI := New_Node;
+                        elsif New_Node.ID = Tree.Lexer.Descriptor.SOI_ID then
+                           Tree.SOI := New_Node;
+                        end if;
+                     end;
+
+                  when Virtual_Terminal =>
+                     pragma Assert (Child_Count = 0 and Node_Index < 0);
+                     declare
+                        Insert_Location : constant WisiToken.Insert_Location 
:= WisiToken.Insert_Location'Value
+                          (Next_Value);
+                        New_Node        : constant Valid_Node_Access         
:= new Node'
+                          (Label            => Virtual_Terminal,
+                           Copied_Node      => Invalid_Node_Access,
+                           Child_Count      => 0,
+                           ID               => ID,
+                           Node_Index       => Node_Index,
+                           Parent           => Invalid_Node_Access,
+                           Augmented        => null,
+                           Error_List       => Error_List,
+                           Non_Grammar      => Non_Grammar,
+                           Sequential_Index => Sequential_Index,
+                           Insert_Location  => Insert_Location);
+                     begin
+                        Tree.Nodes.Append (New_Node);
+                        Node_Index_Map.Extend (Node_Index);
+                        Node_Index_Map (Node_Index) := New_Node;
+                     end;
+
+                  when Virtual_Identifier =>
+                     raise SAL.Programmer_Error;
+
+                  end case;
+               end;
+
+            when Nonterm =>
+               pragma Assert (Node_Index < 0);
+               declare
+                  Virtual          : constant Boolean           := 
Boolean'Value (Next_Value);
+                  Recover_Conflict : constant Boolean           := 
Boolean'Value (Next_Value);
+                  RHS_Index        : constant Natural           := 
Natural'Value (Next_Value);
+                  Name_Offset      : constant Base_Buffer_Pos   := 
Base_Buffer_Pos'Value (Next_Value);
+                  Name_Length      : constant Base_Buffer_Pos   := 
Base_Buffer_Pos'Value (Next_Value);
+                  Children         : constant Node_Access_Array := 
Input_Children (Child_Count);
+                  New_Node         : constant Valid_Node_Access := new Node'
+                    (Label            => Nonterm,
+                     Copied_Node      => Invalid_Node_Access,
+                     Child_Count      => Child_Count,
+                     ID               => ID,
+                     Node_Index       => Node_Index,
+                     Parent           => Invalid_Node_Access,
+                     Augmented        => null,
+                     Error_List       => Error_List,
+                     Virtual          => Virtual,
+                     Recover_Conflict => Recover_Conflict,
+                     RHS_Index        => RHS_Index,
+                     Name_Offset      => Name_Offset,
+                     Name_Length      => Name_Length,
+                     Children         => Children);
+               begin
+                  Tree.Nodes.Append (New_Node);
+                  Node_Index_Map.Extend (Node_Index);
+                  Node_Index_Map (Node_Index) := New_Node;
+
+                  for Child of Children loop
+                     Child.Parent := New_Node;
+                  end loop;
+               end;
+            end case;
+         end;
+      end Input_Node;
+
+   begin
+      Open (File, In_File, File_Name);
+      Stream := Ada.Streams.Stream_IO.Stream (File);
+
+      declare
+         Node_Count : constant Positive_Node_Index := 
Positive_Node_Index'Value (Next_Value);
+      begin
+         for I in 1 .. Node_Count loop
+            Input_Node;
+         end loop;
+      end;
+
+      for Item of Delayed_Following_Deleted loop
+         for Index of Item.List loop
+            Item.Prev_Terminal.Following_Deleted.Append (Node_Index_Map 
(Index));
+         end loop;
+      end loop;
+
+      declare
+         Streams_Length : constant Ada.Containers.Count_Type := 
Ada.Containers.Count_Type'Value (Next_Value);
+      begin
+         for I in 1 .. Streams_Length loop
+            raise SAL.Not_Implemented with "get_tree: streams";
+         end loop;
+      end;
+
+      Tree.Set_Root (Get_Node_Access);
+
+      Close (File);
+
+      if Tree.Streams.Length > 0 then
+         for Stream_Cur in Tree.Streams.Iterate loop
+            for Err_Cur in Tree.Stream_Error_Iterate ((Cur => Stream_Cur)) loop
+               Error_Data_Lists.Variable_Ref 
(Err_Cur.SER.Error).Set_Node_Access (Node_Index_Map);
+            end loop;
+         end loop;
+      else
+         Tree.Set_Parents;
+         for Err in Tree.Error_Iterate loop
+            Error_Data_Lists.Variable_Ref (Err.Error).Set_Node_Access 
(Node_Index_Map);
+         end loop;
+      end if;
+   end Get_Tree;
+
+   function In_Tree
+     (Tree  : in Syntax_Trees.Tree;
+      Node  : in Valid_Node_Access)
+     return Boolean
+   is begin
+      return (for some N of Tree.Nodes => N = Node);
+   end In_Tree;
+
+   function Has_Child
+     (Tree  : in Syntax_Trees.Tree;
+      Node  : in Valid_Node_Access;
+      Child : in Valid_Node_Access)
+     return Boolean
+   is begin
+      for C of Node.Children loop
+         if C = Child then
+            return True;
+         end if;
+      end loop;
+      return False;
+   end Has_Child;
+
+   function Has_Children (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean
+   is begin
+      return Node.Children'Length > 0;
+   end Has_Children;
+
+   function Has_Error (Node : in Valid_Node_Access) return Boolean
+   is begin
+      return Node.Error_List /= null;
+   end Has_Error;
+
+   function Has_Error (Error : in Error_Ref) return Boolean
+   is begin
+      if Error.Node = Invalid_Node_Access then
+         return False;
+
+      elsif Valid_Node_Access_Lists.Has_Element (Error.Deleted) then
+         return True;
+
+      else
+         return Error.Node.Error_List /= null;
+      end if;
+   end Has_Error;
+
+   function Has_Error (Error : in Stream_Error_Ref) return Boolean
+   is begin
+      if Error.Ref.Ref.Node = Invalid_Node_Access then
+         return False;
+
+      elsif Valid_Node_Access_Lists.Has_Element (Error.Deleted) then
+         return True;
+
+      else
+         return Error.Ref.Ref.Node.Error_List /= null;
+      end if;
+   end Has_Error;
+
+   function Has_Error (Position : in Stream_Error_Cursor) return Boolean
+   is begin
+      return Has_Error (Position.SER);
+   end Has_Error;
+
+   function Has_Error (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Node.Error_List /= null;
+   end Has_Error;
+
+   function Has_Error_Class
+     (Tree        : in Syntax_Trees.Tree;
+      Node        : in Valid_Node_Access;
+      Error_Class : in Error_Data'Class)
+     return Error_Ref
+   is
+      use all type Error_Data_Lists.Cursor;
+      use all type Ada.Tags.Tag;
+   begin
+      if Node.Error_List /= null then
+         declare
+            Result : Error_Ref :=
+              (Node    => Node,
+               Deleted => Valid_Node_Access_Lists.No_Element,
+               Error   => Node.Error_List.First);
+         begin
+            loop
+               if Node.Error_List.Constant_Reference 
(Result.Error).Element.all'Tag = Error_Class'Tag then
+                  return Result;
+               end if;
+               Error_Data_Lists.Next (Result.Error);
+               exit when Result.Error = Error_Data_Lists.No_Element;
+            end loop;
+         end;
+      end if;
+      return Invalid_Error_Ref;
+   end Has_Error_Class;
+
+   function Has_Errors (Tree : in Syntax_Trees.Tree) return Boolean
+   is begin
+      if Tree.Parents_Set then
+         return Tree.First_Error /= Invalid_Error_Ref;
+      else
+         for Cur in Tree.Streams.Iterate loop
+            if Tree.First_Error ((Cur => Cur)) /= Invalid_Stream_Error_Ref then
+               return True;
+            end if;
+         end loop;
+         return False;
+      end if;
+   end Has_Errors;
+
+   function Has_Following_Deleted
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Boolean
+   is begin
+      return Node.Following_Deleted.Length > 0;
+   end Has_Following_Deleted;
+
+   function Has_Input
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID)
+     return Boolean
+   is
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+   begin
+      return Parse_Stream.Stack_Top /= Parse_Stream.Elements.Last;
+   end Has_Input;
+
+   function Has_Non_Grammar
+     (Tree     : in Syntax_Trees.Tree;
+      Terminal : in Valid_Node_Access)
+     return Boolean
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Terminal.Non_Grammar.Length > 0;
+   end Has_Non_Grammar;
+
+   function Has_Parent (Tree : in Syntax_Trees.Tree; Child : in 
Valid_Node_Access) return Boolean
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Child.Parent /= Invalid_Node_Access;
+   end Has_Parent;
+
+   function ID
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Token_ID
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Node.ID;
+   end ID;
+
+   function ID
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Token_ID
+   is
+      pragma Unreferenced (Tree, Stream);
+   begin
+      return Stream_Element_Lists.Element (Element.Cur).Node.ID;
+   end ID;
+
+   function ID
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return WisiToken.Token_ID
+   is
+      pragma Unreferenced (Tree);
+   begin
+      if Ref.Node /= Invalid_Node_Access then
+         return Ref.Node.ID;
+      elsif Ref.Element /= Invalid_Stream_Index then
+         return Stream_Element_Lists.Element (Ref.Element.Cur).Node.ID;
+      else
+         return Invalid_Token_ID;
+      end if;
+   end ID;
+
+   function Identifier (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Base_Identifier_Index
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Node.Identifier;
+   end Identifier;
+
+   function Image
+     (Tree : in Syntax_Trees.Tree;
+      Item : in Recover_Token)
+     return String
+   is begin
+      if Item.Virtual then
+         return "(" & Image (Item.ID, Tree.Lexer.Descriptor.all) & ")";
+      else
+         return "(" & Image (Tree, Item.Element_Node, Node_Numbers => True) &
+           (if Item.Element_Node = Item.Node
+            then ""
+            else ", " & Image (Tree, Item.Node, Terminal_Node_Numbers => 
True)) & ")";
+      end if;
+   end Image;
+
+   function Image
+     (Tree          : in Syntax_Trees.Tree;
+      Stream        : in Parse_Stream;
+      Stack         : in Boolean := True;
+      Input         : in Boolean := True;
+      Shared        : in Boolean := False;
+      Children      : in Boolean := False;
+      Node_Numbers  : in Boolean := False;
+      Non_Grammar   : in Boolean := False;
+      Augmented     : in Boolean := False;
+      Line_Numbers  : in Boolean := False;
+      State_Numbers : in Boolean := True)
+     return String
+   is
+      --  stack ^ stack_top input / shared
+      use Ada.Strings.Unbounded;
+      use Stream_Element_Lists;
+      Result     : Unbounded_String := +"(" & Trimmed_Image (Stream.Label) & 
", ";
+      Element    : Cursor           :=
+        (if Stack or Stream.Stack_Top = No_Element
+         then Stream.Elements.First
+         else Next (Stream.Stack_Top));
+      Need_Comma           : Boolean      := False;
+      Current_Stream_Label : Stream_Label := Stream.Label;
+   begin
+      loop
+         if not Has_Element (Element) then
+            if Shared and Current_Stream_Label /= Shared_Stream_Label then
+               if not Has_Element (Stream.Shared_Link) then
+                  exit;
+               else
+                  Current_Stream_Label := Shared_Stream_Label;
+                  Element              := Stream.Shared_Link;
+
+                  Result := @ & "/";
+               end if;
+            else
+               exit;
+            end if;
+         end if;
+
+         if Need_Comma then
+            Result := @ & (if Children then "," & ASCII.LF else ", ");
+         else
+            Need_Comma := True;
+         end if;
+
+         Result := @ &
+           (if Stream.Stack_Top = Element then "^(" else "(") &
+           (if State_Numbers
+            then Trimmed_Image (Stream_Element_Lists.Element (Element).State)
+            else "--") & ", " &
+           (if Children
+            then Tree.Subtree_Image
+              (Stream_Element_Lists.Element (Element).Node, Node_Numbers, 
Non_Grammar, Augmented, Line_Numbers)
+            else Tree.Image
+              (Stream_Element_Lists.Element (Element).Node,
+               Children              => False,
+               RHS_Index             => False,
+               Node_Numbers          => Node_Numbers,
+               Terminal_Node_Numbers => True,
+               Non_Grammar           => Non_Grammar,
+               Augmented             => Augmented,
+               Line_Numbers          => Line_Numbers))
+           & ")";
+
+         if not Input then
+            exit when Element = Stream.Stack_Top;
+         end if;
+
+         Element := Next (Element);
+      end loop;
+      Result := @ & ")";
+      return -Result;
+   end Image;
+
+   function Image
+     (Tree         : in Syntax_Trees.Tree;
+      Children     : in Boolean     := False;
+      Non_Grammar  : in Boolean     := False;
+      Augmented    : in Boolean     := False;
+      Line_Numbers : in Boolean     := False;
+      Root         : in Node_Access := Invalid_Node_Access)
+     return String
+   is begin
+      if Root /= Invalid_Node_Access then
+         --  Assuming children = true in this case.
+         return Subtree_Image (Tree, Root, Non_Grammar, Augmented, 
Line_Numbers);
+
+      elsif Tree.Streams.Length = 0 then
+         if Tree.Root = Invalid_Node_Access then
+            return "invalid_tree: no streams, Tree.Root not set";
+         else
+            --  Assuming children = true in this case.
+            return Subtree_Image
+              (Tree, Tree.Root,
+               Non_Grammar  => Non_Grammar,
+               Augmented    => Augmented,
+               Line_Numbers => Line_Numbers);
+         end if;
+      else
+         declare
+            use Ada.Strings.Unbounded;
+            Result : Unbounded_String;
+            Need_New_Line : Boolean := False;
+         begin
+            for Stream of Tree.Streams loop
+               if Need_New_Line then
+                  Result := @ & ASCII.LF;
+               else
+                  Need_New_Line := True;
+               end if;
+               Result := @ & Image
+                 (Tree, Stream, Children,
+                  Non_Grammar  => Non_Grammar,
+                  Augmented    => Augmented,
+                  Line_Numbers => Line_Numbers);
+            end loop;
+            return -Result;
+         end;
+      end if;
+   end Image;
+
+   function Image
+     (Tree          : in Syntax_Trees.Tree;
+      Stream        : in Stream_ID;
+      Stack         : in Boolean := True;
+      Input         : in Boolean := True;
+      Shared        : in Boolean := False;
+      Children      : in Boolean := False;
+      Node_Numbers  : in Boolean := True;
+      Non_Grammar   : in Boolean := False;
+      Augmented     : in Boolean := False;
+      Line_Numbers  : in Boolean := False;
+      State_Numbers : in Boolean := True)
+     return String
+   is begin
+      return Image
+        (Tree, Tree.Streams (Stream.Cur), Stack, Input, Shared,
+         Children, Node_Numbers, Non_Grammar, Augmented, Line_Numbers, 
State_Numbers);
+   end Image;
+
+   function Image
+     (Tree                  : in Syntax_Trees.Tree;
+      Element               : in Stream_Index;
+      State                 : in Boolean := False;
+      Children              : in Boolean := False;
+      RHS_Index             : in Boolean := False;
+      Node_Numbers          : in Boolean := False;
+      Terminal_Node_Numbers : in Boolean := False;
+      Line_Numbers          : in Boolean := False;
+      Non_Grammar           : in Boolean := False;
+      Augmented             : in Boolean := False;
+      Expecting             : in Boolean := False)
+     return String
+   is begin
+      if Element.Cur = Stream_Element_Lists.No_Element then
+         return "<null>";
+      else
+         declare
+            El : Stream_Element renames Stream_Element_Lists.Element 
(Element.Cur);
+         begin
+            return
+              (if State
+               then "(" & Trimmed_Image (El.State) & ", "
+               else "") &
+              Image
+                (Tree, El.Node, Children,
+                 RHS_Index, Node_Numbers, Terminal_Node_Numbers,
+                 Line_Numbers => Line_Numbers,
+                 Non_Grammar  => Non_Grammar,
+                 Augmented    => Augmented,
+                 Expecting    => Expecting) &
+              (if State then ")" else "");
+         end;
+      end if;
+   end Image;
+
+   function Image
+     (Tree                  : in Syntax_Trees.Tree;
+      Node                  : in Node_Access;
+      Children              : in Boolean := False;
+      RHS_Index             : in Boolean := False;
+      Node_Numbers          : in Boolean := False;
+      Terminal_Node_Numbers : in Boolean := False;
+      Line_Numbers          : in Boolean := False;
+      Non_Grammar           : in Boolean := False;
+      Augmented             : in Boolean := False;
+      Expecting             : in Boolean := False;
+      Safe_Only             : in Boolean := False)
+     return String
+   is
+      use Ada.Strings.Unbounded;
+   begin
+      if Node = null then
+         return "<null>";
+      else
+         declare
+            Result : Unbounded_String;
+            Node_Byte_Region : constant Buffer_Region :=
+              (if Safe_Only
+               then Null_Buffer_Region
+               else Tree.Byte_Region (Node, Trailing_Non_Grammar => False));
+         begin
+            if Node.Label in Terminal_Label and then Node.Sequential_Index /= 
Invalid_Sequential_Index then
+               Append (Result, Trimmed_Image (Node.Sequential_Index) & ";");
+            end if;
+
+            if Node_Numbers then
+               Append (Result, Trimmed_Image (Node.Node_Index) & ":");
+
+            elsif Terminal_Node_Numbers then
+               Append
+                 (Result,
+                  (case Node.Label is
+                   when Source_Terminal    => Trimmed_Image (Node.Node_Index) 
& ":",
+                   when Virtual_Terminal   => Trimmed_Image (Node.Node_Index) 
& ":",
+                   when Virtual_Identifier => Trimmed_Image (Node.Identifier) 
& ":",
+                   when Nonterm            => ""));
+            end if;
+
+            Append (Result, "(" & Image (Node.ID, Tree.Lexer.Descriptor.all));
+            Append (Result, (if RHS_Index and Node.Label = Nonterm then "_" & 
Trimmed_Image (Node.RHS_Index) else ""));
+
+            if Node_Byte_Region /= Null_Buffer_Region then
+               Append (Result, ", " & Image (Node_Byte_Region));
+            end if;
+
+            if (Line_Numbers and Tree.Editable and not Safe_Only) and then
+              Tree.Line_Region (Node, Trailing_Non_Grammar => True) /=
+              Null_Line_Region
+            then
+               Append (Result, ", " & Image (Tree.Line_Region (Node, 
Trailing_Non_Grammar => True)));
+            end if;
+
+            if not Safe_Only and Children and Node.Label = Nonterm then
+               Result := @ & " <= " & Image
+                 (Tree, Node.Children, RHS_Index, Node_Numbers, 
Terminal_Node_Numbers, Non_Grammar, Augmented);
+            end if;
+
+            if (Non_Grammar and Node.Label in Terminal_Label) and then 
Node.Non_Grammar.Length > 0 then
+               Result := @ & "(";
+               for Token of Node.Non_Grammar loop
+                  Result := @ & "(";
+                  Result := @ & Image (Token.ID, Tree.Lexer.Descriptor.all) & 
", ";
+                  Result := @ & Image (Token.Byte_Region) & ", ";
+                  Result := @ & Image (Token.Line_Region);
+                  Result := @ & ")";
+               end loop;
+               Result := @ & ")";
+            end if;
+
+            if Node.Augmented /= null and Augmented then
+               Result := @ & Image_Augmented (Node.Augmented.all);
+            end if;
+
+            if (Node_Numbers and Node.Label = Nonterm) and then 
Node.Recover_Conflict then
+               Append (Result, " recover_conflict");
+            end if;
+
+            if Node.Error_List /= null then
+               if Expecting then
+                  for Err of Node.Error_List.all loop
+                     Append (Result, ASCII.LF & "   ERROR: " & Err.Image 
(Tree, Node));
+                  end loop;
+               else
+                  for Err of Node.Error_List.all loop
+                     Append (Result, ", " & Err.Class_Image & " ERROR");
+                  end loop;
+               end if;
+            end if;
+
+            if Node.Label = Source_Terminal and then 
Node.Following_Deleted.Length > 0 then
+               Append
+                 (Result,
+                  (if Children then ASCII.LF & "  " else "") & " deleted: " &
+                    Tree.Image (Node.Following_Deleted));
+            end if;
+
+            Append (Result, ")");
+
+            return -Result;
+         end;
+      end if;
+   exception
+   when E : others =>
+      --  Tolerate corrupt tree, for debugging.
+      if Debug_Mode then
+         Tree.Lexer.Trace.Put_Line
+           ("corrupt tree; " & Ada.Exceptions.Exception_Name (E) & ":" &
+              Ada.Exceptions.Exception_Message (E));
+         Tree.Lexer.Trace.Put_Line (GNAT.Traceback.Symbolic.Symbolic_Traceback 
(E));
+      end if;
+      return Node.Node_Index'Image & ": corrupt tree; " & 
Ada.Exceptions.Exception_Name (E) & ":" &
+        Ada.Exceptions.Exception_Message (E);
+   end Image;
+
+   function Image
+     (Tree                  : in Syntax_Trees.Tree;
+      Nodes                 : in Node_Access_Array;
+      RHS_Index             : in Boolean := False;
+      Node_Numbers          : in Boolean := False;
+      Terminal_Node_Numbers : in Boolean := False;
+      Line_Numbers          : in Boolean := False;
+      Non_Grammar           : in Boolean := False;
+      Augmented             : in Boolean := False)
+     return String
+   is
+      use Ada.Strings.Unbounded;
+      Result     : Unbounded_String := +"(";
+      Need_Comma : Boolean := False;
+   begin
+      for I in Nodes'Range loop
+         Result := Result & (if Need_Comma then ", " else "") &
+           (if Nodes (I) = null then " - "
+            else Tree.Image
+              (Nodes (I),
+               RHS_Index             => RHS_Index,
+               Node_Numbers          => Node_Numbers,
+               Terminal_Node_Numbers => Terminal_Node_Numbers,
+               Line_Numbers          => Line_Numbers,
+               Non_Grammar           => Non_Grammar,
+               Augmented             => Augmented));
+         Need_Comma := True;
+      end loop;
+      Result := Result & ")";
+      return -Result;
+   end Image;
+
+   function Image
+     (Tree                  : in Syntax_Trees.Tree;
+      Ref                   : in Stream_Node_Ref;
+      First_Terminal        : in Boolean := False;
+      Node_Numbers          : in Boolean := False;
+      Terminal_Node_Numbers : in Boolean := False;
+      Line_Numbers          : in Boolean := False;
+      Non_Grammar           : in Boolean := False;
+      Augmented             : in Boolean := False;
+      Expecting             : in Boolean := False)
+     return String
+   is
+      use Stream_Element_Lists;
+   begin
+      if Ref.Element.Cur /= No_Element then
+         declare
+            Element_Node : constant Valid_Node_Access := Element 
(Ref.Element.Cur).Node;
+         begin
+            return "(" & Trimmed_Image (Tree.Streams (Ref.Stream.Cur).Label) & 
", " &
+              Image
+                (Tree, Ref.Element,
+                 Node_Numbers           => Node_Numbers,
+                 Terminal_Node_Numbers  => Terminal_Node_Numbers,
+                 Line_Numbers           => Line_Numbers,
+                 Non_Grammar            => Non_Grammar,
+                 Augmented              => Augmented,
+                 Expecting              => Expecting) &
+              (if Ref.Node = Invalid_Node_Access or Element_Node.Label in 
Terminal_Label
+               then ""
+               elsif Element_Node.Label = Nonterm and Element_Node = Ref.Node 
and First_Terminal
+               then ", " & Image
+                 (Tree,
+                  Tree.First_Terminal (Ref.Node),
+                  Node_Numbers          => Node_Numbers,
+                  Terminal_Node_Numbers => Terminal_Node_Numbers,
+                  Line_Numbers          => Line_Numbers,
+                  Non_Grammar           => Non_Grammar,
+                  Augmented             => Augmented,
+                  Expecting             => Expecting)
+
+               else ", " & Image
+                 (Tree,
+                  Ref.Node,
+                  Node_Numbers          => Node_Numbers,
+                  Terminal_Node_Numbers => True,
+                  Line_Numbers          => Line_Numbers,
+                  Non_Grammar           => Non_Grammar,
+                  Augmented             => Augmented)) & ")";
+         end;
+      elsif Ref.Node /= Invalid_Node_Access then
+         return "(" & Image
+           (Tree, Ref.Node,
+            Terminal_Node_Numbers => True,
+            Line_Numbers          => Line_Numbers,
+            Non_Grammar           => Non_Grammar) & ")";
+      else
+         return "()";
+      end if;
+   end Image;
+
+   function Input_Has_Matching_Error
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID;
+      Data   : in Error_Data'Class)
+     return Boolean
+   is
+      use Stream_Element_Lists;
+
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+      Error_Ref : Stream_Node_Parents := Tree.To_Stream_Node_Parents
+        (if Parse_Stream.Stack_Top = Parse_Stream.Elements.Last
+         then (Tree.Shared_Stream,
+               (Cur => Parse_Stream.Shared_Link),
+               Element (Parse_Stream.Shared_Link).Node)
+         else (Stream,
+               (Cur => Next (Parse_Stream.Stack_Top)),
+               Element (Next (Parse_Stream.Stack_Top)).Node));
+   begin
+      Tree.First_Terminal (Error_Ref, Following => False);
+
+      if Error_Ref.Ref.Node = Invalid_Node_Access then
+         --  Empty nonterm
+         return False;
+      end if;
+
+      if Error_Ref.Ref.Node.Error_List = null then
+         return False;
+      else
+         return (for some Err of Error_Ref.Ref.Node.Error_List.all => 
Dispatch_Equal (Err, Data));
+      end if;
+   end Input_Has_Matching_Error;
+
+   function Insert_After
+     (User_Data           : in out User_Data_Type;
+      Tree                : in     Syntax_Trees.Tree'Class;
+      Insert_Token        : in     Valid_Node_Access;
+      Insert_Before_Token : in     Valid_Node_Access;
+      Comment_Present     : in     Boolean;
+      Blank_Line_Present  : in     Boolean)
+     return Insert_Location
+   is
+      pragma Unreferenced (User_Data, Tree, Insert_Token, Insert_Before_Token, 
Comment_Present, Blank_Line_Present);
+   begin
+      return Before_Next;
+   end Insert_After;
+
+   procedure Insert_Source_Terminal
+     (Tree     : in out Syntax_Trees.Tree;
+      Stream   : in     Stream_ID;
+      Terminal : in     WisiToken.Lexer.Token;
+      Before   : in     Stream_Index;
+      Errors   : in     Error_Data_Lists.List)
+   is
+      New_Node : constant Valid_Node_Access := Add_Source_Terminal_1
+        (Tree, Terminal,
+         In_Shared_Stream => Stream = Tree.Shared_Stream,
+         Errors => Errors);
+   begin
+      Insert_Stream_Element (Tree, Stream, New_Node, Before => Before.Cur);
+   end Insert_Source_Terminal;
+
+   function Insert_Source_Terminal
+     (Tree     : in out Syntax_Trees.Tree;
+      Stream   : in     Stream_ID;
+      Terminal : in     WisiToken.Lexer.Token;
+      Before   : in     Stream_Index;
+      Errors   : in     Error_Data_Lists.List)
+     return Single_Terminal_Ref
+   is
+      New_Node : constant Valid_Node_Access := Add_Source_Terminal_1
+        (Tree, Terminal,
+         In_Shared_Stream => Stream = Tree.Shared_Stream,
+         Errors => Errors);
+   begin
+      return Insert_Stream_Element (Tree, Stream, New_Node, Before => 
Before.Cur);
+   end Insert_Source_Terminal;
+
+   procedure Insert_Stream_Element
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      Node   : in     Valid_Node_Access;
+      Before : in     Stream_Element_Lists.Cursor := 
Stream_Element_Lists.No_Element)
+   is
+      use Stream_Element_Lists;
+
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+   begin
+      Parse_Stream.Elements.Insert
+        (Element  =>
+           (Node  => Node,
+            State => Unknown_State),
+         Before   =>
+           (if Before /= No_Element
+            then Before
+            else
+              (if Parse_Stream.Stack_Top = No_Element
+               then No_Element
+               else Next (Parse_Stream.Stack_Top))));
+   end Insert_Stream_Element;
+
+   function Insert_Stream_Element
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      Node   : in     Valid_Node_Access;
+      Before : in     Stream_Element_Lists.Cursor := 
Stream_Element_Lists.No_Element)
+     return Rooted_Ref
+   is
+      use Stream_Element_Lists;
+
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+      New_Element  : constant Cursor := Parse_Stream.Elements.Insert
+        (Element  =>
+           (Node  => Node,
+            State => Unknown_State),
+         Before   =>
+           (if Before /= No_Element
+            then Before
+            else
+              (if Parse_Stream.Stack_Top = No_Element
+               then No_Element
+               else Next (Parse_Stream.Stack_Top))));
+   begin
+      return (Stream, (Cur => New_Element), Node);
+   end Insert_Stream_Element;
+
+   function Insert_Virtual_Terminal
+     (Tree     : in out Syntax_Trees.Tree;
+      Stream   : in     Stream_ID;
+      Terminal : in     Token_ID)
+     return Single_Terminal_Ref
+   is
+      New_Node : constant Node_Access := new Node'
+        (Label       => Virtual_Terminal,
+         Child_Count => 0,
+         ID          => Terminal,
+         Node_Index  => -(Tree.Nodes.Last_Index + 1),
+         others      => <>);
+   begin
+      Tree.Nodes.Append (New_Node);
+      return Insert_Stream_Element (Tree, Stream, New_Node);
+   end Insert_Virtual_Terminal;
+
+   function Is_Descendant_Of
+     (Tree       : in Syntax_Trees.Tree;
+      Root       : in Valid_Node_Access;
+      Descendant : in Valid_Node_Access)
+     return Boolean
+   is
+      Node : Node_Access := Descendant;
+   begin
+      loop
+         exit when Node = Invalid_Node_Access;
+         if Node = Root then
+            return True;
+         end if;
+
+         Node := Tree.Parent (Node);
+      end loop;
+      return False;
+   end Is_Descendant_Of;
+
+   function Is_Empty (Tree : in Syntax_Trees.Tree) return Boolean
+   is begin
+      return Tree.Streams.Length = 0 and Tree.Root = Invalid_Node_Access;
+   end Is_Empty;
+
+   function Is_Empty_Nonterm
+     (Tree : in Syntax_Trees.Tree;
+      Item : in Recover_Token)
+     return Boolean
+   is begin
+      return
+        (case Item.Virtual is
+         when True => Is_Nonterminal (Item.ID, Tree.Lexer.Descriptor.all) and 
Item.First_Terminal = Invalid_Node_Access,
+         when False => Item.Node /= Invalid_Node_Access and then 
Tree.Is_Empty_Nonterm (Item.Node));
+   end Is_Empty_Nonterm;
+
+   function Is_Empty_Nonterm
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Boolean
+   is begin
+      return Node.Label = Nonterm and then Tree.First_Terminal (Node) = 
Invalid_Node_Access;
+   end Is_Empty_Nonterm;
+
+   function Is_Empty_Or_Virtual_Nonterm
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Boolean
+   is begin
+      return Node.Label = Nonterm and then
+        (Tree.First_Terminal (Node) = Invalid_Node_Access or else -- no 
terminals
+           Length (Tree.Byte_Region (Node, Trailing_Non_Grammar => False)) = 0 
-- all terminals are virtual
+        );
+   end Is_Empty_Or_Virtual_Nonterm;
+
+   function Is_Nonterm (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Node.Label = Nonterm;
+   end Is_Nonterm;
+
+   function Is_Optimized_List
+     (Productions : in Production_Info_Trees.Vector;
+      ID          : in Token_ID)
+     return Boolean
+   is begin
+      if Productions.Is_Empty then
+         return False;
+      else
+         return Productions (ID).Optimized_List;
+      end if;
+   end Is_Optimized_List;
+
+   function Is_Source_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Node.Label = Source_Terminal;
+   end Is_Source_Terminal;
+
+   function Is_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean
+   is begin
+      return Tree.Label (Node) in Terminal_Label;
+   end Is_Terminal;
+
+   function Is_Valid (Tree : in Syntax_Trees.Tree; Stream : in Stream_ID) 
return Boolean
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Parse_Stream_Lists.Has_Element (Stream.Cur);
+   end Is_Valid;
+
+   function Is_Virtual_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Node.Label = Virtual_Terminal;
+   end Is_Virtual_Terminal;
+
+   function Is_Virtual_Identifier (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Node.Label = Virtual_Identifier;
+   end Is_Virtual_Identifier;
+
+   function Label (Node : in Valid_Node_Access) return Node_Label
+   is begin
+      return Node.Label;
+   end Label;
+
+   function Label (Tree : in Syntax_Trees.Tree; Node : in Valid_Node_Access) 
return Node_Label
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Node.Label;
+   end Label;
+
+   function Label (Tree : in Syntax_Trees.Tree; Element : in Stream_Index) 
return Node_Label
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Stream_Element_Lists.Element (Element.Cur).Node.Label;
+   end Label;
+
+   function Last_Non_Grammar
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Node_Access
+   is
+      --  We always use a Parents stack, to limit Prev_Terminal to
+      --  descendants of Node.
+      Parents : Node_Stacks.Stack;
+      Result  : Node_Access := Last_Terminal (Tree, Node, Parents);
+   begin
+      loop
+         exit when Result = Invalid_Node_Access;
+         exit when Result.Non_Grammar.Length > 0;
+         Prev_Terminal (Tree, Result, Parents);
+      end loop;
+      return Result;
+   end Last_Non_Grammar;
+
+   function Last_Parse_Stream (Tree : in Syntax_Trees.Tree) return Stream_ID
+   is begin
+      return (Cur => Parse_Stream_Lists.Last (Tree.Streams));
+   end Last_Parse_Stream;
+
+   function Last_Sequential_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in     Node_Access;
+      Parents : in out Node_Stacks.Stack)
+     return Node_Access
+   is
+      Result : Node_Access := Tree.Last_Terminal (Node, Parents);
+   begin
+      loop
+         exit when Result = Invalid_Node_Access;
+         exit when Result.Sequential_Index /= Invalid_Sequential_Index;
+         Tree.Prev_Terminal (Result, Parents);
+      end loop;
+      return Result;
+   end Last_Sequential_Terminal;
+
+   function Last_Sequential_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Node_Access)
+     return Node_Access
+   is
+      Parents : Node_Stacks.Stack;
+   begin
+      return Tree.Last_Sequential_Terminal (Node, Parents);
+   end Last_Sequential_Terminal;
+
+   procedure Last_Sequential_Terminal
+     (Tree         : in     Syntax_Trees.Tree;
+      Ref          : in out Syntax_Trees.Stream_Node_Parents;
+      Parse_Stream : in     Stream_ID;
+      Preceding    : in     Boolean)
+   is begin
+      Tree.Last_Terminal (Ref, Parse_Stream, Preceding);
+      loop
+         exit when Ref.Ref = Invalid_Stream_Node_Ref;
+
+         exit when Ref.Ref.Node /= Invalid_Node_Access and then
+           Tree.Get_Sequential_Index (Ref.Ref.Node) /= 
Invalid_Sequential_Index;
+
+         Tree.Prev_Terminal (Ref, Parse_Stream, Preceding);
+         exit when not Preceding and Ref.Ref.Node = Invalid_Node_Access;
+      end loop;
+   end Last_Sequential_Terminal;
+
+   function Last_Source_Terminal
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Valid_Node_Access;
+      Trailing_Non_Grammar : in Boolean)
+     return Node_Access
+   is
+      --  We always use a Parents stack, to limit Prev_Terminal to
+      --  descendants of Node.
+      Parents : Node_Stacks.Stack;
+      Result  : Node_Access := Last_Terminal (Tree, Node, Parents);
+   begin
+      loop
+         exit when Result = Invalid_Node_Access;
+         exit when
+           (if Trailing_Non_Grammar
+            then (case Terminal_Label'(Result.Label) is
+                  when Source_Terminal => True,
+                  when Virtual_Terminal => Result.Non_Grammar.Length > 0,
+                  when Virtual_Identifier => Result.Non_Grammar.Length > 0)
+            else Result.Label = Source_Terminal);
+
+         Prev_Terminal (Tree, Result, Parents);
+      end loop;
+      return Result;
+   end Last_Source_Terminal;
+
+   procedure Last_Source_Terminal
+     (Tree                 : in     Syntax_Trees.Tree;
+      Ref                  : in out Stream_Node_Parents;
+      Trailing_Non_Grammar : in     Boolean)
+   is begin
+      Ref.Ref.Node := Last_Terminal (Tree, Ref.Ref.Node, Ref.Parents);
+      loop
+         exit when Ref.Ref.Node = Invalid_Node_Access;
+         exit when
+           (if Trailing_Non_Grammar
+            then (case Terminal_Label'(Ref.Ref.Node.Label) is
+                  when Source_Terminal => True,
+                  when Virtual_Terminal => Ref.Ref.Node.Non_Grammar.Length > 0,
+                  when Virtual_Identifier => Ref.Ref.Node.Non_Grammar.Length > 
0)
+            else Ref.Ref.Node.Label = Source_Terminal);
+
+         Prev_Terminal (Tree, Ref.Ref.Node, Ref.Parents);
+      end loop;
+   end Last_Source_Terminal;
+
+   function Last_Terminal (Tree : in Syntax_Trees.Tree; Item : in 
Recover_Token) return Node_Access
+   is begin
+      return
+        (if Item.Virtual
+         then Item.Last_Terminal
+         else Last_Terminal (Tree, Item.Element_Node));
+   end Last_Terminal;
+
+   function Last_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Node_Access
+   is begin
+      case Node.Label is
+      when Terminal_Label =>
+         return Node;
+      when Nonterm =>
+         for C of reverse Node.Children loop
+            --  Encountering a deleted child here is an error in the user 
algorithm.
+            declare
+               Term : constant Node_Access := Last_Terminal (Tree, C);
+            begin
+               if Term /= Invalid_Node_Access then
+                  return Term;
+               end if;
+            end;
+         end loop;
+         return Invalid_Node_Access;
+      end case;
+   end Last_Terminal;
+
+   function Last_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in     Valid_Node_Access;
+      Parents : in out Node_Stacks.Stack)
+     return Node_Access
+   is
+      Parent_Depth : constant SAL.Base_Peek_Type := Parents.Depth;
+   begin
+      case Node.Label is
+      when Terminal_Label =>
+         return Node;
+
+      when Nonterm =>
+         for C of reverse Node.Children loop
+            --  We tolerate null C here because this function is called while
+            --  printing a tree for debug.
+            if C /= Invalid_Node_Access then
+               Parents.Push (Node);
+               declare
+                  Last_Term : constant Node_Access := Last_Terminal (Tree, C, 
Parents);
+               begin
+                  if Last_Term /= Invalid_Node_Access then
+                     return Last_Term;
+                  else
+                     Parents.Pop (Parents.Depth - Parent_Depth); -- discard 
parents from call to Last_Terminal.
+                  end if;
+               end;
+            end if;
+         end loop;
+         --  All children are empty
+         return Invalid_Node_Access;
+      end case;
+   end Last_Terminal;
+
+   procedure Last_Terminal
+     (Tree         : in     Syntax_Trees.Tree;
+      Ref          : in out Stream_Node_Parents;
+      Parse_Stream : in     Stream_ID;
+      Preceding    : in     Boolean)
+   is
+      use Stream_Element_Lists;
+   begin
+      if Ref.Ref.Node = Invalid_Node_Access then
+         --  Ref is an empty nonterm.
+         null;
+      else
+         Ref.Ref.Node := Last_Terminal (Tree, Ref.Ref.Node, Ref.Parents);
+      end if;
+
+      if Preceding then
+         loop
+            exit when Ref.Ref.Node /= Invalid_Node_Access;
+            Prev_Terminal (Tree, Ref, Parse_Stream, Preceding);
+            exit when not Has_Element (Ref.Ref.Element.Cur);
+         end loop;
+      elsif Ref.Ref.Node = Invalid_Node_Access then
+         Ref.Parents.Clear;
+      end if;
+   end Last_Terminal;
+
+   procedure Left_Breakdown
+     (Tree      : in out Syntax_Trees.Tree;
+      Ref       : in out Stream_Node_Ref;
+      User_Data : in     Syntax_Trees.User_Data_Access_Constant)
+   is
+      --  [Wagner Graham 1998] doesn't modify the tree structure for
+      --  Left_Breakdown; it just moves the Current_Token pointer around.
+      --  That means the rest of the parser must understand that.
+      --
+      --  Here we actually decompose the tree, as in [Lahav 2008]. Empty
+      --  nonterms are handled by caller.
+      --
+      --  Any errors on stream elements that are deleted along the way are
+      --  moved to their First_Terminal, which is always the same node; the
+      --  node that is the last promoted to the parse stream.
+      use Stream_Element_Lists;
+
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Ref.Stream.Cur);
+
+      Cur         : Cursor                       := Ref.Element.Cur;
+      To_Delete   : Cursor                       := Cur;
+      Node        : Valid_Node_Access            := 
Stream_Element_Lists.Element (Cur).Node;
+      Next_I      : Positive_Index_Type;
+      First_Child : constant Positive_Index_Type := 1; -- preserve symmetry 
with Right_Breakdown
+      New_Errors  : Error_Data_Lists.List;
+
+   begin
+      Ref.Element.Cur := No_Element; --  Allow deleting via To_Delete.
+
+      loop
+         Next_I := Positive_Index_Type'Last;
+
+         for I in reverse 2 .. Node.Child_Count loop
+            if Node.Children (I).Child_Count > 0 or Node.Children (I).Label in 
Terminal_Label then
+               Next_I := I;
+            end if;
+
+            Cur := Parse_Stream.Elements.Insert
+              (Element  =>
+                 (Node  => Node.Children (I),
+                  State => Unknown_State),
+               Before   => Cur);
+
+            Node.Children (I).Parent := Invalid_Node_Access;
+
+            --  We don't set node.children (I) to invalid here, because we 
need it
+            --  if Next_I /= First_Child.
+         end loop;
+
+         Node.Children (First_Child).Parent := Invalid_Node_Access;
+
+         if Node.Children (First_Child).Child_Count > 0 or Node.Children 
(First_Child).Label in Terminal_Label then
+            Next_I := First_Child;
+         else
+            --  Node.Children (First_Child) is an empty nonterm; it has not
+            --  been added to stream.
+            if Next_I = Positive_Index_Type'Last then
+               --  Node is an empty nonterm; move to first sibling below.
+               null;
+            else
+               --  First non_empty is in Node.Children (Next_I); delete 
leading empty
+               --  nonterms that were added to the stream.
+               for I in First_Child + 1 .. Next_I - 1 loop
+                  declare
+                     To_Delete_2 : Cursor := Cur;
+                  begin
+                     Next (Cur);
+                     --  We do not set errors on empty nonterms.
+                     pragma Assert (Stream_Element_Lists.Element 
(To_Delete_2).Node.Error_List = null);
+                     Parse_Stream.Elements.Delete (To_Delete_2);
+                  end;
+               end loop;
+               pragma Assert (Element (Cur).Node = Node.Children (Next_I));
+
+               --  Delete the nonterm that we were breaking down, and record 
the one
+               --  we are now breaking down for deletion.
+               declare
+                  Node : constant Valid_Node_Access := 
Stream_Element_Lists.Element (To_Delete).Node;
+               begin
+                  if Node.Error_List /= null then
+                     for Err of Tree.Error_List (Node) loop
+                        New_Errors.Append (To_Message (Err, Tree, Node));
+                     end loop;
+                  end if;
+               end;
+               Parse_Stream.Elements.Delete (To_Delete);
+               To_Delete := Cur;
+            end if;
+         end if;
+
+         declare
+            Temp : constant Node_Access := Node;
+         begin
+            if Next_I = Positive_Index_Type'Last then
+               --  Node is an empty nonterm; move to first sibling. Possibly 
similar
+               --  to test_incremental.adb Recover_04.
+               raise SAL.Not_Implemented with "FIXME: 
Syntax_Trees.Left_Breakdown move to next sibling.";
+            else
+               Node := Node.Children (Next_I);
+            end if;
+
+            --  Now we can clear the children of Temp (was Node).
+            if Tree.Parents_Set then
+               Temp.Children := (others => Invalid_Node_Access);
+            end if;
+         end;
+
+         if Node.Label in Terminal_Label then
+            if To_Delete /= Cur and Next_I /= First_Child then
+               Ref.Element.Cur := Cur;
+
+            else
+               Ref.Element.Cur := Parse_Stream.Elements.Insert
+                 (Element  =>
+                    (Node  => Node,
+                     State => Unknown_State),
+                  Before   => Cur);
+
+               Cur := No_Element; --  allow delete via To_Delete
+            end if;
+
+            Ref.Node := Node;
+
+            declare
+               Node : constant Valid_Node_Access := 
Stream_Element_Lists.Element (To_Delete).Node;
+            begin
+               if Node.Error_List /= null then
+                  for Err of Tree.Error_List (Node) loop
+                     New_Errors.Append (To_Message (Err, Tree, Node));
+                  end loop;
+               end if;
+            end;
+            Parse_Stream.Elements.Delete (To_Delete);
+            exit;
+         end if;
+      end loop;
+
+      if New_Errors.Length > 0 then
+         Ref.Node := Add_Errors (Tree, Ref.Node, New_Errors, User_Data);
+         Replace_Node (Ref.Element, Ref.Node);
+      end if;
+   end Left_Breakdown;
+
+   function Lexable (Tree : in Syntax_Trees.Tree) return Boolean
+   is begin
+      return Tree.Streams.Length = 1 and
+        (Tree.Shared_Stream.Cur /= Parse_Stream_Lists.No_Element and then
+           Tree.Streams (Tree.Shared_Stream.Cur).Elements.Length = 1);
+   end Lexable;
+
+   function Line_At_Byte_Pos
+     (Tree     : in Syntax_Trees.Tree;
+      Byte_Pos : in Buffer_Pos)
+     return Base_Line_Number_Type
+   is
+      function Line_At_Byte_Pos
+        (Node       : in Valid_Node_Access;
+         Start_Line : in WisiToken.Line_Number_Type)
+        return Base_Line_Number_Type
+      is
+         Prev_Term : constant Valid_Node_Access :=
+           (if Node = Tree.Root
+            then Tree.SOI
+            elsif Node = Tree.SOI
+            then Tree.SOI
+            else Tree.Prev_Source_Terminal (Node, Trailing_Non_Grammar => 
True));
+      begin
+         --  Byte_Pos can be in whitespace or non_grammar, so we construct the
+         --  region to check from two successive terminals. We need line number
+         --  information, so we find the prev/next terminals with new_lines in
+         --  the token or non_grammar.
+         case Node.Label is
+         when Terminal_Label =>
+            declare
+               Prev_New_Line : constant New_Line_Ref := Tree.Prev_New_Line 
(Node, Start_Line);
+               Check_Region : constant Buffer_Region :=
+                 (First =>
+                    (if Node = Tree.SOI
+                     then Tree.Byte_Region (Tree.SOI, Trailing_Non_Grammar => 
True).First
+                     else Tree.Byte_Region (Prev_Term, Trailing_Non_Grammar => 
True).Last + 1),
+                  Last  =>
+                    (if Node = Tree.EOI
+                     then Node.Non_Grammar (1).Byte_Region.First
+                     else Tree.Byte_Region (Node, Trailing_Non_Grammar => 
True).Last));
+
+               function Check_Non_Grammar return Base_Line_Number_Type
+               --  Return Invalid_Line_Number if Byte_Pos not in 
Node.Non_Grammar
+               is begin
+                  if Node.Non_Grammar.Length > 0 and then
+                    Byte_Pos <= Node.Non_Grammar 
(Node.Non_Grammar.Last_Index).Byte_Region.Last
+                  then
+                     for Token of Node.Non_Grammar loop
+                        if Byte_Pos <= Token.Byte_Region.First then
+                           return Token.Line_Region.First;
+
+                        elsif Byte_Pos <= Token.Byte_Region.Last then
+                           return Tree.Lexer.Line_At_Byte_Pos (Token, 
Byte_Pos);
+
+                        end if;
+                     end loop;
+                  end if;
+                  return Invalid_Line_Number;
+               end Check_Non_Grammar;
+
+            begin
+               pragma Assert (Prev_New_Line.Pos /= Invalid_Buffer_Pos); --  
SOI if nothing else
+
+               if not Contains (Check_Region, Byte_Pos) then
+                  return Invalid_Line_Number;
+
+               else
+                  case Terminal_Label'(Node.Label) is
+                  when Virtual_Identifier | Virtual_Terminal =>
+                     --  ada_mode-partial_parse.adb
+                     declare
+                        Temp : constant Base_Line_Number_Type := 
Check_Non_Grammar;
+                     begin
+                        if Temp = Invalid_Line_Number then
+                           return Prev_New_Line.Line;
+                        else
+                           return Temp;
+                        end if;
+                     end;
+
+                  when Source_Terminal =>
+                     if Node = Tree.EOI then
+                        return Node.Non_Grammar (1).Line_Region.First;
+
+                     elsif Byte_Pos < Node.Byte_Region.First then
+                        --  In whitespace before token
+                        return Prev_New_Line.Line;
+
+                     elsif Byte_Pos <= Node.Byte_Region.Last then
+                        return Tree.Lexer.Line_At_Byte_Pos
+                          (Node.Byte_Region, Byte_Pos,
+                           First_Line => Prev_New_Line.Line);
+
+                     else
+                        declare
+                           Temp : constant Base_Line_Number_Type := 
Check_Non_Grammar;
+                        begin
+                           if Temp = Invalid_Line_Number then
+                              raise SAL.Programmer_Error;
+                           else
+                              return Temp;
+                           end if;
+                        end;
+                     end if;
+                  end case;
+               end if;
+            end;
+
+         when Nonterm =>
+            declare
+               First_Term : constant Node_Access  := Tree.First_Source_Terminal
+                 (Node, Trailing_Non_Grammar => True, Following => False);
+               Last_Term  : constant Node_Access  := Tree.Last_Source_Terminal 
(Node, Trailing_Non_Grammar  => True);
+            begin
+               if First_Term = Invalid_Node_Access then
+                  --  Empty or all virtual
+                  return Invalid_Line_Number;
+
+               elsif not Contains
+                 ((First =>
+                     (if Node = Tree.Root
+                      then Tree.Byte_Region (Tree.SOI, Trailing_Non_Grammar => 
True).First
+                      else Tree.Byte_Region (Prev_Term, Trailing_Non_Grammar 
=> True).Last + 1),
+                   Last  =>
+                    (if Last_Term = Tree.EOI
+                     then Last_Term.Non_Grammar (1).Byte_Region.First
+                     else Tree.Byte_Region (Last_Term, Trailing_Non_Grammar => 
True).Last)),
+                  Byte_Pos)
+               then
+                  return Invalid_Line_Number;
+
+               else
+                  for Child of Node.Children loop
+                     declare
+                        Temp : constant Base_Line_Number_Type := 
Line_At_Byte_Pos
+                          (Child, Start_Line => Tree.Prev_New_Line (Child, 
Start_Line).Line);
+                     begin
+                        if Temp = Invalid_Line_Number then
+                           --  Check next child
+                           null;
+                        else
+                           return Temp;
+                        end if;
+                     end;
+                  end loop;
+                  raise SAL.Programmer_Error; --  Contains said we'd find it.
+               end if;
+            end;
+         end case;
+      end Line_At_Byte_Pos;
+   begin
+      return Line_At_Byte_Pos (Tree.Root, Start_Line => Tree.SOI.Non_Grammar 
(1).Line_Region.First);
+   end Line_At_Byte_Pos;
+
+   function Line_Begin_Char_Pos
+     (Tree : in Syntax_Trees.Tree;
+      Line : in Line_Number_Type)
+     return Buffer_Pos
+   is
+      Node           : Node_Access := Tree.Root;
+      Begin_Char_Pos : Buffer_Pos  := Invalid_Buffer_Pos;
+   begin
+      if Line = Line_Number_Type'First then
+         return Buffer_Pos'First;
+      end if;
+
+      Node := Find_New_Line (Tree, Line, Node, Begin_Char_Pos);
+      return Begin_Char_Pos;
+   end Line_Begin_Char_Pos;
+
+   function Line_Begin_Char_Pos
+     (Tree   : in Syntax_Trees.Tree;
+      Line   : in Line_Number_Type;
+      Stream : in Stream_ID)
+     return Buffer_Pos
+   is
+      Begin_Char_Pos : Buffer_Pos := Invalid_Buffer_Pos;
+   begin
+      if Line = Line_Number_Type'First then
+         return Buffer_Pos'First;
+      end if;
+
+      declare
+         Ref : Stream_Node_Parents;
+      begin
+         Ref.Ref := Tree.Stream_First (Stream, Skip_SOI => False);
+         Find_New_Line (Tree, Ref, Stream, Line, Begin_Char_Pos);
+         if Ref.Ref.Node = Invalid_Node_Access then
+            return Invalid_Buffer_Pos;
+         else
+            return Begin_Char_Pos;
+         end if;
+      end;
+   end Line_Begin_Char_Pos;
+
+   function Line_Begin_Token
+     (Tree : in Syntax_Trees.Tree;
+      Line : in Line_Number_Type)
+     return Node_Access
+   is begin
+      declare
+         Node : constant Node_Access := Tree.First_Non_Grammar (Root (Tree));
+      begin
+         if Node = Invalid_Node_Access then
+            --  Tree has no tokens with a Line_Region. Note that during LR 
parse, EOI
+            --  is not in the tree, only in the parse stream.
+            return Invalid_Node_Access;
+         end if;
+
+         if Line = Tree.Line_Region (Node, Trailing_Non_Grammar => True).First 
then
+            return Node;
+         elsif Line < Tree.Line_Region (Node, Trailing_Non_Grammar => 
True).First then
+            return Invalid_Node_Access;
+         end if;
+      end;
+
+      declare
+         Begin_Char_Pos : Buffer_Pos;
+         Node           : Node_Access := Find_New_Line (Tree, Line, Root 
(Tree), Begin_Char_Pos);
+      begin
+         if Node = Invalid_Node_Access then
+            --  Line is after EOI.
+            return Invalid_Node_Access;
+
+         elsif Node.ID = Tree.Lexer.Descriptor.EOI_ID then
+            --  Find_New_Line allows both Line, Line - 1.
+            if Node.Non_Grammar 
(Node.Non_Grammar.First_Index).Line_Region.First = Line then
+               return Node;
+            else
+               return Invalid_Node_Access;
+            end if;
+
+         else
+            --  Node now contains the non-grammar that ends Line - 1
+            if Empty_Line (Tree, Node.Non_Grammar, Line) then
+               return Invalid_Node_Access;
+            else
+               Next_Terminal (Tree, Node);
+               return Node;
+            end if;
+         end if;
+      end;
+   end Line_Begin_Token;
+
+   function Line_Begin_Token
+     (Tree                      : in Syntax_Trees.Tree;
+      Line                      : in Line_Number_Type;
+      Stream                    : in Stream_ID;
+      Following_Source_Terminal : in Boolean)
+     return Node_Access
+   is
+      Ref             : Stream_Node_Parents;
+      Begin_Char_Pos  : Buffer_Pos;
+
+      EOI_Line : constant Line_Number_Type := Tree.EOI.Non_Grammar 
(Tree.EOI.Non_Grammar.First).Line_Region.First;
+   begin
+      Ref.Ref := Stream_First (Tree, Stream, Skip_SOI => True);
+
+      if Line = Line_Number_Type'First then
+         if Line = Tree.Line_Region (Ref, Stream).First then
+            return Tree.First_Terminal (Ref.Ref.Node);
+         else
+            if Following_Source_Terminal then
+               Next_Source_Terminal (Tree, Ref, Trailing_Non_Grammar => False);
+               return Ref.Ref.Node;
+            else
+               return Invalid_Node_Access;
+            end if;
+         end if;
+
+      elsif Line = EOI_Line + 1 then
+         return Tree.EOI;
+
+      elsif Line > EOI_Line + 1 then
+         return Invalid_Node_Access;
+      end if;
+
+      Find_New_Line (Tree, Ref, Stream, Line, Begin_Char_Pos);
+
+      if Ref.Ref = Invalid_Stream_Node_Ref then
+         return Invalid_Node_Access;
+      else
+         --  Ref now contains the non-grammar that ends Line - 1, or EOI.
+
+         if Ref.Ref.Node.ID = Tree.Lexer.Descriptor.EOI_ID then
+            --  test_incremental.adb Edit_String_10
+            return Invalid_Node_Access;
+
+         elsif Empty_Line (Tree, Ref.Ref.Node.Non_Grammar, Line) then
+            if Following_Source_Terminal then
+               Next_Source_Terminal (Tree, Ref, Trailing_Non_Grammar => False);
+               return Ref.Ref.Node;
+            else
+               return Invalid_Node_Access;
+            end if;
+         else
+            Next_Terminal (Tree, Ref, Following => True);
+            return Ref.Ref.Node;
+         end if;
+      end if;
+   end Line_Begin_Token;
+
+   procedure  Line_Region_Internal_1
+     (Tree                    : in     Syntax_Trees.Tree;
+      Node                    : in     Node_Access;
+      Prev_Non_Grammar        : in     Valid_Node_Access;
+      Next_Non_Grammar        : in     Valid_Node_Access;
+      Trailing_Non_Grammar    : in     Boolean;
+      First_Non_Grammar_Token :    out WisiToken.Lexer.Token;
+      Last_Non_Grammar_Token  :    out WisiToken.Lexer.Token)
+   is
+      --  Since all non_grammar have line_region, we don't have to look for
+      --  a new_line, just any non_grammar.
+      --
+      --  We always have to find a previous and next non_grammar, to allow
+      --  for multi-line tokens.
+      --
+      --  The last few tokens in a nonterm may have no non_grammar; then we
+      --  have to find the following Non_Grammar.
+
+      Last_Non_Grammar : constant Syntax_Trees.Node_Access :=
+        (if Node = Invalid_Node_Access
+         then Invalid_Node_Access
+         else Tree.Last_Non_Grammar (Node));
+
+      Last_Terminal : constant Syntax_Trees.Node_Access :=
+        (if Node = Invalid_Node_Access
+         then Invalid_Node_Access
+         else Tree.Last_Terminal (Node));
+
+      Actual_Last_Non_Grammar : constant Syntax_Trees.Valid_Node_Access :=
+        (if Last_Non_Grammar = Invalid_Node_Access
+         then Next_Non_Grammar
+         elsif Last_Non_Grammar = Last_Terminal
+         then Last_Non_Grammar
+         else Next_Non_Grammar);
+   begin
+      First_Non_Grammar_Token :=
+        (if (Node = Tree.Root and Prev_Non_Grammar.ID = 
Tree.Lexer.Descriptor.SOI_ID)
+           --  We are finding the line_region of wisi_accept in an Editable
+           --  tree; we want to include the leading non_grammar in SOI.
+           or Node.ID = Tree.Lexer.Descriptor.SOI_ID
+           --  We are finding the line_region of SOI.
+         then
+            Prev_Non_Grammar.Non_Grammar 
(Prev_Non_Grammar.Non_Grammar.First_Index)
+         else
+            --  We are finding the line_region of a leading non_terminal; we 
don't
+            --  want to include the leading non_grammar in SOI.
+            Prev_Non_Grammar.Non_Grammar 
(Prev_Non_Grammar.Non_Grammar.Last_Index));
+
+      Last_Non_Grammar_Token := Actual_Last_Non_Grammar.Non_Grammar
+        (if Trailing_Non_Grammar and Actual_Last_Non_Grammar = Last_Non_Grammar
+         then Actual_Last_Non_Grammar.Non_Grammar.Last_Index
+         else Actual_Last_Non_Grammar.Non_Grammar.First_Index);
+   end Line_Region_Internal_1;
+
+   function Line_Region_Internal
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Node_Access;
+      Prev_Non_Grammar     : in Valid_Node_Access;
+      Next_Non_Grammar     : in Valid_Node_Access;
+      Trailing_Non_Grammar : in Boolean)
+     return WisiToken.Line_Region
+   is
+      First_Non_Grammar_Token : WisiToken.Lexer.Token;
+      Last_Non_Grammar_Token  : WisiToken.Lexer.Token;
+   begin
+      Line_Region_Internal_1
+        (Tree, Node, Prev_Non_Grammar, Next_Non_Grammar, Trailing_Non_Grammar,
+         First_Non_Grammar_Token, Last_Non_Grammar_Token);
+
+      return
+        (First => First_Non_Grammar_Token.Line_Region.Last,
+         Last => Last_Non_Grammar_Token.Line_Region.First);
+   end Line_Region_Internal;
+
+   function Byte_Region_Of_Line_Region_Internal
+     (Tree                : in Syntax_Trees.Tree;
+      Node                 : in Node_Access;
+      Prev_Non_Grammar     : in Valid_Node_Access;
+      Next_Non_Grammar     : in Valid_Node_Access;
+      Trailing_Non_Grammar : in     Boolean)
+     return WisiToken.Buffer_Region
+   is
+      First_Non_Grammar_Token : WisiToken.Lexer.Token;
+      Last_Non_Grammar_Token  : WisiToken.Lexer.Token;
+   begin
+      Line_Region_Internal_1
+        (Tree, Node, Prev_Non_Grammar, Next_Non_Grammar, Trailing_Non_Grammar,
+         First_Non_Grammar_Token, Last_Non_Grammar_Token);
+
+      return
+        (First => First_Non_Grammar_Token.Byte_Region.Last,
+         Last => Last_Non_Grammar_Token.Byte_Region.First);
+   end Byte_Region_Of_Line_Region_Internal;
+
+   function Line_Region (Tree : in Syntax_Trees.Tree) return 
WisiToken.Line_Region
+   is begin
+      return
+        (First => Tree.SOI.Non_Grammar (1).Line_Region.First,
+         Last  => Tree.EOI.Non_Grammar (1).Line_Region.Last);
+   end Line_Region;
+
+   function Line_Region
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Valid_Node_Access;
+      Trailing_Non_Grammar : in Boolean)
+     return WisiToken.Line_Region
+   is
+      Prev_Non_Grammar     : constant Node_Access := Tree.Prev_Non_Grammar 
(Node);
+      Next_Non_Grammar     : constant Node_Access := Tree.Next_Non_Grammar 
(Node);
+   begin
+      if Prev_Non_Grammar = Invalid_Node_Access or Next_Non_Grammar = 
Invalid_Node_Access then
+         --  Tolerate this because used in error messages.
+         return Null_Line_Region;
+      else
+         return Line_Region_Internal
+           (Tree, Node,
+            Prev_Non_Grammar     => Tree.Prev_Non_Grammar (Node),
+            Next_Non_Grammar     => Tree.Next_Non_Grammar (Node),
+            Trailing_Non_Grammar => Trailing_Non_Grammar);
+      end if;
+   end Line_Region;
+
+   function Line_Region
+     (Tree                 : in Syntax_Trees.Tree;
+      Ref                  : in Stream_Node_Ref;
+      Trailing_Non_Grammar : in Boolean)
+     return WisiToken.Line_Region
+   is begin
+      if Tree.Parents_Set then
+         declare
+            Prev_Non_Grammar : Stream_Node_Ref := Ref;
+            Next_Non_Grammar : Stream_Node_Ref := Ref;
+         begin
+            Tree.Prev_Non_Grammar (Prev_Non_Grammar);
+            Tree.Next_Non_Grammar (Next_Non_Grammar);
+            return Line_Region_Internal
+              (Tree, Ref.Node, Prev_Non_Grammar.Node, Next_Non_Grammar.Node, 
Trailing_Non_Grammar);
+         end;
+
+      else
+         return Line_Region (Tree, To_Stream_Node_Parents (Tree, Ref), 
Ref.Stream, Trailing_Non_Grammar);
+      end if;
+   end Line_Region;
+
+   function Byte_Region_Of_Line_Region
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return WisiToken.Buffer_Region
+   is
+      Prev_Non_Grammar : Stream_Node_Ref := Ref;
+      Next_Non_Grammar : Stream_Node_Ref := Ref;
+   begin
+      Tree.Prev_Non_Grammar (Prev_Non_Grammar);
+      Tree.Next_Non_Grammar (Next_Non_Grammar);
+      return Byte_Region_Of_Line_Region_Internal
+        (Tree, Ref.Node, Prev_Non_Grammar.Node, Next_Non_Grammar.Node, 
Trailing_Non_Grammar => True);
+   end Byte_Region_Of_Line_Region;
+
+   function Line_Region
+     (Tree                 : in Syntax_Trees.Tree;
+      Ref                  : in Stream_Node_Parents;
+      Parse_Stream         : in Stream_ID;
+      Trailing_Non_Grammar : in Boolean := True)
+     return WisiToken.Line_Region
+   is
+      Prev_Non_Grammar : Stream_Node_Parents := Ref;
+      Next_Non_Grammar : Stream_Node_Parents := Ref;
+   begin
+      Tree.Prev_Non_Grammar (Prev_Non_Grammar, Parse_Stream);
+      Tree.Next_Non_Grammar (Next_Non_Grammar);
+      return Line_Region_Internal
+        (Tree, Ref.Ref.Node, Prev_Non_Grammar.Ref.Node, 
Next_Non_Grammar.Ref.Node, Trailing_Non_Grammar);
+   end Line_Region;
+
+   function Line_Region
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID;
+      Ref    : in Real_Recover_Token)
+     return WisiToken.Line_Region
+   is
+      function Find_Element return Stream_Index
+      is
+         use Stream_Element_Lists;
+         Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+         Cur : Cursor := Parse_Stream.Stack_Top;
+      begin
+         loop
+            exit when not Has_Element (Cur);
+            if Stream_Element_Lists.Element (Cur).Node = Ref.Element_Node then
+               return (Cur => Cur);
+            end if;
+            Next (Cur);
+         end loop;
+
+         --  Not found in stream input; search stack.
+         Cur := Parse_Stream.Stack_Top;
+         loop
+            Previous (Cur);
+            exit when not Has_Element (Cur);
+            if Stream_Element_Lists.Element (Cur).Node = Ref.Element_Node then
+               return (Cur => Cur);
+            end if;
+         end loop;
+         return Invalid_Stream_Index;
+      end Find_Element;
+
+      Element : constant Stream_Index := Find_Element;
+   begin
+      if Element = Invalid_Stream_Index then
+         return Null_Line_Region;
+      else
+         declare
+            Ref_Parents : Stream_Node_Parents :=
+              (Ref     => (Stream, Element, Get_Node (Element)),
+               Parents => <>);
+         begin
+            Tree.First_Terminal (Ref_Parents, Following => True);
+            return Line_Region (Tree, Ref_Parents, Stream, 
Trailing_Non_Grammar => True);
+         end;
+      end if;
+   end Line_Region;
+
+   function Make_Rooted (Item : in Recover_Token) return Recover_Token
+   is begin
+      if Item.Virtual then
+         return Item;
+      elsif Item.Element_Node = Item.Node then
+         return Item;
+      else
+         return
+           (Virtual => False,
+            Element_Node => Item.Element_Node,
+            Node => Item.Element_Node);
+      end if;
+   end Make_Rooted;
+
+   type Augmented_In_Tree is new Base_Augmented with record I : Integer; end 
record;
+   Aug_In_Tree : constant Augmented_In_Tree := (Base_Augmented with 1);
+
+   procedure Mark_In_Tree
+     (Tree                : in     Syntax_Trees.Tree;
+      Node                : in     Valid_Node_Access;
+      Data                : in out User_Data_Type'Class;
+      Node_Error_Reported : in out Boolean)
+   is
+      pragma Unreferenced (Data, Node_Error_Reported);
+   begin
+      if Node.Augmented /= null then
+         raise SAL.Programmer_Error with
+           (if Node.Augmented.all in Augmented_In_Tree
+            then "Mark_In_Tree called twice on node " & Tree.Image (Node, 
Node_Numbers => True)
+            else "Mark_In_Tree called with Augmented already set");
+      end if;
+      Node.Augmented := new Augmented_In_Tree'(Aug_In_Tree);
+
+      if Node.Label = Source_Terminal then
+         for N of Node.Following_Deleted loop
+            N.Augmented := new Augmented_In_Tree'(Aug_In_Tree);
+         end loop;
+      end if;
+   end Mark_In_Tree;
+
+   procedure Move_Element
+     (Tree      : in out Syntax_Trees.Tree;
+      Stream    : in     Stream_ID;
+      Ref       : in out Stream_Node_Parents;
+      New_Node  : in     Valid_Node_Access;
+      User_Data : in     User_Data_Access_Constant)
+   --  Move Ref to Stream, replacing Ref.Node with New_Node,
+   --  copying all ancestors. Update Ref to point to new stream element
+   --  with copied nodes.
+   is
+      --  We don't use Move_Shared_To_Input, because that doesn't deep copy the
+      --  node.
+      Orig_Element_Node : constant Valid_Node_Access := Get_Node 
(Ref.Ref.Element);
+
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+   begin
+      if Ref.Ref.Stream /= Stream then
+         declare
+            New_Element : constant Stream_Element_Lists.Cursor :=
+              Parse_Stream.Elements.Insert
+                (Element  =>
+                   (Node  =>
+                      (if Orig_Element_Node = Ref.Ref.Node
+                       then New_Node
+                       else Copy_Node
+                         (Tree, Orig_Element_Node,
+                          Parent                 => Invalid_Node_Access,
+                          User_Data              => User_Data,
+                          Copy_Children          => False,
+                          Copy_Following_Deleted => True)),
+
+                    State => Unknown_State),
+                 Before   => Stream_Element_Lists.Next 
(Parse_Stream.Stack_Top));
+         begin
+            if Ref.Ref.Stream = Tree.Shared_Stream and then
+              Ref.Ref.Element.Cur = Parse_Stream.Shared_Link
+            then
+               Stream_Element_Lists.Next (Parse_Stream.Shared_Link);
+            end if;
+
+            --  Don't set Ref.Ref.Node yet; needed by Copy_Ancestors below.
+            Ref.Ref.Stream      := Stream;
+            Ref.Ref.Element.Cur := New_Element;
+
+            if Ref.Parents.Depth > 0 then
+               Ref.Parents.Set
+                 (Index   => Ref.Parents.Depth,
+                  Depth   => Ref.Parents.Depth,
+                  Element => Get_Node (Ref.Ref.Element));
+            end if;
+         end;
+
+      elsif Orig_Element_Node = Ref.Ref.Node then
+         Replace_Node (Ref.Ref.Element, New_Node);
+      end if;
+
+      if Orig_Element_Node = Ref.Ref.Node then
+         Ref.Ref.Node := New_Node;
+      else
+         --  Edit child links in ancestors, update Ref.Parents to match.
+         Copy_Ancestors (Tree, Ref, New_Node, User_Data);
+      end if;
+   end Move_Element;
+
+   procedure Move_Shared_To_Input
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID)
+   is
+      use Stream_Element_Lists;
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+      Before       : constant Cursor := Next (Parse_Stream.Stack_Top);
+   begin
+      Parse_Stream.Elements.Insert
+        (Element  =>
+           (Node  => Element (Parse_Stream.Shared_Link).Node,
+            State => Unknown_State),
+         Before => Before);
+      Next (Parse_Stream.Shared_Link);
+   end Move_Shared_To_Input;
+
+   procedure Move_Shared_To_Input
+     (Tree   : in out Syntax_Trees.Tree;
+      First  : in     Stream_Node_Ref;
+      Last   : in     Stream_Node_Ref;
+      Stream : in     Stream_ID)
+   is
+      use Stream_Element_Lists;
+      Temp         : Stream_Node_Ref := First;
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+      Before       : constant Cursor := Next (Parse_Stream.Stack_Top);
+   begin
+      loop
+         Parse_Stream.Elements.Insert
+           (Element  =>
+              (Node  => Element (Temp.Element.Cur).Node,
+               State => Unknown_State),
+            Before => Before);
+
+         exit when Temp.Element = Last.Element;
+         Tree.Stream_Next (Temp, Rooted => True);
+      end loop;
+
+      Tree.Stream_Next (Temp, Rooted => True);
+      Parse_Stream.Shared_Link := Temp.Element.Cur;
+   end Move_Shared_To_Input;
+
+   function Name (Tree : in Syntax_Trees.Tree; Item : in Recover_Token) return 
Buffer_Region
+   is begin
+      if Item.Virtual then
+         if Item.Name = Null_Buffer_Region then
+            if Item.First_Terminal = Invalid_Node_Access  or else
+              Tree.Byte_Region (Item.First_Terminal, Trailing_Non_Grammar => 
False) = Null_Buffer_Region
+            then
+               return Null_Buffer_Region;
+            else
+               if Item.First_Terminal = Invalid_Node_Access  or else
+                 Tree.Byte_Region (Item.Last_Terminal, Trailing_Non_Grammar => 
False) = Null_Buffer_Region
+               then
+                  return Null_Buffer_Region;
+               else
+                  return
+                    (Tree.Byte_Region (Item.First_Terminal, 
Trailing_Non_Grammar => False).First,
+                     Tree.Byte_Region (Item.Last_Terminal, 
Trailing_Non_Grammar => False).Last);
+               end if;
+            end if;
+         else
+            return Item.Name;
+         end if;
+      else
+         return Tree.Name (Item.Element_Node);
+      end if;
+   end Name;
+
+   function Name (Tree : in Syntax_Trees.Tree; Node : in Valid_Node_Access) 
return Buffer_Region
+   is begin
+      case Node.Label is
+      when Nonterm =>
+         if Node.Name_Length = 0 then
+            return Tree.Byte_Region (Node, Trailing_Non_Grammar => False);
+         else
+            declare
+               First_Terminal : constant Node_Access := Tree.First_Terminal 
(Node);
+               Byte_First     : constant Buffer_Pos  := Tree.Byte_Region
+                 (First_Terminal, Trailing_Non_Grammar => False).First;
+            begin
+               return
+                 (Byte_First + Node.Name_Offset,
+                  Byte_First + Node.Name_Offset + Node.Name_Length - 1);
+            end;
+         end if;
+
+      when Source_Terminal =>
+         return Node.Byte_Region;
+
+      when Virtual_Terminal_Label =>
+         return Null_Buffer_Region;
+      end case;
+   end Name;
+
+   function Name (Tree : in Syntax_Trees.Tree; Ref : in Stream_Node_Ref) 
return Buffer_Region
+   is begin
+      --  We use the Element node because the nonterminal has the most valid 
Name.
+      return Tree.Name (Stream_Element_Lists.Element (Ref.Element.Cur).Node);
+   end Name;
+
+   overriding function Next
+     (Object   : Error_Iterator;
+      Position : Error_Ref)
+     return Error_Ref
+   is begin
+      return Result : Error_Ref := Position do
+         Object.Tree.Next_Error (Result);
+      end return;
+   end Next;
+
+   overriding function Next
+     (Object   : Stream_Error_Iterator;
+      Position : Stream_Error_Cursor)
+     return Stream_Error_Cursor
+   is begin
+      return Result : Stream_Error_Cursor := Position do
+         Object.Tree.Next_Error (Result.SER);
+      end return;
+   end Next;
+
+   procedure Next_Error (Tree : in Syntax_Trees.Tree; Error : in out Error_Ref)
+   is
+      pragma Unreferenced (Tree);
+      use Valid_Node_Access_Lists;
+      use Error_Data_Lists;
+   begin
+      if Has_Element (Error.Error) then
+         Next (Error.Error);
+      end if;
+      if Has_Element (Error.Error) then
+         return;
+      end if;
+      if Has_Element (Error.Deleted) then
+         Next (Error.Deleted);
+         if not Has_Element (Error.Deleted) then
+            Next_Node (Error.Node);
+         end if;
+         First_Error (Error);
+      else
+         if Error.Node.Error_List = null then
+            if Error.Node.Label = Source_Terminal then
+               Error.Deleted := Error.Node.Following_Deleted.First;
+            else
+               Next_Node (Error.Node);
+               if Error.Node = Invalid_Node_Access then
+                  --  No more errors.
+                  return;
+               end if;
+            end if;
+
+            First_Error (Error);
+         else
+            if Has_Element (Error.Error) then
+               Next (Error.Error);
+            end if;
+            if not Has_Element (Error.Error) then
+               if Error.Node.Label = Source_Terminal then
+                  Error.Deleted := Error.Node.Following_Deleted.First;
+                  if not Has_Element (Error.Deleted) then
+                     Next_Node (Error.Node);
+                  end if;
+               else
+                  Next_Node (Error.Node);
+               end if;
+
+               if Error.Node = Invalid_Node_Access then
+                  --  No more errors.
+                  return;
+               end if;
+               First_Error (Error);
+            end if;
+         end if;
+      end if;
+   end Next_Error;
+
+   procedure Next_Error (Tree : in Syntax_Trees.Tree; Error : in out 
Stream_Error_Ref)
+   is
+      use Valid_Node_Access_Lists;
+      use Error_Data_Lists;
+   begin
+      if Has_Element (Error.Error) then
+         Next (Error.Error);
+      end if;
+      if Has_Element (Error.Error) then
+         return;
+      end if;
+      if Has_Element (Error.Deleted) then
+         Next (Error.Deleted);
+         if not Has_Element (Error.Deleted) then
+            Next_Node (Tree, Error.Ref);
+         end if;
+         First_Error (Tree, Error);
+      else
+         if Error.Ref.Ref.Node.Error_List = null then
+            if Error.Ref.Ref.Node.Label = Source_Terminal then
+               Error.Deleted := Error.Ref.Ref.Node.Following_Deleted.First;
+            else
+               Next_Node (Tree, Error.Ref);
+               if Error.Ref.Ref.Node = Invalid_Node_Access then
+                  --  No more errors.
+                  return;
+               end if;
+            end if;
+
+            First_Error (Tree, Error);
+         else
+            if Has_Element (Error.Error) then
+               Next (Error.Error);
+            end if;
+            if not Has_Element (Error.Error) then
+               if Error.Ref.Ref.Node.Label = Source_Terminal then
+                  Error.Deleted := Error.Ref.Ref.Node.Following_Deleted.First;
+                  if not Has_Element (Error.Deleted) then
+                     Next_Node (Tree, Error.Ref);
+                  end if;
+               else
+                  Next_Node (Tree, Error.Ref);
+               end if;
+
+               if Error.Ref.Ref.Node = Invalid_Node_Access then
+                  --  No more errors.
+                  return;
+               end if;
+               First_Error (Tree, Error);
+            end if;
+         end if;
+      end if;
+   end Next_Error;
+
+   procedure Next_New_Line
+     (Tree               : in     Syntax_Trees.Tree;
+      Start_Ref          : in     Terminal_Ref;
+      After_Non_Grammar  : in     Positive_Index_Type;
+      Result_Ref         :    out Terminal_Ref;
+      Result_Non_Grammar :    out Positive_Index_Type)
+   is
+      Index : Lexer.Token_Arrays.Extended_Index := After_Non_Grammar;
+   begin
+      Result_Ref := Start_Ref;
+
+      loop
+         Next_Non_Grammar :
+         loop
+            if Index /= Lexer.Token_Arrays.No_Index and
+              Index < Result_Ref.Node.Non_Grammar.Last_Index
+            then
+               Index := @ + 1;
+            else
+               Next_Terminal (Tree, Result_Ref);
+               if Result_Ref.Node.Non_Grammar.Length > 0 then
+                  Index := Result_Ref.Node.Non_Grammar.First_Index;
+                  exit Next_Non_Grammar;
+               else
+                  Index := Lexer.Token_Arrays.No_Index;
+               end if;
+            end if;
+         end loop Next_Non_Grammar;
+
+         exit when Result_Ref.Node.ID = Tree.Lexer.Descriptor.EOI_ID or
+           Contains_New_Line (Result_Ref.Node.Non_Grammar (Index).Line_Region);
+      end loop;
+      Result_Non_Grammar := Index;
+   end Next_New_Line;
+
+   procedure Next_Node (Node : in out Node_Access)
+   is
+      procedure Next_Sibling
+      is begin
+         loop
+            if Node.Parent = Invalid_Node_Access then
+               Node := Invalid_Node_Access;
+               return;
+            else
+               declare
+                  Child_Index : constant Positive_Index_Type := 
Syntax_Trees.Child_Index (Node.Parent.all, Node);
+               begin
+                  if Child_Index = Node.Parent.Child_Count then
+                     Node := Node.Parent;
+                  else
+                     Node := Node.Parent.Children (Child_Index + 1);
+                     exit;
+                  end if;
+               end;
+            end if;
+         end loop;
+      end Next_Sibling;
+
+   begin
+      case Node.Label is
+      when Terminal_Label =>
+         Next_Sibling;
+
+      when Nonterm =>
+         if Node.Child_Count > 0 then
+            Node := Node.Children (1);
+         else
+            Next_Sibling;
+         end if;
+      end case;
+   end Next_Node;
+
+   procedure Next_Node (Tree : in Syntax_Trees.Tree; Node : in out 
Stream_Node_Parents)
+   is
+      procedure Next_Sibling
+      is begin
+         loop
+            if Node.Parents.Depth = 0 then
+               if Node.Ref.Node.ID = Tree.Lexer.Descriptor.EOI_ID then
+                  Node.Ref := Invalid_Stream_Node_Ref;
+                  return;
+               else
+                  Stream_Next (Tree, Node.Ref, Rooted => True);
+                  return;
+               end if;
+            else
+               declare
+                  Child_Index : constant Positive_Index_Type := 
Syntax_Trees.Child_Index
+                    (Node.Parents.Peek.all, Node.Ref.Node);
+               begin
+                  if Child_Index = Node.Parents.Peek.Child_Count then
+                     Node.Ref.Node := Node.Parents.Pop;
+                  else
+                     Node.Ref.Node := Node.Parents.Peek.Children (Child_Index 
+ 1);
+                     exit;
+                  end if;
+               end;
+            end if;
+         end loop;
+      end Next_Sibling;
+
+   begin
+      case Node.Ref.Node.Label is
+      when Terminal_Label =>
+         Next_Sibling;
+
+      when Nonterm =>
+         if Node.Ref.Node.Child_Count > 0 then
+            Node.Parents.Push (Node.Ref.Node);
+            Node.Ref.Node := Node.Ref.Node.Children (1);
+         else
+            Next_Sibling;
+         end if;
+      end case;
+   end Next_Node;
+
+   function Next_Non_Grammar
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Valid_Node_Access
+   is
+      Result : Node_Access := Node;
+   begin
+      if Node.ID = Tree.Lexer.Descriptor.EOI_ID then
+         return Node;
+
+      elsif Node = Tree.Root then
+         return Tree.EOI;
+      end if;
+
+      loop
+         Result := Next_Terminal (Tree, Result);
+         exit when Result.Non_Grammar.Length > 0;
+      end loop;
+      return Result;
+   end Next_Non_Grammar;
+
+   procedure Next_Non_Grammar
+     (Tree    : in     Syntax_Trees.Tree;
+      Ref     : in out Stream_Node_Ref)
+   is begin
+      if Ref.Node /= Invalid_Node_Access and then Ref.Node.ID = 
Tree.Lexer.Descriptor.EOI_ID then
+         return;
+      end if;
+      loop
+         Next_Terminal (Tree, Ref);
+         exit when Ref.Node = Invalid_Node_Access;
+         exit when Ref.Node.Non_Grammar.Length > 0;
+      end loop;
+   end Next_Non_Grammar;
+
+   procedure Next_Non_Grammar
+     (Tree    : in     Syntax_Trees.Tree;
+      Ref     : in out Stream_Node_Parents)
+   is begin
+      if Ref.Ref.Node /= Invalid_Node_Access and then Ref.Ref.Node.ID = 
Tree.Lexer.Descriptor.EOI_ID then
+         return;
+      end if;
+      loop
+         Next_Terminal (Tree, Ref, Following => True);
+         exit when Ref.Ref.Node = Invalid_Node_Access;
+         exit when Ref.Ref.Node.Non_Grammar.Length > 0;
+      end loop;
+   end Next_Non_Grammar;
+
+   procedure Next_Nonterm (Tree : in Syntax_Trees.Tree; Ref : in out 
Stream_Node_Ref)
+   is
+      procedure Next_Sibling
+      is begin
+         loop
+            if Ref.Node.Parent = Invalid_Node_Access then
+               loop
+                  Stream_Next (Tree, Ref, Rooted => True);
+                  if Ref = Invalid_Stream_Node_Ref then
+                     return;
+
+                  elsif Ref.Node.ID = Tree.Lexer.Descriptor.EOI_ID then
+                     Ref := Invalid_Stream_Node_Ref;
+                     return;
+                  elsif Ref.Node.Label = Nonterm then
+                     return;
+                  else
+                     null;
+                  end if;
+               end loop;
+            else
+               declare
+                  Child_Index : constant Positive_Index_Type := 
Syntax_Trees.Child_Index
+                    (Ref.Node.Parent.all, Ref.Node);
+               begin
+                  if Child_Index = Ref.Node.Parent.Child_Count then
+                     Ref.Node := Ref.Node.Parent;
+                  else
+                     for I in Child_Index + 1 .. Ref.Node.Parent.Child_Count 
loop
+                        if Ref.Node.Parent.Children (I).ID = 
Tree.Lexer.Descriptor.EOI_ID then
+                           Ref := Invalid_Stream_Node_Ref;
+                           return;
+                        elsif Ref.Node.Parent.Children (I).Label = Nonterm then
+                           Ref.Node := Ref.Node.Parent.Children (I);
+                           return;
+                        end if;
+                     end loop;
+                     Ref.Node := Ref.Node.Parent;
+                  end if;
+               end;
+            end if;
+         end loop;
+      end Next_Sibling;
+
+   begin
+      case Ref.Node.Label is
+      when Terminal_Label =>
+         Next_Sibling;
+
+      when Nonterm =>
+         if Ref.Node.Child_Count > 0 then
+            for N of Ref.Node.Children loop
+               if N.ID = Tree.Lexer.Descriptor.EOI_ID then
+                  Ref := Invalid_Stream_Node_Ref;
+                  return;
+               elsif N.Label = Nonterm then
+                  Ref.Node := N;
+                  return;
+               end if;
+            end loop;
+         end if;
+         Next_Sibling;
+      end case;
+   end Next_Nonterm;
+
+   procedure Next_Parse_Stream (Tree : in Syntax_Trees.Tree; Stream : in out 
Stream_ID)
+   is begin
+      Parse_Stream_Lists.Next (Stream.Cur);
+   end Next_Parse_Stream;
+
+   procedure Next_Sequential_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in out Node_Access;
+      Parents : in out Node_Stacks.Stack)
+   is begin
+      loop
+         Next_Terminal (Tree, Node, Parents);
+         exit when Node = Invalid_Node_Access;
+         exit when Node.Sequential_Index /= Invalid_Sequential_Index;
+      end loop;
+   end Next_Sequential_Terminal;
+
+   procedure Next_Sequential_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Syntax_Trees.Stream_Node_Ref)
+   is begin
+      loop
+         Next_Terminal (Tree, Ref);
+         exit when Ref.Node = Invalid_Node_Access;
+         exit when Ref.Node.Sequential_Index /= Invalid_Sequential_Index;
+      end loop;
+   end Next_Sequential_Terminal;
+
+   procedure Next_Sequential_Terminal
+     (Tree      : in     Syntax_Trees.Tree;
+      Ref       : in out Syntax_Trees.Stream_Node_Parents;
+      Following : in     Boolean)
+   is begin
+      loop
+         Next_Terminal (Tree, Ref, Following);
+         exit when Ref.Ref.Node = Invalid_Node_Access;
+         exit when Ref.Ref.Node.Sequential_Index /= Invalid_Sequential_Index;
+      end loop;
+   end Next_Sequential_Terminal;
+
+   function Next_Source_Terminal
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Valid_Node_Access;
+      Trailing_Non_Grammar : in Boolean)
+     return Node_Access
+   is
+      Result : Node_Access := Next_Terminal (Tree, Node);
+   begin
+      loop
+         exit when Result = Invalid_Node_Access;
+         exit when
+           (if Trailing_Non_Grammar
+            then (case Terminal_Label'(Result.Label) is
+                  when Source_Terminal => True,
+                  when Virtual_Terminal | Virtual_Identifier =>
+                     Result.Non_Grammar.Length > 0)
+            else Result.Label = Source_Terminal);
+
+         Next_Terminal (Tree, Result);
+      end loop;
+      return Result;
+   end Next_Source_Terminal;
+
+   function Next_Source_Terminal
+     (Tree                 : in Syntax_Trees.Tree;
+      Ref                  : in Stream_Node_Ref;
+      Trailing_Non_Grammar : in Boolean)
+     return Stream_Node_Ref
+   is begin
+      return Result : Stream_Node_Ref := Next_Terminal (Tree, Ref) do
+         loop
+            exit when Result = Invalid_Stream_Node_Ref;
+            exit when
+              (if Trailing_Non_Grammar
+               then (case Terminal_Label'(Result.Node.Label) is
+                     when Source_Terminal => True,
+                     when Virtual_Terminal | Virtual_Identifier =>
+                        Result.Node.Non_Grammar.Length > 0)
+               else Result.Node.Label = Source_Terminal);
+
+            Next_Terminal (Tree, Result);
+         end loop;
+      end return;
+   end Next_Source_Terminal;
+
+   procedure Next_Source_Terminal
+     (Tree                 : in     Syntax_Trees.Tree;
+      Ref                  : in out Stream_Node_Ref;
+      Trailing_Non_Grammar : in     Boolean)
+   is begin
+      loop
+         Next_Terminal (Tree, Ref);
+         exit when Ref = Invalid_Stream_Node_Ref;
+         exit when
+           (if Trailing_Non_Grammar
+            then (case Terminal_Label'(Ref.Node.Label) is
+                  when Source_Terminal => True,
+                  when Virtual_Terminal | Virtual_Identifier =>
+                     Ref.Node.Non_Grammar.Length > 0)
+            else Ref.Node.Label = Source_Terminal);
+      end loop;
+   end Next_Source_Terminal;
+
+   procedure Next_Source_Terminal
+     (Tree                 : in     Syntax_Trees.Tree;
+      Ref                  : in out Stream_Node_Parents;
+      Trailing_Non_Grammar : in     Boolean)
+   is begin
+      loop
+         Next_Terminal (Tree, Ref, Following => True);
+         exit when Ref.Ref = Invalid_Stream_Node_Ref;
+         exit when
+           (if Trailing_Non_Grammar
+            then (case Terminal_Label'(Ref.Ref.Node.Label) is
+                  when Source_Terminal => True,
+                  when Virtual_Terminal | Virtual_Identifier =>
+                     Ref.Ref.Node.Non_Grammar.Length > 0)
+            else Ref.Ref.Node.Label = Source_Terminal);
+      end loop;
+   end Next_Source_Terminal;
+
+   function Next_Stream_ID_Trimmed_Image (Tree : in Syntax_Trees.Tree) return 
String
+   is begin
+      return Trimmed_Image (Tree.Next_Stream_Label);
+   end Next_Stream_ID_Trimmed_Image;
+
+   procedure Next_Terminal (Tree : in Syntax_Trees.Tree; Node : in out 
Node_Access)
+   is begin
+      Node := Next_Terminal (Tree, Node);
+   end Next_Terminal;
+
+   function Next_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Node_Access
+   is
+      pragma Unreferenced (Tree);
+
+      function First_Child (Node : in Valid_Node_Access) return Node_Access
+      is
+      begin
+         case Node.Label is
+         when Source_Terminal | Virtual_Terminal | Virtual_Identifier =>
+            return Node;
+         when Nonterm =>
+            --  Use first non-empty
+            for J of Node.Children loop
+               --  We tolerate deleted children here for edited trees.
+               if J /= Invalid_Node_Access then
+                  declare
+                     Result : constant Node_Access := First_Child (J);
+                  begin
+                     if Result /= Invalid_Node_Access then
+                        return Result;
+                     end if;
+                  end;
+               end if;
+            end loop;
+            --  All Children are empty
+            return Invalid_Node_Access;
+         end case;
+      end First_Child;
+
+      function Next_Child (Child : in Valid_Node_Access; Parent : in 
Node_Access) return Node_Access
+      is begin
+         --  Parent is parent of Child; return node immediately after Child.
+         if Parent = Invalid_Node_Access then
+            return Invalid_Node_Access;
+         else
+            case Parent.Label is
+            when Source_Terminal =>
+               --  Child is in Parent.Following_Deleted.
+               return Next_Child (Parent, Parent.Parent);
+
+            when Nonterm =>
+               --  Normal tree node
+               for I in Parent.Children'Range loop
+                  if Parent.Children (I) = Child then
+                     --  Use first non-empty next from I + 1.
+                     for J in I + 1 .. Parent.Children'Last loop
+                        --  We tolerate deleted children here for edited trees.
+                        if Parent.Children (J) /= Invalid_Node_Access then
+                           declare
+                              Result : constant Node_Access := First_Child 
(Parent.Children (J));
+                           begin
+                              if Result /= Invalid_Node_Access then
+                                 return Result;
+                              end if;
+                           end;
+                        end if;
+                     end loop;
+                     --  All next Children are empty
+                     return Next_Child (Parent, Parent.Parent);
+                  end if;
+               end loop;
+               raise SAL.Programmer_Error; -- Child not found in Node.Children
+            when others =>
+               raise SAL.Programmer_Error;
+            end case;
+         end if;
+      end Next_Child;
+   begin
+      return Next_Child (Node, Node.Parent);
+   end Next_Terminal;
+
+   procedure Next_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in out Node_Access;
+      Parents : in out Node_Stacks.Stack)
+   is
+      pragma Unreferenced (Tree);
+
+      function First_Child (Node : in Valid_Node_Access) return Node_Access
+      is
+      begin
+         case Node.Label is
+         when Terminal_Label =>
+            return Node;
+         when Nonterm =>
+            --  Use first non-empty
+            Parents.Push (Node);
+            for J of Node.Children loop
+               --  Encountering a deleted child here is an error in the user
+               --  algorithm.
+               declare
+                  Result : constant Node_Access := First_Child (J);
+               begin
+                  if Result /= Invalid_Node_Access then
+                     return Result;
+                  end if;
+               end;
+            end loop;
+            --  All Children are empty
+            Parents.Pop;
+            return Invalid_Node_Access;
+         end case;
+      end First_Child;
+
+      function Next_Child (Child : in Valid_Node_Access; Parent : in 
Valid_Node_Access) return Node_Access
+      is
+         Parent_Depth : constant SAL.Base_Peek_Type := Parents.Depth;
+      begin
+         --  Parent is Parent of Child; return node immediately after Child.
+         pragma Assert (Parent.Label = Nonterm);
+         for I in Parent.Children'Range loop
+            --  Encountering a deleted child here is an error in the user
+            --  algorithm.
+            if Parent.Children (I) = Child then
+               --  Use first non-empty from I + 1.
+               for J in I + 1 .. Parent.Children'Last loop
+                  Parents.Push (Parent);
+                  declare
+                     Result : constant Node_Access := First_Child 
(Parent.Children (J));
+                  begin
+                     if Result /= Invalid_Node_Access then
+                        return Result;
+                     else
+                        Parents.Pop (Parents.Depth - Parent_Depth); -- discard 
parents from call to First_Child.
+                     end if;
+                  end;
+               end loop;
+               --  All next Children are empty (or there are none); move to
+               --  next cousin.
+               if Parents.Is_Empty then
+                  return Invalid_Node_Access;
+               else
+                  return Next_Child (Parent, Parents.Pop);
+               end if;
+            end if;
+         end loop;
+         raise SAL.Programmer_Error; -- Child not found in Node.Children
+      end Next_Child;
+   begin
+      if Parents.Is_Empty then
+         Node := Invalid_Node_Access;
+      else
+         Node := Next_Child (Node, Parents.Pop);
+      end if;
+   end Next_Terminal;
+
+   procedure Next_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Terminal_Ref)
+   is
+      use Stream_Element_Lists;
+   begin
+      loop -- Handle empty Elements
+
+         if Element (Ref.Element.Cur).Node.Label in Terminal_Label or else
+           Ref.Node = Invalid_Node_Access
+           --  A previous Stream_Next arrived at an empty nonterm, or
+           --  Next_Terminal reached the end of an element node.
+         then
+            Stream_Next (Tree, Ref, Rooted => False);
+            exit when Ref.Element = Invalid_Stream_Index;
+
+         else
+            Ref.Node := Next_Terminal (Tree, Ref.Node);
+         end if;
+
+         exit when Ref.Node /= Invalid_Node_Access;
+      end loop;
+   end Next_Terminal;
+
+   function Next_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Terminal_Ref)
+     return Terminal_Ref
+   is begin
+      return Result : Terminal_Ref := Ref do
+         Next_Terminal (Tree, Result);
+      end return;
+   end Next_Terminal;
+
+   procedure Next_Terminal
+     (Tree      : in     Syntax_Trees.Tree;
+      Ref       : in out Stream_Node_Parents;
+      Following : in     Boolean)
+   is
+      use Stream_Element_Lists;
+   begin
+      loop -- Handle empty Elements
+
+         if Element (Ref.Ref.Element.Cur).Node.Label in Terminal_Label or else
+           --  Can only be true on the first loop
+
+           Ref.Ref.Node = Invalid_Node_Access
+           --  The previous loop reached the end of an element node.
+
+         then
+            if Following then
+               Stream_Next (Tree, Ref, Rooted => False);
+            else
+               Ref.Ref.Node := Invalid_Node_Access;
+               Ref.Parents.Clear;
+               exit;
+            end if;
+
+            exit when Ref.Ref.Element = Invalid_Stream_Index; -- end of stream
+
+            exit when Ref.Ref.Node /= Invalid_Node_Access and then 
Ref.Ref.Node.Label in Terminal_Label;
+
+         else
+            Next_Terminal (Tree, Ref.Ref.Node, Ref.Parents);
+            exit when Ref.Ref.Node /= Invalid_Node_Access;
+         end if;
+
+      end loop;
+   end Next_Terminal;
+
+   function New_Stream (Tree : in out Syntax_Trees.Tree) return Stream_ID
+   is begin
+      return Result : constant Stream_ID :=
+        (Cur             => Tree.Streams.Append
+           ((Label       => Tree.Next_Stream_Label,
+             Stack_Top   => Invalid_Stream_Index.Cur,
+             Shared_Link => Tree.Stream_First (Tree.Shared_Stream, Skip_SOI => 
True).Element.Cur,
+             Elements    => <>)))
+      do
+         Tree.Next_Stream_Label := @ + 1;
+      end return;
+   end New_Stream;
+
+   function New_Stream
+     (Tree       : in out Syntax_Trees.Tree;
+      Old_Stream : in     Stream_ID)
+     return Stream_ID
+   is begin
+      if Old_Stream = Invalid_Stream_ID then
+         return New_Stream (Tree);
+      else
+         declare
+            Old_Parse_Stream : Parse_Stream renames Tree.Streams 
(Old_Stream.Cur);
+            Old_Stack_Top    : constant Stream_Element_Lists.Cursor := 
Old_Parse_Stream.Stack_Top;
+
+            Result_Cur : constant Parse_Stream_Lists.Cursor := 
Tree.Streams.Append
+              ((Label       => Tree.Next_Stream_Label,
+                Stack_Top   => Invalid_Stream_Index.Cur,
+                Shared_Link => Old_Parse_Stream.Shared_Link,
+                Elements    => <>));
+
+            New_Stream : Parse_Stream renames Tree.Streams (Result_Cur);
+
+            New_Cur : Stream_Element_Lists.Cursor;
+            Old_Cur : Stream_Element_Lists.Cursor := 
Old_Parse_Stream.Elements.First;
+            use Stream_Element_Lists;
+         begin
+            loop
+               exit when not Has_Element (Old_Cur);
+               declare
+                  Old_Element : constant Stream_Element := Element (Old_Cur);
+                  New_Node    : constant Node_Access    := Old_Element.Node;
+                  --  We do not deep copy any nodes for the new stream; they 
are all
+                  --  shared with other streams.
+               begin
+                  New_Cur := New_Stream.Elements.Append
+                    ((Node  => New_Node,
+                      State => Old_Element.State));
+               end;
+
+               if Old_Cur = Old_Stack_Top then
+                  New_Stream.Stack_Top := New_Cur;
+               end if;
+               Next (Old_Cur);
+            end loop;
+
+            Tree.Next_Stream_Label := @ + 1;
+
+            return (Cur => Result_Cur);
+         end;
+      end if;
+   end New_Stream;
+
+   function Node_Access_Compare (Left, Right : in Node_Access) return 
SAL.Compare_Result
+   is
+     --  Within one batch parsed subtree, positive and negative
+     --  Node_Indices are separately unique. Positive Node_Index first, abs
+     --  value for wisitoken_grammar_editing.translate_EBNF_to_BNF.
+     (if Left.Node_Index > 0 and Right.Node_Index <= 0 then SAL.Less
+      elsif Left.Node_Index <= 0 and Right.Node_Index > 0 then SAL.Greater
+      elsif abs Left.Node_Index > abs Right.Node_Index then SAL.Greater
+      elsif abs Left.Node_Index < abs Right.Node_Index then SAL.Less
+      else SAL.Equal);
+
+   function Node_ID
+     (Tree   : in Syntax_Trees.Tree;
+      Item : in Recover_Token)
+     return Token_ID
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Item.Node.ID;
+   end Node_ID;
+
+   function Non_Grammar_Var
+     (Tree     : in Syntax_Trees.Tree;
+      Terminal : in     Valid_Node_Access)
+     return Token_Array_Var_Ref
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return
+        (Element =>
+           (case Terminal.Label is
+            when Terminal_Label => Terminal.Non_Grammar'Access,
+            when others         => raise SAL.Programmer_Error),
+         Dummy => 0);
+   end Non_Grammar_Var;
+
+   function Non_Grammar_Const (Terminal : in Valid_Node_Access) return 
Token_Array_Const_Ref
+   is begin
+      return
+        (Element =>
+           (case Terminal.Label is
+            when Terminal_Label => Terminal.Non_Grammar'Access,
+            when others         => raise SAL.Programmer_Error),
+         Dummy => 0);
+   end Non_Grammar_Const;
+
+   function Non_Grammar_Const
+     (Tree     : in Syntax_Trees.Tree;
+      Terminal : in Valid_Node_Access)
+     return Token_Array_Const_Ref
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return Non_Grammar_Const (Terminal);
+   end Non_Grammar_Const;
+
+   function Parent
+     (Tree  : in Syntax_Trees.Tree;
+      Node  : in Valid_Node_Access;
+      Count : in Positive := 1)
+     return Node_Access
+   is
+      pragma Unreferenced (Tree);
+
+      Result : Node_Access := Node;
+      N      : Natural    := 0;
+   begin
+      loop
+         Result := Result.Parent;
+         N := N + 1;
+         exit when N = Count or Result = Invalid_Node_Access;
+      end loop;
+      return Result;
+   end Parent;
+
+   function Parents_Set (Tree : in Syntax_Trees.Tree) return Boolean
+   is begin
+      return Tree.Parents_Set;
+   end Parents_Set;
+
+   function Parents_Valid (Ref : in Stream_Node_Parents) return Boolean
+   is begin
+      return
+        (Ref.Ref.Element = Invalid_Stream_Index and Ref.Ref.Node = 
Invalid_Node_Access) or else
+        ((Stream_Element_Lists.Element (Ref.Ref.Element.Cur).Node = 
Ref.Ref.Node or
+            Ref.Ref.Node = Invalid_Node_Access) and Ref.Parents.Is_Empty) or 
else
+        (Ref.Parents.Depth > 0 and then
+           (for all Item of Ref.Parents => Item /= Invalid_Node_Access and 
then Item.Label = Nonterm) and then
+           (Ref.Parents.Peek (Ref.Parents.Depth) = 
Stream_Element_Lists.Element (Ref.Ref.Element.Cur).Node and
+              --  we don't check the intervening parent items.
+              (for some Child of Ref.Parents.Peek.Children => Child = 
Ref.Ref.Node)));
+   end Parents_Valid;
+
+   function Parseable (Tree : in Syntax_Trees.Tree) return Boolean
+   is begin
+      return Tree.Streams.Length = 1;
+   end Parseable;
+
+   function Peek
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID;
+      Count  : in SAL.Peek_Type := 1)
+     return Stream_Index
+   is
+      use Stream_Element_Lists;
+
+      Result : Cursor := Tree.Streams (Stream.Cur).Stack_Top;
+   begin
+      for I in 1 .. Count - 1 loop
+         Result := Previous (@);
+      end loop;
+      return (Cur => Result);
+   end Peek;
+
+   function Pop (Parse_Stream : in out Syntax_Trees.Parse_Stream) return 
Valid_Node_Access
+   is
+      use Stream_Element_Lists;
+      Temp : Cursor := Parse_Stream.Stack_Top;
+   begin
+      return Result : constant Valid_Node_Access := Element 
(Parse_Stream.Stack_Top).Node do
+         Previous (Parse_Stream.Stack_Top);
+         Parse_Stream.Elements.Delete (Temp);
+         --  This does not change Parse_Stream.Shared_Link
+      end return;
+   end Pop;
+
+   function Pop
+     (Tree      : in out Syntax_Trees.Tree;
+      Stream    : in     Stream_ID)
+     return Valid_Node_Access
+   is begin
+      return Pop (Tree.Streams (Stream.Cur));
+   end Pop;
+
+   function Prev_New_Line
+     (Tree       : in Syntax_Trees.Tree;
+      Node       : in Valid_Node_Access;
+      Start_Line : in Base_Line_Number_Type := Invalid_Line_Number)
+     return New_Line_Ref
+   is
+      Prev              : Node_Access           := Tree.Prev_Terminal (Node);
+      Actual_Start_Line : Line_Number_Type;
+
+      function SOI_New_Line_Ref return New_Line_Ref
+      is begin
+         return
+           (Node              => Tree.SOI,
+            Non_Grammar_Index => 1,
+            First             => False,
+            Pos               => Tree.SOI.Non_Grammar (1).Byte_Region.First,
+            Line              => Tree.SOI.Non_Grammar (1).Line_Region.First);
+      end SOI_New_Line_Ref;
+
+      function Prev_New_Line_Ref (Index : in SAL.Peek_Type) return New_Line_Ref
+      is begin
+         return
+           (Node              => Prev,
+            Non_Grammar_Index => Index,
+            First             => False,
+            Pos               => Tree.Lexer.Contains_New_Line 
(Prev.Non_Grammar (Index).Byte_Region, First => False),
+            Line              => Prev.Non_Grammar (Index).Line_Region.Last);
+      end Prev_New_Line_Ref;
+
+   begin
+      if Start_Line = Invalid_Line_Number then
+         declare
+            New_Line_Count : Base_Line_Number_Type := 0;
+         begin
+            if Prev = Invalid_Node_Access then
+               return SOI_New_Line_Ref;
+            end if;
+
+            loop
+               exit when Prev.Non_Grammar.Length > 0;
+               New_Line_Count := @ + Prev.New_Line_Count;
+               Prev_Terminal (Tree, Prev);
+            end loop;
+
+            Actual_Start_Line := Prev.Non_Grammar 
(Prev.Non_Grammar.Last_Index).Line_Region.Last + New_Line_Count;
+            Prev              := Tree.Prev_Terminal (Node);
+         end;
+      else
+         Actual_Start_Line := Start_Line;
+      end if;
+
+      loop
+         if Prev = Invalid_Node_Access then
+            return SOI_New_Line_Ref;
+
+         elsif Prev = Tree.SOI then
+            for I in reverse Prev.Non_Grammar.First_Index + 1 .. 
Prev.Non_Grammar.Last_Index loop
+               if New_Line_Count (Prev.Non_Grammar (I).Line_Region) > 0 then
+                  return Prev_New_Line_Ref (I);
+               end if;
+            end loop;
+            return SOI_New_Line_Ref;
+
+         else
+            case Terminal_Label'(Prev.Label) is
+            when Virtual_Identifier | Virtual_Terminal =>
+               for I in reverse Prev.Non_Grammar.First_Index .. 
Prev.Non_Grammar.Last_Index loop
+                  --  ada_mode-interactive_01.adb
+                  if Prev.Non_Grammar (I).ID = 
Tree.Lexer.Descriptor.New_Line_ID then
+                     return Prev_New_Line_Ref (I);
+                  end if;
+               end loop;
+               Prev_Terminal (Tree, Prev);
+
+            when Source_Terminal =>
+               if Prev.Non_Grammar.Length > 0 then
+                  for I in reverse Prev.Non_Grammar.First_Index .. 
Prev.Non_Grammar.Last_Index loop
+                     if New_Line_Count (Prev.Non_Grammar (I).Line_Region) > 0 
then
+                        return Prev_New_Line_Ref (I);
+                     end if;
+                  end loop;
+                  Prev_Terminal (Tree, Prev);
+
+               else
+                  declare
+                     Result : New_Line_Ref;
+                  begin
+                     Result.Pos := Tree.Lexer.Contains_New_Line (Prev.ID, 
Prev.Byte_Region, First => False);
+                     if Result.Pos /= Invalid_Buffer_Pos then
+                        Result.Node  := Prev;
+                        Result.First := True;
+                        Result.Line  := Actual_Start_Line;
+                        return Result;
+
+                     else
+                        Prev_Terminal (Tree, Prev);
+                     end if;
+                  end;
+               end if;
+            end case;
+         end if;
+      end loop;
+   end Prev_New_Line;
+
+   function Prev_Non_Grammar
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Node_Access
+   is
+      Result : Node_Access := Node;
+   begin
+      if Node = Tree.Root then
+         return Tree.SOI;
+
+      elsif Node.ID = Tree.Lexer.Descriptor.SOI_ID then
+         return Node;
+      end if;
+
+      loop
+         Result := Prev_Terminal (Tree, Result);
+         exit when Result = Invalid_Node_Access;
+         exit when Result.Non_Grammar.Length > 0;
+      end loop;
+      return Result;
+   end Prev_Non_Grammar;
+
+   procedure Prev_Non_Grammar
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Stream_Node_Ref)
+   is begin
+      if Ref.Node /= Invalid_Node_Access and then Ref.Node.ID = 
Tree.Lexer.Descriptor.SOI_ID then
+         return;
+      end if;
+
+      loop
+         Prev_Terminal (Tree, Ref);
+         exit when Ref.Node = Invalid_Node_Access;
+         exit when Ref.Node.Non_Grammar.Length > 0;
+      end loop;
+   end Prev_Non_Grammar;
+
+   procedure Prev_Non_Grammar
+     (Tree         : in     Syntax_Trees.Tree;
+      Ref          : in out Stream_Node_Parents;
+      Parse_Stream : in     Stream_ID)
+   is begin
+      if Ref.Ref.Node /= Invalid_Node_Access and then Ref.Ref.Node.ID = 
Tree.Lexer.Descriptor.SOI_ID then
+         return;
+      end if;
+      loop
+         Prev_Terminal (Tree, Ref, Parse_Stream, Preceding => True);
+         exit when Ref.Ref.Node = Invalid_Node_Access;
+         exit when Ref.Ref.Node.Non_Grammar.Length > 0;
+      end loop;
+   end Prev_Non_Grammar;
+
+   procedure Prev_Sequential_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in out Node_Access;
+      Parents : in out Node_Stacks.Stack)
+   is begin
+      loop
+         Prev_Terminal (Tree, Node, Parents);
+         exit when Node = Invalid_Node_Access;
+         exit when Node.Sequential_Index /= Invalid_Sequential_Index;
+      end loop;
+   end Prev_Sequential_Terminal;
+
+   procedure Prev_Sequential_Terminal
+     (Tree         : in     Syntax_Trees.Tree;
+      Ref          : in out Syntax_Trees.Stream_Node_Parents;
+      Parse_Stream : in     Stream_ID;
+      Preceding    : in     Boolean)
+   is begin
+      loop
+         Prev_Terminal (Tree, Ref, Parse_Stream, Preceding);
+         exit when not Preceding and Ref.Ref.Node = Invalid_Node_Access;
+         exit when Ref.Ref = Invalid_Stream_Node_Ref;
+         exit when Ref.Ref.Node.Sequential_Index /= Invalid_Sequential_Index;
+      end loop;
+   end Prev_Sequential_Terminal;
+
+   function Prev_Source_Terminal
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Node_Access;
+      Trailing_Non_Grammar : in Boolean)
+     return Node_Access
+   is begin
+      return Result : Node_Access := Node do
+         loop
+            Result := Prev_Terminal (Tree, Result);
+            exit when Result = Invalid_Node_Access;
+            exit when
+              (if Trailing_Non_Grammar
+               then (case Terminal_Label'(Result.Label) is
+                     when Source_Terminal => True,
+                     when Virtual_Terminal | Virtual_Identifier =>
+                        Result.Non_Grammar.Length > 0)
+               else Result.Label = Source_Terminal);
+         end loop;
+      end return;
+   end Prev_Source_Terminal;
+
+   function Prev_Source_Terminal
+     (Tree                 : in Syntax_Trees.Tree;
+      Ref                  : in Stream_Node_Ref;
+      Trailing_Non_Grammar : in Boolean)
+     return Stream_Node_Ref
+   is begin
+      return Result : Stream_Node_Ref := Ref do
+         loop
+            Prev_Terminal (Tree, Result);
+            exit when Result = Invalid_Stream_Node_Ref;
+            exit when
+              (if Trailing_Non_Grammar
+               then (case Terminal_Label'(Result.Node.Label) is
+                     when Source_Terminal => True,
+                     when Virtual_Terminal | Virtual_Identifier =>
+                        Result.Node.Non_Grammar.Length > 0)
+               else Result.Node.Label = Source_Terminal);
+         end loop;
+      end return;
+   end Prev_Source_Terminal;
+
+   procedure Prev_Source_Terminal
+     (Tree                 : in     Syntax_Trees.Tree;
+      Ref                  : in out Stream_Node_Parents;
+      Parse_Stream         : in     Stream_ID;
+      Trailing_Non_Grammar : in     Boolean)
+   is begin
+      loop
+         Prev_Terminal (Tree, Ref, Parse_Stream, Preceding => True);
+         exit when Ref.Ref = Invalid_Stream_Node_Ref;
+         exit when
+           (if Trailing_Non_Grammar
+            then (case Terminal_Label'(Ref.Ref.Node.Label) is
+                  when Source_Terminal => True,
+                  when Virtual_Terminal | Virtual_Identifier =>
+                     Ref.Ref.Node.Non_Grammar.Length > 0)
+            else Ref.Ref.Node.Label = Source_Terminal);
+      end loop;
+   end Prev_Source_Terminal;
+
+   function Prev_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Node_Access
+   is
+      pragma Unreferenced (Tree);
+
+      function Last_Child (Node : in Valid_Node_Access) return Node_Access
+      is begin
+         case Node.Label is
+         when Source_Terminal | Virtual_Terminal | Virtual_Identifier =>
+            return Node;
+         when Nonterm =>
+            --  Use first non-empty from end.
+            for J of reverse Node.Children loop
+               --  We tolerate deleted children here for edited trees.
+               if J /= Invalid_Node_Access then
+                  declare
+                     Result : constant Node_Access := Last_Child (J);
+                  begin
+                     if Result /= Invalid_Node_Access then
+                        return Result;
+                     end if;
+                  end;
+               end if;
+            end loop;
+            --  All Children are empty
+            return Invalid_Node_Access;
+         end case;
+      end Last_Child;
+
+      function Prev_Child (Child : in Valid_Node_Access; Parent : in 
Node_Access) return Node_Access
+      is begin
+         --  Parent is Parent of Child; return terminal node immediately 
previous to Child.
+         if Parent = Invalid_Node_Access then
+            return Invalid_Node_Access;
+         else
+            case Parent.Label is
+            when Source_Terminal =>
+               --  Child is in Parent.Following_Deleted
+               return Parent;
+
+            when Nonterm =>
+               --  Normal tree entry
+               for I in reverse Parent.Children'Range loop
+                  if Parent.Children (I) = Child then
+                     --  Use first non-empty from I - 1.
+                     for J in reverse Parent.Children'First .. I - 1 loop
+                        --  We tolerate deleted children here for edited trees.
+                        if Parent.Children (J) /= Invalid_Node_Access then
+                           declare
+                              Result : constant Node_Access := Last_Child 
(Parent.Children (J));
+                           begin
+                              if Result /= Invalid_Node_Access then
+                                 return Result;
+                              end if;
+                           end;
+                        end if;
+                     end loop;
+                     --  All previous Children are empty
+                     return Prev_Child (Parent, Parent.Parent);
+                  end if;
+               end loop;
+               raise SAL.Programmer_Error; -- Child not found in 
Parent.Children
+            when others =>
+               raise SAL.Programmer_Error;
+            end case;
+         end if;
+      end Prev_Child;
+   begin
+      return Prev_Child (Node, Node.Parent);
+   end Prev_Terminal;
+
+   procedure Prev_Terminal (Tree : in Syntax_Trees.Tree; Node : in out 
Node_Access)
+   is begin
+      Node := Prev_Terminal (Tree, Node);
+   end Prev_Terminal;
+
+   procedure Prev_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in out Node_Access;
+      Parents : in out Node_Stacks.Stack)
+   is
+      pragma Unreferenced (Tree);
+
+      function Last_Child (Node : in Valid_Node_Access) return Node_Access
+      is begin
+         case Node.Label is
+         when Terminal_Label =>
+            return Node;
+         when Nonterm =>
+            --  Use first non-empty from end.
+            Parents.Push (Node);
+            for J of reverse Node.Children loop
+               --  Encountering a deleted child here is an error in the user
+               --  algorithm.
+               declare
+                  Result : constant Node_Access := Last_Child (J);
+               begin
+                  if Result /= Invalid_Node_Access then
+                     return Result;
+                  end if;
+               end;
+            end loop;
+            --  All Children are empty
+            Parents.Pop;
+            return Invalid_Node_Access;
+         end case;
+      end Last_Child;
+
+      function Prev_Child (Child : in Valid_Node_Access; Parent : in 
Valid_Node_Access) return Node_Access
+      is
+         Parent_Depth : constant SAL.Base_Peek_Type := Parents.Depth;
+      begin
+         --  Parent is parent of Child; return node immediately previous to 
Child.
+         pragma Assert (Parent.Label = Nonterm);
+         for I in reverse Parent.Children'Range loop
+            --  Encountering a deleted child here is an error in the user
+            --  algorithm.
+            if Parent.Children (I) = Child then
+               --  Use first non-empty from I - 1.
+               for J in reverse Parent.Children'First .. I - 1 loop
+                  Parents.Push (Parent);
+                  declare
+                     Result : constant Node_Access := Last_Child 
(Parent.Children (J));
+                  begin
+                     if Result /= Invalid_Node_Access then
+                        return Result;
+                     else
+                        Parents.Pop (Parents.Depth - Parent_Depth); -- discard 
parents from call to Last_Child.
+                     end if;
+                  end;
+               end loop;
+
+               --  All previous Parent.Children are empty (or there are none); 
move to
+               --  prev cousin.
+               if Parents.Is_Empty then
+                  return Invalid_Node_Access;
+               else
+                  return Prev_Child (Parent, Parents.Pop);
+               end if;
+            end if;
+         end loop;
+         raise SAL.Programmer_Error; -- Child not found in Parent.Children
+      end Prev_Child;
+   begin
+      if Parents.Is_Empty then
+         Node := Invalid_Node_Access;
+
+      else
+         Node := Prev_Child (Node, Parents.Pop);
+      end if;
+   end Prev_Terminal;
+
+   procedure Prev_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Terminal_Ref)
+   is
+      use Stream_Element_Lists;
+   begin
+      loop -- Handle empty Elements
+
+         if Element (Ref.Element.Cur).Node.Label in Terminal_Label or else
+           Ref.Node = Invalid_Node_Access
+           --  A previous Prev_Element arrived at an empty nonterm, or
+           --  Prev_Terminal reached the beginning of an element node.
+         then
+            Stream_Prev (Tree, Ref, Rooted => False);
+            exit when Ref.Element = Invalid_Stream_Index;
+
+         else
+            Ref.Node := Prev_Terminal (Tree, Ref.Node);
+         end if;
+         exit when Ref.Node /= Invalid_Node_Access;
+      end loop;
+   end Prev_Terminal;
+
+   function Prev_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Terminal_Ref)
+     return Terminal_Ref
+   is begin
+      return Result : Terminal_Ref := Ref do
+         Prev_Terminal (Tree, Result);
+      end return;
+   end Prev_Terminal;
+
+   procedure Prev_Terminal
+     (Tree         : in     Syntax_Trees.Tree;
+      Ref          : in out Stream_Node_Parents;
+      Parse_Stream : in     Stream_ID;
+      Preceding    : in     Boolean)
+   is
+      use Stream_Element_Lists;
+   begin
+      loop -- Handle empty Elements
+         if Element (Ref.Ref.Element.Cur).Node.Label in Terminal_Label or else
+           Ref.Ref.Node = Invalid_Node_Access
+           --  Ref is at a terminal element or an empty nonterm, or was at a
+           --  first terminal; move to previous stream element.
+         then
+            if not Preceding then
+               Ref.Ref.Node := Invalid_Node_Access;
+               Ref.Parents.Clear;
+               exit;
+            end if;
+            if Parse_Stream /= Invalid_Stream_ID and Ref.Ref.Stream = 
Tree.Shared_Stream then
+               declare
+                  P_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Parse_Stream.Cur);
+               begin
+                  if Ref.Ref.Element.Cur = P_Stream.Shared_Link then
+                     Ref :=
+                       (Ref     =>
+                          (Parse_Stream,
+                           (Cur => P_Stream.Elements.Last),
+                           Element (P_Stream.Elements.Last).Node),
+                        Parents => <>);
+                     Tree.Last_Terminal (Ref, Parse_Stream, Preceding => 
False);
+                  else
+                     Stream_Prev (Tree, Ref, Rooted => False);
+                  end if;
+               end;
+            else
+               Stream_Prev (Tree, Ref, Rooted => False);
+            end if;
+
+            exit when Ref.Ref.Element = Invalid_Stream_Index;
+         else
+            Prev_Terminal (Tree, Ref.Ref.Node, Ref.Parents);
+         end if;
+         exit when Ref.Ref.Node /= Invalid_Node_Access;
+      end loop;
+   end Prev_Terminal;
+
+   procedure Print_Ref_Counts (Tree : in Syntax_Trees.Tree)
+   is begin
+      for Stream of Tree.Streams loop
+         Tree.Lexer.Trace.Put (Trimmed_Image (Stream.Label) & ":");
+         declare
+            use Stream_Element_Lists;
+            Cur : Cursor := Stream.Elements.First;
+         begin
+            loop
+               exit when Cur = No_Element;
+               Tree.Lexer.Trace.Put (Integer'Image (Ref_Count (Cur) - 1));
+               Next (Cur);
+            end loop;
+         end;
+         Tree.Lexer.Trace.New_Line;
+      end loop;
+   end Print_Ref_Counts;
+
+   procedure Print_Streams
+     (Tree        : in     Syntax_Trees.Tree;
+      Children    : in     Boolean := False;
+      Non_Grammar : in     Boolean := False)
+   is begin
+      for Stream of Tree.Streams loop
+         Tree.Lexer.Trace.Put_Line
+           (Tree.Image
+              (Stream, Shared => True, Children => Children, Node_Numbers => 
True, Non_Grammar => Non_Grammar));
+         Tree.Lexer.Trace.New_Line;
+      end loop;
+   end Print_Streams;
+
+   procedure Print_Tree
+     (Tree         : in Syntax_Trees.Tree;
+      Root         : in Node_Access := Invalid_Node_Access;
+      Line_Numbers : in Boolean     := False;
+      Non_Grammar  : in Boolean     := False)
+   is
+      procedure Print_Node (Node : in Valid_Node_Access; Level : in Integer)
+      is begin
+         for I in 1 .. Level loop
+            Tree.Lexer.Trace.Put ("| ", Prefix => False);
+         end loop;
+         Tree.Lexer.Trace.Put (Image (Tree, Node, Children => False, RHS_Index 
=> True, Node_Numbers => True,
+                           Line_Numbers => Line_Numbers, Non_Grammar => 
Non_Grammar),
+                    Prefix => False);
+
+         if Node.Augmented /= null then
+            Tree.Lexer.Trace.Put (Image_Augmented (Node.Augmented.all), Prefix 
=> False);
+         end if;
+
+         Tree.Lexer.Trace.New_Line;
+         if Node.Label = Nonterm then
+            for Child of Node.Children loop
+               if Child = null then
+                  Tree.Lexer.Trace.Put ("    : ", Prefix => True);
+                  for I in 1 .. Level + 1 loop
+                     Tree.Lexer.Trace.Put ("| ", Prefix => False);
+                  end loop;
+                  Tree.Lexer.Trace.Put ("<null>", Prefix => False);
+                  Tree.Lexer.Trace.New_Line;
+               else
+                  Print_Node (Child, Level + 1);
+               end if;
+            end loop;
+         end if;
+      end Print_Node;
+
+      Print_Root : constant Node_Access := (if Root = Invalid_Node_Access then 
Syntax_Trees.Root (Tree) else Root);
+
+      Print_SOI_EOI : constant Boolean := Root = Invalid_Node_Access and 
Print_Root /= Tree.Root and
+        Tree.Streams.Length > 0;
+   begin
+      if Print_Root = Invalid_Node_Access then
+         Tree.Lexer.Trace.Put_Line ("<empty tree>");
+      else
+         if Print_SOI_EOI then
+            declare
+               --  Get SOI, EOI from same stream as Print_Root
+               Stream : Parse_Stream renames Tree.Streams (Tree.Streams.Last);
+            begin
+               Print_Node (Stream_Element_Lists.Element 
(Stream.Elements.First).Node, 0);
+            end;
+         end if;
+         Print_Node (Print_Root, 0);
+         if Print_SOI_EOI then
+            declare
+               --  Get SOI, EOI from same stream as Print_Root
+               Stream : Parse_Stream renames Tree.Streams (Tree.Streams.Last);
+            begin
+               Print_Node (Stream_Element_Lists.Element 
(Stream.Elements.Last).Node, 0);
+            end;
+         end if;
+      end if;
+   end Print_Tree;
+
+   function Process_Tree
+     (Tree         : in Syntax_Trees.Tree;
+      Node         : in Valid_Node_Access;
+      Visit_Parent : in Visit_Parent_Mode;
+      Process_Node : access function
+        (Tree : in Syntax_Trees.Tree;
+         Node : in Valid_Node_Access)
+        return Boolean)
+     return Boolean
+   is
+   begin
+      if Visit_Parent = Before then
+         if not Process_Node (Tree, Node) then
+            return False;
+         end if;
+      end if;
+
+      if Node.Label = Nonterm then
+         for Child of Node.Children loop
+            if Child /= null then
+               if not Process_Tree (Tree, Child, Visit_Parent, Process_Node) 
then
+                  return False;
+               end if;
+            end if;
+         end loop;
+      end if;
+
+      if Visit_Parent = After then
+         return Process_Node (Tree, Node);
+      else
+         return True;
+      end if;
+   end Process_Tree;
+
+   procedure Process_Tree
+     (Tree         : in out Syntax_Trees.Tree;
+      Node         : in     Valid_Node_Access;
+      Process_Node : access procedure
+        (Tree : in out Syntax_Trees.Tree;
+         Node : in     Valid_Node_Access))
+   is begin
+      if Node.Label = Nonterm then
+         for Child of Node.Children loop
+            if Child /= null then
+               Process_Tree (Tree, Child, Process_Node);
+            end if;
+         end loop;
+      end if;
+
+      Process_Node (Tree, Node);
+   end Process_Tree;
+
+   procedure Process_Tree
+     (Tree         : in out Syntax_Trees.Tree;
+      Process_Node : access procedure
+        (Tree : in out Syntax_Trees.Tree;
+         Node : in     Valid_Node_Access);
+      Root         : in     Node_Access := Invalid_Node_Access)
+   is begin
+      Tree.Traversing := True;
+      Process_Tree (Tree, (if Root = Invalid_Node_Access then 
Syntax_Trees.Root (Tree) else Root), Process_Node);
+      Tree.Traversing := False;
+   exception
+   when others =>
+      Tree.Traversing := False;
+      raise;
+   end Process_Tree;
+
+   function Production_ID
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return WisiToken.Production_ID
+   is
+      pragma Unreferenced (Tree);
+   begin
+      return (Node.ID, Node.RHS_Index);
+   end Production_ID;
+
+   function Push
+     (Parse_Stream : in out Syntax_Trees.Parse_Stream;
+      Stream_ID    : in     Syntax_Trees.Stream_ID;
+      Node         : in     Valid_Node_Access;
+      State        : in     State_Index)
+     return Rooted_Ref
+   is
+      use Stream_Element_Lists;
+      New_Element : constant Cursor := Parse_Stream.Elements.Insert
+        (Element  => (Node, State),
+         Before   => Next (Parse_Stream.Stack_Top));
+   begin
+      Parse_Stream.Stack_Top := New_Element;
+      --  caller must change Parse_Stream.Shared_Link if needed.
+      return (Stream_ID, (Cur => New_Element), Node);
+   end Push;
+
+   procedure Push
+     (Parse_Stream : in out Syntax_Trees.Parse_Stream;
+      Stream_ID    : in     Syntax_Trees.Stream_ID;
+      Node         : in     Valid_Node_Access;
+      State        : in     State_Index)
+   is
+      Junk : Stream_Node_Ref := Push (Parse_Stream, Stream_ID, Node, State);
+      pragma Unreferenced (Junk);
+   begin
+      null;
+   end Push;
+
+   procedure Push
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      Node   : in     Valid_Node_Access;
+      State  : in     State_Index)
+   is begin
+      Push (Tree.Streams (Stream.Cur), Stream, Node, State);
+   end Push;
+
+   procedure Push_Back
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID)
+   is
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+   begin
+      Parse_Stream.Stack_Top := Stream_Element_Lists.Previous 
(Parse_Stream.Stack_Top);
+      --  This does not change Parse_Stream.Shared_Link
+   end Push_Back;
+
+   procedure Put_Tree
+     (Tree      : in Syntax_Trees.Tree;
+      File_Name : in String)
    is
-      use all type SAL.Base_Peek_Type;
+      --  The format uses parens and spaces; no commas. One node per line,
+      --  then one stream per line.
+      use Ada.Streams.Stream_IO;
+      File   : File_Type;
+      Stream : Stream_Access;
+
+      --  SAL.SAL.Gen_Unbounded_Definite_Vectors uses SAL.Peek_Type
+      --  internally to index the vector; that has a range 1 ..
+      --  Integer'Last, smaller than Node_Index'Range. So we must use a
+      --  smaller type, and hope the actual tree does not have that many
+      --  nodes.
+      subtype Half_Node_Index is Node_Index range Node_Index'First / 2 .. 
Node_Index'Last / 2;
+      package Node_Index_Sets is new SAL.Gen_Unbounded_Definite_Vectors
+        (Half_Node_Index, Boolean, Default_Element => False);
+      Node_Index_Seen : Node_Index_Sets.Vector;
+
+      procedure Seen (I : in Node_Index)
+      is begin
+         if Node_Index_Seen.Is_Empty then
+            Node_Index_Seen.Set_First_Last (I, I);
+         elsif I < Node_Index_Seen.First_Index then
+            Node_Index_Seen.Set_First_Last (I, Node_Index_Seen.Last_Index);
+
+         elsif I > Node_Index_Seen.Last_Index then
+            Node_Index_Seen.Set_First_Last (Node_Index_Seen.First_Index, I);
+
+         else
+            if Node_Index_Seen (I) then
+               raise SAL.Programmer_Error with "duplicate node_index; use 
Copy_Tree first";
+            end if;
+         end if;
+         Node_Index_Seen (I) := True;
+      end Seen;
+
+      procedure Put_Error_List (Error_List : in Error_List_Access)
+      is begin
+         if Error_List = null then
+            String'Write (Stream, " 0()");
+         else
+            String'Write (Stream, Error_List.Length'Image & '(');
+            for Error of Error_List.all loop
+               Error_Data'Class'Output (Stream, Error);
+            end loop;
+            Character'Write (Stream, ')');
+         end if;
+      end Put_Error_List;
+
+      function Buffer_Region_Image (Region : in Buffer_Region) return String
+      is begin
+         return "(" & Trimmed_Image (Region.First) & Region.Last'Image & ")";
+      end Buffer_Region_Image;
+
+      function Line_Region_Image (Region : in WisiToken.Line_Region) return 
String
+      is begin
+         return "(" & Trimmed_Image (Region.First) & Region.Last'Image & ")";
+      end Line_Region_Image;
+
+      function Token_Image (Token : in WisiToken.Lexer.Token) return String
+      is begin
+         return "(" & Trimmed_Image (Token.ID) &
+           Buffer_Region_Image (Token.Byte_Region) &
+           Buffer_Region_Image (Token.Char_Region) &
+           Line_Region_Image (Token.Line_Region) & ")";
+      end Token_Image;
 
-      procedure Compute (N : in Syntax_Trees.Node)
+      function Token_Array_Image is new WisiToken.Lexer.Token_Arrays.Gen_Image 
(Token_Image);
+
+      procedure Put_Node (Node : in Valid_Node_Access)
       is begin
-         if N.ID = ID then
-            Last := Last + 1;
-            Result (Last) := Node;
+         String'Write
+           (Stream,
+            "(" & Node.Label'Image &
+              Node.Child_Count'Image &
+              Node.ID'Image & " " &
+              Node.Node_Index'Image);
+
+         --  We don't output Node.Parent; redundant with node.Children.
+
+         if Node.Augmented /= null then
+            raise SAL.Not_Implemented with "put_tree augmented";
          end if;
-         case N.Label is
-         when Shared_Terminal | Virtual_Terminal | Virtual_Identifier =>
-            null;
-         when Nonterm =>
-            for I of N.Children loop
-               --  Encountering Deleted_Child here is an error in the user 
algorithm.
-               Get_IDs (Tree, I, ID, Result, Last);
-            end loop;
-         end case;
-      end Compute;
-   begin
-      Compute
-        ((if Node <= Tree.Last_Shared_Node
-          then Tree.Shared_Tree.Nodes (Node)
-          else Tree.Branched_Nodes (Node)));
-   end Get_IDs;
 
-   function Get_IDs
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index;
-      ID   : in Token_ID)
-     return Valid_Node_Index_Array
-   is
-      Last : SAL.Base_Peek_Type := 0;
-   begin
-      Tree.Shared_Tree.Traversing := True;
-      return Result : Valid_Node_Index_Array (1 .. Count_IDs (Tree, Node, ID)) 
do
-         Get_IDs (Tree, Node, ID, Result, Last);
-         Tree.Shared_Tree.Traversing := False;
-      end return;
-   end Get_IDs;
+         Put_Error_List (Node.Error_List);
 
-   procedure Get_Terminals
-     (Tree   : in     Syntax_Trees.Tree;
-      Node   : in     Valid_Node_Index;
-      Result : in out Valid_Node_Index_Array;
-      Last   : in out SAL.Base_Peek_Type)
-   is
-      use all type SAL.Base_Peek_Type;
+         case Node.Label is
+         when Terminal_Label =>
 
-      procedure Compute (N : in Syntax_Trees.Node)
-      is begin
-         case N.Label is
-         when Shared_Terminal | Virtual_Terminal | Virtual_Identifier =>
-            Last := Last + 1;
-            Result (Last) := Node;
+            String'Write (Stream, Node.Non_Grammar.Length'Image & 
Token_Array_Image (Node.Non_Grammar));
+            String'Write (Stream, Node.Sequential_Index'Image);
+
+            case Terminal_Label'(Node.Label) is
+            when Source_Terminal =>
+               String'Write
+                 (Stream,
+                  Buffer_Region_Image (Node.Byte_Region) & Buffer_Region_Image 
(Node.Char_Region) &
+                    Node.New_Line_Count'Image);
+               String'Write (Stream, Node.Following_Deleted.Length'Image & 
"(");
+               for Del of Node.Following_Deleted loop
+                  String'Write (Stream, Del.Node_Index'Image);
+               end loop;
+               String'Write (Stream, ")");
+
+            when Virtual_Terminal =>
+               String'Write (Stream, " " & Node.Insert_Location'Image);
+
+            when Virtual_Identifier =>
+               raise SAL.Not_Implemented with "put_tree virtual_identifier";
+            end case;
 
          when Nonterm =>
-            for C of N.Children loop
-               --  This is called to build an edited source image while 
editing the tree
-               if C /= Deleted_Child then
-                  Get_Terminals (Tree, C, Result, Last);
+            String'Write
+              (Stream, " " & Node.Virtual'Image & " " & 
Node.Recover_Conflict'Image & Node.RHS_Index'Image &
+                 Node.Name_Offset'Image & Node.Name_Length'Image);
+            String'Write (Stream, "(");
+            for Child of Node.Children loop
+               if Child = Invalid_Node_Access then
+                  String'Write (Stream, Invalid_Node_Index'Image);
+               else
+                  String'Write (Stream, " " & Child.Node_Index'Image);
                end if;
             end loop;
+            String'Write (Stream, ")");
          end case;
-      end Compute;
-   begin
-      Compute
-        ((if Node <= Tree.Last_Shared_Node
-          then Tree.Shared_Tree.Nodes (Node)
-          else Tree.Branched_Nodes (Node)));
-   end Get_Terminals;
+         String'Write (Stream, ")" & ASCII.LF);
+      end Put_Node;
 
-   function Get_Terminals (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Valid_Node_Index_Array
-   is
-      Last : SAL.Base_Peek_Type := 0;
    begin
-      Tree.Shared_Tree.Traversing := True;
-      return Result : Valid_Node_Index_Array (1 .. SAL.Base_Peek_Type 
(Count_Terminals (Tree, Node))) do
-         Get_Terminals (Tree, Node, Result, Last);
-         Tree.Shared_Tree.Traversing := False;
-      end return;
-   end Get_Terminals;
+      Create (File, Out_File, File_Name);
+      Stream := Ada.Streams.Stream_IO.Stream (File);
+      String'Write (Stream, Tree.Nodes.Last_Index'Image & ASCII.LF);
+      for Node of Tree.Nodes loop
+         Seen (Node.Node_Index);
+         Put_Node (Node);
+      end loop;
+
+      String'Write (Stream, Tree.Streams.Length'Image & ASCII.LF);
+      for Stream of Tree.Streams loop
+         raise SAL.Not_Implemented with "put_tree stream";
+      end loop;
+      if Tree.Root /= Invalid_Node_Access then
+         String'Write (Stream, Tree.Root.Node_Index'Image);
+      end if;
+      Close (File);
+   end Put_Tree;
 
-   function First_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Node_Index
+   function Recover_Conflict (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean
+   is begin
+      return Node.Recover_Conflict;
+   end Recover_Conflict;
+
+   function Reduce
+     (Tree             : in out Syntax_Trees.Tree;
+      Stream           : in     Stream_ID;
+      Production       : in     WisiToken.Production_ID;
+      Child_Count      : in     Ada.Containers.Count_Type;
+      State            : in     State_Index;
+      Recover_Conflict : in     Boolean)
+     return Rooted_Ref
    is
-      function Compute (Index : in Valid_Node_Index; N : in Syntax_Trees.Node) 
return Node_Index
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+
+      function Pop_Children return Valid_Node_Access_Array
       is begin
-         case N.Label is
-         when Shared_Terminal | Virtual_Terminal | Virtual_Identifier =>
-            return Index;
-         when Nonterm =>
-            for C of N.Children loop
-               --  Encountering Deleted_Child here is an error in the user 
algorithm.
-               declare
-                  Term : constant Node_Index := First_Terminal (Tree, C);
-               begin
-                  if Term /= Invalid_Node_Index then
-                     return Term;
-                  end if;
-               end;
+         return Result : Valid_Node_Access_Array (1 .. SAL.Base_Peek_Type 
(Child_Count)) := (others => Dummy_Node) do
+            --  IMPROVEME: use iterated_component_association to avoid bogus 
init. Waiting on compiler support.
+            for I in reverse Result'Range loop
+               Result (I) := Pop (Parse_Stream);
             end loop;
-            return Invalid_Node_Index;
-         end case;
-      end Compute;
+         end return;
+      end Pop_Children;
+
+      New_Node : constant Node_Access := Tree.Add_Nonterm_1
+        (Production, Pop_Children, Clear_Parents => False, Recover_Conflict => 
Recover_Conflict);
    begin
-      return Compute
-        (Node,
-         (if Node <= Tree.Last_Shared_Node
-          then Tree.Shared_Tree.Nodes (Node)
-          else Tree.Branched_Nodes (Node)));
-   end First_Terminal;
+      return Push (Parse_Stream, Stream, New_Node, State);
+   end Reduce;
 
-   procedure Get_Terminal_IDs
-     (Tree   : in     Syntax_Trees.Tree;
-      Node   : in     Valid_Node_Index;
-      Result : in out Token_ID_Array;
-      Last   : in out SAL.Base_Peek_Type)
+   procedure Replace_Child
+     (Tree                 : in out Syntax_Trees.Tree;
+      Parent               : in     Valid_Node_Access;
+      Child_Index          : in     SAL.Peek_Type;
+      Old_Child            : in     Node_Access;
+      New_Child            : in     Node_Access;
+      Old_Child_New_Parent : in     Node_Access := Invalid_Node_Access)
    is
-      procedure Compute (N : in Syntax_Trees.Node)
-      is
-         use all type SAL.Base_Peek_Type;
-      begin
-         case N.Label is
-         when Shared_Terminal | Virtual_Terminal | Virtual_Identifier =>
-            Last := Last + 1;
-            Result (Integer (Last)) := N.ID;
-
-         when Nonterm =>
-            for I of N.Children loop
-               --  Encountering Deleted_Child here is an error in the user 
algorithm.
-               Get_Terminal_IDs (Tree, I, Result, Last);
-            end loop;
-         end case;
-      end Compute;
+      pragma Unreferenced (Tree);
    begin
-      Compute
-        ((if Node <= Tree.Last_Shared_Node
-          then Tree.Shared_Tree.Nodes (Node)
-          else Tree.Branched_Nodes (Node)));
-   end Get_Terminal_IDs;
+      Parent.Children (Child_Index) := New_Child;
+
+      if Old_Child /= null then
+         Old_Child.Parent := Old_Child_New_Parent;
+      end if;
 
-   function Get_Terminal_IDs (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Token_ID_Array
+      New_Child.Parent := Parent;
+   end Replace_Child;
+
+   procedure Replace_Node (Element : in Stream_Index; New_Node : in 
Valid_Node_Access)
    is
-      Last : SAL.Base_Peek_Type := 0;
+      Orig : Stream_Element := Stream_Element_Lists.Element (Element.Cur);
    begin
-      Tree.Shared_Tree.Traversing := True;
-      return Result : Token_ID_Array (1 .. Count_Terminals (Tree, Node))  do
-         Get_Terminal_IDs (Tree, Node, Result, Last);
-         Tree.Shared_Tree.Traversing := False;
-      end return;
-   end Get_Terminal_IDs;
+      Orig.Node := New_Node;
+      Stream_Element_Lists.Replace_Element (Element.Cur, Orig);
+   end Replace_Node;
 
-   function First_Shared_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Base_Token_Index
+   procedure Replace_State (Cur : in Stream_Element_Lists.Cursor; New_State : 
in State_Index)
    is
-      function Compute (N : in Syntax_Trees.Node) return Base_Token_Index
-      is begin
-         return
-           (case N.Label is
-            when Shared_Terminal => N.Terminal,
-            when Virtual_Terminal |
-              Virtual_Identifier => Invalid_Token_Index,
-            when Nonterm         => N.Min_Terminal_Index);
-      end Compute;
-
+      Orig : Stream_Element := Stream_Element_Lists.Element (Cur);
    begin
-      if Node <= Tree.Last_Shared_Node then
-         return Compute (Tree.Shared_Tree.Nodes (Node));
-      else
-         return Compute (Tree.Branched_Nodes (Node));
-      end if;
-   end First_Shared_Terminal;
+      Orig.State := New_State;
+      Stream_Element_Lists.Replace_Element (Cur, Orig);
+   end Replace_State;
 
-   function First_Terminal_ID (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Token_ID
+   function RHS_Index
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Natural
    is
-      function Compute (N : in Syntax_Trees.Node) return Token_ID
-      is begin
-         case N.Label is
-         when Shared_Terminal | Virtual_Terminal | Virtual_Identifier =>
-            return N.ID;
-
-         when Nonterm =>
-            for C of N.Children loop
-               --  Encountering Deleted_Child here is an error in the user 
algorithm.
-               declare
-                  ID : constant Token_ID := First_Terminal_ID (Tree, C);
-               begin
-                  if ID /= Invalid_Token_ID then
-                     return ID;
-                  end if;
-               end;
-            end loop;
-            return Invalid_Token_ID;
-         end case;
-      end Compute;
+      pragma Unreferenced (Tree);
    begin
-      return Compute
-        ((if Node <= Tree.Last_Shared_Node
-          then Tree.Shared_Tree.Nodes (Node)
-          else Tree.Branched_Nodes (Node)));
-   end First_Terminal_ID;
+      return Node.RHS_Index;
+   end RHS_Index;
 
-   function Has_Branched_Nodes (Tree : in Syntax_Trees.Tree) return Boolean
+   function Root (Tree : in Syntax_Trees.Tree) return Node_Access
    is begin
-      return Tree.Branched_Nodes.Length > 0;
-   end Has_Branched_Nodes;
+      if Tree.Root = Invalid_Node_Access then
+         if Tree.Streams.Length = 0 then
+            return Invalid_Node_Access;
+         else
+            declare
+               use Stream_Element_Lists;
+               Stream : Parse_Stream renames Tree.Streams (Tree.Streams.Last);
+               --  parse stream from Parse or shared_stream from Edit_Tree 
with no changes
 
-   function Has_Child
-     (Tree  : in Syntax_Trees.Tree;
-      Node  : in Valid_Node_Index;
-      Child : in Valid_Node_Index)
-     return Boolean
-   is begin
-      for C of Tree.Get_Node_Const_Ref (Node).Children loop
-         if C = Child then
-            return True;
+               Cur : Cursor := Stream.Elements.First; -- SOI
+            begin
+               if Has_Element (Cur) then
+                  Cur := Next (Cur); -- wisitoken_accept
+                  if Has_Element (Cur) then
+                     return Element (Cur).Node;
+                  end if;
+               end if;
+               return Invalid_Node_Access;
+            end;
          end if;
-      end loop;
-      return False;
-   end Has_Child;
-
-   function Has_Children (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean
-   is begin
-      if Node <= Tree.Last_Shared_Node then
-         return Tree.Shared_Tree.Nodes (Node).Children.Length > 0;
       else
-         return Tree.Branched_Nodes (Node).Children.Length > 0;
+         return Tree.Root;
       end if;
-   end Has_Children;
-
-   function Has_Parent (Tree : in Syntax_Trees.Tree; Child : in 
Valid_Node_Index) return Boolean
-   is begin
-      return
-        (if Child <= Tree.Last_Shared_Node
-         then Tree.Shared_Tree.Nodes (Child).Parent /= Invalid_Node_Index
-         else Tree.Branched_Nodes (Child).Parent /= Invalid_Node_Index);
-   end Has_Parent;
+   end Root;
 
-   function Has_Parent (Tree : in Syntax_Trees.Tree; Children : in 
Valid_Node_Index_Array) return Boolean
+   function Rooted (Ref : in Stream_Node_Ref) return Boolean
    is begin
-      return
-        (for some Child of Children =>
-           (if Child <= Tree.Last_Shared_Node
-            then Tree.Shared_Tree.Nodes (Child).Parent /= Invalid_Node_Index
-            else Tree.Branched_Nodes (Child).Parent /= Invalid_Node_Index));
-   end Has_Parent;
+      return Stream_Element_Lists.Has_Element (Ref.Element.Cur) and then
+        Stream_Element_Lists.Element (Ref.Element.Cur).Node = Ref.Node;
+   end Rooted;
 
-   function ID
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
-     return Token_ID
-   is begin
-      return
-        (if Node <= Tree.Last_Shared_Node
-         then Tree.Shared_Tree.Nodes (Node).ID
-         else Tree.Branched_Nodes (Node).ID);
-   end ID;
+   procedure Set_Augmented
+     (Tree  : in Syntax_Trees.Tree;
+      Node  : in Valid_Node_Access;
+      Value : in Augmented_Class_Access)
+   is
+      pragma Unreferenced (Tree);
+   begin
+      Node.Augmented := Value;
+   end Set_Augmented;
 
-   function Identifier (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Base_Identifier_Index
+   procedure Set_Children
+     (Tree     : in out Syntax_Trees.Tree;
+      Parent   : in out Valid_Node_Access;
+      Children : in     Node_Access_Array)
    is begin
-      return
-        (if Node <= Tree.Last_Shared_Node
-         then Tree.Shared_Tree.Nodes (Node).Identifier
-         else Tree.Branched_Nodes (Node).Identifier);
-   end Identifier;
+      --  See Design note in spec about Parents, Parent_Set.
 
-   function Image
-     (Tree         : in Syntax_Trees.Tree;
-      Children     : in Valid_Node_Index_Arrays.Vector;
-      Descriptor   : in WisiToken.Descriptor;
-      Node_Numbers : in Boolean)
-     return String
-   is
-      use Ada.Strings.Unbounded;
-      Result     : Unbounded_String := +"(";
-      Need_Comma : Boolean := False;
-   begin
-      for I of Children loop
-         Result := Result & (if Need_Comma then ", " else "") &
-           (if I = Deleted_Child
-            then "-"
-            else Tree.Image (I, Descriptor, Include_Children => False, 
Node_Numbers => Node_Numbers));
-         Need_Comma := True;
+      --  Clear current Children.Parent first, in case some are also in new
+      --  children.
+      for C of Parent.Children loop
+         if C /= null then
+            C.Parent := Invalid_Node_Access;
+         end if;
       end loop;
-      Result := Result & ")";
-      return -Result;
-   end Image;
-
-   function Image
-     (Tree              : in Syntax_Trees.Tree;
-      N                 : in Syntax_Trees.Node;
-      Node_Index        : in Valid_Node_Index;
-      Descriptor        : in WisiToken.Descriptor;
-      Include_Children  : in Boolean;
-      Include_RHS_Index : in Boolean := False;
-      Node_Numbers      : in Boolean := False)
-     return String
-   is
-      use Ada.Strings.Unbounded;
-      Result : Unbounded_String := +(if Node_Numbers then Image (Node_Index) & 
":" else "");
-   begin
-      case N.Label is
-      when Shared_Terminal =>
-         Result := Result & Trimmed_Image (N.Terminal) & ":";
 
-      when Virtual_Identifier =>
-         Result := Result & Trimmed_Image (N.Identifier) & ";";
+      if Parent.Children'Length = Children'Length then
+         --  reuse current node
+         Parent.Virtual  := False;
+         Parent.Children := Children;
 
-      when others =>
-         null;
-      end case;
+      else
+         --  reallocate node with new child_count
+         declare
+            Realloc_Parent : constant Node_Access := new Node'
+              (Label       => Nonterm,
+               Copied_Node => Invalid_Node_Access,
+               Child_Count => Children'Last,
+               ID          => Parent.ID,
+               Node_Index  => -(Tree.Nodes.Last_Index + 1),
+               Parent      => Parent.Parent,
+               Augmented   => Parent.Augmented,
+               Error_List  =>
+                 (if Parent.Error_List = null
+                  then null
+                  else new Error_Data_Lists.List'(Parent.Error_List.all)),
+               Virtual          => False,
+               Recover_Conflict => Parent.Recover_Conflict,
+               RHS_Index        => Parent.RHS_Index,
+               Name_Offset      => Parent.Name_Offset,
+               Name_Length      => Parent.Name_Length,
+               Children         => Children);
+         begin
+            Tree.Nodes.Append (Realloc_Parent);
 
-      Result := Result & "(" & Image (N.ID, Descriptor) &
-        (if Include_RHS_Index and N.Label = Nonterm then "_" & Trimmed_Image 
(N.RHS_Index) else "") &
-        (if N.Byte_Region = Null_Buffer_Region then "" else ", " & Image 
(N.Byte_Region)) & ")";
+            if Parent.Parent /= null then
+               Parent.Parent.Children (Child_Index (Parent.Parent.all, 
Parent)) := Realloc_Parent;
+            end if;
 
-      if Include_Children and N.Label = Nonterm then
-         Result := Result & " <= " & Image (Tree, N.Children, Descriptor, 
Node_Numbers);
+            Parent := Realloc_Parent;
+         end;
       end if;
 
-      return -Result;
-   end Image;
-
-   function Image
-     (Tree              : in Syntax_Trees.Tree;
-      Node              : in Valid_Node_Index;
-      Descriptor        : in WisiToken.Descriptor;
-      Include_Children  : in Boolean := False;
-      Include_RHS_Index : in Boolean := False;
-      Node_Numbers      : in Boolean := False)
-     return String
-   is begin
-      return Tree.Image
-        ((if Node <= Tree.Last_Shared_Node
-          then Tree.Shared_Tree.Nodes (Node)
-          else Tree.Branched_Nodes (Node)),
-         Node, Descriptor, Include_Children, Include_RHS_Index, Node_Numbers);
-   end Image;
+      for Child of Children loop
+         if Child.Parent /= Invalid_Node_Access then
+            declare
+               Other_Parent : constant Node_Access := Child.Parent;
+               Child_Index  : constant SAL.Base_Peek_Type := 
Syntax_Trees.Child_Index
+                 (Other_Parent.all, Child);
+            begin
+               Other_Parent.Children (Child_Index) := null;
+            end;
+         end if;
 
-   function Image
-     (Tree       : in Syntax_Trees.Tree;
-      Nodes      : in Valid_Node_Index_Array;
-      Descriptor : in WisiToken.Descriptor)
-     return String
-   is
-      use Ada.Strings.Unbounded;
-      Result     : Unbounded_String := +"(";
-      Need_Comma : Boolean := False;
-   begin
-      for I in Nodes'Range loop
-         Result := Result & (if Need_Comma then ", " else "") &
-           Tree.Image (Nodes (I), Descriptor);
-         Need_Comma := True;
+         Child.Parent := Parent;
       end loop;
-      Result := Result & ")";
-      return -Result;
-   end Image;
+   end Set_Children;
 
-   function Image
-     (Item     : in Node_Sets.Vector;
-      Inverted : in Boolean := False)
-     return String
+   procedure Set_Children
+     (Tree     : in out Syntax_Trees.Tree;
+      Node     : in out Valid_Node_Access;
+      New_ID   : in     WisiToken.Production_ID;
+      Children : in     Node_Access_Array)
    is
-      use Ada.Strings.Unbounded;
-      Result : Unbounded_String;
    begin
-      for I in Item.First_Index .. Item.Last_Index loop
-         if (if Inverted then not Item (I) else Item (I)) then
-            Result := Result & Node_Index'Image (I);
-         end if;
-      end loop;
-      return -Result;
-   end Image;
-
-   procedure Initialize
-     (Branched_Tree : in out Syntax_Trees.Tree;
-      Shared_Tree   : in     Base_Tree_Access;
-      Flush         : in     Boolean;
-      Set_Parents   : in     Boolean := False)
-   is begin
-      Branched_Tree :=
-        (Ada.Finalization.Controlled with
-         Shared_Tree      => Shared_Tree,
-         Last_Shared_Node => Shared_Tree.Nodes.Last_Index,
-         Branched_Nodes   => <>,
-         Flush            => Flush,
-         Root             => <>);
+      Node.ID        := New_ID.LHS;
+      Node.RHS_Index := New_ID.RHS;
 
-      Branched_Tree.Shared_Tree.Parents_Set := Set_Parents;
-   end Initialize;
+      Set_Children (Tree, Node, Children);
+   end Set_Children;
 
-   function Is_Descendant_Of
-     (Tree       : in Syntax_Trees.Tree;
-      Root       : in Valid_Node_Index;
-      Descendant : in Valid_Node_Index)
-     return Boolean
+   procedure Set_Insert_Location
+     (Tree            : in Syntax_Trees.Tree;
+      Node            : in Valid_Node_Access;
+      Insert_Location : in WisiToken.Insert_Location)
    is
-      Node : Node_Index := Descendant;
+      pragma Unreferenced (Tree);
    begin
-      loop
-         exit when Node = Invalid_Node_Index;
-         if Node = Root then
-            return True;
-         end if;
-
-         Node := Tree.Parent (Node);
-      end loop;
-      return False;
-   end Is_Descendant_Of;
+      Node.Insert_Location := Insert_Location;
+   end Set_Insert_Location;
 
-   function Is_Nonterm (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean
+   procedure Set_Name
+     (Tree : in     Syntax_Trees.Tree;
+      Item : in out Recover_Token;
+      Name : in     Buffer_Region)
    is begin
-      if Node <= Tree.Last_Shared_Node then
-         return Tree.Shared_Tree.Nodes (Node).Label = Nonterm;
+      if Item.Virtual then
+         Item.Name := Name;
       else
-         return Tree.Branched_Nodes (Node).Label = Nonterm;
+         Tree.Set_Name (Item.Element_Node, Name);
       end if;
-   end Is_Nonterm;
+   end Set_Name;
 
-   function Is_Shared_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean
-   is begin
-      if Node <= Tree.Last_Shared_Node then
-         return Tree.Shared_Tree.Nodes (Node).Label = Shared_Terminal;
+   procedure Set_Name
+     (Tree   : in Syntax_Trees.Tree;
+      Node   : in Valid_Node_Access;
+      Region : in Buffer_Region)
+   is
+      First_Terminal : constant Node_Access := Tree.First_Terminal (Node);
+      Byte_First     : constant Buffer_Pos  := Tree.Byte_Region 
(First_Terminal, Trailing_Non_Grammar => False).First;
+   begin
+      if Region = Null_Buffer_Region or else -- Not a valid name
+        (First_Terminal.Label = Virtual_Terminal and not Tree.Parents_Set) --  
Can't trust Byte_First
+      then
+         null;
       else
-         return Tree.Branched_Nodes (Node).Label = Shared_Terminal;
+         Node.Name_Offset := Region.First - Byte_First;
+         Node.Name_Length := Region.Last - Region.First + 1;
       end if;
-   end Is_Shared_Terminal;
+   end Set_Name;
 
-   function Is_Virtual_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean
-   is begin
-      if Node <= Tree.Last_Shared_Node then
-         return Tree.Shared_Tree.Nodes (Node).Label = Virtual_Terminal;
-      else
-         return Tree.Branched_Nodes (Node).Label = Virtual_Terminal;
-      end if;
-   end Is_Virtual_Terminal;
+   procedure Set_Node_Index
+     (Tree       : in Syntax_Trees.Tree;
+      Node       : in Valid_Node_Access;
+      Node_Index : in Syntax_Trees.Node_Index)
+   is
+      pragma Unreferenced (Tree);
+   begin
+      Node.Node_Index := Node_Index;
+   end Set_Node_Index;
 
-   function Is_Virtual (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean
+   procedure Set_Parents
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID := Invalid_Stream_ID)
    is
-      function Compute (N : in Syntax_Trees.Node) return Boolean
-      is begin
-         return N.Label = Virtual_Terminal or (N.Label = Nonterm and then 
N.Virtual);
-      end Compute;
+      procedure Set_Parents
+        (Tree   : in out Syntax_Trees.Tree;
+         Node   : in     Valid_Node_Access;
+         Parent : in     Node_Access)
+      is
+      begin
+         Node.Parent := Parent;
+         case Node.Label is
+         when Source_Terminal =>
+            for N of Node.Following_Deleted loop
+               N.Parent := Node;
+            end loop;
+
+         when Virtual_Terminal | Virtual_Identifier =>
+            null;
+
+         when Nonterm =>
+            for C of Node.Children loop
+               if C = null then
+                  --  This can only happen if someone calls Set_Parents after 
parents
+                  --  are already set and the tree is edited.
+                  raise SAL.Programmer_Error with "encountered deleted child";
+               end if;
+               Set_Parents (Tree, C, Node);
+            end loop;
+         end case;
+      end Set_Parents;
    begin
-      if Node <= Tree.Last_Shared_Node then
-         return Compute (Tree.Shared_Tree.Nodes (Node));
+      --  IMPROVEME incremental: only need to handle fully parsed tree, no
+      --  streams. Use incremental algorithm; if find a set parent link,
+      --  assume subtree under that node has parent links set? requires all
+      --  "node.parent = null" to do all ancestors as well.
+      if Stream = Invalid_Stream_ID then
+         if Tree.Streams.Length = 0 then
+            if Tree.Root = Invalid_Node_Access then
+               raise SAL.Parameter_Error with "invalid_tree: no streams, 
Tree.Root not set";
+            else
+               Set_Parents (Tree, Tree.Root, Invalid_Node_Access);
+            end if;
+         else
+            declare
+               use Stream_Element_Lists;
+               Cur : Cursor := Tree.Streams 
(Tree.Shared_Stream.Cur).Elements.First;
+            begin
+               loop
+                  exit when Cur = No_Element;
+                  Set_Parents (Tree, Element (Cur).Node, Invalid_Node_Access);
+                  Next (Cur);
+               end loop;
+            end;
+         end if;
       else
-         return Compute (Tree.Branched_Nodes (Node));
+         declare
+            use Stream_Element_Lists;
+            Cur : Cursor := Tree.Streams (Stream.Cur).Elements.First;
+         begin
+            loop
+               exit when Cur = No_Element;
+               Set_Parents (Tree, Element (Cur).Node, Invalid_Node_Access);
+               Next (Cur);
+            end loop;
+         end;
       end if;
-   end Is_Virtual;
-
-   function Is_Virtual_Identifier (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean
-   is begin
-      return
-        (if Node <= Tree.Last_Shared_Node
-         then Tree.Shared_Tree.Nodes (Node).Label = Virtual_Identifier
-         else Tree.Branched_Nodes (Node).Label = Virtual_Identifier);
-   end Is_Virtual_Identifier;
+      Tree.Parents_Set := True;
+   end Set_Parents;
 
-   function Label (Tree : in Syntax_Trees.Tree; Node : in Valid_Node_Index) 
return Node_Label
+   procedure Set_Root (Tree : in out Syntax_Trees.Tree; New_Root : in 
Valid_Node_Access)
    is begin
-      if Node <= Tree.Last_Shared_Node then
-         return Tree.Shared_Tree.Nodes (Node).Label;
+      if New_Root.Children (1).ID = Tree.Lexer.Descriptor.SOI_ID and
+        New_Root.Children (New_Root.Children'Last).ID = 
Tree.Lexer.Descriptor.EOI_ID
+      then
+         Tree.Root := New_Root;
       else
-         return Tree.Branched_Nodes (Node).Label;
-      end if;
-   end Label;
+         declare
+            function Create_New_Children return Node_Access_Array
+            is
+               Last : Positive_Index_Type := New_Root.Children'Last;
+               New_Children : Node_Access_Array (1 .. New_Root.Children'Last + 
2);
+            begin
+               if New_Root.Children (1) /= Tree.SOI then
+                  New_Children (1) := Tree.SOI;
+                  Last := 1 + New_Root.Children'Length;
+                  New_Children (2 .. Last) := New_Root.Children;
+               end if;
 
-   function Last_Index (Tree : in Syntax_Trees.Tree) return Node_Index
-   is begin
-      return
-        (if Tree.Flush
-         then Tree.Shared_Tree.Nodes.Last_Index
-         else Tree.Branched_Nodes.Last_Index);
-   end Last_Index;
+               if New_Root.Children (New_Root.Children'Last) /= Tree.EOI then
+                  Last := @ + 1;
+                  New_Children (Last) := Tree.EOI;
+               end if;
+               return New_Children (1 .. Last);
+            end Create_New_Children;
 
-   function Last_Shared_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Base_Token_Index
-   is
-      --  Max_Terminal_Index is not cached, because it is not needed in 
recover.
+            New_Children : constant Node_Access_Array := Create_New_Children;
+         begin
+            Tree.Root := new Node'
+              (Label       => Nonterm,
+               Copied_Node => Invalid_Node_Access,
+               Child_Count => New_Children'Last,
+               ID          => New_Root.ID,
+               Node_Index  => New_Root.Node_Index,
+               Parent      => null,
+               Augmented   => New_Root.Augmented,
+               Error_List  =>
+                 (if New_Root.Error_List = null
+                  then null
+                  else new Error_Data_Lists.List'(New_Root.Error_List.all)),
+               Virtual          => New_Root.Virtual,
+               Recover_Conflict => New_Root.Recover_Conflict,
+               RHS_Index        => New_Root.RHS_Index,
+               Name_Offset      => New_Root.Name_Offset,
+               Name_Length      => New_Root.Name_Length,
+               Children         => New_Children);
+
+            for Child of New_Children loop
+               Child.Parent := Tree.Root;
+            end loop;
 
-      function Compute (N : in Syntax_Trees.Node) return Base_Token_Index
-      is begin
-         case N.Label is
-         when Shared_Terminal =>
-            return N.Terminal;
+            Tree.Nodes.Append (Tree.Root);
+         end;
+      end if;
+   end Set_Root;
 
-         when Virtual_Terminal | Virtual_Identifier =>
-            return Invalid_Token_Index;
+   procedure Set_Sequential_Index
+     (Tree  : in Syntax_Trees.Tree;
+      Node  : in Valid_Node_Access;
+      Index : in Base_Sequential_Index)
+   is
+      pragma Unreferenced (Tree);
+   begin
+      Node.Sequential_Index := Index;
+   end Set_Sequential_Index;
 
-         when Nonterm =>
-            for C of reverse N.Children loop
-               --  Encountering Deleted_Child here is an error in the user 
algorithm.
-               declare
-                  Last_Term : constant Base_Token_Index := 
Tree.Last_Shared_Terminal (C);
-               begin
-                  if Last_Term /= Invalid_Token_Index then
-                     return Last_Term;
-                  end if;
-               end;
-            end loop;
-            return Invalid_Token_Index;
-         end case;
-      end Compute;
+   function Shared_Stream (Tree : in Syntax_Trees.Tree) return Stream_ID
+   is begin
+      return Tree.Shared_Stream;
+   end Shared_Stream;
 
+   function Shared_Token
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID)
+     return Rooted_Ref
+   is
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
    begin
-      if Node <= Tree.Last_Shared_Node then
-         return Compute (Tree.Shared_Tree.Nodes (Node));
+      if Stream_Element_Lists.Has_Element (Parse_Stream.Shared_Link) then
+         return
+           (Stream  => Tree.Shared_Stream,
+            Element => (Cur => Parse_Stream.Shared_Link),
+            Node    => Stream_Element_Lists.Element 
(Parse_Stream.Shared_Link).Node);
       else
-         return Compute (Tree.Branched_Nodes (Node));
+         --  Shared_Link was EOI, then EOI was copied to parse stream to add an
+         --  error. test_incremental.adb Preserve_parse_Errors_1.
+         return Invalid_Stream_Node_Ref;
       end if;
-   end Last_Shared_Terminal;
+   end Shared_Token;
 
-   function Last_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Node_Index
+   procedure Shift
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      State  : in     State_Index)
    is
-      N : constant Node_Const_Ref := Tree.Get_Node_Const_Ref (Node);
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
    begin
-      case N.Label is
-      when Shared_Terminal | Virtual_Terminal | Virtual_Identifier =>
-         return Node;
-      when Nonterm =>
-         for C of reverse N.Children loop
-            --  Encountering Deleted_Child here is an error in the user 
algorithm.
-            declare
-               Term : constant Node_Index := Last_Terminal (Tree, C);
-            begin
-               if Term /= Invalid_Node_Index then
-                  return Term;
-               end if;
-            end;
-         end loop;
-         return Invalid_Node_Index;
+      if Parse_Stream.Stack_Top = Parse_Stream.Elements.Last then
+         --  Current input token is Stream.Shared_Link.
+         Push (Parse_Stream, Stream, Stream_Element_Lists.Element 
(Parse_Stream.Shared_Link).Node, State);
+         Stream_Element_Lists.Next (Parse_Stream.Shared_Link);
+      else
+         --  Current input token is Stream input.
+         Stream_Element_Lists.Next (Parse_Stream.Stack_Top);
+         Replace_State (Parse_Stream.Stack_Top, State);
+      end if;
+   end Shift;
+
+   procedure Shift
+     (Tree             : in     Syntax_Trees.Tree;
+      Node             : in     Valid_Node_Access;
+      Shift_Bytes      : in     Base_Buffer_Pos;
+      Shift_Chars      : in     Base_Buffer_Pos;
+      Shift_Lines      : in     Base_Line_Number_Type;
+      Last_Stable_Byte : in     Base_Buffer_Pos;
+      Non_Grammar_Next : in out Lexer.Token_Arrays.Extended_Index)
+   is begin
+      case Terminal_Label'(Node.Label) is
+      when Source_Terminal =>
+         pragma Assert (if Node.ID = Tree.Lexer.Descriptor.SOI_ID then 
Shift_Bytes = 0 and Shift_Chars = 0);
+         if Node.Byte_Region /= Null_Buffer_Region then
+            Node.Byte_Region := @ + Shift_Bytes;
+         end if;
+         if Node.Char_Region /= Null_Buffer_Region then
+            Node.Char_Region := @ + Shift_Chars;
+         end if;
+      when Virtual_Terminal | Virtual_Identifier =>
+         null;
       end case;
-   end Last_Terminal;
-
-   function Min_Descendant (Nodes : in Node_Arrays.Vector; Node : in 
Valid_Node_Index) return Valid_Node_Index
-   is
-      N : Syntax_Trees.Node renames Nodes (Node);
-   begin
-      case N.Label is
-      when Shared_Terminal | Virtual_Terminal | Virtual_Identifier =>
-         return Node;
 
-      when Nonterm =>
+      for I in Node.Non_Grammar.First_Index .. Node.Non_Grammar.Last_Index loop
          declare
-            Min : Node_Index := Node;
+            Token : Lexer.Token renames Node.Non_Grammar (I);
          begin
-            for C of N.Children loop
-               --  Encountering Deleted_Child here is an error in the user 
algorithm.
-               Min := Node_Index'Min (Min, Min_Descendant (Nodes, C));
-            end loop;
-            return Min;
-         end;
-      end case;
-   end Min_Descendant;
+            if Token.ID = Tree.Lexer.Descriptor.SOI_ID then
+               null;
 
-   procedure Move_Branch_Point (Tree : in out Syntax_Trees.Tree; Required_Node 
: in Valid_Node_Index)
-   is begin
-      --  Note that this preserves all stored indices in Branched_Nodes.
-      Tree.Branched_Nodes.Prepend (Tree.Shared_Tree.Nodes, Required_Node, 
Tree.Last_Shared_Node);
-      Tree.Last_Shared_Node := Required_Node - 1;
-   end Move_Branch_Point;
+            elsif Token.Byte_Region.Last < Last_Stable_Byte then
+               Token.Byte_Region := @ + Shift_Bytes;
+               Token.Char_Region := @ + Shift_Chars;
+               Token.Line_Region := @ + Shift_Lines;
 
-   function Next_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Node_Index
-   is
-      use Valid_Node_Index_Arrays;
-      use all type SAL.Base_Peek_Type;
+            else
+               Non_Grammar_Next := I;
+               exit;
+            end if;
+         end;
+      end loop;
 
-      function First_Child (Node : in Valid_Node_Index) return Node_Index
-      is
-         N : Node_Const_Ref renames Tree.Get_Node_Const_Ref (Node);
-      begin
-         case N.Label is
-         when Shared_Terminal | Virtual_Terminal | Virtual_Identifier =>
-            return Node;
-         when Nonterm =>
-            --  Use first non-empty
-            for J in N.Children.First_Index .. N.Children.Last_Index loop
-               --  Encountering Deleted_Child here is an error in the user 
algorithm.
-               declare
-                  Result : constant Node_Index := First_Child (N.Children (J));
-               begin
-                  if Result /= Invalid_Node_Index then
-                     return Result;
-                  end if;
-               end;
-            end loop;
-            --  All Children are empty
-            return Invalid_Node_Index;
-         end case;
-      end First_Child;
+      if Node.Augmented /= null then
+         Shift (Node.Augmented.all, Shift_Bytes, Shift_Chars, Shift_Lines, 
Last_Stable_Byte);
+      end if;
+   end Shift;
 
-      function Next_Child (Child : in Valid_Node_Index; Node : in Node_Index) 
return Node_Index
-      is begin
-         --  Node is Parent of Child; return node immediately after Child.
-         if Node = Invalid_Node_Index then
-            return Invalid_Node_Index;
-         else
-            declare
-               N : Node_Const_Ref renames Tree.Get_Node_Const_Ref (Node);
-            begin
-               pragma Assert (N.Label = Nonterm);
-               for I in N.Children.First_Index .. N.Children.Last_Index loop
-                  --  Encountering Deleted_Child here is an error in the user 
algorithm.
-                  if N.Children (I) = Child then
-                     --  Use first non-empty next from I + 1.
-                     for J in I + 1 .. N.Children.Last_Index loop
-                        declare
-                           Result : constant Node_Index := First_Child 
(N.Children (J));
-                        begin
-                           if Result /= Invalid_Node_Index then
-                              return Result;
-                           end if;
-                        end;
-                     end loop;
-                     --  All next Children are empty
-                     return Next_Child (Node, N.Parent);
-                  end if;
-               end loop;
-               raise SAL.Programmer_Error;
-            end;
-         end if;
-      end Next_Child;
+   function Single_Terminal (Ref : in Stream_Node_Ref) return Boolean
+   is begin
+      return Stream_Element_Lists.Element (Ref.Element.Cur).Node = Ref.Node 
and Ref.Node.Label in Terminal_Label;
+   end Single_Terminal;
 
-      N : Node_Const_Ref renames Get_Node_Const_Ref (Tree, Node);
-   begin
-      return Next_Child (Node, N.Parent);
-   end Next_Terminal;
+   function SOI (Tree : in Syntax_Trees.Tree) return Node_Access
+   is begin
+      return Tree.SOI;
+   end SOI;
 
-   function Parent
-     (Tree  : in Syntax_Trees.Tree;
-      Node  : in Valid_Node_Index;
-      Count : in Positive := 1)
-     return Node_Index
+   function Stack_Depth (Tree : in Syntax_Trees.Tree; Stream : in Stream_ID) 
return SAL.Base_Peek_Type
    is
-      Result : Node_Index := Node;
-      N      : Natural    := 0;
+      use Stream_Element_Lists;
+
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+
+      Element : Cursor             := Parse_Stream.Stack_Top;
+      Result  : SAL.Base_Peek_Type := 0;
    begin
       loop
-         if Result <= Tree.Last_Shared_Node then
-            Result := Tree.Shared_Tree.Nodes (Result).Parent;
-         else
-            Result := Tree.Branched_Nodes (Result).Parent;
-         end if;
-         N := N + 1;
-         exit when N = Count or Result = Invalid_Node_Index;
+         exit when not Has_Element (Element);
+         Result := @ + 1;
+         Element := Previous (Element);
       end loop;
       return Result;
-   end Parent;
+   end Stack_Depth;
 
-   function Prev_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Node_Index
-   is
-      use Valid_Node_Index_Arrays;
-      use all type SAL.Base_Peek_Type;
+   function Stack_Top
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID)
+     return Stream_Index
+   is begin
+      return (Cur => Tree.Streams (Stream.Cur).Stack_Top);
+   end Stack_Top;
 
-      function Last_Child (Node : in Valid_Node_Index) return Node_Index
-      is
-         N : Node_Const_Ref renames Tree.Get_Node_Const_Ref (Node);
+   procedure Start_Edit (Tree : in out Syntax_Trees.Tree)
+   is
+      use Stream_Element_Lists;
+   begin
+      Tree.Shared_Stream :=
+        (Cur             => Tree.Streams.Append
+           ((Label       => Shared_Stream_Label,
+             Stack_Top   => Invalid_Stream_Index.Cur,
+             Shared_Link => Invalid_Stream_Index.Cur,
+             Elements    => <>)));
+
+      Tree.Streams (Tree.Shared_Stream.Cur).Elements.Append
+        ((Node  => Tree.SOI,
+          State => Unknown_State));
+
+      Tree.SOI.Parent := Invalid_Node_Access;
+
+      --  Delete SOI, EOI from root children (added in Clear_Parse_Streams)
+      declare
+         New_Children : Node_Access_Array (1 .. Tree.Root.Child_Count - 2);
       begin
-         case N.Label is
-         when Shared_Terminal | Virtual_Terminal | Virtual_Identifier =>
-            return Node;
-         when Nonterm =>
-            --  Use first non-empty from end.
-            for J in reverse N.Children.First_Index .. N.Children.Last_Index 
loop
-               --  Encountering Deleted_Child here is an error in the user 
algorithm.
-               declare
-                  Result : constant Node_Index := Last_Child (N.Children (J));
-               begin
-                  if Result /= Invalid_Node_Index then
-                     return Result;
-                  end if;
-               end;
-            end loop;
-            --  All Children are empty
-            return Invalid_Node_Index;
-         end case;
-      end Last_Child;
-
-      function Prev_Child (Child : in Valid_Node_Index; Node : in Node_Index) 
return Node_Index
-      is begin
-         --  Node is Parent of Child; return node immediately previous to 
Child.
-         if Node = Invalid_Node_Index then
-            return Invalid_Node_Index;
-         else
-            declare
-               N : Node_Const_Ref renames Tree.Get_Node_Const_Ref (Node);
-            begin
-               pragma Assert (N.Label = Nonterm);
-               for I in reverse N.Children.First_Index .. 
N.Children.Last_Index loop
-                  --  Encountering Deleted_Child here is an error in the user 
algorithm.
-                  if N.Children (I) = Child then
-                     --  Use first non-empty from I - 1.
-                     for J in reverse N.Children.First_Index .. I - 1 loop
-                        declare
-                           Result : constant Node_Index := Last_Child 
(N.Children (J));
-                        begin
-                           if Result /= Invalid_Node_Index then
-                              return Result;
-                           end if;
-                        end;
-                     end loop;
-                     --  All previous Children are empty
-                     return Prev_Child (Node, N.Parent);
-                  end if;
-               end loop;
-               raise SAL.Programmer_Error;
-            end;
-         end if;
-      end Prev_Child;
+         New_Children := Tree.Root.Children (2 .. Tree.Root.Children'Last - 1);
+
+         Tree.Root := new Node'
+           (Label       => Nonterm,
+            Copied_Node => Invalid_Node_Access,
+            Child_Count => Tree.Root.Child_Count - 2,
+            ID          => Tree.Root.ID,
+            Node_Index  => Tree.Root.Node_Index,
+            Parent      => null,
+            Augmented   => Tree.Root.Augmented,
+            Error_List  =>
+              (if Tree.Root.Error_List = null
+               then null
+               else new Error_Data_Lists.List'(Tree.Root.Error_List.all)),
+            Virtual          => Tree.Root.Virtual,
+            Recover_Conflict => Tree.Root.Recover_Conflict,
+            RHS_Index        => Tree.Root.RHS_Index,
+            Name_Offset      => Tree.Root.Name_Offset,
+            Name_Length      => Tree.Root.Name_Length,
+            Children         => New_Children);
+
+         for Child of New_Children loop
+            Child.Parent := Tree.Root;
+         end loop;
 
-      N : Node_Const_Ref renames Get_Node_Const_Ref (Tree, Node);
-   begin
-      return Prev_Child (Node, N.Parent);
-   end Prev_Terminal;
+         Tree.Nodes.Append (Tree.Root);
+      end;
 
-   procedure Print_Tree
-     (Tree            : in Syntax_Trees.Tree;
-      Descriptor      : in WisiToken.Descriptor;
-      Root            : in Node_Index                   := Invalid_Node_Index;
-      Image_Augmented : in Syntax_Trees.Image_Augmented := null;
-      Image_Action    : in Syntax_Trees.Image_Action    := null)
-   is
-      use Ada.Text_IO;
+      Tree.Streams (Tree.Shared_Stream.Cur).Elements.Append
+        ((Node  => Tree.Root,
+          State => Unknown_State));
 
-      Node_Printed : Node_Sets.Vector;
+      Tree.Streams (Tree.Shared_Stream.Cur).Elements.Append
+        ((Node  => Tree.EOI,
+          State => Unknown_State));
 
-      procedure Print_Node (Node : in Valid_Node_Index; Level : in Integer)
-      is
-         function Image is new SAL.Generic_Decimal_Image (Node_Index);
+      Tree.EOI.Parent := Invalid_Node_Access;
 
-         N : Syntax_Trees.Node renames Tree.Shared_Tree.Nodes (Node);
-      begin
-         if Node_Printed (Node) then
-            --  This does not catch all possible tree edit errors, but it does
-            --  catch circles.
-            raise SAL.Programmer_Error with "Print_Tree: invalid tree; loop:" 
& Node_Index'Image (Node);
-         else
-            Node_Printed (Node) := True;
-         end if;
+      Tree.Root := Invalid_Node_Access;
+   end Start_Edit;
 
-         Put (Image (Node, Width => 4) & ": ");
-         for I in 1 .. Level loop
-            Put ("| ");
-         end loop;
-         Put (Image (Tree, N, Node, Descriptor, Include_Children => False, 
Include_RHS_Index => True));
-         if Image_Augmented /= null and N.Augmented /= null then
-            Put (" - " & Image_Augmented (N.Augmented));
-         end if;
-         if N.Label = Nonterm and then (Image_Action /= null and N.Action /= 
null) then
-            Put (" - " & Image_Action (N.Action));
-         end if;
+   procedure Start_Lex
+     (Tree           : in out Syntax_Trees.Tree)
+   is
+      Begin_Byte_Pos : Buffer_Pos;
+      Begin_Char_Pos : Buffer_Pos;
+      Begin_Line     : Line_Number_Type;
+   begin
+      Tree.Lexer.Begin_Pos (Begin_Byte_Pos, Begin_Char_Pos, Begin_Line);
+      declare
+         Token : constant Lexer.Token :=
+           (ID          => Tree.Lexer.Descriptor.SOI_ID,
+            Byte_Region => (Begin_Byte_Pos, Begin_Byte_Pos),
+            Char_Region => (Begin_Char_Pos, Begin_Char_Pos),
+            Line_Region => (Begin_Line, Begin_Line));
+         --  Tree.*_Region uses SOI.*_Region.First as first * in text,
+         --  .Last as first * in next token.
 
-         New_Line;
-         if N.Label = Nonterm then
-            for Child of N.Children loop
-               if Child = Deleted_Child then
-                  Put ("    : ");
-                  for I in 1 .. Level + 1 loop
-                     Put ("| ");
-                  end loop;
-                  Put_Line (" <deleted>");
-               else
-                  Print_Node (Child, Level + 1);
-               end if;
-            end loop;
+      begin
+         if Tree.SOI = null then
+            Tree.SOI := new Node'
+              (Label       => Source_Terminal,
+               Child_Count => 0,
+               others => <>);
+            Tree.Nodes.Append (Tree.SOI);
          end if;
-      end Print_Node;
 
-      Print_Root : constant Node_Index := (if Root = Invalid_Node_Index then 
Tree.Root else Root);
+         Tree.SOI.all :=
+           (Label             => Source_Terminal,
+            Copied_Node       => Invalid_Node_Access,
+            Child_Count       => 0,
+            ID                => Tree.Lexer.Descriptor.SOI_ID,
+            Node_Index        => 0,
+            Parent            => Invalid_Node_Access,
+            Augmented         => null,
+            Error_List        => null,
+            Non_Grammar       => Lexer.Token_Arrays.To_Vector (Token),
+            Sequential_Index  => Invalid_Sequential_Index,
+            Byte_Region       => Token.Byte_Region,
+            Char_Region       => Token.Char_Region,
+            New_Line_Count    => New_Line_Count (Token.Line_Region),
+            Following_Deleted => Valid_Node_Access_Lists.Empty_List);
+
+         Tree.Shared_Stream :=
+           (Cur             => Tree.Streams.Append
+              ((Label       => Shared_Stream_Label,
+                Stack_Top   => Stream_Element_Lists.No_Element,
+                Shared_Link => Stream_Element_Lists.No_Element,
+                Elements    => <>)));
+
+         Tree.Streams (Tree.Shared_Stream.Cur).Elements.Append
+           ((Node => Tree.SOI,
+             State => Unknown_State));
+      end;
+   end Start_Lex;
+
+   procedure Start_Parse
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      State  : in     State_Index)
+   is
+      Junk : Terminal_Ref := Append_Stream_Element (Tree, Stream, Tree.SOI, 
State);
+      pragma Unreferenced (Junk);
    begin
-      Node_Printed.Set_First_Last (Tree.First_Index, Tree.Last_Index);
-      if Print_Root = Invalid_Node_Index then
-         Put_Line ("<empty tree>");
-      else
-         Print_Node (Print_Root, 0);
-      end if;
-   end Print_Tree;
+      Tree.Parents_Set := False;
+   end Start_Parse;
 
-   function Process_Tree
-     (Tree         : in Syntax_Trees.Tree;
-      Node         : in Valid_Node_Index;
-      Visit_Parent : in Visit_Parent_Mode;
-      Process_Node : access function
-        (Tree : in Syntax_Trees.Tree;
-         Node : in Valid_Node_Index)
-        return Boolean)
-     return Boolean
+   function State
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Unknown_State_Index
    is
-      function Compute (N : in Syntax_Trees.Node) return Boolean
-      is begin
-         if Visit_Parent = Before then
-            if not Process_Node (Tree, Node) then
-               return False;
-            end if;
-         end if;
+      pragma Unreferenced (Tree, Stream);
+   begin
+      return Stream_Element_Lists.Element (Element.Cur).State;
+   end State;
 
-         if N.Label = Nonterm then
-            for Child of N.Children loop
-               if Child /= Deleted_Child then
-                  if not Process_Tree (Tree, Child, Visit_Parent, 
Process_Node) then
-                     return False;
-                  end if;
-               end if;
-            end loop;
-         end if;
+   function State (Tree : in Syntax_Trees.Tree; Stream : in Stream_ID) return 
State_Index
+   is begin
+      return Stream_Element_Lists.Element (Tree.Streams 
(Stream.Cur).Stack_Top).State;
+   end State;
 
-         if Visit_Parent = After then
-            return Process_Node (Tree, Node);
-         else
-            return True;
-         end if;
-      end Compute;
-   begin
-      if Node <= Tree.Last_Shared_Node then
-         return Compute (Tree.Shared_Tree.Nodes (Node));
-      else
-         return Compute (Tree.Branched_Nodes (Node));
-      end if;
-   end Process_Tree;
+   function Stream_Count (Tree : in Syntax_Trees.Tree) return Natural
+   is begin
+      return Natural (Tree.Streams.Length);
+   end Stream_Count;
 
-   procedure Process_Tree
-     (Tree         : in out Syntax_Trees.Tree;
-      Node         : in     Valid_Node_Index;
-      Process_Node : access procedure
-        (Tree : in out Syntax_Trees.Tree;
-         Node : in     Valid_Node_Index))
+   procedure Stream_Delete
+     (Tree    : in out Syntax_Trees.Tree;
+      Stream  : in     Stream_ID;
+      Element : in out Stream_Index)
    is
-      procedure Compute (N : in Syntax_Trees.Node)
-      is begin
-         if N.Label = Nonterm then
-            for Child of N.Children loop
-               if Child /= Deleted_Child then
-                  Process_Tree (Tree, Child, Process_Node);
-               end if;
-            end loop;
-         end if;
-
-         Process_Node (Tree, Node);
-      end Compute;
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
    begin
-      if Node <= Tree.Last_Shared_Node then
-         Compute (Tree.Shared_Tree.Nodes (Node));
-      else
-         Compute (Tree.Branched_Nodes (Node));
+      if Parse_Stream.Stack_Top = Element.Cur then
+         Parse_Stream.Stack_Top := Stream_Element_Lists.No_Element;
       end if;
-   end Process_Tree;
 
-   procedure Process_Tree
-     (Tree         : in out Syntax_Trees.Tree;
-      Process_Node : access procedure
-        (Tree : in out Syntax_Trees.Tree;
-         Node : in     Valid_Node_Index);
-      Root         : in     Node_Index := Invalid_Node_Index)
-   is begin
-      Tree.Shared_Tree.Traversing := True;
-      Process_Tree (Tree, (if Root = Invalid_Node_Index then Tree.Root else 
Root), Process_Node);
-      Tree.Shared_Tree.Traversing := False;
-   exception
-   when others =>
-      Tree.Shared_Tree.Traversing := False;
-      raise;
-   end Process_Tree;
+      Parse_Stream.Elements.Delete (Element.Cur);
+   end Stream_Delete;
 
-   function Production_ID
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
-     return WisiToken.Production_ID
+   function Stream_Error_Iterate
+     (Tree   : aliased in Syntax_Trees.Tree;
+      Stream :         in Stream_ID)
+     return Stream_Error_Iterator_Interfaces.Forward_Iterator'Class
    is begin
-      return
-        (if Node <= Tree.Last_Shared_Node
-         then (Tree.Shared_Tree.Nodes (Node).ID, Tree.Shared_Tree.Nodes 
(Node).RHS_Index)
-         else (Tree.Branched_Nodes (Node).ID, Tree.Branched_Nodes 
(Node).RHS_Index));
-   end Production_ID;
+      return Stream_Error_Iterator'(Tree => Tree'Access, Stream => Stream.Cur);
+   end Stream_Error_Iterate;
+
+   function Stream_First
+     (Tree     : in Syntax_Trees.Tree;
+      Stream   : in Stream_ID;
+      Skip_SOI : in Boolean)
+     return Stream_Index
+   is begin
+      return Result : Stream_Index := (Cur => Tree.Streams 
(Stream.Cur).Elements.First) do
+         if Skip_SOI then
+            Stream_Element_Lists.Next (Result.Cur);
+         end if;
+      end return;
+   end Stream_First;
 
-   procedure Replace_Child
-     (Tree                 : in out Syntax_Trees.Tree;
-      Parent               : in     Valid_Node_Index;
-      Child_Index          : in     SAL.Peek_Type;
-      Old_Child            : in     Valid_Node_Index;
-      New_Child            : in     Valid_Node_Index;
-      Old_Child_New_Parent : in     Node_Index := Invalid_Node_Index)
+   function Stream_First
+     (Tree     : in Syntax_Trees.Tree;
+      Stream   : in Stream_ID;
+      Skip_SOI : in Boolean)
+     return Rooted_Ref
    is
-      N : Syntax_Trees.Node renames Tree.Shared_Tree.Nodes (Parent);
+      Cur : constant Stream_Element_Lists.Cursor := Tree.Streams 
(Stream.Cur).Elements.First;
    begin
-      N.Children (Child_Index) := New_Child;
+      return Result : Rooted_Ref := (Stream, (Cur => Cur), 
Stream_Element_Lists.Element (Cur).Node) do
+         if Skip_SOI then
+            Tree.Stream_Next (Result, Rooted => True);
+         end if;
+      end return;
+   end Stream_First;
 
-      if Old_Child /= Deleted_Child then
-         Tree.Shared_Tree.Nodes (Old_Child).Parent := Old_Child_New_Parent;
-      end if;
+   function Stream_Input_Length (Tree : in Syntax_Trees.Tree; Stream : in 
Stream_ID) return SAL.Base_Peek_Type
+   is
+      use Stream_Element_Lists;
+      use SAL;
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
 
-      Tree.Shared_Tree.Nodes (New_Child).Parent := Parent;
-   end Replace_Child;
+      Element : Cursor         := Next (Parse_Stream.Stack_Top);
+      Result  : Base_Peek_Type := 0;
+   begin
+      loop
+         exit when not Has_Element (Element);
+         Result := @ + 1;
+         Element := Next (Element);
+      end loop;
+      return Result;
+   end Stream_Input_Length;
+
+   procedure Stream_Insert
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      Node   : in     Valid_Node_Access;
+      Before : in     Stream_Index)
+   is
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+   begin
+      Node.Parent := Invalid_Node_Access;
+      Parse_Stream.Elements.Insert
+        (Element  =>
+           (Node  => Node,
+            State => Unknown_State),
+         Before   => Before.Cur);
+   end Stream_Insert;
+
+   function Stream_Insert
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      Node   : in     Valid_Node_Access;
+      Before : in     Stream_Index)
+     return Stream_Node_Ref
+   is
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
+   begin
+      Node.Parent := Invalid_Node_Access;
+      return Result : constant Stream_Node_Ref :=
+        (Stream         => Stream,
+         Node           => Node,
+         Element        =>
+           (Cur         => Parse_Stream.Elements.Insert
+              (Element  =>
+                 (Node  => Node,
+                  State => Unknown_State),
+               Before   => Before.Cur)));
+   end Stream_Insert;
+
+   function Stream_Last
+     (Tree     : in Syntax_Trees.Tree;
+      Stream   : in Stream_ID;
+      Skip_EOI : in Boolean)
+     return Stream_Index
+   is begin
+      return Result : Stream_Index := (Cur => Tree.Streams 
(Stream.Cur).Elements.Last) do
+         if Skip_EOI then
+            Stream_Element_Lists.Previous (Result.Cur);
+         end if;
+      end return;
+   end Stream_Last;
 
-   function RHS_Index
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
-     return Natural
+   function Stream_Last
+     (Tree     : in Syntax_Trees.Tree;
+      Stream   : in Stream_ID;
+      Skip_EOI : in Boolean)
+     return Rooted_Ref
    is begin
-      return
-        (if Node <= Tree.Last_Shared_Node
-         then Tree.Shared_Tree.Nodes (Node).RHS_Index
-         else Tree.Branched_Nodes (Node).RHS_Index);
-   end RHS_Index;
+      return To_Rooted_Ref (Tree, Stream, Stream_Last (Tree, Stream, 
Skip_EOI));
+   end Stream_Last;
 
-   function Root (Tree : in Syntax_Trees.Tree) return Node_Index
+   function Stream_Length (Tree : in Syntax_Trees.Tree; Stream : in Stream_ID) 
return SAL.Base_Peek_Type
    is begin
-      return Tree.Root;
-   end Root;
+      return SAL.Base_Peek_Type (Tree.Streams (Stream.Cur).Elements.Length);
+   end Stream_Length;
+
+   function Stream_Next
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Stream_Index
+   is begin
+      return
+        (if Element = Invalid_Stream_Index
+         then (Cur => Tree.Streams (Stream.Cur).Elements.First)
+         else (Cur => Stream_Element_Lists.Next (Element.Cur)));
+   end Stream_Next;
 
-   procedure Set_Node_Identifier
-     (Tree       : in Syntax_Trees.Tree;
-      Node       : in Valid_Node_Index;
-      ID         : in Token_ID;
-      Identifier : in Identifier_Index)
+   function Stream_Next
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return Rooted_Ref
    is
-      Current : constant Syntax_Trees.Node := Tree.Shared_Tree.Nodes (Node);
+      Result : Stream_Node_Ref := Ref;
    begin
-      for C of Current.Children loop
-         if C /= Deleted_Child then
-            Tree.Shared_Tree.Nodes (C).Parent := Invalid_Node_Index;
-         end if;
-      end loop;
-
-      Tree.Shared_Tree.Nodes.Replace_Element
-        (Node,
-         (Label       => Virtual_Identifier,
-          ID          => ID,
-          Identifier  => Identifier,
-          Byte_Region => Current.Byte_Region,
-          Parent      => Current.Parent,
-          State       => Unknown_State,
-          Augmented   => null));
-   end Set_Node_Identifier;
+      Stream_Next (Tree, Result, Rooted => True);
+      return Result;
+   end Stream_Next;
 
-   procedure Set_Parents (Tree : in out Syntax_Trees.Tree)
+   procedure Stream_Next
+     (Tree   : in     Syntax_Trees.Tree;
+      Ref    : in out Stream_Node_Ref;
+      Rooted : in     Boolean)
    is
-      procedure Set_Parents
-        (Tree   : in out Syntax_Trees.Tree;
-         Node   : in     Valid_Node_Index;
-         Parent : in     Node_Index)
-      is
-         N : Node_Var_Ref renames Tree.Get_Node_Var_Ref (Node);
-      begin
-         N.Parent := Parent;
-         case N.Label is
-         when Shared_Terminal | Virtual_Terminal | Virtual_Identifier =>
-            null;
-
-         when Nonterm =>
-            for C of N.Children loop
-               if C = Deleted_Child then
-                  --  This can only happen if someone calls Set_Parents after 
parents
-                  --  are already set.
-                  raise SAL.Programmer_Error with "encountered Deleted_Child";
-               end if;
-               Set_Parents (Tree, C, Node);
-            end loop;
-         end case;
-      end Set_Parents;
+      use Stream_Element_Lists;
    begin
-      Set_Parents (Tree, Root (Tree), Invalid_Node_Index);
-      Tree.Shared_Tree.Parents_Set := True;
-   end Set_Parents;
+      Ref.Element := (Cur => Next (Ref.Element.Cur));
 
-   procedure Set_Root (Tree : in out Syntax_Trees.Tree; Root : in 
Valid_Node_Index)
-   is begin
-      Tree.Root := Root;
-   end Set_Root;
+      if not Has_Element (Ref.Element.Cur) then
+         if Ref.Stream = Tree.Shared_Stream then
+            Ref.Stream := Invalid_Stream_ID;
 
-   function Same_Token
-     (Tree_1  : in Syntax_Trees.Tree'Class;
-      Index_1 : in Valid_Node_Index;
-      Tree_2  : in Syntax_Trees.Tree'Class;
-      Index_2 : in Valid_Node_Index)
-     return Boolean
-   is
-      function Compute (N_1, N_2 : in Syntax_Trees.Node) return Boolean
-      is begin
-         return N_1.Label = N_2.Label and
-           N_1.ID = N_2.ID and
-           N_1.Byte_Region = N_2.Byte_Region;
-      end Compute;
-   begin
-      return Compute
-        ((if Index_1 <= Tree_1.Last_Shared_Node
-          then Tree_1.Shared_Tree.Nodes (Index_1)
-          else Tree_1.Branched_Nodes (Index_1)),
-         (if Index_2 <= Tree_2.Last_Shared_Node
-          then Tree_2.Shared_Tree.Nodes (Index_2)
-          else Tree_2.Branched_Nodes (Index_2)));
-   end Same_Token;
+         elsif not Has_Element (Tree.Streams (Ref.Stream.Cur).Shared_Link) then
+            Ref.Stream := Invalid_Stream_ID;
 
-   procedure Set_Augmented
-     (Tree  : in out Syntax_Trees.Tree;
-      Node  : in     Valid_Node_Index;
-      Value : in     Base_Token_Class_Access)
-   is begin
-      if Node <= Tree.Last_Shared_Node then
-         Tree.Shared_Tree.Nodes (Node).Augmented := Value;
-      else
-         Tree.Branched_Nodes (Node).Augmented := Value;
+         else
+            Ref.Element.Cur := Tree.Streams (Ref.Stream.Cur).Shared_Link;
+            Ref.Stream      := Tree.Shared_Stream;
+         end if;
       end if;
-      Tree.Shared_Tree.Augmented_Present := True;
-   end Set_Augmented;
 
-   procedure Set_Children
-     (Tree     : in out Syntax_Trees.Tree;
-      Parent   : in     Valid_Node_Index;
-      Children : in     Valid_Node_Index_Array)
-   is
-      N : Node_Var_Ref renames Tree.Get_Node_Var_Ref (Parent);
+      Ref.Node :=
+        (if Has_Element (Ref.Element.Cur)
+         then (if Rooted
+               then Element (Ref.Element.Cur).Node
+               else First_Terminal (Tree, Element (Ref.Element.Cur).Node))
+         else Invalid_Node_Access);
+   end Stream_Next;
 
-      Min_Terminal_Index_Set : Boolean := False;
+   procedure Stream_Next
+     (Tree   : in     Syntax_Trees.Tree;
+      Ref    : in out Stream_Node_Parents;
+      Rooted : in     Boolean)
+   is
+      use Stream_Element_Lists;
    begin
-      --  See Design note in spec about Parents, Parent_Set.
+      Ref.Parents.Clear;
+      Stream_Next (Tree, Ref.Ref, Rooted => True);
 
-      if Tree.Parents_Set then
-         --  Clear current Children.Parent first, in case some are also in new
-         --  children.
-         for C of N.Children loop
-            if C /= WisiToken.Deleted_Child then
-               Tree.Shared_Tree.Nodes (C).Parent := Invalid_Node_Index;
-            end if;
-         end loop;
+      if Ref.Ref.Element.Cur /= No_Element and not Rooted then
+         Ref.Ref.Node := First_Terminal
+           (Tree, Stream_Element_Lists.Element (Ref.Ref.Element.Cur).Node, 
Ref.Parents);
       end if;
+   end Stream_Next;
 
-      N.Children.Set_First_Last (Children'First, Children'Last);
-
-      for I in Children'Range loop
+   function Stream_Prev
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Stream_Index
+   is
+      pragma Unreferenced (Tree, Stream);
+   begin
+      return (Cur => Stream_Element_Lists.Previous (Element.Cur));
+   end Stream_Prev;
 
-         N.Children (I) := Children (I);
+   function Stream_Prev
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return Rooted_Ref
+   is
+      Result : Stream_Node_Ref := Ref;
+   begin
+      Stream_Prev (Tree, Result);
+      return Result;
+   end Stream_Prev;
 
-         if Tree.Parents_Set then
-            declare
-               Child_Node : Node renames Tree.Shared_Tree.Nodes (Children (I));
-            begin
-               if Child_Node.Parent /= Invalid_Node_Index then
-                  declare
-                     Other_Parent : Node renames Tree.Shared_Tree.Nodes 
(Child_Node.Parent);
-                     Child_Index  : constant SAL.Base_Peek_Type := 
Syntax_Trees.Child_Index
-                       (Other_Parent, Children (I));
-                  begin
-                     Other_Parent.Children (Child_Index) := 
WisiToken.Deleted_Child;
-                  end;
-               end if;
+   procedure Stream_Prev
+     (Tree   : in     Syntax_Trees.Tree;
+      Ref    : in out Stream_Node_Ref;
+      Rooted : in     Boolean := True)
+   is
+      use Stream_Element_Lists;
+   begin
+      Ref.Element := (Cur => Previous (Ref.Element.Cur));
 
-               Child_Node.Parent := Parent;
-            end;
-         end if;
+      if not Has_Element (Ref.Element.Cur) then
+         Ref.Stream := Invalid_Stream_ID;
+      end if;
 
-         declare
-            K : Node_Const_Ref renames Tree.Get_Node_Const_Ref (Children (I));
-         begin
-            N.Virtual := N.Virtual or
-              (case K.Label is
-               when Shared_Terminal                       => False,
-               when Virtual_Terminal | Virtual_Identifier => True,
-               when Nonterm                               => K.Virtual);
+      Ref.Node :=
+        (if Has_Element (Ref.Element.Cur)
+         then (if Rooted
+               then Element (Ref.Element.Cur).Node
+               else Tree.Last_Terminal (Element (Ref.Element.Cur).Node))
+         else Invalid_Node_Access);
+   end Stream_Prev;
 
-            if N.Byte_Region.First > K.Byte_Region.First then
-               N.Byte_Region.First := K.Byte_Region.First;
-            end if;
+   procedure Stream_Prev
+     (Tree   : in     Syntax_Trees.Tree;
+      Ref    : in out Stream_Node_Parents;
+      Rooted : in     Boolean)
+   is
+      use Stream_Element_Lists;
+   begin
+      Ref.Parents.Clear;
+      Stream_Prev (Tree, Ref.Ref, Rooted => True);
 
-            if N.Byte_Region.Last < K.Byte_Region.Last then
-               N.Byte_Region.Last := K.Byte_Region.Last;
-            end if;
+      if Ref.Ref.Element.Cur /= No_Element and not Rooted then
+         Ref.Ref.Node := Last_Terminal
+           (Tree, Stream_Element_Lists.Element (Ref.Ref.Element.Cur).Node, 
Ref.Parents);
+      end if;
+   end Stream_Prev;
 
-            if not Min_Terminal_Index_Set then
-               case K.Label is
-               when Shared_Terminal =>
-                  Min_Terminal_Index_Set := True;
-                  N.Min_Terminal_Index   := K.Terminal;
+   function Subtree_Image
+     (Tree         : in Syntax_Trees.Tree;
+      Node         : in Node_Access;
+      Node_Numbers : in Boolean := True;
+      Non_Grammar  : in Boolean := False;
+      Augmented    : in Boolean := False;
+      Line_Numbers : in Boolean := False;
+      Level        : in Integer := 0)
+     return String
+   is
+      use Ada.Strings.Unbounded;
+      Result : Unbounded_String := +"" & ASCII.LF;
+   begin
+      for I in 1 .. Level loop
+         Result := @ & "| ";
+      end loop;
+      Result := @ &
+        (if Node = Invalid_Node_Access
+         then "<null>"
+         else Image
+           (Tree, Node,
+            Children              => False,
+            Node_Numbers          => Node_Numbers,
+            RHS_Index             => True,
+            Terminal_Node_Numbers => True,
+            Non_Grammar           => Non_Grammar,
+            Line_Numbers          => Line_Numbers,
+            Augmented             => Augmented));
+
+      if Node /= Invalid_Node_Access and then Node.Label = Nonterm then
+         for Child of Node.Children loop
+            Result := @ & Subtree_Image
+              (Tree, Child, Node_Numbers, Non_Grammar, Augmented, 
Line_Numbers, Level + 1);
+         end loop;
+      end if;
 
-               when Virtual_Terminal | Virtual_Identifier =>
-                  null;
+      return -Result;
+   end Subtree_Image;
 
-               when Nonterm =>
-                  if K.Min_Terminal_Index /= Invalid_Token_Index then
-                     --  not an empty nonterm
-                     Min_Terminal_Index_Set := True;
-                     N.Min_Terminal_Index   := K.Min_Terminal_Index;
-                  end if;
-               end case;
-            end if;
-         end;
+   function Subtree_Root (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Valid_Node_Access
+   is
+      pragma Unreferenced (Tree);
+      N : Valid_Node_Access := Node;
+   begin
+      loop
+         exit when N.Parent = Invalid_Node_Access;
+         N := N.Parent;
       end loop;
-   end Set_Children;
+      return N;
+   end Subtree_Root;
 
-   procedure Set_Children
-     (Tree     : in out Syntax_Trees.Tree;
-      Node     : in     Valid_Node_Index;
-      New_ID   : in     WisiToken.Production_ID;
-      Children : in     Valid_Node_Index_Array)
+   function To_Node_Access (Item : in Valid_Node_Access_Array) return 
Node_Access_Array
+   is begin
+      return (for I in Item'Range => Item (I));
+   end To_Node_Access;
+
+   function To_Real_Recover_Token (Item : in Stream_Node_Ref) return 
Real_Recover_Token
+   is begin
+      return
+        (Virtual      => False,
+         Element_Node => Stream_Element_Lists.Element (Item.Element.Cur).Node,
+         Node         => Item.Node);
+   end To_Real_Recover_Token;
+
+   function To_Rooted_Ref
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Rooted_Ref
    is
-      Parent_Node  : Syntax_Trees.Node renames Tree.Shared_Tree.Nodes (Node);
+      pragma Unreferenced (Tree);
    begin
-      if New_ID /= (Parent_Node.ID, Parent_Node.RHS_Index) then
-         Parent_Node.Action := null;
-      end if;
+      return (Stream, Element, Stream_Element_Lists.Element 
(Element.Cur).Node);
+   end To_Rooted_Ref;
+
+   function To_Stream_Node_Ref
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID;
+      Node   : in Valid_Node_Access)
+     return Stream_Node_Ref
+   is
+      use Stream_Element_Lists;
+      Parse_Stream : Syntax_Trees.Parse_Stream renames Tree.Streams 
(Stream.Cur);
 
-      Parent_Node.ID        := New_ID.LHS;
-      Parent_Node.RHS_Index := New_ID.RHS;
+      Root : constant Valid_Node_Access := Tree.Subtree_Root (Node);
+      Cur  : Cursor                     := Parse_Stream.Elements.First;
+   begin
+      loop
+         exit when Element (Cur).Node = Root;
+         Next (Cur);
+      end loop;
 
-      Set_Children (Tree, Node, Children);
-   end Set_Children;
+      return
+        (Stream  => Stream,
+         Element => (Cur => Cur),
+         Node    => Node);
+   end To_Stream_Node_Ref;
 
-   procedure Set_State
-     (Tree  : in out Syntax_Trees.Tree;
-      Node  : in     Valid_Node_Index;
-      State : in     State_Index)
+   function To_Stream_Node_Parents (Tree : in Syntax_Trees.Tree; Ref : in 
Stream_Node_Ref) return Stream_Node_Parents
    is begin
-      if Tree.Flush then
-         Tree.Shared_Tree.Nodes (Node).State := State;
-      else
-         if Node <= Tree.Last_Shared_Node then
-            Tree.Shared_Tree.Nodes (Node).State := State;
+      return Result : Stream_Node_Parents := (Ref, Parents => <>) do
+         if Ref = Invalid_Stream_Node_Ref or else Rooted (Ref) then
+            null;
+
+         elsif Tree.Parents_Set then
+            declare
+               Inverted_Parents : Node_Stacks.Stack;
+               Node             : Valid_Node_Access          := 
Ref.Node.Parent;
+               Root_Node        : constant Valid_Node_Access :=
+                 Stream_Element_Lists.Element (Ref.Element.Cur).Node;
+            begin
+               loop
+                  Inverted_Parents.Push (Node);
+                  exit when Node = Root_Node;
+                  Node := Node.Parent;
+               end loop;
+               Result.Parents := Inverted_Parents.Invert;
+            end;
          else
-            Tree.Branched_Nodes (Node).State := State;
+            declare
+               use Stream_Element_Lists;
+            begin
+               Result.Ref.Node := Tree.First_Terminal (Element 
(Ref.Element.Cur).Node, Result.Parents);
+               loop
+                  exit when Result.Ref.Node = Ref.Node;
+                  Tree.Next_Terminal (Result, Following => True);
+               end loop;
+            end;
          end if;
-      end if;
-   end Set_State;
+      end return;
+   end To_Stream_Node_Parents;
 
-   procedure Set_Flush_False (Tree : in out Syntax_Trees.Tree)
+   function To_Valid_Node_Access (Item : in Node_Access_Array) return 
Valid_Node_Access_Array
    is begin
-      if Tree.Flush then
-         Tree.Flush := False;
-         Tree.Branched_Nodes.Set_First_Last (Tree.Last_Shared_Node + 1, 
Tree.Last_Shared_Node);
-      end if;
-   end Set_Flush_False;
+      return (for I in Item'Range => Item (I));
+   end To_Valid_Node_Access;
 
-   procedure Set_Name_Region
-     (Tree   : in out Syntax_Trees.Tree;
-      Node   : in     Valid_Node_Index;
-      Region : in     Buffer_Region)
+   function Traversing (Tree : in Syntax_Trees.Tree) return Boolean
    is begin
-      if Tree.Flush then
-         Tree.Shared_Tree.Nodes (Node).Name := Region;
+      return Tree.Traversing;
+   end Traversing;
 
-      else
-         if Node <= Tree.Last_Shared_Node then
-            Move_Branch_Point (Tree, Node);
-         end if;
+   function Trimmed_Image (Tree : in Syntax_Trees.Tree; Item : in Stream_ID) 
return String
+   is begin
+      return Trimmed_Image (Tree.Streams (Item.Cur).Label);
+   end Trimmed_Image;
 
-         Tree.Branched_Nodes (Node).Name := Region;
-      end if;
-   end Set_Name_Region;
+   function Trimmed_Image (Item : in Stream_Index) return String
+   is begin
+      return
+        (if Item = Invalid_Stream_Index
+         then "-"
+         else Trimmed_Image (Stream_Element_Lists.Element 
(Item.Cur).Node.Node_Index));
+   end Trimmed_Image;
+
+   function Trimmed_Image (Node : in Node_Access) return String
+   is begin
+      return
+        (if Node = Invalid_Node_Access
+         then "-"
+         else Trimmed_Image (Node.Node_Index));
+   end Trimmed_Image;
+
+   function Tree_Size_Image (Tree : in Syntax_Trees.Tree) return String
+   is begin
+      return Node_Index'(Tree.Nodes.Last_Index)'Image;
+   end Tree_Size_Image;
 
-   function Sub_Tree_Root (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Valid_Node_Index
+   procedure Update_Error
+     (Tree      : in out Syntax_Trees.Tree;
+      Stream    : in     Stream_ID;
+      Error_Ref : in     Stream_Error_Ref;
+      Data      : in     Error_Data'Class;
+      User_Data : in     User_Data_Access_Constant)
    is
-      N : Valid_Node_Index := Node;
+      use Error_Data_Lists;
+      Error_Node : constant Valid_Node_Access := Syntax_Trees.Error_Node 
(Error_Ref);
+
+      New_Error_List : constant Error_List_Access := new 
List'(Error_Node.Error_List.all);
+      Cur            : Cursor                     := New_Error_List.First;
+      Moved_Ref      : Stream_Node_Parents        := Error_Ref.Ref;
    begin
       loop
-         exit when Tree.Shared_Tree.Nodes (N).Parent = Invalid_Node_Index;
-         N := Tree.Shared_Tree.Nodes (N).Parent;
+         exit when Dispatch_Equal (Data, New_Error_List (Cur));
+         Next (Cur);
       end loop;
-      return N;
-   end Sub_Tree_Root;
 
-   function Terminal (Tree : in Syntax_Trees.Tree; Node : in Valid_Node_Index) 
return Base_Token_Index
-   is begin
-      if Node <= Tree.Last_Shared_Node then
-         return Tree.Shared_Tree.Nodes (Node).Terminal;
-      else
-         return Tree.Branched_Nodes (Node).Terminal;
-      end if;
-   end Terminal;
+      New_Error_List (Cur) := Data;
 
-   function Traversing (Tree : in Syntax_Trees.Tree) return Boolean
-   is begin
-      return Tree.Shared_Tree.Traversing;
-   end Traversing;
+      if Error_Ref.Deleted /= Valid_Node_Access_Lists.No_Element then
+         declare
+            Old_Following_Deleted : Valid_Node_Access_Lists.List renames 
Error_Ref.Ref.Ref.Node.Following_Deleted;
 
-   function Recover_Token
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
-     return WisiToken.Recover_Token
-   is
-      function Compute (N : Syntax_Trees.Node) return WisiToken.Recover_Token
-      is begin
-         case N.Label is
-         when Shared_Terminal =>
-            return
-              (ID                 => N.ID,
-               Byte_Region        => N.Byte_Region,
-               Min_Terminal_Index => N.Terminal,
-               Name               => Null_Buffer_Region,
-               Virtual            => False);
+            pragma Assert (Error_Node.Following_Deleted.Length = 0);
 
-         when Virtual_Terminal | Virtual_Identifier =>
-            return
-              (ID                 => N.ID,
-               Byte_Region        => Null_Buffer_Region,
-               Min_Terminal_Index => Invalid_Token_Index,
-               Name               => Null_Buffer_Region,
-               Virtual            => True);
+            New_Error_Node : constant Valid_Node_Access := Copy_Node
+              (Tree, Error_Node,
+               Parent                 => Invalid_Node_Access,
+               Copy_Children          => False,
+               Copy_Following_Deleted => False,
+               User_Data              => User_Data,
+               New_Error_List         => New_Error_List,
+               Set_Error_List         => True,
+               Set_Copied_Node        => True);
 
-         when Nonterm =>
-            return
-              (ID                 => N.ID,
-               Byte_Region        => N.Byte_Region,
-               Min_Terminal_Index => N.Min_Terminal_Index,
-               Name               => N.Name,
-               Virtual            => N.Virtual);
-         end case;
-      end Compute;
-   begin
-      return Compute
-        ((if Node <= Tree.Last_Shared_Node
-          then Tree.Shared_Tree.Nodes (Node)
-          else Tree.Branched_Nodes (Node)));
-   end Recover_Token;
+         begin
+            Move_Element
+              (Tree, Stream, Moved_Ref,
+               New_Node => Copy_Node
+                 (Tree, Error_Ref.Ref.Ref.Node,
+                  Parent =>
+                    (if Tree.Parents_Set
+                     then Error_Ref.Ref.Ref.Node.Parent
+                     else Invalid_Node_Access),
+                  Copy_Children          => False,
+                  Copy_Following_Deleted => False,
+                  User_Data              => User_Data),
+               User_Data                 => User_Data);
+
+            for Cur in Old_Following_Deleted.Iterate loop
+               Moved_Ref.Ref.Node.Following_Deleted.Append
+                 ((if Cur = Error_Ref.Deleted
+                   then New_Error_Node
+                   else Copy_Node
+                     (Tree, Old_Following_Deleted (Cur),
+                      Parent                 => Invalid_Node_Access,
+                      User_Data              => User_Data,
+                      Copy_Children          => False,
+                      Copy_Following_Deleted => False,
+                      Set_Copied_Node        => True)));
+            end loop;
 
-   function Recover_Token_Array
-     (Tree  : in Syntax_Trees.Tree;
-      Nodes : in Valid_Node_Index_Array)
-     return WisiToken.Recover_Token_Array
-   is begin
-      return Result : WisiToken.Recover_Token_Array (Nodes'First .. 
Nodes'Last) do
-         for I in Result'Range loop
-            Result (I) := Tree.Recover_Token (Nodes (I));
-         end loop;
-      end return;
-   end Recover_Token_Array;
+            New_Error_List (Cur).Adjust_Copy;
+            New_Error_Node.Copied_Node := Invalid_Node_Access;
+            for Cur in Old_Following_Deleted.Iterate loop
+               Old_Following_Deleted (Cur).Copied_Node := Invalid_Node_Access;
+            end loop;
+         end;
 
-   function State (Tree : in Syntax_Trees.Tree; Node : in Valid_Node_Index) 
return Unknown_State_Index
-   is begin
-      if Node <= Tree.Last_Shared_Node then
-         return Tree.Shared_Tree.Nodes (Node).State;
       else
-         return Tree.Branched_Nodes (Node).State;
+         Move_Element
+           (Tree, Stream, Moved_Ref,
+            New_Node                  => Copy_Node
+              (Tree, Error_Ref.Ref.Ref.Node,
+               Parent                 => Invalid_Node_Access,
+               Copy_Children          => False,
+               Copy_Following_Deleted => True,
+               New_Error_List         => New_Error_List,
+               Set_Error_List         => True,
+               User_Data              => User_Data),
+            User_Data                 => User_Data);
       end if;
-   end State;
+   end Update_Error;
+
+   function Valid_Root (Tree : in Syntax_Trees.Tree) return Boolean
+   is begin
+      return Tree.Root /= Invalid_Node_Access or Tree.Stream_Count > 0;
+   end Valid_Root;
+
+   function Valid_Stream_Node
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return Boolean
+   is begin
+      return
+        Tree.Contains (Ref.Stream, Ref.Element) and then
+        (if Ref.Node = Invalid_Node_Access
+         then Ref /= Invalid_Stream_Node_Ref
+         else
+           (not Tree.Parents_Set or else
+              Tree.Is_Descendant_Of (Root => Tree.Get_Node (Ref.Stream, 
Ref.Element), Descendant => Ref.Node)));
+   end Valid_Stream_Node;
 
    procedure Validate_Tree
-     (Tree          : in out Syntax_Trees.Tree;
-      Terminals     : in     Base_Token_Array_Access_Constant;
-      Descriptor    : in     WisiToken.Descriptor;
-      File_Name     : in     String;
-      Root          : in     Node_Index                 := Invalid_Node_Index;
-      Validate_Node : in     Syntax_Trees.Validate_Node := null)
+     (Tree              : in out Syntax_Trees.Tree;
+      User_Data         : in out User_Data_Type'Class;
+      Error_Reported    : in out Node_Sets.Set;
+      Node_Index_Order  : in     Boolean;
+      Byte_Region_Order : in     Boolean                    := True;
+      Root              : in     Node_Access                := 
Invalid_Node_Access;
+      Validate_Node     : in     Syntax_Trees.Validate_Node := null)
    is
+
+      Real_Root : Node_Access;
+
+      Last_Source_Terminal_Pos : Base_Buffer_Pos := Buffer_Pos'First;
+
       procedure Process_Node
         (Tree : in out Syntax_Trees.Tree;
-         Node : in     Valid_Node_Index)
+         Node : in     Valid_Node_Access)
       is
-         use Ada.Text_IO;
-         N : Syntax_Trees.Node renames Tree.Shared_Tree.Nodes (Node);
          Node_Image_Output : Boolean := False;
+
+         procedure Put_Error (Msg : in String)
+         is begin
+            --  Tell the caller that some error happened, even if not
+            --  Node_Index_Order.
+            Error_Reported.Insert (Node);
+
+            if not Node_Image_Output then
+               Tree.Lexer.Trace.Put_Line
+                 (Tree.Error_Message
+                    (Node,
+                     Image (Tree, Node,
+                            Children     => False,
+                            Node_Numbers => True)));
+
+               Node_Image_Output := True;
+            end if;
+
+            Tree.Lexer.Trace.Put_Line (Tree.Error_Message (Node, "... 
invalid_tree: " & Msg));
+         end Put_Error;
+
       begin
-         if N.Label = Nonterm then
-            for I in N.Children.First_Index .. N.Children.Last_Index loop
-               if N.Children (I) = Deleted_Child then
-                  if not Node_Image_Output then
-                     Put_Line
-                       (Current_Error,
-                        Tree.Error_Message
-                          (Terminals, Node, File_Name,
-                           Image (Tree, N, Node, Descriptor,
-                                  Include_Children => False,
-                                  Node_Numbers     => True)));
+         --  Node_Index checked in Nonterm below; no check needed for
+         --  Source_Terminal since that is set by lexer. Node_Index on Virtual
+         --  terminals not checked.
+
+         if Node = Real_Root then
+            if Node.Parent /= null then
+               Put_Error ("root parent set expecting null");
+            end if;
+         elsif Node.Parent = null then
+            Put_Error ("parent null expecting set");
+            --  Node in Parent.Children checked below.
+         end if;
+
+         --  Augmented handled by Validate_Node below.
+
+         if Node.Error_List /= null then
+            for Err of Node.Error_List.all loop
+               declare
+                  Node_Error_Reported : Boolean :=
+                    (if Node_Index_Order
+                     then Error_Reported.Contains (Node)
+                     else False);
+               begin
+                  Validate_Error (Err, Tree, Node, Node_Error_Reported);
+                  if Node_Error_Reported then
+                     Error_Reported.Insert (Node);
                      Node_Image_Output := True;
                   end if;
-                  Put_Line
-                    (Current_Error, Tree.Error_Message
-                       (Terminals, Node, File_Name, "... child" & I'Image & " 
deleted"));
+               end;
+            end loop;
+         end if;
+
+         case Node.Label is
+         when Terminal_Label =>
+            if Node.Sequential_Index /= Invalid_Sequential_Index then
+               Put_Error ("invalid Sequential_Index:" & 
Node.Sequential_Index'Image);
+            end if;
+            case Terminal_Label'(Node.Label) is
+            when Source_Terminal =>
+               if Byte_Region_Order and then Node.Byte_Region.First < 
Last_Source_Terminal_Pos then
+                  Put_Error ("byte_region out of order");
+               end if;
+               if Node.Non_Grammar.Length > 0 then
+                  Last_Source_Terminal_Pos := Node.Non_Grammar 
(Node.Non_Grammar.Last_Index).Byte_Region.Last;
+               else
+                  Last_Source_Terminal_Pos := Node.Byte_Region.Last;
+               end if;
+
+               for Deleted of Node.Following_Deleted loop
+                  if Deleted.Parent /= Node then
+                     Put_Error ("deleted.parent wrong");
+                  end if;
+                  if not Tree.In_Tree (Deleted) then
+                     Put_Error ("deleted not in Tree.Nodes");
+                  end if;
+                  if Deleted.Error_List /= null then
+                     for Err of Deleted.Error_List.all loop
+                        declare
+                           Node_Error_Reported : Boolean :=
+                             (if Node_Index_Order
+                              then Error_Reported.Contains (Deleted)
+                              else False);
+                        begin
+                           Validate_Error (Err, Tree, Deleted, 
Node_Error_Reported);
+                           if Node_Error_Reported then
+                              Error_Reported.Insert (Deleted);
+                           end if;
+                        end;
+                     end loop;
+                  end if;
+                  if Deleted.Following_Deleted.Length > 0 then
+                     Put_Error ("deleted has following_deleted");
+                  end if;
+               end loop;
+
+            when Virtual_Terminal | Virtual_Identifier =>
+               if Node.Non_Grammar.Length > 0 then
+                  Last_Source_Terminal_Pos := Node.Non_Grammar 
(Node.Non_Grammar.Last_Index).Byte_Region.Last;
+               end if;
+            end case;
+
+         when Nonterm =>
+            for I in Node.Children'Range loop
+               if Node.Children (I) = null then
+                  Put_Error ("child" & I'Image & " deleted");
 
                else
+                  if Node_Index_Order and then
+                    abs Node.Children (I).Node_Index >= abs Node.Node_Index
+                  then
+                     Put_Error
+                       ("child.node_index" & Node_Index'Image (abs 
Node.Children (I).Node_Index) &
+                          " >= parent.node_index" & Node_Index'Image (abs 
Node.Node_Index));
+                  end if;
                   declare
-                     Child_Parent : constant Node_Index := 
Tree.Shared_Tree.Nodes (N.Children (I)).Parent;
+                     Child_Parent : constant Node_Access := Node.Children 
(I).Parent;
                   begin
                      if Child_Parent /= Node then
-                        if not Node_Image_Output then
-                           Put_Line
-                             (Current_Error,
-                              Tree.Error_Message
-                                (Terminals, Node, File_Name,
-                                 Image (Tree, N, Node, Descriptor,
-                                        Include_Children => False,
-                                        Node_Numbers     => True)));
-                           Node_Image_Output := True;
-                        end if;
-                        if Child_Parent = Invalid_Node_Index then
-                           Put_Line
-                             (Current_Error, Tree.Error_Message
-                                (Terminals, Node, File_Name, "... child.parent 
invalid"));
-                        else
-                           Put_Line
-                             (Current_Error, Tree.Error_Message
-                                (Terminals, Node, File_Name, "... 
child.parent" & Child_Parent'Image & " incorrect"));
-                        end if;
+                        Put_Error
+                          ((if Child_Parent = Invalid_Node_Access
+                            then "child.parent invalid"
+                            else "child.parent incorrect"));
                      end if;
                   end;
                end if;
             end loop;
-         end if;
+         end case;
 
          if Validate_Node /= null then
-            Validate_Node (Tree, Node, Node_Image_Output);
+            declare
+               Node_Error_Reported : Boolean :=
+                 (if Node_Index_Order
+                  then Error_Reported.Contains (Node)
+                  else False);
+            begin
+               Validate_Node (Tree, Node, User_Data, Node_Error_Reported);
+               if Node_Error_Reported then
+                  Error_Reported.Insert (Node);
+                  Node_Image_Output := True;
+               end if;
+            end;
          end if;
       end Process_Node;
 
    begin
-      Process_Tree (Tree, (if Root = Invalid_Node_Index then Tree.Root else 
Root), Process_Node'Access);
+      if Root /= Invalid_Node_Access then
+         Real_Root := Root;
+         Process_Tree (Tree, Root, Process_Node'Access);
+      else
+         if Tree.Streams.Length = 0 then
+            if Tree.Root = Invalid_Node_Access then
+               Tree.Lexer.Trace.Put_Line
+                 (Error_Message
+                    (Tree.Lexer.File_Name, 1, 1, "... invalid_tree: Tree.Root 
not set"));
+            else
+               Real_Root := Tree.Root;
+               Process_Tree (Tree, Tree.Root, Process_Node'Access);
+
+               if Validate_Node = Mark_In_Tree'Access then
+                  for Node of Tree.Nodes loop
+                     if Node.Augmented = null then
+                        Error_Reported.Insert (Node);
+
+                        --  Node is not in tree, so can't use 
Tree.Error_Message
+                        Tree.Lexer.Trace.Put_Line
+                          (Image
+                             (Tree, Node,
+                              Children     => False,
+                              Node_Numbers => True,
+                              Safe_Only    => True));
+                        Tree.Lexer.Trace.Put_Line
+                          ("... invalid_tree: node in Tree.Nodes but not in 
tree (has parent, should not)");
+                     end if;
+                  end loop;
+               end if;
+            end if;
+         else
+            for Stream of Tree.Streams loop
+               declare
+                  use Stream_Element_Lists;
+                  Cur : Cursor := Stream.Elements.First;
+               begin
+                  loop
+                     exit when Cur = No_Element;
+                     Real_Root := Element (Cur).Node;
+                     Process_Tree (Tree, Real_Root, Process_Node'Access);
+                     Next (Cur);
+                  end loop;
+               end;
+            end loop;
+         end if;
+      end if;
    end Validate_Tree;
 
+   procedure Sequential_Index_Cleared (Tree : in Syntax_Trees.Tree)
+   is
+      Stream : Stream_ID := Tree.Shared_Stream;
+   begin
+      loop
+         declare
+            Terminal : Stream_Node_Parents := Tree.To_Stream_Node_Parents
+              (Tree.To_Rooted_Ref (Stream, Tree.Stream_First (Stream, Skip_SOI 
=> False)));
+         begin
+            loop
+               if Terminal.Ref.Node.Sequential_Index /= 
Invalid_Sequential_Index then
+                  raise SAL.Programmer_Error with "sequential_index not 
cleared: " & Tree.Image (Terminal.Ref);
+               end if;
+               Tree.Next_Terminal (Terminal, Following => True);
+               exit when Terminal.Ref = Invalid_Stream_Node_Ref;
+            end loop;
+         end;
+         Tree.Next_Parse_Stream (Stream);
+         exit when Stream = Invalid_Stream_ID;
+      end loop;
+   end Sequential_Index_Cleared;
+
 end WisiToken.Syntax_Trees;
diff --git a/wisitoken-syntax_trees.ads b/wisitoken-syntax_trees.ads
index db29bacbc3..8e89007b24 100644
--- a/wisitoken-syntax_trees.ads
+++ b/wisitoken-syntax_trees.ads
@@ -1,44 +1,88 @@
 --  Abstract :
 --
---  Syntax tree type and operations.
+--  Syntax tree type and operations, providing parse streams as
+--  described in [1], but generalized for parallel parsers. Data
+--  structures and operations are optimized for incremental parsing.
+--
+--  References :
+--
+--  [1] Elad Lahav 2004, Efficient Semantic Analysis for Text Editors
 --
 --  Design :
 --
---  There is one syntax tree for each parallel parser. There is one
---  shared Terminals array (provided by the master parser), matching
---  the actual input text.
+--  There is one syntax tree; parallel parsers all add nodes to the
+--  same tree, maintaining different roots via Stream_IDs.
+--
+--  During parsing, the Shared_Stream contains all of the input source
+--  text, either as terminal tokens from the lexer in batch parse, or
+--  a mix of terminal and nonterminal tokens from Parse.Edit_Tree in
+--  incremental parse.
+--
+--  Node_Index is used only for debugging. Node_Index on nonterms is
+--  negative. Node_Index on terminal nodes created by the lexer in the
+--  shared stream is positive; Node_Index on virtual nodes inserted by
+--  error recover is negative.
+--
+--  During a batch parse, Node_Index on terminals is sequential, as a
+--  consequence of lexing the source code first; Node_Index on
+--  nonterms is unique within the tree, and abs Node_Index of a
+--  nonterm is greater than abs Node_Index of any of its children.
+--  Error recover inserts and deletes terminals, with non-sequential
+--  Node_Index.
+--
+--  In incremental parse, Node_Index on terminals is not sequential,
+--  and Node_Index is not unique within the tree.
+--
+--  Error recover uses Sequential_Index to determine success, and to
+--  control where terminals are inserted and deleted. To be
+--  incremental, Sequential_Index is only set in a small portion of
+--  the shared stream at error recover start, extended as needed
+--  during error recover, and cleared when error recover completes.
+--
+--  During and after parsing, the sequence of terminals in the parse
+--  stream or syntax tree is given by Next_Terminal/Prev_Terminal.
 --
---  Node contains a Parent component, to make it easy to traverse the
---  tree in any direction. However, we do not set the Parent nodes
---  while parsing, to simplify branching the syntax tree for parallel
---  parsing. When a new nonterm is added to a branched tree, if it set
---  the parent component of its children, it would first have to move
---  those children, and all intervening nodes, into the branched tree.
---  Since Shared_Terminals nodes are created before all other nodes
---  (when the lexer is run, to allow Lexer_To_Augmented to store info
---  in the node), that would mean every branched tree is a practically
---  complete copy of the entire tree, significantly slowing down
---  parsing (by a factor of 250 on ada-mode wisi.adb when we did this
---  by mistake!).
+--  Each parallel parser uses one stream as the parse stack and
+--  auxiliary input stream. The auxiliary input stream contains tokens
+--  that are pushed back in error recovery, or broken down from
+--  Shared_Stream in incremental parse.
 --
---  The parent components are set by Set_Parents, which is called by
---  Parser.Execute_Actions before the actions are executed.
---  Fortunately, we don't need the parent components during error
---  recover. After calling Set_Parents (ie, while editing the syntax
---  tree after parse), any functions that modify children or parents
---  update the corresponding links, setting them to Invalid_Node_Index
---  or Deleted_Child as appropriate.
+--  Nodes that are deleted from the parse stream during error recover
+--  are referenced from the preceding terminal node or SOI, so they
+--  may be restored on the next incremental parse if appropriate.
+--  Similarly, parse errors are referenced from the error node. In
+--  order to avoid editing shared nodes, any nodes that are edited to
+--  add deleted or error references are copied to the parse stream
+--  first.
 --
---  We provide Base_Tree and Tree in one package, because only Tree
---  needs an API; the only way Base_Tree is accessed is via Tree.
+--  Each node contains a Parent link, to make it easy to traverse the
+--  tree in any direction after parsing is done. We do not set the
+--  Parent links while parsing, to avoid having to copy nodes. During
+--  batch parsing, parent links are not set; error recover must use
+--  explicit Parent stack versions of tree routines. All Parent links
+--  are set when parse completes; condition Tree.Editable ensures that
+--  there is a single fully parsed tree with all parent links set. At
+--  the start of incremental parse (during and after Edit_Tree), the
+--  shared stream has complete parent links. While editing the syntax
+--  tree after parse, any functions that modify children or parent
+--  relationships update the corresponding links, setting them to
+--  Invalid_Node_Access as appropriate.
 --
---  Base_Tree and Tree are not limited to allow
---  wisitoken-parse-lr-parser_lists.ads Prepend_Copy to copy them. No
---  Adjust is needed; Shared_Tree is shared between parsers, and
---  Augmented pointers are also shared, since during parse they are
---  set only for Shared_Terminals.
+--  We don't store the parse State in syntax tree nodes, to avoid
+--  having to copy nodes during parsing. State is stored in the parse
+--  stream elements. This means Parse.LR.Undo_Reduce has to call
+--  Action_For to compute the state for the child nodes.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Type Tree is limited because a bit-copy is not a good start on copy
+--  for assign; use Copy_Tree.
+--
+--  We can't traverse Tree.Streams to deallocate tree Nodes, either
+--  when streams are terminated or during Finalize; in general Nodes
+--  are referenced multiple times in multiple streams. So we keep
+--  track of nodes to deallocate in Tree.Nodes. Nodes are deallocated
+--  in Clear_Parse_Streams and when the entire tree is Finalized.
+--
+--  Copyright (C) 2018 - 2021 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -54,20 +98,73 @@
 pragma License (Modified_GPL);
 
 with Ada.Finalization;
+with Ada.Iterator_Interfaces;
+with Ada.Unchecked_Deallocation;
+with SAL.Gen_Definite_Doubly_Linked_Lists.Gen_Image_Aux;
+with SAL.Gen_Definite_Doubly_Linked_Lists_Ref_Count;
+with SAL.Gen_Indefinite_Doubly_Linked_Lists;
+with SAL.Gen_Trimmed_Image;
+with SAL.Gen_Unbounded_Definite_Stacks;
 with SAL.Gen_Unbounded_Definite_Vectors;
+with SAL.Gen_Unbounded_Sparse_Ordered_Sets;
+with SAL.Generic_Decimal_Image;
 with WisiToken.Lexer;
 package WisiToken.Syntax_Trees is
+   use all type SAL.Base_Peek_Type;
 
-   type Base_Tree is new Ada.Finalization.Controlled with private;
+   type Node (<>) is private;
+   type Node_Access is access all Node;
+   subtype Valid_Node_Access is not null Node_Access;
 
-   type Base_Tree_Access is access all Base_Tree;
+   Invalid_Node_Access : constant Node_Access := null;
+   Dummy_Node : constant Valid_Node_Access;
+   --  Use when you must initialize a Valid_Node_Access before overwritting it.
 
-   overriding procedure Finalize (Tree : in out Base_Tree);
-   --  Free any allocated storage.
+   type Node_Access_Array is array (Positive_Index_Type range <>) of 
Node_Access;
+   type Valid_Node_Access_Array is array (Positive_Index_Type range <>) of 
Valid_Node_Access;
+
+   function To_Node_Access (Item : in Valid_Node_Access_Array) return 
Node_Access_Array;
+   function To_Valid_Node_Access (Item : in Node_Access_Array) return 
Valid_Node_Access_Array;
+
+   package Valid_Node_Access_Lists is new SAL.Gen_Definite_Doubly_Linked_Lists 
(Valid_Node_Access);
+   use all type Valid_Node_Access_Lists.Cursor;
+
+   package Node_Stacks is new SAL.Gen_Unbounded_Definite_Stacks 
(Valid_Node_Access);
+
+   type Stream_ID is private;
+   Invalid_Stream_ID : constant Stream_ID;
+
+   type Stream_ID_Array is array (Positive_Index_Type range <>) of Stream_ID;
 
-   function Is_Empty (Tree : in Base_Tree) return Boolean;
+   type Stream_Index is private;
+   Invalid_Stream_Index : constant Stream_Index;
 
-   type Tree is new Ada.Finalization.Controlled with private;
+   type Node_Index is new Integer range Integer'First + 1 .. Integer'Last;
+   subtype Positive_Node_Index is Node_Index range 1 .. Node_Index'Last;
+   Invalid_Node_Index : constant Node_Index := Node_Index'Last;
+
+   type Base_Sequential_Index is new Integer;
+   Invalid_Sequential_Index : constant Base_Sequential_Index := 
Base_Sequential_Index'Last;
+   subtype Sequential_Index is Base_Sequential_Index range 
Base_Sequential_Index'First .. Invalid_Sequential_Index - 1;
+   --  Identifies a sequence of tokens in Shared_Stream during error
+   --  recovery. Index 1 is the error token (set in
+   --  wisitoken-parse-lr-mckenzie_recover-base.adb Initialize).
+   --
+   --  We need arbitrarily large negative index for Push_Back and
+   --  Undo_Reduce error recover operations, and arbitrarily large
+   --  positive index for handling unterminated strings.
+
+   type Base_Tree is new Ada.Finalization.Limited_Controlled with record
+      --  Visible components of Tree.
+
+      Lexer : WisiToken.Lexer.Handle;
+      --  Declared here because it provides access to the source text; any
+      --  code that needs access to Tree mostly likely also needs access to
+      --  the source text.
+   end record;
+
+   type Tree is new Base_Tree with private;
+   --  Use Copy_Tree to get a copy.
 
    type Tree_Variable_Reference (Element : not null access Tree) is null 
record with
      Implicit_Dereference => Element;
@@ -75,37 +172,395 @@ package WisiToken.Syntax_Trees is
    type Tree_Constant_Reference (Element : not null access constant Tree) is 
null record with
      Implicit_Dereference => Element;
 
+   overriding procedure Finalize (Tree : in out Syntax_Trees.Tree);
+   --  Free any allocated storage.
+
+   procedure Free_Augmented (Tree : in Syntax_Trees.Tree);
+   --  Free Augmented in all nodes.
+
+   procedure Clear
+     (Tree        : in out Syntax_Trees.Tree;
+      Free_Memory : in     Boolean := False)
+   with Post => Tree.Cleared;
+   --  Delete all nodes in all streams, reset for new lex and parse.
+   --  Free_Memory applies to internal bookkeeping; leaving it False may
+   --  slightly speed parsing a similar sized file as the previous one.
+
    function Is_Empty (Tree : in Syntax_Trees.Tree) return Boolean;
 
-   procedure Initialize
-     (Branched_Tree : in out Tree;
-      Shared_Tree   : in     Base_Tree_Access;
-      Flush         : in     Boolean;
-      Set_Parents   : in     Boolean := False)
-   with Pre => Branched_Tree.Is_Empty and Shared_Tree.Is_Empty;
-   --  Set Branched_Tree to refer to Shared_Tree.
+   type Error_Data is abstract tagged null record;
+   --  A node that has error data is call an "error node".
+   --
+   --  Since nodes can be shared between parse streams, an error node
+   --  must be copied to the parse stream when any change is made to the
+   --  error data; different parsers can make different changes.
+   --  Update_Error handles this. There are no variable references to
+   --  errors.
+   --
+   --  Errors can contain references to nodes, which must be updated
+   --  properly by Copy_Tree; Adjust_Copy must do this. We don't
+   --  use "limited" for this to avoid the finalization overhead.
+
+   procedure Adjust_Copy (Data : in out Error_Data)
+   is abstract;
+   --  Some or all of Data has been copied by Copy_Tree or Copy_Node;
+   --  update any Node_Access values using Copied_Node.
+
+   function Dispatch_Equal (Left : in Error_Data; Right : in Error_Data'Class) 
return Boolean
+   is abstract;
+   --  True if Left matches Right for purposes of Update_Error and
+   --  Delete_Errors_In_Input, below.
+   --
+   --  Not named "=" because that's always ambiguous with the predefined "=".
 
-   overriding procedure Finalize (Tree : in out Syntax_Trees.Tree);
-   --  Free any allocated storage.
+   function To_Message
+     (Data       : in Error_Data;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Valid_Node_Access)
+     return Error_Data'Class
+   is abstract;
+   --  Convert Data to a simple message; it is being moved to another
+   --  node (see wisitoken-parse-lr.adb Undo_Reduce).
+
+   function Image
+     (Data       : in Error_Data;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Valid_Node_Access)
+     return String
+   is abstract;
+   --  Should not include file name, line number; a higher level will add
+   --  that if desired.
+
+   function Class_Image (Data : in Error_Data) return String
+   is abstract;
+   --  Return image of Data'Class; ie "parser" or "lexer". For Tree.Image 
(Node).
+
+   --  See comment in Put_Tree body about Half_Node_Index
+   subtype Half_Node_Index is Node_Index range Node_Index'First / 2 .. 
Node_Index'Last / 2;
+   package Node_Index_Array_Node_Access is new 
SAL.Gen_Unbounded_Definite_Vectors
+     (Half_Node_Index, Node_Access, Default_Element => Invalid_Node_Access);
+
+   procedure Set_Node_Access
+     (Data           : in out Error_Data;
+      Node_Index_Map : in     Node_Index_Array_Node_Access.Vector)
+   is abstract;
+   --  Called from Get_Tree to convert Data.Input_Node_Index to *_Node.
+   --  Required because some errors reference later nodes.
+
+   procedure Validate_Error
+     (Data                : in     Error_Data;
+      Tree                : in     Syntax_Trees.Tree'Class;
+      Error_Node          : in     Valid_Node_Access;
+      Node_Error_Reported : in out Boolean)
+   is abstract;
+   --  Called by Validate_Tree for each error; validate that any node
+   --  links are in Tree.Nodes, and anything else that needs validating.
+   --  Node_Error_Reported is described in the
+   --  comment for Validate_Node below.
+
+   type Null_Error_Data is new Error_Data with null record;
+   --  For Error_Data parameters when there is no error.
+
+   overriding procedure Adjust_Copy (Data : in out Null_Error_Data) is null;
+   overriding function Dispatch_Equal
+     (Left  : in Null_Error_Data;
+      Right : in Error_Data'Class)
+     return Boolean
+   is (Right in Null_Error_Data);
+   overriding function To_Message
+     (Data       : in Null_Error_Data;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Valid_Node_Access)
+     return Error_Data'Class
+   is (raise SAL.Programmer_Error);
+
+   overriding function Image
+     (Data       : in Null_Error_Data;
+      Tree       : in Syntax_Trees.Tree'Class;
+      Error_Node : in Valid_Node_Access)
+     return String
+   is ("null");
+
+   overriding function Class_Image (Data : in Null_Error_Data) return String 
is ("null");
+
+   overriding procedure Set_Node_Access
+     (Data           : in out Null_Error_Data;
+      Node_Index_Map : in     Node_Index_Array_Node_Access.Vector)
+   is null;
+
+   overriding
+   procedure Validate_Error
+     (Data                : in     Null_Error_Data;
+      Tree                : in     Syntax_Trees.Tree'Class;
+      Error_Node          : in     Valid_Node_Access;
+      Node_Error_Reported : in out Boolean)
+   is null;
+
+   pragma Warnings (Off, """others"" choice is redundant");
+   No_Error : constant Null_Error_Data := (others => <>);
+   pragma Warnings (On);
+
+   No_Error_Classwide : constant Error_Data'Class := Error_Data'Class 
(No_Error);
+
+   package Error_Data_Lists is new SAL.Gen_Indefinite_Doubly_Linked_Lists 
(Error_Data'Class);
+
+   Null_Error_List : Error_Data_Lists.List renames Error_Data_Lists.Empty_List;
+
+   type Error_Data_List_Const_Ref (List : not null access constant 
Error_Data_Lists.List) is private
+   with Implicit_Dereference => List;
+   --  There is no "_var_ref"; error nodes must be copied on any change.
+   --  See note at Error_Data declaration.
 
    type Node_Label is
-     (Shared_Terminal,    -- text is user input, accessed via Parser.Terminals
+     (Source_Terminal,    -- text is user input, accessed via Lexer
       Virtual_Terminal,   -- no text; inserted during error recovery
       Virtual_Identifier, -- text in user data, created during tree rewrite
-      Nonterm             -- contains terminals/nonterminals/identifiers
-     );
+      Nonterm);           -- nonterminal node.
+   subtype Terminal_Label is Node_Label range Source_Terminal .. 
Virtual_Identifier;
+   subtype Virtual_Terminal_Label is Node_Label range Virtual_Terminal .. 
Virtual_Identifier;
+
+   function Label (Node : in Valid_Node_Access) return Node_Label;
+
+   function Shared_Stream (Tree : in Syntax_Trees.Tree) return Stream_ID;
+
+   type Stream_Node_Ref is record
+      Stream  : Stream_ID;
+      Element : Stream_Index;
+      Node    : Node_Access  := Invalid_Node_Access;
+      --  If both valid, Element contains Node in Stream. In some cases,
+      --  Element is valid but Node is Invalid_Node_Access (for example, if
+      --  the ref is the First_Terminal in an empty nonterm). In post-parse
+      --  actions, the parse stream is deleted, so Stream is
+      --  Invalid_Stream_Index, Element is Invalid_Stream_Index, but Node is
+      --  valid.
+   end record;
+
+   Invalid_Stream_Node_Ref : constant Stream_Node_Ref;
+
+   function Correct_Stream_Node
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return Boolean;
+   --  True if Ref = Invalid_Stream_Node_Ref or Ref.Node =
+   --  Invalid_Node_Access or Stream contains Element, which contains
+   --  Node.
+   --
+   --  We allow Ref.Node = Invalid_Node_Access so a Stream_Node_Ref can
+   --  be First_Terminal of an empty nonterm, while still allowing
+   --  Next_Terminal (Ref).
+   --
+   --  Note that this is False in post-parse actions; there are no
+   --  streams, so Element is Invalid_Stream_Index.
+
+   function Valid_Stream_Node
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return Boolean;
+   --  True if Ref refers to a node (possibly an empty nonterm).
+
+   function To_Stream_Node_Ref
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Node : in Valid_Node_Access)
+     return Stream_Node_Ref
+   with Pre => Tree.Parents_Set,
+     Post => Tree.Valid_Stream_Node (To_Stream_Node_Ref'Result);
+
+   subtype Terminal_Ref is Stream_Node_Ref
+   with Dynamic_Predicate =>
+     Terminal_Ref.Node = Invalid_Node_Access or else
+     Label (Terminal_Ref.Node) in Terminal_Label;
+
+   function Valid_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Terminal_Ref)
+     return Boolean
+   is (Valid_Stream_Node (Tree, Ref) and Ref.Node /= Invalid_Node_Access);
+   --  True if Ref refers to a Terminal node.
+
+   function Single_Terminal (Ref : in Stream_Node_Ref) return Boolean;
+   --  True if Ref contains a single terminal node.
+
+   function Valid_Single_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return Boolean
+   is (Valid_Stream_Node (Tree, Ref) and then Single_Terminal (Ref));
+
+   subtype Single_Terminal_Ref is Stream_Node_Ref
+   with Dynamic_Predicate =>
+     Single_Terminal_Ref.Node = Invalid_Node_Access or else
+     Single_Terminal (Single_Terminal_Ref);
+
+   function Rooted (Ref : in Stream_Node_Ref) return Boolean;
+   --  True if Ref.Element.Node = Ref.Node.
+
+   subtype Rooted_Ref is Stream_Node_Ref
+   with Dynamic_Predicate =>
+     (Rooted_Ref = Invalid_Stream_Node_Ref --  allows initialization
+        or else Rooted (Rooted_Ref));
+
+   function To_Rooted_Ref
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Rooted_Ref;
+
+   type Stream_Node_Parents is record
+      Ref     : Stream_Node_Ref;
+      Parents : Node_Stacks.Stack;
+      --  Parents stores the path from Ref.Element.Node to Ref.Node. Parents
+      --  is empty if Ref is rooted (ie Ref.Element.Node = Ref.Node). If not
+      --  rooted, Parents.Peek (1) is Ref.Node parent, and Parents.Peek
+      --  (Parents.Depth) is Ref.Element.Node.
+   end record;
+
+   Invalid_Stream_Node_Parents : constant Stream_Node_Parents;
+
+   type Stream_Node_Parents_Array is array (Positive_Index_Type range <>) of 
Stream_Node_Parents;
+
+   function Parents_Valid (Ref : in Stream_Node_Parents) return Boolean;
+   --  True if Parents gives the path from Element.Node to Node, or Element or 
Node is invalid.
+
+   function To_Stream_Node_Parents (Tree : in Syntax_Trees.Tree; Ref : in 
Stream_Node_Ref) return Stream_Node_Parents
+   with Pre => Ref = Invalid_Stream_Node_Ref or else Tree.Parents_Set or else
+               (Rooted (Ref) or Ref.Node = Tree.First_Terminal (Get_Node 
(Ref.Element))),
+     Post => Parents_Valid (To_Stream_Node_Parents'Result);
+
+   type Recover_Token (Virtual : Boolean := True) is record
+      --  Virtual is True if there is no node in the syntax tree that is
+      --  this token; it was created by error recover.
+
+      --  Declared here because it needs Node_Access
+
+      --  Maintaining a syntax tree during error recovery is too slow, so we
+      --  store enough information in the recover stack to perform
+      --  In_Parse_Actions, Language_Fixes, Push_Back and Undo_Reduce
+      --  operations. and to apply the solution to the main parser state. We
+      --  make thousands of copies of the parse stack during recover, so
+      --  minimizing size and compute time for this is critical.
+
+      case Virtual is
+      when True =>
+         ID : Token_ID := Invalid_Token_ID;
+
+         First_Terminal : Node_Access := Invalid_Node_Access;
+         Last_Terminal  : Node_Access := Invalid_Node_Access;
+         --  For ID in Nonterminals, first and last terminal of this token in
+         --  the Tree shared or parse stream, cached from children;
+         --  Invalid_Node_Access if the children are virtual. For terminals,
+         --  Invalid_Node_Access. Used to detect empty nonterm and compute
+         --  Name.
+
+         Name : Buffer_Region := Null_Buffer_Region;
+         --  Set and used by In_Parse_Actions.
+
+         Contains_Virtual_Terminal : Boolean := False;
+         --  True if any contained terminal is Virtual.
+
+      when False =>
+         Element_Node : Node_Access := Invalid_Node_Access;
+         Node         : Node_Access := Invalid_Node_Access;
+         --  This token in the Tree shared or parse stream.
+         --
+         --  This implements a variant of Stream_Node_Ref for recover.
+         --
+         --  For terminals, Element_Node = Node.
+         --
+         --  For nonterminals, Node = some descendant of Element_Node (similar
+         --  to a Stream_Node_Ref).
+         --
+         --  This token can be virtual, if from Shared_Stream of an edited
+         --  tree.
+         --
+         --  In a non-default Recover_Token, Element_Node cannot be
+         --  Invalid_Node_Access. Node can be Invalid_Node_Access when it is
+         --  nominally a terminal and Element_Node is an empty nonterm.
+      end case;
+   end record;
+
+   subtype Virtual_Recover_Token is Recover_Token (Virtual => True);
+   subtype Real_Recover_Token is Recover_Token (Virtual => False);
+
+   Invalid_Recover_Token : constant Recover_Token := (Virtual => True, ID => 
Invalid_Token_ID, others => <>);
+
+   function Node_ID (Tree : in Syntax_Trees.Tree; Item : in Recover_Token) 
return Token_ID
+   with Pre => Item.Virtual = False;
+   function Element_ID (Tree : in Syntax_Trees.Tree; Item : in Recover_Token) 
return Token_ID;
+
+   function Element_Is_Terminal (Tree : in Syntax_Trees.Tree; Item : in 
Recover_Token) return Boolean;
+   --  Virtual ID or Element_Node.
+
+   function Byte_Region (Tree : in Syntax_Trees.Tree; Item : in Recover_Token) 
return Buffer_Region;
+
+   function Name (Tree : in Syntax_Trees.Tree; Item : in Recover_Token) return 
Buffer_Region;
+   --  If Node.Name = Null_Buffer_Region and Is_Terminal (Node.ID),
+   --  return Node.Byte_Region; else return Node.Name.
+
+   procedure Set_Name
+     (Tree : in     Syntax_Trees.Tree;
+      Item : in out Recover_Token;
+      Name : in     Buffer_Region);
+
+   function Contains_Virtual_Terminal (Tree : in Syntax_Trees.Tree; Item : in 
Recover_Token) return Boolean;
+   function Contains_Virtual_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Boolean;
+
+   function Is_Empty_Nonterm
+     (Tree : in Syntax_Trees.Tree;
+      Item : in Recover_Token)
+     return Boolean;
+   --  True if node contains no terminals.
+
+   function First_Terminal (Tree : in Syntax_Trees.Tree; Item : in 
Recover_Token) return Node_Access;
+   function Last_Terminal (Tree : in Syntax_Trees.Tree; Item : in 
Recover_Token) return Node_Access;
+
+   function To_Real_Recover_Token (Item : in Stream_Node_Ref) return 
Real_Recover_Token
+   with Pre => Item.Element /= Invalid_Stream_Index;
+
+   function Make_Rooted (Item : in Recover_Token) return Recover_Token;
+
+   function Image
+     (Tree : in Syntax_Trees.Tree;
+      Item : in Recover_Token)
+     return String;
+
+   type Recover_Token_Array is array (Positive_Index_Type range <>) of 
Recover_Token;
+
+   ----------
+   --  User_Data_Type
+
+   type Base_Augmented is tagged null record;
+
+   function Image_Augmented (Aug : in Base_Augmented) return String
+   is ("");
+
+   type Augmented_Class_Access is access all Base_Augmented'Class;
+   type Augmented_Class_Access_Constant is access constant 
Base_Augmented'Class;
+
+   procedure Shift
+     (Augmented        : in out Base_Augmented;
+      Shift_Bytes      : in     Base_Buffer_Pos;
+      Shift_Chars      : in     Base_Buffer_Pos;
+      Shift_Line       : in     Base_Line_Number_Type;
+      Last_Stable_Byte : in     Buffer_Pos)
+   is null;
+   --  Add Shift_* to Augmented positions.
+
+   procedure Free is new Ada.Unchecked_Deallocation (Base_Augmented'Class, 
Augmented_Class_Access);
 
    type User_Data_Type is tagged limited null record;
    --  Many test languages don't need this, so we default the procedures
    --  to null.
 
    type User_Data_Access is access all User_Data_Type'Class;
+   type User_Data_Access_Constant is access constant User_Data_Type'Class;
 
-   procedure Set_Lexer_Terminals
-     (User_Data : in out User_Data_Type;
-      Lexer     : in     WisiToken.Lexer.Handle;
-      Terminals : in     Base_Token_Array_Access_Constant)
-   is null;
+   function New_User_Data (Template : in User_Data_Type) return 
User_Data_Access
+   is (null);
+   --  Return a new empty object with the same type as Template.
 
    procedure Reset (User_Data : in out User_Data_Type) is null;
    --  Reset to start a new parse.
@@ -113,428 +568,1022 @@ package WisiToken.Syntax_Trees is
    procedure Initialize_Actions
      (User_Data : in out User_Data_Type;
       Tree      : in     Syntax_Trees.Tree'Class)
-     is null;
-   --  Called by Execute_Actions, before processing the tree.
+   is null;
+   --  Called by Execute_Actions, before processing the tree, after
+   --  Insert_Token/Delete_Token.
 
    procedure Lexer_To_Augmented
-     (User_Data : in out          User_Data_Type;
-      Tree      : in out          Syntax_Trees.Tree'Class;
-      Token     : in              Base_Token;
-      Lexer     : not null access WisiToken.Lexer.Instance'Class)
-     is null;
-   --  Read auxiliary data from Lexer, do something useful with it.
-   --  Called before parsing, once for each token in the input stream. If
-   --  Token is a grammar token, client can use Tree.Set_Augmented
-   --  (Token.Tree_Node).
+     (User_Data     : in out User_Data_Type;
+      Tree          : in out Syntax_Trees.Tree'Class;
+      Token         : in     Lexer.Token;
+      Grammar_Token : in     Valid_Node_Access)
+   is null;
+   --  Token is a grammar or non-grammar token that was just returned by
+   --  User_Data.Lexer. If grammar, it is Grammar_Token; if non-grammar,
+   --  it has already been added to Grammar_Token (which is SOI if before
+   --  first grammar token in input). Called before parsing, once for
+   --  each non-grammar token in the input stream.
+
+   function Copy_Augmented
+     (User_Data : in User_Data_Type;
+      Augmented : in Augmented_Class_Access)
+     return Augmented_Class_Access
+   with Pre => Augmented /= null;
+   --  Default implementation raises SAL.Programmer_Error.
 
    function Insert_After
-     (User_Data            : in out User_Data_Type;
-      Tree                 : in     Syntax_Trees.Tree'Class;
-      Token                : in     Valid_Node_Index;
-      Insert_On_Blank_Line : in     Boolean)
-     return Boolean;
-   --  Return True if ID should be treated as if inserted after the
-   --  previous shared terminal, rather than before the next (which is
-   --  the default). This can affect which line it appears on, which
-   --  affects indentation. Called from Insert_Token.
+     (User_Data           : in out User_Data_Type;
+      Tree                : in     Syntax_Trees.Tree'Class;
+      Insert_Token        : in     Valid_Node_Access;
+      Insert_Before_Token : in     Valid_Node_Access;
+      Comment_Present     : in     Boolean;
+      Blank_Line_Present  : in     Boolean)
+     return WisiToken.Insert_Location
+   with Post'Class => (if not (Blank_Line_Present or Comment_Present) then 
Insert_After'Result /= Between);
+   --  Return an insert location for Insert_Token. This can affect which
+   --  line it appears on, which affects indentation. Should be called from
+   --  user-overridden Insert_Token.
+   --
+   --  If Comment_Present, there is a comment between Tree.Prev_Terminal
+   --  (Insert_Before_Token) and Insert_Before_Token.
    --
-   --  The default implementation always returns False.
+   --  If Blank_Line_Present, there is at least one blank line
+   --  immediately after Tree.Prev_Terminal (Insert_Before_Token) (before
+   --  any comment).
+   --
+   --  The default implementation always returns Before_Next.
 
    procedure Insert_Token
-     (User_Data : in out User_Data_Type;
-      Tree      : in out Syntax_Trees.Tree'Class;
-      Token     : in     Valid_Node_Index)
-   is null;
-   --  Token was inserted in error recovery; update other tokens and Tree
-   --  as needed. Called from Execute_Actions for each inserted token,
-   --  before processing the syntax tree.
-
-   procedure Delete_Token
-     (User_Data   : in out User_Data_Type;
-      Tree        : in out Syntax_Trees.Tree'Class;
-      Token_Index : in     WisiToken.Token_Index)
-   is null;
-   --  Token at Token_Index was deleted in error recovery; update
-   --  remaining tokens as needed. Called from Execute_Actions for each
-   --  deleted token, before processing the syntax tree.
+     (User_Data      : in out User_Data_Type;
+      Tree           : in out Syntax_Trees.Tree'Class;
+      Inserted_Token : in     Syntax_Trees.Valid_Node_Access)
+   is null
+   with Pre'Class => Tree.Parents_Set and Tree.Is_Virtual_Terminal 
(Inserted_Token);
+   --  Inserted_Token was inserted in error recovery. Move
+   --  Inserted_Token.Non_Grammar as needed to control which line the
+   --  token is on.
+   --
+   --  Called from Execute_Actions for each inserted token, before
+   --  Initialize_Actions.
+
+   procedure Delete_Tokens
+     (User_Data  : in out User_Data_Type;
+      Tree       : in     Syntax_Trees.Tree'Class;
+      Prev_Token : in     Valid_Node_Access)
+   is null
+   with Pre'Class =>
+     Tree.Parents_Set and
+     Tree.Has_Following_Deleted (Prev_Token);
+   --  Prev_Token.Following_Deleted contains tokens that were deleted in
+   --  error recovery; Prev_Token is the non-deleted terminal token
+   --  before the deleted tokens in the parse stream.
+   --
+   --  Any Non_Grammar that were on the deleted tokens have been moved to
+   --  Prev_Token.Non_Grammar during error recover; user code
+   --  may now move them somewhere else if desired.
+   --
+   --  Called from Execute_Actions for each node with deleted tokens,
+   --  before Initialize_Actions.
 
    procedure Reduce
      (User_Data : in out User_Data_Type;
-      Tree      : in out Syntax_Trees.Tree'Class;
-      Nonterm   : in     Valid_Node_Index;
-      Tokens    : in     Valid_Node_Index_Array)
+      Tree      : in     Syntax_Trees.Tree'Class;
+      Nonterm   : in     Valid_Node_Access)
    is null;
-   --  Reduce Tokens to Nonterm. Nonterm.Byte_Region is computed by
-   --  caller.
+   --  Called by Parser.Execute_Actions, just before processing Nonterm;
+   --  Nonterm was created by a 'reduce' parse action.
+
+   ----------
+   --  In_, Post_ Parse_Actions
+   --
+   --  Declared here because Breakdown needs Optimized_List, and the
+   --  actions need Tree and Valid_Node_Access.
 
-   type Semantic_Action is access procedure
+   type Post_Parse_Action is access procedure
      (User_Data : in out User_Data_Type'Class;
       Tree      : in out Syntax_Trees.Tree;
-      Nonterm   : in     Valid_Node_Index;
-      Tokens    : in     Valid_Node_Index_Array);
+      Nonterm   : in     Valid_Node_Access);
    --  Routines of this type are called by
    --  WisiToken.LR.Parser.Execute_Actions when it processes a Nonterm
-   --  node in the syntax tree. Tokens are the children of Nonterm.
+   --  node in the syntax tree.
+
+   Null_Action : constant Post_Parse_Action := null;
+
+   package In_Parse_Actions is
+      type Status_Label is
+        (Ok,
+         Missing_Name_Error, -- block start has name, required block end name 
missing
+         Extra_Name_Error,   -- block start has no name, end has one
+         Match_Names_Error); -- both names present, but don't match
+
+      subtype Error is Status_Label range Status_Label'Succ (Ok) .. 
Status_Label'Last;
+
+      type Status (Label : Status_Label := Ok) is record
+         case Label is
+         when Ok =>
+            null;
+
+         when Error =>
+            Begin_Name : Positive_Index_Type;
+            End_Name   : Positive_Index_Type;
+         end case;
+      end record;
+
+      subtype Error_Status is Status
+      with Dynamic_Predicate => Error_Status.Label /= Ok;
+
+      type In_Parse_Action is access function
+        (Tree           : in     Syntax_Trees.Tree;
+         Nonterm        : in out Recover_Token;
+         Tokens         : in     Recover_Token_Array;
+         Recover_Active : in     Boolean)
+        return Status;
+      --  Called during parsing and error recovery to implement higher level
+      --  checks, such as block name matching in Ada.
+   end In_Parse_Actions;
+
+   type RHS_Info is record
+      In_Parse_Action   : In_Parse_Actions.In_Parse_Action := null;
+      Post_Parse_Action : Syntax_Trees.Post_Parse_Action   := null;
+   end record;
 
-   Null_Action : constant Semantic_Action := null;
+   package RHS_Info_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
+     (Natural, RHS_Info, Default_Element => (others => <>));
 
-   procedure Clear (Tree : in out Syntax_Trees.Base_Tree);
-   procedure Clear (Tree : in out Syntax_Trees.Tree);
-   --  Delete all Elements and free associated memory; keep results of
-   --  Initialize.
+   type Production_Info is record
+      Optimized_List : Boolean := False;
+      RHSs           : RHS_Info_Arrays.Vector;
+   end record;
 
-   procedure Flush (Tree : in out Syntax_Trees.Tree);
-   --  Move all nodes in branched part to shared tree, set Flush mode
-   --  True.
+   package Production_Info_Trees is new SAL.Gen_Unbounded_Definite_Vectors
+     (Token_ID, Production_Info, Default_Element => (others => <>));
+   --  Indexed by Production_ID.
 
-   procedure Set_Flush_False (Tree : in out Syntax_Trees.Tree);
-   --  Set Flush mode False; use Flush to set True.
+   ----------
+   --  Parsing operations (including error recovery and incremental
+   --  parse), Tree and Node attributes.
 
-   function Flushed (Tree : in Syntax_Trees.Tree) return Boolean;
+   function New_Stream
+     (Tree       : in out Syntax_Trees.Tree;
+      Old_Stream : in     Stream_ID)
+     return Stream_ID
+   with
+     Pre => Old_Stream = Invalid_Stream_ID or else
+            (not Tree.Parents_Set and Tree.Stream_Count > 1 and Tree.Is_Valid 
(Old_Stream)),
+     Post => Tree.Is_Valid (New_Stream'Result);
+   --  Create a new parse stream, initially copied from Old_Stream.
 
-   function Copy_Subtree
-     (Tree : in out Syntax_Trees.Tree;
-      Root : in     Valid_Node_Index)
-     return Valid_Node_Index
-   with Pre => Tree.Flushed and Tree.Parents_Set;
-   --  Deep copy (into Tree) subtree of Tree rooted at Root. Return root
-   --  of new subtree; it has no parent.
-   --
-   --  Parents of new child nodes are set. Node index order is preserved.
-   --  References to objects external to tree are shallow copied
-   --  (Terminals, Augmented, Action).
+   function Stream_Count (Tree : in Syntax_Trees.Tree) return Natural;
+   --  Number of active streams.
 
-   function Add_Nonterm
-     (Tree            : in out Syntax_Trees.Tree;
-      Production      : in     Production_ID;
-      Children        : in     Valid_Node_Index_Array;
-      Action          : in     Semantic_Action := null;
-      Default_Virtual : in     Boolean         := False)
-     return Valid_Node_Index
-   with Pre => not Tree.Traversing and
-               (for all C of Children => C /= Deleted_Child);
-   --  Add a new Nonterm node, which can be empty. Result points to the
-   --  added node. If Children'Length = 0, set Nonterm.Virtual :=
-   --  Default_Virtual.
-   --
-   --  If Tree.Parents_Set, then Children.Parent are set to the new node,
-   --  and in previous parents of those children (if any), the
-   --  corresponding entry in Children is set to Deleted_Child.
+   function First_Parse_Stream (Tree : in Syntax_Trees.Tree) return Stream_ID
+   with Pre => Tree.Stream_Count >= 2;
 
-   function Add_Terminal
-     (Tree      : in out Syntax_Trees.Tree;
-      Terminal  : in     Token_Index;
-      Terminals : in     Base_Token_Arrays.Vector)
-     return Valid_Node_Index
-   with Pre => not Tree.Traversing;
-   --  Add a new Terminal node. Terminal must be an index into Terminals.
-   --  Result points to the added node.
+   function Last_Parse_Stream (Tree : in Syntax_Trees.Tree) return Stream_ID
+   with Pre => Tree.Stream_Count >= 2;
 
-   function Add_Terminal
-     (Tree     : in out Syntax_Trees.Tree;
-      Terminal : in     Token_ID;
-      Before   : in     Base_Token_Index := Invalid_Token_Index)
-     return Valid_Node_Index
-   with Pre => not Tree.Traversing;
-   --  Add a new Virtual_Terminal node with no parent. Before is the
-   --  index of the terminal in Terminals that this virtual is inserted
-   --  before during error correction; if Invalid_Token_Index, it is
-   --  inserted during EBNF translation, and there is no such terminal.
-   --  Result points to the added node.
+   procedure Next_Parse_Stream (Tree : in Syntax_Trees.Tree; Stream : in out 
Stream_ID)
+   with Pre => Stream /= Invalid_Stream_ID;
 
-   function Before
-     (Tree             : in Syntax_Trees.Tree;
-      Virtual_Terminal : in Valid_Node_Index)
-     return Base_Token_Index
-   with Pre => Tree.Is_Virtual_Terminal (Virtual_Terminal);
+   function Stream_Length (Tree : in Syntax_Trees.Tree; Stream : in Stream_ID) 
return SAL.Base_Peek_Type
+   with Pre => Tree.Is_Valid (Stream);
+   --  Stack + input
 
-   function Add_Identifier
-     (Tree        : in out Syntax_Trees.Tree;
-      ID          : in     Token_ID;
-      Identifier  : in     Identifier_Index;
-      Byte_Region : in     WisiToken.Buffer_Region)
-     return Valid_Node_Index
-   with Pre => Tree.Flushed and (not Tree.Traversing);
-   --  Add a new Virtual_Identifier node with no parent. Byte_Region
-   --  should point to an area in the source buffer related to the new
-   --  identifier, to aid debugging. Result points to the added node.
-
-   procedure Add_Child
+   function Stream_Input_Length (Tree : in Syntax_Trees.Tree; Stream : in 
Stream_ID) return SAL.Base_Peek_Type
+   with Pre => Tree.Is_Valid (Stream);
+
+   function Stack_Depth (Tree : in Syntax_Trees.Tree; Stream : in Stream_ID) 
return SAL.Base_Peek_Type
+   with Pre => Tree.Is_Valid (Stream);
+
+   procedure Delete_Stream (Tree : in out Syntax_Trees.Tree; Stream : in out 
Stream_ID)
+   with Pre => Tree.Is_Valid (Stream);
+   --  Delete the stream
+
+   function Is_Valid (Tree : in Syntax_Trees.Tree; Stream : in Stream_ID) 
return Boolean;
+   --  Stream is available for parsing operations.
+
+   function Contains
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID;
+      Token  : in Stream_Index)
+     return Boolean;
+   --  True if Stream and Token are valid, and Token is an element of Stream.
+
+   function Trimmed_Image (Item : in Stream_Index) return String;
+   --  Trimmed_Image of item.node_index.
+
+   function Trimmed_Image (Node : in Node_Access) return String;
+   --  Trimmed_Image of item.node_index.
+
+   function Get_Node
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Valid_Node_Access
+   with Pre => Tree.Contains (Stream, Element) or Tree.Contains 
(Tree.Shared_Stream, Element);
+   --  Parser.Current_Token may be from either stream.
+
+   function Get_Node
+     (Element : in Stream_Index)
+     return Valid_Node_Access
+   with Pre => Element /= Invalid_Stream_Index;
+   --  When we don't have the stream.
+
+   procedure Start_Lex
+     (Tree           : in out Syntax_Trees.Tree)
+   with Pre => Tree.Cleared,
+     Post => Tree.Lexable;
+   --  Create empty Tree.Shared_Stream, add SOI node containing
+   --  Tree.Lexer.Begin_Pos values to it.
+
+   procedure Start_Parse
      (Tree   : in out Syntax_Trees.Tree;
-      Parent : in     Valid_Node_Index;
-      Child  : in     Valid_Node_Index)
-   with
-     Pre => Tree.Flushed and Tree.Parents_Set and (not Tree.Traversing) and
-            Tree.Is_Nonterm (Parent);
-   --  Sets Child.Parent.
+      Stream : in     Stream_ID;
+      State  : in     State_Index)
+   with Pre => (not Tree.Traversing and Stream /= Tree.Shared_Stream and 
Tree.Is_Valid (Stream)) and then
+               Tree.Stream_Length (Stream) = 0;
+   --  State must be the parser start state; it is stored in Tree.SOI.
+   --  Sets Tree.Parents_Set False.
+
+   procedure Start_Edit (Tree : in out Syntax_Trees.Tree)
+   with Pre => Tree.Editable,
+     Post => Tree.Parseable;
+   --  Construct Tree.Shared_Stream from Tree.SOI, Tree.Root, Tree.EOI.
+   --
+   --  On return, Tree is ready for Parse.Edit_Tree.
+   --
+   --  Parents_Set remains True. Parse.Edit_Tree calls Breakdown on
+   --  Shared_Stream Elements, which removes some parent links. However,
+   --  the remaining stream elements all have complete parent links; the
+   --  links removed point to nodes that are not accessible from the
+   --  shared stream.
+
+   function Reduce
+     (Tree             : in out Syntax_Trees.Tree;
+      Stream           : in     Stream_ID;
+      Production       : in     WisiToken.Production_ID;
+      Child_Count      : in     Ada.Containers.Count_Type;
+      State            : in     State_Index;
+      Recover_Conflict : in     Boolean)
+     return Rooted_Ref
+   with Pre => not Tree.Traversing and not Tree.Parents_Set and Tree.Is_Valid 
(Stream) and Stream /= Tree.Shared_Stream,
+     Post => Reduce'Result.Stream = Stream and Tree.Valid_Stream_Node 
(Reduce'Result);
+   --  Reduce Child_Count tokens on Stream top of stack to a new Nonterm
+   --  node on Stream top of stack. Result points to the new Nonterm
+   --  node.
+   --
+   --  Set Result byte_region, char_region, line, column,
+   --  first_terminal to min/max of children.
+   --
+   --  See comment in 'type Node' below for Recover_Conflict.
 
-   function Child_Index
+   procedure Shift
      (Tree   : in out Syntax_Trees.Tree;
-      Parent : in     Valid_Node_Index;
-      Child  : in     Valid_Node_Index)
-     return SAL.Peek_Type
-   with Pre => Tree.Has_Child (Parent, Child);
+      Stream : in     Stream_ID;
+      State  : in     State_Index);
+   --  Shift Stream current input token onto Stream stack. Then set State
+   --  in the Stream element.
+   --
+   --  Does _not_ clear shifted node Error_List; this shift may be part
+   --  of error recover.
 
-   procedure Replace_Child
-     (Tree                 : in out Syntax_Trees.Tree;
-      Parent               : in     Valid_Node_Index;
-      Child_Index          : in     SAL.Peek_Type;
-      Old_Child            : in     Valid_Node_Index;
-      New_Child            : in     Valid_Node_Index;
-      Old_Child_New_Parent : in     Node_Index := Invalid_Node_Index)
-   with
-     Pre => Tree.Flushed and Tree.Parents_Set and (not Tree.Traversing) and
-            (Tree.Is_Nonterm (Parent) and then
-             (Tree.Child (Parent, Child_Index) = Old_Child and
-              (Old_Child = Deleted_Child or else
-               Tree.Parent (Old_Child) = Parent)));
-   --  In Parent.Children, replace child at Child_Index with New_Child.
-   --  Unless Old_Child is Deleted_Child, set Old_Child.Parent to
-   --  Old_Child_New_Parent (may be Invalid_Node_Index). Unless New_Child
-   --  is Deleted_Child, set New_Child.Parent to Parent.
+   function Pop
+     (Tree      : in out Syntax_Trees.Tree;
+      Stream    : in     Stream_ID)
+     return Valid_Node_Access
+   with Pre => not Tree.Traversing and Tree.Is_Valid (Stream);
+   --  Delete Stream stack top, returning its node.
+
+   procedure Push
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      Node   : in     Valid_Node_Access;
+      State  : in     State_Index)
+   with Pre => not Tree.Traversing and Tree.Is_Valid (Stream);
+   --  State, Node become Stream stack top.
+
+   procedure Push_Back
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID);
+   --  Move Stream.Stack_Top to Stream input.
+
+   procedure Move_Shared_To_Input
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID);
+   --  Insert Stream.Shared_Link into Parse_Stream input, increment
+   --  Stream.Shared_Link.
    --
-   --  If Old_Child is Deleted_Child, Old_Child_New_Parent should be left
-   --  to default.
+   --  This is often needed before Left_Breakdown while parsing.
 
-   procedure Set_Children
-     (Tree     : in out Syntax_Trees.Tree;
-      Node     : in     Valid_Node_Index;
-      New_ID   : in     WisiToken.Production_ID;
-      Children : in     Valid_Node_Index_Array)
-   with
-     Pre => Tree.Flushed and Tree.Parents_Set and (not Tree.Traversing) and
-            Tree.Is_Nonterm (Node) and
-            (for all C of Children => C /= Deleted_Child);
-   --  If parents of current Node.Children are not Invalid_Node_Index,
-   --  set corresponding entry in those parents to Deleted_Child, then
-   --  set Parent to Invalid_Node_Index.
+   procedure Move_Shared_To_Input
+     (Tree   : in out Syntax_Trees.Tree;
+      First  : in     Stream_Node_Ref;
+      Last   : in     Stream_Node_Ref;
+      Stream : in     Stream_ID)
+   with Pre => Valid_Stream_Node (Tree, First) and Valid_Stream_Node (Tree, 
Last) and
+               First.Stream = Tree.Shared_Stream and Last.Stream = 
Tree.Shared_Stream and
+               Stream /= Tree.Shared_Stream;
+   --  Insert Shared_Stream elements First .. Last into Stream input.
+   --  Update Parse_Stream.Shared_Link to next Shared_Stream element
+   --  after Last.
+
+   procedure Breakdown
+     (Tree           : in out Syntax_Trees.Tree;
+      Ref            : in out Stream_Node_Parents;
+      Productions    : in     Production_Info_Trees.Vector;
+      User_Data      : in     Syntax_Trees.User_Data_Access_Constant;
+      First_Terminal : in     Boolean)
+   with Pre => Valid_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref) and
+               Tree.Label (Ref.Ref.Element) = Nonterm and
+               (if First_Terminal
+                then not Tree.Is_Empty_Nonterm (Ref.Ref.Node)
+                else Get_Node (Ref.Ref.Element) /= Invalid_Node_Access) and
+               Tree.Stack_Top (Ref.Ref.Stream) /= Ref.Ref.Element,
+     Post => Parents_Valid (Ref) and
+             (if First_Terminal
+              then Tree.Valid_Terminal (Ref.Ref)
+              else Valid_Stream_Node (Tree, Ref.Ref))
+             and Ref.Ref.Node =
+             (if First_Terminal
+              then Tree.First_Terminal (Get_Node (Ref.Ref.Element))
+              else Get_Node (Ref.Ref.Element));
+   --  Bring descendants of Ref.Element to the parse stream. If
+   --  First_Terminal, stop when First_Terminal of one of the parse
+   --  stream elements = Ref.Node; otherwise stop when one of the element
+   --  nodes = Ref.Node. Ref.Element is updated to the terminating
+   --  element.
    --
-   --  Then set ID of Node to New_ID, and Node.Children to Children; set
-   --  parents of Children to Node.
+   --  If Ref.Parents nonterms contain any errors, the errors are moved to
+   --  the first terminal of that nonterm, copying ancestor nodes, and
+   --  Ref.Parents is updated to match. If Ref.Node is one of those first
+   --  terminals, it will be copied.
    --
-   --  If New_ID /= Tree.Production_ID (Node), Node.Action is set
-   --  to null, because the old Action probably no longer applies.
-
-   procedure Delete_Parent
-     (Tree : in out Syntax_Trees.Tree;
-      Node : in     Valid_Node_Index)
-   with
-     Pre => Tree.Flushed and Tree.Parents_Set and (not Tree.Traversing) and
-            Tree.Parent (Node) /= Invalid_Node_Index;
-   --  Set child in Node.Parent to Deleted_Child. If Node.Parent =
-   --  Tree.Root, set Tree.Root to Node. Set Node.Parent to
-   --  Invalid_Node_Index.
+   --  If one of the nodes brought to the parse stream is an optimized
+   --  list, the list is split at the immediate ancestor of Ref.Node.
+   --
+   --  The stack top is unchanged.
+   --
+   --  Parent links are set to Invalid_Node_Access as appropriate, but
+   --  child links are not, since Breakdown is called from main parse,
+   --  where nodes are shared.
+
+   procedure Breakdown
+     (Tree           : in out Syntax_Trees.Tree;
+      Ref            : in out Stream_Node_Ref;
+      Productions    : in     Production_Info_Trees.Vector;
+      User_Data      : in     User_Data_Access_Constant;
+      First_Terminal : in     Boolean)
+   with Pre => Valid_Stream_Node (Tree, Ref) and Tree.Label (Ref.Element) = 
Nonterm and
+               (if First_Terminal
+                then not Tree.Is_Empty_Nonterm (Ref.Node)
+                else Get_Node (Ref.Element) /= Invalid_Node_Access) and
+               Tree.Stack_Top (Ref.Stream) /= Ref.Element,
+     Post => Valid_Stream_Node (Tree, Ref);
+   --  Same as Breakdown (Stream_Node_Parents), but supports not
+   --  Tree.Parents_Set, using an internal Stream_Node_Parent.
+
+   procedure Left_Breakdown
+     (Tree      : in out Syntax_Trees.Tree;
+      Ref       : in out Stream_Node_Ref;
+      User_Data : in     User_Data_Access_Constant)
+   with Pre =>
+     Valid_Stream_Node (Tree, Ref) and then
+     (Tree.Label (Ref.Element) = Nonterm and
+        Tree.First_Terminal (Ref).Node /= Invalid_Node_Access and
+        Tree.Stack_Top (Ref.Stream) /= Ref.Element),
+     Post => Valid_Single_Terminal (Tree, Ref);
+   --  Similar to Breakdown; bring first terminal of Ref.Element to
+   --  the stream Ref.Stream. Ref.Node is ignored on input.
+
+   function State (Tree : in Syntax_Trees.Tree; Stream : in Stream_ID) return 
State_Index
+   with Pre => Tree.Is_Valid (Stream);
+   --  Return State from Stream.Stack_Top.
+
+   function State
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Unknown_State_Index
+   with Pre => Tree.Contains (Stream, Element);
+   --  If Element is in input, state may be Unknown_State.
+
+   function Stream_First
+     (Tree     : in Syntax_Trees.Tree;
+      Stream   : in Stream_ID;
+      Skip_SOI : in Boolean)
+     return Stream_Index
+   with Pre => Tree.Is_Valid (Stream);
+
+   function Stream_First
+     (Tree     : in Syntax_Trees.Tree;
+      Stream   : in Stream_ID;
+      Skip_SOI : in Boolean)
+     return Rooted_Ref
+   with Pre => Tree.Is_Valid (Stream);
+
+   function Stream_Last
+     (Tree     : in Syntax_Trees.Tree;
+      Stream   : in Stream_ID;
+      Skip_EOI : in Boolean)
+     return Stream_Index
+   with Pre => Tree.Is_Valid (Stream);
+
+   function Stream_Last
+     (Tree     : in Syntax_Trees.Tree;
+      Stream   : in Stream_ID;
+      Skip_EOI : in Boolean)
+     return Rooted_Ref
+   with Pre => Tree.Is_Valid (Stream);
+
+   function Stack_Top
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID)
+     return Stream_Index
+   with Pre => Tree.Is_Valid (Stream);
+
+   function Has_Input
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID)
+     return Boolean
+   with Pre => Stream /= Tree.Shared_Stream and Tree.Is_Valid (Stream);
+   --  Return True if there is a stream element after Stack_Top.
+
+   function First_Input
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID)
+     return Rooted_Ref
+   with Pre => Tree.Has_Input (Stream),
+     Post => Correct_Stream_Node (Tree, First_Input'Result);
+   --  Return first stream element after Stack_Top.
+
+   function Current_Token
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID)
+     return Rooted_Ref
+   with Post => Correct_Stream_Node (Tree, Current_Token'Result);
+   --  If Stream has input, then first input of Stream; otherwise
+   --  Stream.Shared_Link.
+
+   function Shared_Token
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID)
+     return Rooted_Ref
+   with Post => Correct_Stream_Node (Tree, Shared_Token'Result);
+   --  Stream.Shared_Link.
+
+   procedure Delete_Current_Token
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID);
+   --  User must call Add_Deleted with Deleted_Ref => Tree.Current_Input
+   --  before calling Delete_Current_Token.
+
+   function Stream_Next
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Stream_Index
+   with Pre => Element = Invalid_Stream_Index or else
+               (Tree.Contains (Stream, Element) or Tree.Contains 
(Tree.Shared_Stream, Element));
+   --  If Element is Invalid_Stream_Index, result is Stream_First (= SOI).
+
+   function Stream_Next
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return Rooted_Ref
+   with Pre => Valid_Stream_Node (Tree, Ref),
+     Post => Correct_Stream_Node (Tree, Stream_Next'Result);
+   --  Return stream element after Ref.Element.
+
+   procedure Stream_Next
+     (Tree   : in     Syntax_Trees.Tree;
+      Ref    : in out Stream_Node_Ref;
+      Rooted : in     Boolean)
+   with Pre => Valid_Stream_Node (Tree, Ref),
+     Post => Correct_Stream_Node (Tree, Ref) and then
+             (Ref = Invalid_Stream_Node_Ref or else
+                (if Rooted
+                 then Tree.Get_Node (Ref.Stream, Ref.Element) = Ref.Node
+                 else Tree.First_Terminal (Tree.Get_Node (Ref.Stream, 
Ref.Element)) = Ref.Node));
+   --  Update Ref to root or first terminal of next stream element after
+   --  Ref.Element. Follows Shared_Link.
+
+   procedure Stream_Next
+     (Tree   : in     Syntax_Trees.Tree;
+      Ref    : in out Stream_Node_Parents;
+      Rooted : in     Boolean)
+   with Pre => Correct_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref),
+     Post => Correct_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref) and
+             (Ref.Ref = Invalid_Stream_Node_Ref or else
+                (if Rooted
+                 then Tree.Get_Node (Ref.Ref.Stream, Ref.Ref.Element) = 
Ref.Ref.Node
+                 else Tree.First_Terminal (Tree.Get_Node (Ref.Ref.Stream, 
Ref.Ref.Element)) = Ref.Ref.Node));
+   --  Update Ref to root or first terminal of next stream element after
+   --  Ref.Element.
+
+   function Stream_Prev
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Stream_Index
+   with Pre => Tree.Contains (Stream, Element);
+
+   function Stream_Prev
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return Rooted_Ref
+   with Pre => Valid_Stream_Node (Tree, Ref),
+     Post => Correct_Stream_Node (Tree, Stream_Prev'Result);
+   --  Return stream element before Ref.Element.
+
+   procedure Stream_Prev
+     (Tree   : in     Syntax_Trees.Tree;
+      Ref    : in out Stream_Node_Ref;
+      Rooted : in     Boolean := True)
+   with Pre => Valid_Stream_Node (Tree, Ref),
+     Post => Correct_Stream_Node (Tree, Ref) and
+             (Ref = Invalid_Stream_Node_Ref or else
+                (if Rooted
+                 then Tree.Get_Node (Ref.Stream, Ref.Element) = Ref.Node
+                 else Tree.Last_Terminal (Tree.Get_Node (Ref.Stream, 
Ref.Element)) = Ref.Node));
+   --  Update Ref to root or last terminal of stream element before 
Ref.Element.
+
+   procedure Stream_Prev
+     (Tree   : in     Syntax_Trees.Tree;
+      Ref    : in out Stream_Node_Parents;
+      Rooted : in     Boolean)
+   with Pre => Valid_Stream_Node (Tree, Ref.Ref),
+     Post => Correct_Stream_Node (Tree, Ref.Ref) and
+             (Ref.Ref = Invalid_Stream_Node_Ref or else
+                (if Rooted
+                 then Tree.Get_Node (Ref.Ref.Stream, Ref.Ref.Element) = 
Ref.Ref.Node
+                 else Tree.Last_Terminal (Tree.Get_Node (Ref.Ref.Stream, 
Ref.Ref.Element)) = Ref.Ref.Node));
+
+   procedure Stream_Insert
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      Node   : in     Valid_Node_Access;
+      Before : in     Stream_Index)
+   with Pre => Tree.Contains (Stream, Before);
+   --  Insert a new stream element on Stream containing Node, before
+   --  Before. Clears Node.Parent.
+
+   function Stream_Insert
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      Node   : in     Valid_Node_Access;
+      Before : in     Stream_Index)
+     return Stream_Node_Ref
+   with Pre => Tree.Contains (Stream, Before);
+   --  Insert a new stream element on Stream containing Node, before Before.
+   --  Result references new element.
+
+   function Peek
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID;
+      Count  : in SAL.Peek_Type := 1)
+     return Stream_Index
+   with Pre => Tree.Is_Valid (Stream);
+   --  Return Count element on stack in Stream; Count = 1
+   --  returns stack top.
 
-   procedure Set_Node_Identifier
-     (Tree       : in Syntax_Trees.Tree;
-      Node       : in Valid_Node_Index;
-      ID         : in Token_ID;
-      Identifier : in Identifier_Index)
-   with Pre => Tree.Flushed and Tree.Parents_Set and (not Tree.Traversing) and
-               Tree.Is_Nonterm (Node);
-   --  Set parents of current Node.Children to Invalid_Node_Index.
-   --  Then change Node to a Virtual_Identifier.
+   function Add_Terminal
+     (Tree     : in out Syntax_Trees.Tree;
+      Stream   : in     Stream_ID;
+      Terminal : in     Lexer.Token;
+      Errors   : in     Error_Data_Lists.List)
+     return Single_Terminal_Ref
+   with Pre => not Tree.Traversing and Stream = Tree.Shared_Stream,
+     Post => Tree.Label (Add_Terminal'Result.Node) = Source_Terminal;
+   --  Append a new Source_Terminal element to Stream. Result points to the 
added
+   --  node.
+
+   type Token_Array_Var_Ref (Element : not null access 
WisiToken.Lexer.Token_Arrays.Vector) is private
+   with Implicit_Dereference => Element;
+
+   type Token_Array_Const_Ref (Element : not null access constant 
WisiToken.Lexer.Token_Arrays.Vector) is private
+   with Implicit_Dereference => Element;
+
+   function Has_Non_Grammar
+     (Tree     : in Syntax_Trees.Tree;
+      Terminal : in Valid_Node_Access)
+     return Boolean
+   with Pre => Tree.Label (Terminal) in Terminal_Label;
 
-   procedure Set_State
-     (Tree  : in out Syntax_Trees.Tree;
-      Node  : in     Valid_Node_Index;
-      State : in     State_Index);
+   function Non_Grammar_Var
+     (Tree     : in Syntax_Trees.Tree;
+      Terminal : in Valid_Node_Access)
+     return Token_Array_Var_Ref
+   with Pre => Tree.Label (Terminal) in Terminal_Label;
 
-   function State (Tree : in Syntax_Trees.Tree; Node : in Valid_Node_Index) 
return Unknown_State_Index;
+   function Non_Grammar_Const
+     (Tree     : in Syntax_Trees.Tree;
+      Terminal : in Valid_Node_Access)
+     return Token_Array_Const_Ref
+   with Pre => Tree.Label (Terminal) in Terminal_Label;
 
-   function Label (Tree : in Syntax_Trees.Tree; Node : in Valid_Node_Index) 
return Node_Label;
+   procedure Insert_Source_Terminal
+     (Tree     : in out Syntax_Trees.Tree;
+      Stream   : in     Stream_ID;
+      Terminal : in     Lexer.Token;
+      Before   : in     Stream_Index;
+      Errors   : in     Error_Data_Lists.List)
+   with Pre => not Tree.Traversing and (Before = Invalid_Stream_Index or else 
Tree.Contains (Stream, Before));
+   --  Insert a new Source_Terminal element on Stream, before Before.
+
+   function Insert_Source_Terminal
+     (Tree     : in out Syntax_Trees.Tree;
+      Stream   : in     Stream_ID;
+      Terminal : in     Lexer.Token;
+      Before   : in     Stream_Index;
+      Errors   : in     Error_Data_Lists.List)
+     return Single_Terminal_Ref
+   with Pre => not Tree.Traversing and (Before = Invalid_Stream_Index or else 
Tree.Contains (Stream, Before)),
+     Post => Tree.Label (Insert_Source_Terminal'Result.Node) = Source_Terminal;
+   --  Insert a new Source_Terminal element on Stream, before Before.
+   --  Result points to the added element.
+
+   function Insert_Virtual_Terminal
+     (Tree     : in out Syntax_Trees.Tree;
+      Stream   : in     Stream_ID;
+      Terminal : in     Token_ID)
+     return Single_Terminal_Ref
+   with
+     Pre  => not Tree.Traversing and Tree.Is_Valid (Stream) and Stream /= 
Tree.Shared_Stream,
+     Post => Tree.Label (Insert_Virtual_Terminal'Result.Node) = 
Virtual_Terminal;
+   --  Insert a new Virtual_Terminal element into Stream, after
+   --  Stack_Top. Result refers to the added node.
+
+   procedure Shift
+     (Tree             : in     Syntax_Trees.Tree;
+      Node             : in     Valid_Node_Access;
+      Shift_Bytes      : in     Base_Buffer_Pos;
+      Shift_Chars      : in     Base_Buffer_Pos;
+      Shift_Lines      : in     Base_Line_Number_Type;
+      Last_Stable_Byte : in     Base_Buffer_Pos;
+      Non_Grammar_Next : in out Lexer.Token_Arrays.Extended_Index)
+   with Pre => Tree.Label (Node) in Terminal_Label;
+   --  Add Shift_* to token, non_grammar, and augmented corresponding
+   --  regions. If a non_grammar is adjacent to or after
+   --  Last_Stable_Byte, set Non_Grammar_Next to it, without shifting,
+   --  and skip the rest of non_grammar.
+
+   procedure Set_Node_Index
+     (Tree       : in Syntax_Trees.Tree;
+      Node       : in Valid_Node_Access;
+      Node_Index : in Syntax_Trees.Node_Index);
 
-   function Child_Count (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Ada.Containers.Count_Type
-   with Pre => Tree.Is_Nonterm (Node);
+   procedure Stream_Delete
+     (Tree    : in out Syntax_Trees.Tree;
+      Stream  : in     Stream_ID;
+      Element : in out Stream_Index)
+   with
+     Pre  => Tree.Contains (Stream, Element),
+     Post => Element = Invalid_Stream_Index;
+   --  Delete Element from Stream. If Element = Stream.Stack_Top,
+   --  Stack_Top is set to Invalid_Stream_Index.
 
-   function Children (Tree : in Syntax_Trees.Tree; Node : in Valid_Node_Index) 
return Valid_Node_Index_Array
+   function ID
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Token_ID
+   with Pre => Tree.Contains (Stream, Element) or Tree.Contains 
(Tree.Shared_Stream, Element);
+   --  The precondition allows either stream; Current_Token is
+   --  either a Source_Terminal from Shared_Stream or a Virtual_Terminal
+   --  in Stream input from error recovery; in incremental parse, it
+   --  could be a Source_Terminal in Stream input from push_back.
+
+   function Label (Tree : in Syntax_Trees.Tree; Node : in Valid_Node_Access) 
return Node_Label;
+   function Label (Tree : in Syntax_Trees.Tree; Element : in Stream_Index) 
return Node_Label
+   with Pre => Element /= Invalid_Stream_Index;
+
+   function Child_Count (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return SAL.Base_Peek_Type;
+
+   function Children (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Node_Access_Array
    with Pre => Tree.Is_Nonterm (Node);
-   --  Any children that were cleared by Add_Nonterm are returned as
-   --  Deleted_Child.
+   --  Any children that were deleted by tree editing are returned as
+   --  Invalid_Node_Access.
 
    function Child
      (Tree        : in Syntax_Trees.Tree;
-      Node        : in Valid_Node_Index;
+      Node        : in Valid_Node_Access;
       Child_Index : in Positive_Index_Type)
-     return Node_Index
+     return Node_Access
    with Pre => Tree.Is_Nonterm (Node);
 
-   function Has_Branched_Nodes (Tree : in Syntax_Trees.Tree) return Boolean;
-   function Has_Children (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean
+   function Has_Children (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean
    with Pre => Tree.Is_Nonterm (Node);
+
    function Has_Child
      (Tree  : in Syntax_Trees.Tree;
-      Node  : in Valid_Node_Index;
-      Child : in Valid_Node_Index)
+      Node  : in Valid_Node_Access;
+      Child : in Valid_Node_Access)
      return Boolean
    with Pre => Tree.Is_Nonterm (Node);
-   function Has_Parent (Tree : in Syntax_Trees.Tree; Child : in 
Valid_Node_Index) return Boolean;
-   function Has_Parent (Tree : in Syntax_Trees.Tree; Children : in 
Valid_Node_Index_Array) return Boolean;
 
-   function Buffer_Region_Is_Empty (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean;
-   --  True if contained buffer region is empty; always the case for
-   --  virtual tokens, and for most copied tokens. Use Has_Children or
-   --  Child_Count to see if Node has children.
+   function Has_Parent (Tree : in Syntax_Trees.Tree; Child : in 
Valid_Node_Access) return Boolean;
 
-   function Is_Nonterm (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean;
-   function Is_Shared_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean;
-   function Is_Virtual_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean;
+   function Is_Nonterm (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean;
+   function Is_Source_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean;
+   function Is_Virtual_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean;
+   function Is_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean;
 
-   function Is_Virtual (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean;
-   --  Virtual_Terminal, Virtual_Identifier, or Nonterm that contains some 
Virtual tokens.
+   function Is_Empty_Nonterm (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean;
+   --  True if Node contains no terminals.
 
-   function Is_Virtual_Identifier (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean;
-   function Traversing (Tree : in Syntax_Trees.Tree) return Boolean;
+   function Is_Empty_Or_Virtual_Nonterm
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Boolean;
+   --  True if Node contains no terminals, or all terminals are virtual,
+   --  and thus have an empty Buffer_Region for Byte_ and Char_Region.
 
-   function Parents_Set (Tree : in Syntax_Trees.Tree) return Boolean;
-   procedure Set_Parents (Tree : in out Syntax_Trees.Tree)
-   with Pre => Tree.Flushed and Tree.Root /= Invalid_Node_Index;
+   function Is_Virtual_Identifier (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean;
+   function Recover_Conflict (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean
+   with Pre => Tree.Label (Node) = Nonterm;
 
-   function Parent
-     (Tree  : in Syntax_Trees.Tree;
-      Node  : in Valid_Node_Index;
-      Count : in Positive := 1)
-     return Node_Index
-   with Pre => Tree.Parents_Set;
-   --  Return Count parent of Node.
+   function Traversing (Tree : in Syntax_Trees.Tree) return Boolean;
 
-   procedure Set_Name_Region
-     (Tree   : in out Syntax_Trees.Tree;
-      Node   : in     Valid_Node_Index;
-      Region : in     Buffer_Region)
+   procedure Set_Insert_Location
+     (Tree            : in Syntax_Trees.Tree;
+      Node            : in Valid_Node_Access;
+      Insert_Location : in WisiToken.Insert_Location)
+   with Pre => Tree.Is_Virtual_Terminal (Node);
+
+   procedure Set_Name
+     (Tree   : in Syntax_Trees.Tree;
+      Node   : in Valid_Node_Access;
+      Region : in Buffer_Region)
    with Pre => Tree.Is_Nonterm (Node);
 
    function ID
      (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
+      Node : in Valid_Node_Access)
+     return WisiToken.Token_ID;
+
+   function ID
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
      return WisiToken.Token_ID;
+   --  One of Ref.Node.ID, Ref.Element.Node.ID, Invalid_Token_ID
 
    function Production_ID
      (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
+      Node : in Valid_Node_Access)
      return WisiToken.Production_ID
    with Pre => Tree.Is_Nonterm (Node);
 
+   function Byte_Region (Tree : in Syntax_Trees.Tree; Index : in Stream_Index) 
return WisiToken.Buffer_Region
+   with Pre => Index /= Invalid_Stream_Index;
+
    function Byte_Region
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Valid_Node_Access;
+      Trailing_Non_Grammar : in Boolean)
+     return WisiToken.Buffer_Region;
+   --  If Trailing_Non_Grammar, any non_grammar attached to last terminal
+   --  in Node is included in region.
+   --
+   --  If Tree.Parents_Set:
+   --
+   --  Byte_Region of Virtual_Terminal is an empty region with .First
+   --  determined by Insert_Location, using previous or next
+   --  source_terminal or non_grammar.
+   --
+   --  Byte_Region of an empty nonterm with Trailing_Non_Grammar False is
+   --  an empty region; .First gives nominal location in source text,
+   --  using previous or next source_terminal or non_grammar.
+   --
+   --  If not Tree.Parents_Set, does as much of the above as possible,
+   --  returning Null_Buffer_Region if would need Parents_Set.
+
+   function Byte_Region
+     (Tree                 : in Syntax_Trees.Tree;
+      Ref                  : in Stream_Node_Ref;
+      Trailing_Non_Grammar : in Boolean := False)
+     return WisiToken.Buffer_Region
+   with Pre => Tree.Parents_Set and Valid_Stream_Node (Tree, Ref);
+   --  Return Byte_Region of Ref.Node, using stream to find prev, next
+   --  non_grammar if needed.
+
+   function Byte_Region
+     (Tree                 : in Syntax_Trees.Tree;
+      Ref                  : in Stream_Node_Parents;
+      Parse_Stream         : in Stream_ID;
+      Trailing_Non_Grammar : in Boolean := False)
+     return WisiToken.Buffer_Region
+   with Pre => Valid_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref);
+   --  Same as Byte_Region (Stream_Node_Ref), for use when parents are
+   --  not set. See Prev_Terminal (tree, stream_node_parents) for meaning
+   --  of Parse_Stream.
+
+   function Name (Tree : in Syntax_Trees.Tree; Node : in Valid_Node_Access) 
return Buffer_Region;
+   --  If Node.Label in Terminal_Label, return Node.Byte_Region; else if
+   --  Node.Name is not Null_Buffer_Region, return Node.Name; else return
+   --  Node.Byte_Region.
+
+   function Name (Tree : in Syntax_Trees.Tree; Ref : in Stream_Node_Ref) 
return Buffer_Region
+   with Pre => Valid_Stream_Node (Tree, Ref);
+   --  Call Name with Ref.Element.Node.
+
+   function Char_Region
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Valid_Node_Access;
+      Trailing_Non_Grammar : in Boolean)
      return WisiToken.Buffer_Region;
+   --  Similar to Byte_Region.
+
+   function Line_At_Byte_Pos
+     (Tree     : in Syntax_Trees.Tree;
+      Byte_Pos : in Buffer_Pos)
+     return Base_Line_Number_Type
+   with Pre => Tree.Editable;
+   --  Return line that contains Byte_Pos; Invalid_Line_Number if outside
+   --  range of text spanned by Tree. If Byte_Pos is on a New_Line,
+   --  result is the line that the character ends.
+
+   function Line_Region (Tree : in Syntax_Trees.Tree) return 
WisiToken.Line_Region
+   with Pre => Tree.SOI /= Invalid_Node_Access and Tree.EOI /= 
Invalid_Node_Access;
+   --  Region spanned by entire tree; from SOI and EOI.
+
+   function Line_Region
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Valid_Node_Access;
+      Trailing_Non_Grammar : in Boolean)
+     return WisiToken.Line_Region
+   with Pre => Tree.Editable;
+   --  Lines of tokens in Node. First is the line started by the first
+   --  New_Line or SOI (start of input) before the first terminal in
+   --  Node. If Trailing_Non_Grammar, Last is the line ended by the last
+   --  New_Line in the first non_grammar array after the last terminal of
+   --  Node, or EOI (end of input); if not Trailing_Non_Grammar, Last is
+   --  the line ended by the first New_Line or EOI after the last
+   --  terminal of Node.
+   --
+   --  Trailing_Non_Grammar => False is used to get the line_region of a
+   --  multi-line token.
+
+   function Line_Region
+     (Tree                 : in Syntax_Trees.Tree;
+      Ref                  : in Stream_Node_Ref;
+      Trailing_Non_Grammar : in Boolean)
+     return WisiToken.Line_Region
+   with Pre => Tree.Valid_Stream_Node (Ref) and
+               (Tree.Parents_Set or else
+                  Rooted (Ref) or else
+                  Ref.Node = Tree.First_Terminal (Get_Node (Ref.Element)));
+   --  Same as Line_Region (Ref.Node), using Ref.Stream to find
+   --  prev/next non_grammar.
+   --
+   --  If not Tree.Parents_Set, constructs a Stream_Node_Parents
+   --  internally.
+
+   function Byte_Region_Of_Line_Region
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return WisiToken.Buffer_Region
+   with Pre => Tree.Valid_Stream_Node (Ref) and Tree.Parents_Set;
+
+   function Line_Region
+     (Tree                 : in Syntax_Trees.Tree;
+      Ref                  : in Stream_Node_Parents;
+      Parse_Stream         : in Stream_ID;
+      Trailing_Non_Grammar : in Boolean := True)
+     return WisiToken.Line_Region
+   with Pre => Tree.Valid_Stream_Node (Ref.Ref) and Parents_Valid (Ref);
+   --  Same as Line_Region (Stream_Node_Ref), for use when parents are
+   --  not set. See Prev_Terminal (tree, stream_node_parents) for meaning
+   --  of Parse_Stream.
+
+   function Line_Region
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID;
+      Ref    : in Real_Recover_Token)
+     return WisiToken.Line_Region
+   with Pre => Ref.Element_Node = Ref.Node or Ref.Node = Tree.First_Terminal 
(Ref.Element_Node);
+   --  Constructs a Stream_Node_Parents from Stream, Ref. Assumes
+   --  Trailing_Non_Grammar => True. For use in error recovery.
+
+   function Column (Tree : in Syntax_Trees.Tree; Node : in Valid_Node_Access) 
return Ada.Text_IO.Count
+   with Pre => Tree.Editable and Tree.Subtree_Root (Node) = Tree.Root;
+   --  Column of first char of Node; offset from first character on line,
+   --  origin 0 (WisiToken and Emacs standard). If Node is empty or
+   --  Virtual, result is 0.
+
+   function Column
+     (Tree   : in Syntax_Trees.Tree;
+      Node   : in Valid_Node_Access;
+      Stream : in Stream_ID)
+     return Ada.Text_IO.Count;
+   --  Same as Column, but Node must be in Stream or Shared_Stream.
 
    function RHS_Index
      (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
+      Node : in Valid_Node_Access)
      return Natural
    with Pre => Tree.Is_Nonterm (Node);
 
-   function Same_Token
-     (Tree_1  : in Syntax_Trees.Tree'Class;
-      Index_1 : in Valid_Node_Index;
-      Tree_2  : in Syntax_Trees.Tree'Class;
-      Index_2 : in Valid_Node_Index)
-     return Boolean;
-   --  True if the two tokens have the same ID and Byte_Region.
+   function Get_Recover_Token
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return Recover_Token;
 
-   function Recover_Token
+   function Get_Recover_Token
      (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
-     return WisiToken.Recover_Token;
+      Node : in Valid_Node_Access)
+     return Recover_Token;
+   --  Treat Node as a stream element.
 
-   function Recover_Token_Array
-     (Tree  : in Syntax_Trees.Tree;
-      Nodes : in Valid_Node_Index_Array)
-     return WisiToken.Recover_Token_Array;
+   function Children_Recover_Tokens
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Recover_Token_Array
+   with Pre => Tree.Contains (Stream, Element) and Tree.Label (Element) = 
Nonterm;
 
    procedure Set_Augmented
-     (Tree  : in out Syntax_Trees.Tree;
-      Node  : in     Valid_Node_Index;
-      Value : in     Base_Token_Class_Access);
+     (Tree  : in Syntax_Trees.Tree;
+      Node  : in Valid_Node_Access;
+      Value : in Augmented_Class_Access);
    --  Value will be deallocated when Tree is finalized.
 
    function Augmented
      (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
-     return Base_Token_Class_Access;
+      Node : in Valid_Node_Access)
+     return Augmented_Class_Access;
    --  Returns result of Set_Augmented.
 
    function Augmented_Const
      (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
-     return Base_Token_Class_Access_Constant;
+      Node : in Valid_Node_Access)
+     return Augmented_Class_Access_Constant;
 
-   function Action
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
-     return Semantic_Action
-   with Pre => Tree.Is_Nonterm (Node);
+   procedure Clear_Augmented (Tree : in Syntax_Trees.Tree);
+   --  Free all Augmented in Tree.
 
    function Find_Ancestor
      (Tree       : in Syntax_Trees.Tree;
-      Node       : in Valid_Node_Index;
+      Node       : in Valid_Node_Access;
       ID         : in Token_ID;
       Max_Parent : in Boolean := False)
-     return Node_Index
+     return Node_Access
    with Pre => Tree.Parents_Set;
    function Find_Ancestor
      (Tree       : in Syntax_Trees.Tree;
-      Node       : in Valid_Node_Index;
+      Node       : in Valid_Node_Access;
       IDs        : in Token_ID_Array;
       Max_Parent : in Boolean := False)
-     return Node_Index
+     return Node_Access
    with Pre => Tree.Parents_Set;
-   --  Return the ancestor of Node that contains ID (starting search with
-   --  Node.Parent), or Invalid_Node_Index if none match.
+   --  Return the ancestor of Node that contains one of IDs (starting
+   --  search with Node.Parent), or Invalid_Node_Access if none match.
    --
    --  If Max_Parent, return max parent found if none match; this will be
-   --  Invalid_Node_Index if Node has no parent.
+   --  Invalid_Node_Access if Node has no parent.
 
    function Find_Sibling
      (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index;
+      Node : in Valid_Node_Access;
       ID   : in Token_ID)
-     return Node_Index
-   with Pre => Tree.Parents_Set and then Tree.Has_Parent (Node);
-   --  Return the sibling of Node that contains ID, or Invalid_Node_Index if
+     return Node_Access
+   with Pre => Tree.Parents_Set and Tree.Has_Parent (Node);
+   --  Return the sibling of Node that contains ID, or Invalid_Node_Access if
    --  none match.
 
    function Find_Child
      (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index;
+      Node : in Valid_Node_Access;
       ID   : in Token_ID)
-     return Node_Index
+     return Node_Access
    with Pre => Tree.Is_Nonterm (Node);
-   --  Return the child of Node whose ID is ID, or Invalid_Node_Index if
+   --  Return the child of Node whose ID is ID, or Invalid_Node_Access if
    --  none match.
 
    function Find_Descendant
      (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index;
+      Node : in Valid_Node_Access;
       ID   : in Token_ID)
-     return Node_Index;
+     return Node_Access;
    --  Return the descendant of Node (may be Node) whose ID is ID, or
-   --  Invalid_Node_Index if none match.
+   --  Invalid_Node_Access if none match.
 
    function Find_Descendant
      (Tree      : in     Syntax_Trees.Tree;
-      Node      : in     Valid_Node_Index;
-      Predicate : access function (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Boolean)
-     return Node_Index;
-   --  Return the descendant of Node (may be Node) for which Predicate
-   --  returns True, or Invalid_Node_Index if none do.
+      Node      : in     Valid_Node_Access;
+      Predicate : access function (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean)
+     return Node_Access;
+   --  Return the first descendant of Node (may be Node; breadth first
+   --  search) for which Predicate returns True, or Invalid_Node_Access
+   --  if none do.
 
    function Is_Descendant_Of
      (Tree       : in Syntax_Trees.Tree;
-      Root       : in Valid_Node_Index;
-      Descendant : in Valid_Node_Index)
+      Root       : in Valid_Node_Access;
+      Descendant : in Valid_Node_Access)
      return Boolean
-   with Pre => Tree.Parents_Set and Tree.Is_Nonterm (Root);
-
-   procedure Set_Root (Tree : in out Syntax_Trees.Tree; Root : in 
Valid_Node_Index);
-
-   function Root (Tree : in Syntax_Trees.Tree) return Node_Index;
-   --  Return value set by Set_Root.
-   --  returns Invalid_Node_Index if Tree is empty.
+   with Pre => Tree.Parents_Set;
 
-   function Sub_Tree_Root (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Valid_Node_Index
+   function Subtree_Root (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Valid_Node_Access
    with Pre => Tree.Parents_Set;
    --  Return top ancestor of Node.
 
@@ -542,203 +1591,1572 @@ package WisiToken.Syntax_Trees is
      (Tree         : in out Syntax_Trees.Tree;
       Process_Node : access procedure
         (Tree : in out Syntax_Trees.Tree;
-         Node : in     Valid_Node_Index);
-      Root         : in     Node_Index := Invalid_Node_Index)
-   with Pre => Root /= Invalid_Node_Index or Tree.Root /= Invalid_Node_Index;
-   --  Traverse subtree of Tree rooted at Root (default Tree.Root) in
-   --  depth-first order, calling Process_Node on each node.
-
-   function Identifier (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Base_Identifier_Index
+         Node : in     Valid_Node_Access);
+      Root         : in     Node_Access := Invalid_Node_Access)
+   with Pre => Root /= Invalid_Node_Access or else Tree.Root /= 
Invalid_Node_Access;
+   --  Traverse subtree of Tree rooted at Root (default single remaining
+   --  stream element) in depth-first order, calling Process_Node on each
+   --  node.
+
+   function Identifier (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Base_Identifier_Index
    with Pre => Tree.Is_Virtual_Identifier (Node);
 
-   function Terminal (Tree : in Syntax_Trees.Tree; Node : in Valid_Node_Index) 
return Base_Token_Index
-   with Pre => Tree.Is_Shared_Terminal (Node);
+   function Prev_Non_Grammar
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Node_Access
+   with Pre => Tree.Parents_Set;
+   --  Return first node before Node that has a non-empty Non_Grammar.
+   --  If Node = Tree.Root or Tree.SOI, return Tree.SOI.
+   --
+   --  Returns Invalid_Node_Access only in broken trees; we tolerate this
+   --  here so we can use this in Error_Message.
 
-   function First_Shared_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Base_Token_Index;
-   --  Returns first shared terminal in subtree under Node
-   --  (ignoring virtual terminals). If result is Invalid_Token_Index,
-   --  all terminals are virtual, or a nonterm is empty.
+   procedure Prev_Non_Grammar
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Stream_Node_Ref);
 
-   function Last_Shared_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Base_Token_Index;
-   --  Returns last shared terminal in subtree under Node (ignoring
-   --  virtual terminals). If result is Invalid_Token_Index, all
-   --  terminals are virtual, or a nonterm is empty.
+   procedure Prev_Non_Grammar
+     (Tree         : in     Syntax_Trees.Tree;
+      Ref          : in out Stream_Node_Parents;
+      Parse_Stream : in     Stream_ID)
+   with Pre => Tree.Valid_Stream_Node (Ref.Ref) and Parents_Valid (Ref),
+     Post => Tree.Correct_Stream_Node (Ref.Ref) and Parents_Valid (Ref);
 
-   function Get_Terminals (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Valid_Node_Index_Array;
-   --  Return sequence of terminals in Node.
-   --
-   --  "Terminals" can be Shared_Terminal, Virtual_Terminal,
-   --  Virtual_Identifier.
+   function First_Non_Grammar
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Node_Access;
+   --  Return first node in subtree under Node that has a non-empty
+   --  Non_Grammar.
+   --  Invalid_Node_Access if none.
 
-   function First_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Node_Index;
-   --  First of Get_Terminals. Invalid_Node_Index if Node is an empty 
nonterminal.
+   function Last_Non_Grammar
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Node_Access;
+   --  Return last node in subtree under Node that has a non-empty
+   --  Non_Grammar.
+   --  Invalid_Node_Access if none.
 
-   function Last_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Node_Index;
-   --  Last of Get_Terminals. Invalid_Node_Index if Node is an empty 
nonterminal.
+   function Next_Non_Grammar
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Valid_Node_Access
+   with Pre => Tree.Parents_Set;
+   --  Return first node after Last_Terminal (Node) that has a non-empty
+   --  Non_Grammar. If Node = Tree.Root or Tree.EOI, return Tree.EOI.
+
+   procedure Next_Non_Grammar
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Stream_Node_Parents)
+   with Pre => Valid_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref),
+     Post => Tree.Correct_Stream_Node (Ref.Ref) and Parents_Valid (Ref);
+
+   type New_Line_Ref is record
+      Node : Node_Access;
+      --  Node is Invalid_Node_Access, or Node.Label is in Source_Terminal |
+      --  Virtual_Terminal.
+
+      Non_Grammar_Index : SAL.Base_Peek_Type := 0;
+      --  If Node is Invalid_Node_Access, then there is no new_line. If
+      --  Non_Grammar_Index is 0, Node is a Source_Terminal containing at
+      --  least one new_line. Otherwise Non_Gramamr_Index is the index in
+      --  Node.Non_Grammar containing a new_line.
+
+      First : Boolean := True;
+      --  If First, the new_line is the first in the token or
+      --  non_grammar; it was found by Next_New_Line. If not First, the
+      --  new_line is the last; it was found by Prev_New_Line;
+
+      Pos : Base_Buffer_Pos := Invalid_Buffer_Pos;
+      --  The buffer position of the new_line.
+
+      Line : Base_Line_Number_Type := Invalid_Line_Number;
+      --  The line number after the new_line.
+   end record;
 
-   function Prev_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Node_Index
-   with Pre => Tree.Parents_Set and Tree.Label (Node) in Shared_Terminal | 
Virtual_Terminal | Virtual_Identifier;
-   --  Return the terminal that is immediately before Node in Tree;
-   --  Invalid_Node_Index if Node is the first terminal in Tree.
+   Invalid_New_Line_Ref : constant New_Line_Ref := (others => <>);
 
-   function Next_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Node_Index
-   with Pre => Tree.Parents_Set and Tree.Label (Node) in Shared_Terminal | 
Virtual_Terminal | Virtual_Identifier;
-   --  Return the terminal that is immediately after Node in Tree;
-   --  Invalid_Node_Index if Node is the last terminal in Tree.
+   function Prev_New_Line
+     (Tree       : in Syntax_Trees.Tree;
+      Node       : in Valid_Node_Access;
+      Start_Line : in Base_Line_Number_Type := Invalid_Line_Number)
+     return New_Line_Ref
+   with Pre => Tree.Parents_Set;
+   --  If Node is SOI, returns reference to SOI.Non_Grammar (1).
+   --  Otherwise, return a reference to the first New_Line preceding
+   --  First_Terminal (Node).Byte_Region.First.
+   --
+   --  If Start_Line is not Invalid_Line_Number, it must be the line
+   --  number at the beginning of Node. If Start_Line is
+   --  Invalid_Line_Number, first searches towards SOI for a non_grammar
+   --  giving a line number, computes Start_Line, and continues as
+   --  above.
+
+   function First_Source_Terminal
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Valid_Node_Access;
+      Trailing_Non_Grammar : in Boolean;
+      Following            : in Boolean)
+     return Node_Access
+   with Pre => (if Following then Tree.Parents_Set else True);
+   --  Return a terminal node that can give byte or char pos.
+   --
+   --  If Trailing_Non_Grammar, return first terminal in Node that is a
+   --  Source_Terminal, or a virtual terminal with non-empty non_grammar.
+   --  If not Trailing_Non_Grammar, only return a Source_Terminal.
+   --
+   --  If Following, return a matching terminal following Node if none
+   --  found in Node.
+
+   function Next_Source_Terminal
+     (Tree                 : in Syntax_Trees.Tree;
+      Node                 : in Valid_Node_Access;
+      Trailing_Non_Grammar : in Boolean)
+     return Node_Access
+   with Pre => Tree.Parents_Set;
+   --  Return the next terminal node after Node that can give byte or
+   --  char pos; Invalid_Node_Access if there is no such node.
+   --
+   --  If Trailing_Non_Grammar, return next terminal after Ref.Ref.Node
+   --  that is a Source_Terminal, or a virtual terminal with non-empty
+   --  non_grammar. If not Trailing_Non_Grammar, only return a
+   --  Source_Terminal.
+
+   procedure Next_Source_Terminal
+     (Tree                 : in     Syntax_Trees.Tree;
+      Ref                  : in out Stream_Node_Ref;
+      Trailing_Non_Grammar : in     Boolean)
+   with Pre => Valid_Stream_Node (Tree, Ref) and Tree.Parents_Set,
+     Post => Tree.Correct_Stream_Node (Ref);
+   --  Update Ref to the next terminal node that can give byte or char
+   --  pos.
+   --
+   --  If Trailing_Non_Grammar, return next terminal after Ref.Node
+   --  that is a Source_Terminal, or a virtual terminal with non-empty
+   --  non_grammar. If not Trailing_Non_Grammar, only return a
+   --  Source_Terminal.
+
+   function Prev_Source_Terminal
+     (Tree                 : in Syntax_Trees.Tree;
+      Ref                  : in Stream_Node_Ref;
+      Trailing_Non_Grammar : in Boolean)
+     return Stream_Node_Ref
+   with Pre => Valid_Stream_Node (Tree, Ref) and Tree.Parents_Set,
+     Post => Tree.Correct_Stream_Node (Prev_Source_Terminal'Result);
+   --  Return the previous terminal node that can give byte or char pos.
+   --
+   --  If Trailing_Non_Grammar, return prev terminal before Ref.Ref.Node
+   --  that is a Source_Terminal, or a virtual terminal with non-empty
+   --  non_grammar. If not Trailing_Non_Grammar, only return a
+   --  Source_Terminal.
+
+   procedure Prev_Source_Terminal
+     (Tree                 : in     Syntax_Trees.Tree;
+      Ref                  : in out Stream_Node_Parents;
+      Parse_Stream         : in     Stream_ID;
+      Trailing_Non_Grammar : in     Boolean)
+   with Pre => Valid_Stream_Node (Tree, Ref.Ref) and not Tree.Parents_Set,
+     Post   => Tree.Correct_Stream_Node (Ref.Ref);
+
+   function Count_Terminals (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Natural;
+
+   function Get_Terminals (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Valid_Node_Access_Array
+   with Post => (for all Node of Get_Terminals'Result => Tree.Label (Node) in 
Terminal_Label);
+   --  Return sequence of terminals in Node.
 
-   function Get_Terminal_IDs (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Token_ID_Array;
+   function Get_Terminal_IDs (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Token_ID_Array;
    --  Same as Get_Terminals, but return the IDs.
 
-   function First_Terminal_ID (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Index) return Token_ID;
-   --  First of Get_Terminal_IDs; Invalid_Token_ID if Node is empty.
+   function First_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Node_Access;
+   --  First of Get_Terminals. Invalid_Node_Access if Node is an empty 
nonterminal.
+
+   function First_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in     Valid_Node_Access;
+      Parents : in out Node_Stacks.Stack)
+     return Node_Access;
+   --  Same as First_Terminal (Tree, Node), also initializes Parents to
+   --  store path from Node to the first terminal, for Next_Terminal in
+   --  nodes that have unset parent links, or to limit Next_Terminal to
+   --  descendants of Node.
+   --
+   --  We don't have "Pre => Parents.Is_Empty" or "Post => Parents_Valid
+   --  (Parents, First_Terminal'Result)", because we call this function
+   --  recursively to create Parents.
+   --
+   --  Visible for use with error recovery Configuration input stream.
+
+   procedure First_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Stream_Node_Ref)
+   with Pre => Valid_Stream_Node (Tree, Ref),
+     Post => Valid_Stream_Node (Tree, Ref);
+   --  Update Ref to first terminal of Ref.Element.Node or a following
+   --  stream element. Continues search in Shared_Stream at
+   --  Stream.Shared_Link; will always find EOI.
+
+   function First_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return Terminal_Ref
+   with Pre => Valid_Stream_Node (Tree, Ref),
+     Post => Valid_Stream_Node (Tree, First_Terminal'Result);
+   --  Return first terminal in Ref.Element.Node or a following stream element.
+   --  Continues search in Shared_Stream; will always find EOI, so never
+   --  Invalid_Stream_Element.
+   --
+   --  Use First_Terminal_In_Node to not look in following stream
+   --  elements.
+
+   function First_Terminal_In_Node
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Stream_Node_Ref)
+     return Terminal_Ref
+   with Pre => Valid_Stream_Node (Tree, Ref);
+   --  Return first terminal in Ref.Node; Invalid_Node_Access if none.
+
+   procedure First_Terminal
+     (Tree      : in     Syntax_Trees.Tree;
+      Ref       : in out Stream_Node_Parents;
+      Following : in     Boolean)
+   with Pre => Valid_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref),
+     Post => Parents_Valid (Ref);
+   --  Update Ref to first terminal in Ref.Ref.Node or, if Following, a
+   --  following stream element - continues search in Shared_Stream.
+
+   function First_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Rooted_Ref)
+     return Stream_Node_Parents
+   with Pre => Valid_Stream_Node (Tree, Ref),
+     Post => Valid_Stream_Node (Tree, First_Terminal'Result.Ref) and
+             Label (First_Terminal'Result.Ref.Node) in Terminal_Label and
+             Parents_Valid (First_Terminal'Result);
+   --  Return first terminal in Ref.Node or a following element.
+
+   function Last_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Node_Access;
+   --  Last terminal in subtree under Node. Invalid_Node_Access if none.
+
+   function Last_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in     Valid_Node_Access;
+      Parents : in out Node_Stacks.Stack)
+     return Node_Access;
+   --  Same as Last_Terminal, also initializes Parents.
+   --
+   --  We don't have "Pre => Parents.Is_Empty" or "Post => Parents_Valid
+   --  (Parents, Last_Terminal'Result)", because we call this function
+   --  recursively to build Parents.
+   --
+   --  Visible for use with error recovery Configuration input stream.
+
+   procedure Last_Terminal
+     (Tree         : in     Syntax_Trees.Tree;
+      Ref          : in out Stream_Node_Parents;
+      Parse_Stream : in     Stream_ID;
+      Preceding    : in     Boolean)
+   with Pre => Valid_Stream_Node (Tree, Ref.Ref),
+     Post => Parents_Valid (Ref);
+   --  Update Ref to last terminal of Ref.Ref.Element.Node or, if
+   --  Preceding, preceding element.
+
+   procedure Prev_Terminal (Tree : in Syntax_Trees.Tree; Node : in out 
Node_Access)
+   with Pre => Tree.Parents_Set,
+     Post => Node = Invalid_Node_Access or else
+             Tree.Label (Node) in Terminal_Label;
+
+   function Prev_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Node_Access
+   with Pre => Tree.Parents_Set,
+     Post => Prev_Terminal'Result = Invalid_Node_Access or else
+             Tree.Label (Prev_Terminal'Result) in Terminal_Label;
+   --  Return the terminal that is immediately before Node in subtree
+   --  containing Node; Invalid_Node_Access if Node is the first terminal
+   --  in that subtree.
+
+   function Prev_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Terminal_Ref)
+     return Terminal_Ref
+   with Pre => Tree.Parents_Set and Tree.Valid_Terminal (Ref),
+     Post => Tree.Correct_Stream_Node (Prev_Terminal'Result);
+
+   procedure Prev_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Terminal_Ref)
+   with Pre => Tree.Parents_Set and Tree.Valid_Terminal (Ref),
+     Post => Tree.Correct_Stream_Node (Ref);
+
+   procedure Prev_Terminal
+     (Tree         : in     Syntax_Trees.Tree;
+      Ref          : in out Stream_Node_Parents;
+      Parse_Stream : in     Stream_ID;
+      Preceding    : in     Boolean)
+   with Pre => Correct_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref),
+     Post => Tree.Correct_Stream_Node (Ref.Ref) and Parents_Valid (Ref);
+   --  If Parse_Stream is not Invalid_Stream_ID and Ref.Stream is
+   --  Shared_Stream, switches from Shared_Stream to Parse_Stream at
+   --  Parse_Stream.Shared_Link. If not Preceeding and there is no
+   --  previous terminal in Ref.Ref.Element, set ref.Ref.Node to
+   --  invalid_Node_Access.
+
+   procedure Next_Terminal (Tree : in Syntax_Trees.Tree; Node : in out 
Node_Access)
+   with Pre => Tree.Parents_Set,
+     Post => Node = Invalid_Node_Access or else
+             Tree.Label (Node) in Terminal_Label;
+
+   function Next_Terminal (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Node_Access
+   with Pre => Tree.Parents_Set,
+     Post => Next_Terminal'Result = Invalid_Node_Access or else
+             Tree.Label (Next_Terminal'Result) in Terminal_Label;
+   --  Return the terminal that is immediately after Node in subtree
+   --  containing Node; Invalid_Node_Access if Node is the last terminal
+   --  in that subtree.
+
+   procedure Next_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Terminal_Ref)
+   with Pre => Tree.Parents_Set and Valid_Terminal (Tree, Ref),
+     Post => Correct_Stream_Node (Tree, Ref);
+   --  Update Ref to the next terminal that is after Ref.Node in Stream.
+   --  Continues search in Shared_Stream; will always find EOI. Result is
+   --  Invalid_Stream_Node_Ref if Ref.Node is EOI.
+
+   procedure Next_Terminal
+     (Tree      : in     Syntax_Trees.Tree;
+      Ref       : in out Stream_Node_Parents;
+      Following : in     Boolean)
+   with Pre => Correct_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref),
+     Post => Correct_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref);
+   --  Same as Next_Terminal (Tree, Ref), using Ref.Parents for parent
+   --  links. Ref.Parents is initialized by First_Terminal. If not
+   --  Following, do not step out of Ref.Ref.Element; set Ref.Ref.Node to
+   --  Invalid_Node_Access if there is no next terminal in
+   --  Ref.Ref.Element.
+
+   function Next_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Terminal_Ref)
+     return Terminal_Ref
+   with Pre => Tree.Parents_Set and Valid_Terminal (Tree, Ref),
+     Post => Correct_Stream_Node (Tree, Next_Terminal'Result);
+   --  Same as procedure Next_Terminal, but return result.
+
+   procedure Next_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in out Node_Access;
+      Parents : in out Node_Stacks.Stack);
+   --  Same as Next_Terminal, using Parents instead of node parent links.
+   --  Parent is initialized by First_Terminal.
+   --
+   --  Visible for use with error recovery Configuration input stream.
+
+   function Get_Sequential_Index (Tree : in Syntax_Trees.Tree; Node : in 
Node_Access) return Base_Sequential_Index
+   with Pre => Node = Invalid_Node_Access or else Tree.Label (Node) in 
Terminal_Label;
+   --  For convenience, returns Invalid_Sequential_Index if Node =
+   --  Invalid_Node_Access.
+   --
+   --  Not named "Sequential_Index" becasue that conflicts with the
+   --  subtype.
+
+   procedure Set_Sequential_Index
+     (Tree  : in Syntax_Trees.Tree;
+      Node  : in Valid_Node_Access;
+      Index : in Base_Sequential_Index)
+   with Pre => Tree.Label (Node) in Terminal_Label;
+
+   function First_Sequential_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Node_Access)
+     return Node_Access;
+   --  Return first terminal with valid Sequential_Index in Node;
+   --  Invalid_Node_Access if none.
+
+   function First_Sequential_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in     Node_Access;
+      Parents : in out Node_Stacks.Stack)
+     return Node_Access;
+   --  Same as Tree.First_Sequential_Terminal (Node), also initialize
+   --  Parents.
+
+   procedure First_Sequential_Terminal
+     (Tree      : in     Syntax_Trees.Tree;
+      Node      : in out Node_Access;
+      Parents   : in out Node_Stacks.Stack);
+   --  Update Node to first terminal with valid Sequential_Index in Node;
+   --  Invalid_Node_Access if none. Also initialize Parents.
+
+   procedure First_Sequential_Terminal
+     (Tree      : in     Syntax_Trees.Tree;
+      Ref       : in out Syntax_Trees.Stream_Node_Parents;
+      Following : in     Boolean)
+   with Pre => Valid_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref),
+     Post => Correct_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref);
+   --  Return first terminal with valid Sequential_Index in Ref.Node or,
+   --  if Following, a following stream element; continues search in
+   --  Tree.Shared_Stream. Invalid_Stream_Node_Parents if none found.
+
+   procedure First_Sequential_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Stream_Node_Ref)
+   with Pre => Valid_Stream_Node (Tree, Ref) and Tree.Parents_Set,
+     Post => Correct_Stream_Node (Tree, Ref);
+   --  Return first terminal with valid Sequential_Index in Ref.Node or a
+   --  following stream element; continues search in Tree.Shared_Stream.
+   --  Invalid_Node_Access if none found.
+
+   function First_Sequential_Terminal
+     (Tree : in Syntax_Trees.Tree;
+      Ref  : in Syntax_Trees.Rooted_Ref)
+     return Terminal_Ref
+   with Pre => Valid_Stream_Node (Tree, Ref),
+     Post => Correct_Stream_Node (Tree, First_Sequential_Terminal'Result);
+   --   Same as First_Sequential_Terminal, does not require Parents_Set.
+
+   function Last_Sequential_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in     Node_Access;
+      Parents : in out Node_Stacks.Stack)
+     return Node_Access;
+   --  Return last terminal in Node that has a valid Sequential_Index,
+   --  also initialize Parents.
+
+   function Last_Sequential_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in     Node_Access)
+     return Node_Access;
+   --  Return last terminal in Node that has a valid Sequential_Index.
+   --  Uses an internal parents stack.
+
+   procedure Last_Sequential_Terminal
+     (Tree         : in     Syntax_Trees.Tree;
+      Ref          : in out Syntax_Trees.Stream_Node_Parents;
+      Parse_Stream : in     Stream_ID;
+      Preceding    : in     Boolean)
+   with Pre => Valid_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref),
+     Post => Correct_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref);
+   --  Update Ref to last terminal with valid Sequential_Index in
+   --  Ref.Node or, if Preceding, a preceding stream element; if
+   --  Ref.Stream is Tree.Shared_Stream, switches to Parse_Stream at
+   --  Parse_Stream.Shared_Link. Invalid_Node_Access if none found.
+
+   procedure Next_Sequential_Terminal
+     (Tree    : in     Syntax_Trees.Tree;
+      Node    : in out Node_Access;
+      Parents : in out Node_Stacks.Stack)
+   with Pre => Tree.Label (Node) in Terminal_Label;
+   --  Update Node to the first terminal with valid Sequential_Index
+   --  following Node. .
+
+   procedure Next_Sequential_Terminal
+     (Tree : in     Syntax_Trees.Tree;
+      Ref  : in out Syntax_Trees.Stream_Node_Ref)
+   with Pre => Valid_Stream_Node (Tree, Ref) and Tree.Parents_Set,
+     Post => Correct_Stream_Node (Tree, Ref);
+
+   procedure Next_Sequential_Terminal
+     (Tree      : in     Syntax_Trees.Tree;
+      Ref       : in out Syntax_Trees.Stream_Node_Parents;
+      Following : in     Boolean)
+   with Pre => Valid_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref),
+     Post => Correct_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref);
+   --  Update Node to the first terminal with valid Sequential_Index
+   --  succeeding Node. Can step past EOI. If not Following, do not step
+   --  out of Ref.Ref.Element.
+
+   procedure Prev_Sequential_Terminal
+     (Tree      : in     Syntax_Trees.Tree;
+      Node      : in out Node_Access;
+      Parents   : in out Node_Stacks.Stack)
+   with Pre => Tree.Label (Node) in Terminal_Label;
+   --  Update Node to the last terminal with valid Sequential_Index
+   --  preceding Node. Can step past SOI.
+
+   procedure Prev_Sequential_Terminal
+     (Tree         : in     Syntax_Trees.Tree;
+      Ref          : in out Stream_Node_Parents;
+      Parse_Stream : in     Stream_ID;
+      Preceding    : in     Boolean)
+   with Pre => Valid_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref),
+     Post => Correct_Stream_Node (Tree, Ref.Ref) and Parents_Valid (Ref);
 
    function Get_IDs
      (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index;
+      Node : in Valid_Node_Access;
       ID   : in Token_ID)
-     return Valid_Node_Index_Array;
+     return Valid_Node_Access_Array;
    --  Return all descendants of Node matching ID.
 
+   ----------
+   --  Post-parsing operations; editing the tree. The tree has one or
+   --  zero streams, so these subprograms have no stream argument.
+   --
+   --  Some of these are also used for Packrat parsing, and don't have a
+   --  precondition of Fully_Parsed.
+
+   function Cleared (Tree : in Syntax_Trees.Tree) return Boolean;
+   --  True if there are no streams and no nodes.
+
+   function Lexable (Tree : in Syntax_Trees.Tree) return Boolean;
+   --  True if there is a shared stream that contains only SOI.
+
+   function Parseable (Tree : in Syntax_Trees.Tree) return Boolean;
+   --  True if there are no parse streams and
+   --  Shared_Stream holds a lexed or edited stream.
+
+   function Fully_Parsed (Tree : in Syntax_Trees.Tree) return Boolean;
+   --  True if there is only one parse stream, and it has only two
+   --  elements; SOI with the start state and the tree root (EOI is only
+   --  in the shared stream).
+
+   function Editable (Tree : in Syntax_Trees.Tree) return Boolean;
+   --  True if Clear_Parse_Streams and Set_Parents have been called; the
+   --  remaining tree may be arbitrarily edited.
+
+   function Copy_Subtree
+     (Tree      : in out Syntax_Trees.Tree;
+      Root      : in     Node_Access;
+      User_Data : in     User_Data_Access_Constant)
+     return Node_Access
+   with Pre => Editable (Tree);
+   --  Deep copy (into Tree) subtree of Tree rooted at Root. Return root
+   --  of new subtree; it has no parent.
+   --
+   --  If Root is Invalid_Node_Access, returns Invalid_Node_Access.
+
+   procedure Copy_Tree
+     (Source      : in     Tree;
+      Destination :    out Tree;
+      User_Data   : in     User_Data_Access_Constant)
+   with Pre => Editable (Source);
+   --  The subtree at Tree.Root is copied. Destination parents are set.
+   --  All references are deep copied; Source may be finalized after this
+   --  operation.
+   --
+   --  All Node_Index values in Destination are reset to be sequential;
+   --  useful for Put_Tree.
+
+   function Copied_Node (Node : in Valid_Node_Access) return Node_Access;
+   --  Only valid during Copy_Tree; Tree must be the original tree.
+   --  Returns the copy of Node in the copied tree.
+
+   procedure Put_Tree
+     (Tree      : in Syntax_Trees.Tree;
+      File_Name : in String);
+   --  Output to File_Name a text representation of Tree that can be read
+   --  by Get_Tree.
+   --
+   --  File_Name must not exist.
+   --
+   --  The representation uses Node_Index to identify each node; it must
+   --  be unique. Non-unique Node_Index is detected, and raises
+   --  Programmer_Error. If Tree has non-unique Node_Index because of
+   --  editing or incremental parse, use Copy_Tree first to normalize
+   --  Node_Index.
+
+   procedure Get_Tree
+     (Tree      : in out Syntax_Trees.Tree;
+      File_Name : in     String);
+   --  Read the output of Put_Tree from File_Name, populate Tree.
+
+   procedure Clear_Parse_Streams
+     (Tree       : in out Syntax_Trees.Tree;
+      Keep_Nodes : in     Valid_Node_Access_Lists.List := 
Valid_Node_Access_Lists.Empty_List)
+   with Pre => Tree.Fully_Parsed or Tree.Stream_Count = 1,
+     Post => Tree.Editable;
+   --  If Tree.Root is not set, set it to the root of the single
+   --  remaining parse stream. Delete the parse stream and shared stream.
+   --  Delete all nodes not reachable from the root, and not Tree.SOI,
+   --  Tree.EOI, or in Keep_Nodes. Also call Set_Parents if not
+   --  Tree.Parents_Set.
+   --
+   --  Keep_Nodes should be set to nodes that occur in errors, or are
+   --  deleted by error recovery; they may be referenced by post-parse
+   --  actions.
+   --
+   --  No precondition for Packrat parser.
+
+   function Parents_Set (Tree : in Syntax_Trees.Tree) return Boolean;
+
+   procedure Set_Parents
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID := Invalid_Stream_ID);
+   --  If Stream is not Invalid_Stream_ID, set parents in all elements of
+   --  Stream. Otherwise, if Tree.Root is set, sets parents in tree
+   --  rooted at Tree.Root.
+   --
+   --  No precondition for packrat.
+
+   function Valid_Root (Tree : in Syntax_Trees.Tree) return Boolean;
+   --  True if Tree has a single root.
+
+   function Root (Tree : in Syntax_Trees.Tree) return Node_Access
+   with Pre => Tree.Valid_Root;
+   --  Tree.Root, or the root in the last parse stream if Tree.Root is
+   --  not set. Can be Invalid_Node_Access if input syntax does not allow
+   --  parsing to succeed.
+
+   procedure Set_Root (Tree : in out Syntax_Trees.Tree; New_Root : in 
Valid_Node_Access)
+   with Pre => Tree.Label (New_Root) = Nonterm and then Tree.Child_Count 
(New_Root) > 0;
+   --  Set Tree.Root to Root. If New_Root.Children does not start with
+   --  Tree.SOI, prepend it. If New_Root.Children does not end with
+   --  Tree.EOI, append it.
+   --
+   --  Precondition matches Packrat parser conditions at end of parse.
+
+   function SOI (Tree : in Syntax_Trees.Tree) return Node_Access;
+   --  Return node representing start of input in the shared stream. It
+   --  has non_grammar giving the first line number, and all non_grammar
+   --  before the first grammar node.
+   --
+   --  Note that SOI may be copied in a parse stream, when it has
+   --  Following_Deleted.
+
+   function EOI (Tree : in Syntax_Trees.Tree) return Node_Access;
+   --  Return node representing end of input in the shared stream. It has
+   --  non_grammar giving the last line number. Invalid_Node_Access if it
+   --  has not yet been seen by the lexer.
+   --
+   --  Note that EOI may be copied in a parse stream, when it has an error.
+
+   function Parent
+     (Tree  : in Syntax_Trees.Tree;
+      Node  : in Valid_Node_Access;
+      Count : in Positive := 1)
+     return Node_Access
+   with Pre => Tree.Parents_Set;
+   --  Return Count parent of Node.
+
+   function Find_Byte_Pos
+     (Tree                 : in Syntax_Trees.Tree;
+      Byte_Pos             : in Buffer_Pos;
+      Trailing_Non_Grammar : in Boolean)
+     return Node_Access;
+   --  Return the terminal that contains (including non_grammar if
+   --  Trailing_Non_Grammar) or is first after Byte_Pos.
+   --  Invalid_Node_Access if Byte_Pos is after text spanned by Tree.
+
+   function Find_Byte_Pos
+     (Tree                 : in Syntax_Trees.Tree;
+      Byte_Pos             : in Buffer_Pos;
+      Trailing_Non_Grammar : in Boolean;
+      Start_At             : in Terminal_Ref;
+      Stream               : in Stream_ID := Invalid_Stream_ID)
+     return Terminal_Ref
+   with Pre =>
+     Tree.Parents_Set and
+     (Start_At /= Invalid_Stream_Node_Ref or Stream /= Invalid_Stream_ID);
+   --  Return the terminal that contains (including non_grammar if
+   --  Trailing_Non_Grammar) or is first after Byte_Pos.
+   --  Invalid_Stream_Node_Ref if Byte_Pos is after text spanned by
+   --  Tree.Stream.
+   --
+   --  If Start_At is not Invalid_Stream_Node_Ref, start search there,
+   --  move forward. If Start_At is Invalid_Stream_Node_Ref, start search
+   --  at SOI in Stream.
+
+   function Find_Char_Pos
+     (Tree                 : in Syntax_Trees.Tree;
+      Char_Pos             : in Buffer_Pos;
+      Trailing_Non_Grammar : in Boolean;
+      After                : in Boolean := False)
+     return Node_Access;
+   --  If After, return the first terminal after or containing
+   --  Char_Point. Otherwise return the terminal containing Char_Point.
+   --  If Include_Non_Grammar, non_grammar is included in token
+   --  char_region. Invalid_Node_Access if none.
+   --
+   --  Adding a "Before" option here would significantly complicate the
+   --  logic, and would only make a difference when Char_Pos is in the
+   --  whitespace immediately before a token; calling code can move
+   --  Char_Pos out of such whitespace if it matters.
+
+   function Find_New_Line
+     (Tree : in Syntax_Trees.Tree;
+      Line : in Line_Number_Type)
+     return Node_Access
+   with Pre => Tree.Editable,
+     Post => Find_New_Line'Result = Invalid_Node_Access or else
+             (Tree.Is_Terminal (Find_New_Line'Result));
+   --  Return the terminal node containing a non_grammar that ends Line -
+   --  1, or the multi-line terminal node that contains the new_line that
+   --  ends Line - 1. Result is Invalid_Node_Access if Line is outside
+   --  range spanned by Tree.
+
+   function Find_New_Line
+     (Tree                : in     Syntax_Trees.Tree;
+      Line                : in     Line_Number_Type;
+      Line_Begin_Char_Pos :    out Buffer_Pos)
+     return Node_Access
+   with Pre => Tree.Editable,
+     Post => Find_New_Line'Result = Invalid_Node_Access or else
+             (Tree.Is_Terminal (Find_New_Line'Result));
+   --  Same as Find_New_Line, also updates Line_Begin_Char_Pos to first
+   --  char pos on Line.
+
+   procedure Next_New_Line
+     (Tree               : in     Syntax_Trees.Tree;
+      Start_Ref          : in     Terminal_Ref;
+      After_Non_Grammar  : in     Positive_Index_Type;
+      Result_Ref         :    out Terminal_Ref;
+      Result_Non_Grammar :    out Positive_Index_Type)
+   with Pre => Tree.Non_Grammar_Const (Start_Ref.Node).Last_Index >= 
After_Non_Grammar,
+     Post => Tree.Non_Grammar_Const (Result_Ref.Node).Last_Index >= 
Result_Non_Grammar;
+   --  Return next New_Line or EOI.
+
+   function Line_Begin_Char_Pos
+     (Tree : in Syntax_Trees.Tree;
+      Line : in Line_Number_Type)
+     return Buffer_Pos
+   with Pre => Tree.Editable;
+   --  First character on Line in text spanned by tree under Tree.Root;
+   --  it may be in no token, or in a grammar or non-grammar token.
+   --  Result is Invalid_Buffer_Pos if Line is not in the text spanned by
+   --  Tree, or if Line is inside a multi-line token.
+
+   function Line_Begin_Char_Pos
+     (Tree : in Syntax_Trees.Tree;
+      Line : in Line_Number_Type;
+      Stream : in Stream_ID)
+     return Buffer_Pos;
+   --  Same as other Line_Begin_Char_Pos, but searches in Stream instead of
+   --  Tree.Root. If not found there, continues searching input in
+   --  Shared_Stream.
+
+   function Line_Begin_Token
+     (Tree : in Syntax_Trees.Tree;
+      Line : in Line_Number_Type)
+     return Node_Access
+   with Pre => Tree.Parents_Set;
+   --  Return the node under Tree.Root of the first terminal token on
+   --  line Line; Invalid_Node_Access if there are no grammar tokens on
+   --  the line (ie only comment or whitespace), or the line is outside
+   --  the text spanned by Tree.
+
+   function Line_Begin_Token
+     (Tree                      : in Syntax_Trees.Tree;
+      Line                      : in Line_Number_Type;
+      Stream                    : in Stream_ID;
+      Following_Source_Terminal : in Boolean)
+     return Node_Access;
+   --  Same as other Line_Begin_Token, but searches in Stream instead of
+   --  Tree.Root. If not found there, continues searching input in
+   --  Shared_Stream.
+   --
+   --  If Following_Source_Terminal, returns next Source_Terminal in
+   --  stream if there are no grammar tokens on Line.
+
+   function Add_Nonterm
+     (Tree          : in out Syntax_Trees.Tree;
+      Production    : in     WisiToken.Production_ID;
+      Children      : in     Valid_Node_Access_Array;
+      Clear_Parents : in     Boolean)
+     return Valid_Node_Access
+   with Pre => not Tree.Traversing and Children'First = 1;
+   --  Add a new Nonterm node (not on any stream), containing
+   --  Children, with no parent. Result points to the added node..
+   --
+   --  If Parents_Set, Children.Parent are set to the new node. If a
+   --  child has a previous parent, then if Clear_Parents, the
+   --  corresponding entry in the parent's Children is set to null; if
+   --  not Clear_Parents and assertions are enabled, Assertion_Error is
+   --  raised.
+
+   function Add_Terminal
+     (Tree     : in out Syntax_Trees.Tree;
+      Terminal : in     Lexer.Token;
+      Errors   : in     Error_Data_Lists.List)
+     return Valid_Node_Access
+   with Pre => not Tree.Traversing and Tree.Editable;
+   --  Add a new Terminal node with no parent, on no stream. Result
+   --  points to the added node.
+
+   function Add_Terminal
+     (Tree       : in out Syntax_Trees.Tree;
+      Terminal   : in     Token_ID)
+     return Valid_Node_Access
+   with Pre => not Tree.Traversing and Tree.Editable;
+   --  Add a new Virtual_Terminal node with no parent, on no stream.
+   --  Result points to the added node.
+
+   procedure Add_Deleted
+     (Tree          : in out Syntax_Trees.Tree;
+      Deleted_Node  : in     Valid_Node_Access;
+      Prev_Terminal : in out Stream_Node_Parents;
+      User_Data     : in     User_Data_Access_Constant)
+   with Pre =>
+     Tree.Label (Deleted_Node) in Terminal_Label and
+     Tree.Valid_Stream_Node (Prev_Terminal.Ref) and
+     Parents_Valid (Prev_Terminal) and
+     Prev_Terminal.Ref.Stream /= Tree.Shared_Stream and
+     Tree.Label (Prev_Terminal.Ref.Node) = Source_Terminal;
+   --  Copy Prev_Terminal.Ref.Node, add Deleted_Node to
+   --  Prev_Terminal.Ref.Node.Following_Deleted. Update Prev_Terminal to
+   --  point to copied node. Move any non_grammar from Deleted_Node to
+   --  Prev_Terminal.Ref.Node.
+   --
+   --  Note that this does _not_ delete Deleted_Node from the input; use
+   --  Delete_Current_Token for that.
+
+   function Has_Following_Deleted
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Valid_Node_Access)
+     return Boolean
+   with Pre => Tree.Label (Node) = Source_Terminal;
+
+   type Valid_Node_Access_List_Var_Ref (List : not null access 
Valid_Node_Access_Lists.List) is private
+   with Implicit_Dereference => List;
+
+   function Following_Deleted
+     (Tree : in out Syntax_Trees.Tree;
+      Node : in     Valid_Node_Access)
+     return Valid_Node_Access_List_Var_Ref
+   with Pre => Tree.Label (Node) = Source_Terminal;
+
+   procedure Delete_Subtree
+     (Tree : in out Syntax_Trees.Tree;
+      Root : in out Node_Access);
+   --  Free all nodes under Root
+   --
+   --  No precondition; called from Finalize.
+
+   function Add_Identifier
+     (Tree        : in out Syntax_Trees.Tree;
+      ID          : in     Token_ID;
+      Identifier  : in     Identifier_Index)
+     return Valid_Node_Access
+   with Pre => not Tree.Traversing and Tree.Editable;
+   --  Add a new Virtual_Identifier node with no parent, on no stream.
+   --  Result points to the added node.
+
+   function Child_Index
+     (Tree   : in Syntax_Trees.Tree;
+      Parent : in Valid_Node_Access;
+      Child  : in Valid_Node_Access)
+     return SAL.Peek_Type
+   with Pre => Tree.Has_Child (Parent, Child);
+
+   procedure Replace_Child
+     (Tree                 : in out Syntax_Trees.Tree;
+      Parent               : in     Valid_Node_Access;
+      Child_Index          : in     SAL.Peek_Type;
+      Old_Child            : in     Node_Access;
+      New_Child            : in     Node_Access;
+      Old_Child_New_Parent : in     Node_Access := Invalid_Node_Access)
+   with
+     Pre => not Tree.Traversing and Tree.Editable and
+            (Tree.Is_Nonterm (Parent) and then
+               (Tree.Child (Parent, Child_Index) = Old_Child and
+                  (Old_Child = Invalid_Node_Access or else
+                     Tree.Parent (Old_Child) = Parent)));
+   --  In Parent.Children, replace child at Child_Index with New_Child.
+   --  Unless Old_Child is Invalid_Node_Access, set Old_Child.Parent to
+   --  Old_Child_New_Parent (may be Invalid_Node_Access). Unless New_Child
+   --  is Invalid_Node_Access, set New_Child.Parent to Parent.
+
+   procedure Set_Children
+     (Tree     : in out Syntax_Trees.Tree;
+      Node     : in out Valid_Node_Access;
+      New_ID   : in     WisiToken.Production_ID;
+      Children : in     Node_Access_Array)
+   with
+     Pre => not Tree.Traversing and Tree.Editable and
+            Tree.Is_Nonterm (Node) and (for all C of Children => C /= 
Invalid_Node_Access);
+   --  If parents of current Node.Children are not Invalid_Node_Access,
+   --  set corresponding entry in those parents to Invalid_Node_Access,
+   --  then set the child parent to Invalid_Node_Access.
+   --
+   --  Then set ID of Node to New_ID, and Node.Children to Children; set
+   --  parents of Children to Node.
+   --
+   --  We use a precondition on Children, rather than
+   --  Valid_Node_Access_Array, so constructs like:
+   --
+   --     Tree.Set_Children (node, new_id, tree.children ())
+   --
+   --  are legal.
+   --
+   --  Node is 'in out' because it must be reallocated if Children'length
+   --  /= Node.Children'length. If it is reallocated,
+   --  Node.Parent.Children is updated; the caller must update any other
+   --  copies of Node.
+
+   procedure Clear_Parent
+     (Tree           : in out Syntax_Trees.Tree;
+      Node           : in     Valid_Node_Access;
+      Clear_Children : in     Boolean)
+   with Pre => not Tree.Traversing;
+   --  If Clear_Children and Node.Parent /= Invalid_Node_Access, set
+   --  child in Node.Parent to Invalid_Node_Access, and if Node.Parent =
+   --  Tree.Root, set Tree.Root to Node. Finally, set Node.Parent to
+   --  Invalid_Node_Access.
+   --
+   --  Clear_Children should be False unless Tree is Editable or Node is
+   --  in Shared_Stream.
+
+   function First_Recover_Conflict (Tree : in Syntax_Trees.Tree) return 
Stream_Node_Ref
+   with
+     Pre  => Tree.Parents_Set,
+     Post => First_Recover_Conflict'Result = Invalid_Stream_Node_Ref or else
+             Tree.Recover_Conflict (First_Recover_Conflict'Result.Node);
+   --  First recover conflict node in Tree; Invalid_Stream_Node_Ref if none.
+
+   procedure First_Recover_Conflict (Tree : in Syntax_Trees.Tree; Ref : in out 
Stream_Node_Ref)
+   with Pre => Tree.Parents_Set;
+   --  First recover conflict node at or after Ref; Invalid_Stream_Ref if none.
+
+   ----------
+   --  Accessing parse errors
+
+   function Contains_Error
+     (Tree       : in Syntax_Trees.Tree;
+      Error_Node : in Valid_Node_Access;
+      Data       : in Error_Data'Class)
+     return Boolean;
+   --  True if Error_Node's error list contains an element matching Data.
+
+   procedure Add_Errors
+     (Tree   : in out Syntax_Trees.Tree;
+      Stream : in     Stream_ID;
+      Node   : in     Valid_Node_Access;
+      Errors : in     Error_Data_Lists.List)
+   with Pre => Stream = Tree.Shared_Stream and Tree.Label (Node) = 
Source_Terminal;
+   --  Add Errors to Node; Node is not copied first.
+   --
+   --  This should only be used when lexing new text, Errors are lexer
+   --  errors which occurred while lexing a non_grammar token, and Node is
+   --  the previously lexed grammar token.
+
+   procedure Add_Error_To_Input
+     (Tree      : in out Syntax_Trees.Tree;
+      Stream    : in     Stream_ID;
+      Data      : in     Error_Data'Class;
+      User_Data : in     User_Data_Access_Constant);
+   --  Add Data to the error list of the First_Terminal of the current
+   --  input token. If the current input is in Shared_Stream, copy to
+   --  Stream first.
+
+   procedure Add_Error_To_Stack_Top
+     (Tree      : in out Syntax_Trees.Tree;
+      Stream    : in     Stream_ID;
+      Data      : in     Error_Data'Class;
+      User_Data : in     User_Data_Access_Constant);
+   --  Copy Stream.Stack_Top.Node, add Data to its error list.
+
+   procedure Add_Errors
+     (Tree      : in out Syntax_Trees.Tree;
+      Error_Ref : in out Stream_Node_Parents;
+      Errors    : in     Error_Data_Lists.List;
+      User_Data : in     User_Data_Access_Constant)
+   with Pre => Parents_Valid (Error_Ref) and
+     (for all Err of Errors => not Tree.Contains_Error (Error_Ref.Ref.Node, 
Err));
+   --  Copy Error_Ref.Node and parents, add Errors to its error list.
+   --  Update Error_Ref to point to copied node.
+
+   type Error_Predicate is access function (Cur : in Error_Data_Lists.Cursor) 
return Boolean;
+
+   procedure Delete_Errors_In_Input
+     (Tree      : in out Syntax_Trees.Tree;
+      Stream    : in     Stream_ID;
+      Predicate : in     Error_Predicate;
+      User_Data : in     User_Data_Access_Constant);
+   --  Delete errors in Current_Token where Predicate returns True.
+   --
+   --  If Current_Token is a nonterm, deletes errors from the entire
+   --  subtree.
+
+   function Input_Has_Matching_Error
+     (Tree   : in Syntax_Trees.Tree;
+      Stream : in Stream_ID;
+      Data   : in Error_Data'Class)
+     return Boolean;
+   --  Return True if Data matches (according to Dispatching_Equal) an
+   --  error on the current input node.
+
+   function Error_List (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Error_Data_List_Const_Ref;
+   --  To change the error data, use Update_Error; see note at
+   --  declaration of Error_Data.
+   --
+   --  Returns an empty list if Has_Error (Node) is false, so users can
+   --  just use 'for Err of Tree.Error_List (Node) loop'
+
+   type Error_Ref is private;
+   --  Used when tree is fully parsed.
+
+   Invalid_Error_Ref : constant Error_Ref;
+
+   procedure Delete_Error
+     (Tree  : in out Syntax_Trees.Tree;
+      Error : in out Error_Ref)
+   with Pre => Tree.Parents_Set and Has_Error (Error);
+   --  Delete Error from its containing node. Error is updated to next
+   --  error (Invalid_Error_Ref if none).
+
+   type Stream_Error_Ref is private;
+   --  Used while parsing
+
+   Invalid_Stream_Error_Ref : constant Stream_Error_Ref;
+
+   type Error_Node_Features is record
+      Label     : Node_Label       := Node_Label'First;
+      --  Also determines which predicate applies.
+
+      Seq_Index : Base_Sequential_Index := Invalid_Sequential_Index;
+      --  of First_Terminal
+
+      Terminal_Predicate  : Error_Predicate := null;
+      --  Either Parse_Error or Error_Message.
+
+      Deleted : Boolean := False;
+      --  Node is deleted by an error recover op; Label must be in 
Terminal_Label.
+
+      Prev_Term_Seq_Index : Base_Sequential_Index := Invalid_Sequential_Index;
+      --  When Deleted; sequential index of terminal containing error node
+      --  in Following_Deleted.
+   end record;
+
+   function Current_Error_Ref
+     (Tree                : in Syntax_Trees.Tree;
+      Stream              : in Stream_ID;
+      Terminal_Predicate  : in Error_Predicate;
+      Nonterm_Predicate   : in Error_Predicate;
+      Error_Node_Features : in Syntax_Trees.Error_Node_Features := (others => 
<>))
+     return Stream_Error_Ref
+   with Post => Has_Error (Current_Error_Ref'Result);
+   --  Return error matching Predicate from Stream input or stack.
+   --
+   --  If First_Term_Seq_Index is Invalid_Term_Seq_Index, looks only in
+   --  First_Terminal (Current_Token (Stream)) with Terminal_Predicate and
+   --  Peek (Stream) with Nonterm_Predicate.
+   --
+   --  Otherwise, Error_Node_Features gives information needed to find
+   --  the error node in Stream input and stack; error recover may have
+   --  pushed elements before the error node in the input, or after the
+   --  error node on the stack, and the error node may have been deleted.
+
+   procedure Delete_Error
+     (Tree  : in out Syntax_Trees.Tree;
+      Error : in out Stream_Error_Ref)
+   with Pre => Tree.Parents_Set and Has_Error (Error);
+   --  Delete Error from its containing node. Error is updated to next
+   --  error (Invalid_Stream_Error_Ref if none).
+
+   procedure Update_Error
+     (Tree      : in out Syntax_Trees.Tree;
+      Stream    : in     Stream_ID;
+      Error_Ref : in     Stream_Error_Ref;
+      Data      : in     Error_Data'Class;
+      User_Data : in     User_Data_Access_Constant)
+   with
+     Pre => not Tree.Parents_Set and Tree.Contains_Error (Error_Node 
(Error_Ref), Data),
+     Post => Tree.Contains_Error (Error_Node (Error_Ref), Data);
+   --  Update error list element matching Data.
+   --
+   --  First copy Error_Ref.Node and all ancestors; Update Error_Ref to
+   --  point to new stream element with copied nodes.
+
+   type Stream_Error_Cursor is private;
+   --  We need an extra layer of indirection for Stream_Error_Ref
+   --  iterators because Stream_ID used in Stream_Node_Parents is
+   --  private.
+
+   function Error (Item : in Stream_Error_Cursor) return Stream_Error_Ref;
+
+   function Error (Item : in Error_Ref) return Error_Data'Class;
+   function Error (Item : in Stream_Error_Ref) return Error_Data'Class;
+
+   function Error_Node (Tree : in Syntax_Trees.Tree; Error : in Error_Ref) 
return Valid_Node_Access
+   with Pre => Error /= Invalid_Error_Ref;
+
+   function Error_Node (Error : in Stream_Error_Ref) return Valid_Node_Access
+   with Pre => Error /= Invalid_Stream_Error_Ref;
+
+   function Error_Node (Tree : in Syntax_Trees.Tree; Error : in 
Stream_Error_Ref) return Valid_Node_Access
+   with Pre => Error /= Invalid_Stream_Error_Ref;
+
+   function Error_Stream_Node_Ref (Tree : in Syntax_Trees.Tree; Error : in 
Stream_Error_Ref) return Stream_Node_Ref
+   with Pre => Error_Deleted (Error) = Valid_Node_Access_Lists.No_Element;
+
+   function Error_Deleted (Error : in Stream_Error_Ref) return 
Valid_Node_Access_Lists.Cursor;
+
+   function First_Error (Tree : in Syntax_Trees.Tree) return Error_Ref
+   with Pre => Tree.Editable;
+   --  Return first error node in Tree.
+
+   function First_Error (Tree : in Syntax_Trees.Tree; Stream : in Stream_ID) 
return Stream_Error_Ref;
+   --  Return first error node in Stream.
+
+   procedure Next_Error (Tree : in Syntax_Trees.Tree; Error : in out Error_Ref)
+   with Pre => Tree.Parents_Set and Error /= Invalid_Error_Ref;
+   --  Update Error to next error node.
+
+   procedure Next_Error (Tree : in Syntax_Trees.Tree; Error : in out 
Stream_Error_Ref)
+   with Pre => Error /= Invalid_Stream_Error_Ref;
+   --  Update Error to next error node.
+
+   function Error_Count (Tree : in Syntax_Trees.Tree) return 
Ada.Containers.Count_Type
+   with Pre => Tree.Parents_Set;
+   function Error_Count (Tree : in Syntax_Trees.Tree; Stream : in Stream_ID) 
return Ada.Containers.Count_Type;
+
+   function Has_Errors (Tree : in Syntax_Trees.Tree) return Boolean;
+   --  If there are errors, faster than Error_Count; also independent of
+   --  Tree.Parents_Set.
+
+   function Has_Error (Error : in Error_Ref) return Boolean;
+   function Has_Error (Error : in Stream_Error_Ref) return Boolean;
+   function Has_Error (Position : in Stream_Error_Cursor) return Boolean;
+   function Has_Error (Node : in Valid_Node_Access) return Boolean;
+   function Has_Error (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access) return Boolean;
+
+   function Has_Error_Class
+     (Tree        : in Syntax_Trees.Tree;
+      Node        : in Valid_Node_Access;
+      Error_Class : in Error_Data'Class)
+     return Error_Ref;
+   --  Return a reference to the first error in Node that is in Error_Class;
+   --  Invalid_Error_Ref if none.
+
+   package Error_Iterator_Interfaces is new Ada.Iterator_Interfaces
+     (Cursor      => Error_Ref,
+      Has_Element => Has_Error);
+
+   function Error_Iterate
+     (Tree : aliased in Syntax_Trees.Tree)
+     return Error_Iterator_Interfaces.Forward_Iterator'Class;
+   --  Iterates over errors.
+
+   package Stream_Error_Iterator_Interfaces is new Ada.Iterator_Interfaces
+     (Cursor      => Stream_Error_Cursor,
+      Has_Element => Has_Error);
+
+   function Stream_Error_Iterate
+     (Tree   : aliased in Syntax_Trees.Tree;
+      Stream :         in Stream_ID)
+     return Stream_Error_Iterator_Interfaces.Forward_Iterator'Class;
+   --  Iterates over errors in Stream.
+
+   ----------
+   --  Debug and error message utils.
+   --
+   --  Typically no preconditions so they help with debugging errors
+   --  detected by other preconditions.
+
+   function Trimmed_Image (Tree : in Syntax_Trees.Tree; Item : in Stream_ID) 
return String;
+   function Next_Stream_ID_Trimmed_Image (Tree : in Syntax_Trees.Tree) return 
String;
+   --  Trimmed integer.
+
    function Image
-     (Tree              : in Syntax_Trees.Tree;
-      Node              : in Valid_Node_Index;
-      Descriptor        : in WisiToken.Descriptor;
-      Include_Children  : in Boolean := False;
-      Include_RHS_Index : in Boolean := False;
-      Node_Numbers      : in Boolean := False)
+     (Tree         : in Syntax_Trees.Tree;
+      Children     : in Boolean     := False;
+      Non_Grammar  : in Boolean     := False;
+      Augmented    : in Boolean     := False;
+      Line_Numbers : in Boolean     := False;
+      Root         : in Node_Access := Invalid_Node_Access)
      return String;
+   --  Image of all streams, or root node if no streams.
+   --  If Children, subtree of each stream element is included.
+
    function Image
-     (Tree       : in Syntax_Trees.Tree;
-      Nodes      : in Valid_Node_Index_Array;
-      Descriptor : in WisiToken.Descriptor)
+     (Tree          : in Syntax_Trees.Tree;
+      Stream        : in Stream_ID;
+      Stack         : in Boolean := True;
+      Input         : in Boolean := True;
+      Shared        : in Boolean := False;
+      Children      : in Boolean := False;
+      Node_Numbers  : in Boolean := True;
+      Non_Grammar   : in Boolean := False;
+      Augmented     : in Boolean := False;
+      Line_Numbers  : in Boolean := False;
+      State_Numbers : in Boolean := True)
      return String;
-   --  For debug and error messages.
+   --  Image of each node. If Stack, includes stack; if Input, includes
+   --  input; if Shared, includes continuation in Shared_Stream. If
+   --  Children, each entire subtree is included, with newlines, as in
+   --  Print_Tree.
 
-   function First_Index (Tree : in Syntax_Trees.Tree) return Node_Index;
-   function Last_Index (Tree : in Syntax_Trees.Tree) return Node_Index;
+   function Image
+     (Tree                  : in Syntax_Trees.Tree;
+      Element               : in Stream_Index;
+      State                 : in Boolean := False;
+      Children              : in Boolean := False;
+      RHS_Index             : in Boolean := False;
+      Node_Numbers          : in Boolean := False;
+      Terminal_Node_Numbers : in Boolean := False;
+      Line_Numbers          : in Boolean := False;
+      Non_Grammar           : in Boolean := False;
+      Augmented             : in Boolean := False;
+      Expecting             : in Boolean := False)
+     return String;
+   --  Element can be from any stream, or Invalid_Stream_Index
 
-   package Node_Sets is new SAL.Gen_Unbounded_Definite_Vectors 
(Valid_Node_Index, Boolean, Default_Element => False);
+   function Image
+     (Tree                  : in Syntax_Trees.Tree;
+      Node                  : in Node_Access;
+      Children              : in Boolean := False;
+      RHS_Index             : in Boolean := False;
+      Node_Numbers          : in Boolean := False;
+      Terminal_Node_Numbers : in Boolean := False;
+      Line_Numbers          : in Boolean := False;
+      Non_Grammar           : in Boolean := False;
+      Augmented             : in Boolean := False;
+      Expecting             : in Boolean := False;
+      Safe_Only             : in Boolean := False)
+     return String;
+   --  If Safe_Only, assume Node is not in tree, so can't use Prev_/Next_ 
anything.
 
    function Image
-     (Item     : in Node_Sets.Vector;
-      Inverted : in Boolean := False)
+     (Tree                  : in Syntax_Trees.Tree;
+      Nodes                 : in Node_Access_Array;
+      RHS_Index             : in Boolean := False;
+      Node_Numbers          : in Boolean := False;
+      Terminal_Node_Numbers : in Boolean := False;
+      Line_Numbers          : in Boolean := False;
+      Non_Grammar           : in Boolean := False;
+      Augmented             : in Boolean := False)
      return String;
-   --  Simple list of numbers, for debugging
+
+   function Image
+     (Tree                  : in Syntax_Trees.Tree;
+      Ref                   : in Stream_Node_Ref;
+      First_Terminal        : in Boolean := False;
+      Node_Numbers          : in Boolean := False;
+      Terminal_Node_Numbers : in Boolean := False;
+      Line_Numbers          : in Boolean := False;
+      Non_Grammar           : in Boolean := False;
+      Augmented             : in Boolean := False;
+      Expecting             : in Boolean := False)
+     return String;
+   --  If First_Terminal, show First_Terminal of Ref.Node if Ref is rooted.
+
+   function Image
+     (Tree : in Syntax_Trees.Tree;
+      List : in Valid_Node_Access_Lists.List)
+     return String;
+
+   function Decimal_Image is new SAL.Generic_Decimal_Image (Node_Index);
+   function Trimmed_Image is new SAL.Gen_Trimmed_Image (Node_Index);
+   function Trimmed_Image is new SAL.Gen_Trimmed_Image (Base_Sequential_Index);
+
+   function Get_Node_Index (Node : in Node_Access) return Node_Index;
+   function Get_Node_Index (Tree : in Syntax_Trees.Tree; Node : in 
Node_Access) return Node_Index;
+   function Get_Node_Index (Element : in Stream_Index) return Node_Index;
+   function Get_Node_Index
+     (Tree    : in Syntax_Trees.Tree;
+      Stream  : in Stream_ID;
+      Element : in Stream_Index)
+     return Node_Index
+   with Pre => Element = Invalid_Stream_Index or else Tree.Contains (Stream, 
Element);
+   --  Version without Tree requires Syntax_Trees.Get_Node_Index. Returns
+   --  Invalid_Node_Index for Invalid_Node_Access.
+
+   procedure Enable_Ref_Count_Check (Tree : in out Syntax_Trees.Tree; Stream : 
in Stream_ID; Enable : in Boolean)
+   with Pre => Stream /= Invalid_Stream_ID;
+   --  Default is enabled.
+   --
+   --  Disabling is useful when there are bugs you want to ignore.
+
+   function Node_Access_Compare (Left, Right : in Node_Access) return 
SAL.Compare_Result
+   with Pre => Left /= Invalid_Node_Access and Right /= Invalid_Node_Access;
+   --  Only valid on a batch parsed tree, where Node_Index is ordered and
+   --  unique.
+   --
+   --  Left, Right can't be Valid_Node access because
+   --  SAL.Gen_Unbounded_Sparse_Ordered_Sets requires a valid default
+   --  initialization.
+
+   package Node_Sets is new SAL.Gen_Unbounded_Sparse_Ordered_Sets 
(Node_Access, Node_Access_Compare);
 
    function Error_Message
-     (Tree      : in Syntax_Trees.Tree;
-      Terminals : in Base_Token_Array_Access_Constant;
-      Node      : in Valid_Node_Index;
-      File_Name : in String;
-      Message   : in String)
+     (Tree    : in Syntax_Trees.Tree;
+      Node    : in Valid_Node_Access;
+      Message : in String)
+     return String
+   with Pre => Tree.Parents_Set;
+   --  File_Name from Tree.Lexer, line, column from Node
+
+   function Error_Message
+     (Tree    : in Syntax_Trees.Tree;
+      Ref     : in Stream_Node_Ref;
+      Message : in String)
      return String;
-   --  Get Line, column from Node.
+   --  File_Name from Tree.Lexer, line, column from Node
 
    type Validate_Node is access procedure
-     (Tree              : in     Syntax_Trees.Tree;
-      Node              : in     Valid_Node_Index;
-      Node_Image_Output : in out Boolean);
-   --  Called by Validate_Tree for each node visited; perform other
-   --  checks, output to Text_IO.Current_Error. If Node_Image_Output is
-   --  False, output Image (Tree, Node, Descriptor, Node_Numbers => True) once
-   --  before any error messages.
+     (Tree                : in     Syntax_Trees.Tree;
+      Node                : in     Valid_Node_Access;
+      Data                : in out User_Data_Type'Class;
+      Node_Error_Reported : in out Boolean);
+   --  Called by Validate_Tree for each node visited; perform checks
+   --  other than parent/child, output error messages to
+   --  Tree.Lexer.Trace.
+   --
+   --  Set Node_Error_Reported True if any errors are reported.
+   --
+   --  Unless Node_Error_Reported already true on entry, output Image
+   --  (Tree, Node, Node_Numbers => True) once before any error messages.
+
+   procedure Mark_In_Tree
+     (Tree                : in     Syntax_Trees.Tree;
+      Node                : in     Valid_Node_Access;
+      Data                : in out User_Data_Type'Class;
+      Node_Error_Reported : in out Boolean);
+   --  Mark Node as being "in tree".
+   --
+   --  If this is provided to Validate_Tree as the Validate_Node
+   --  argument, Validate_Tree will check that all nodes in Tree.Nodes
+   --  are marked as "in tree".
+
+   function In_Tree (Tree : in Syntax_Trees.Tree; Node : in Valid_Node_Access) 
return Boolean;
+   --  True if Node is in Tree.Nodes. Useful for Validate_Node, Validate_Error.
 
    procedure Validate_Tree
-     (Tree          : in out Syntax_Trees.Tree;
-      Terminals     : in     Base_Token_Array_Access_Constant;
-      Descriptor    : in     WisiToken.Descriptor;
-      File_Name     : in     String;
-      Root          : in     Node_Index                 := Invalid_Node_Index;
-      Validate_Node : in     Syntax_Trees.Validate_Node := null)
-   with Pre => Tree.Flushed and Tree.Parents_Set;
-   --  Verify child/parent links, and that no children are Deleted_Child.
+     (Tree              : in out Syntax_Trees.Tree;
+      User_Data         : in out User_Data_Type'Class;
+      Error_Reported    : in out Node_Sets.Set;
+      Node_Index_Order  : in     Boolean;
+      Byte_Region_Order : in     Boolean                    := True;
+      Root              : in     Node_Access                := 
Invalid_Node_Access;
+      Validate_Node     : in     Syntax_Trees.Validate_Node := null);
+   --  Verify that no children are Invalid_Node_Access. Verify
+   --  child/parent links. If Node_Index_Order, verify that
+   --  Node.Node_Index > Node.Children.Node_Index (which is true in a
+   --  batch parse tree). Call Validate_Node for each visited node.
    --  Violations output a message to Text_IO.Current_Error.
+   --  Error_Reported is used to avoid outputing an error for a node more
+   --  than once.
 
-   type Image_Augmented is access function (Aug : in Base_Token_Class_Access) 
return String;
-   type Image_Action is access function (Action : in Semantic_Action) return 
String;
+   procedure Sequential_Index_Cleared (Tree : in Syntax_Trees.Tree);
+   --  Raises SAL.Programmer_Error if any node in Tree or Tree.Streams
+   --  has Sequential_Index /= Invalid_Sequential_Index.
 
    procedure Print_Tree
-     (Tree            : in Syntax_Trees.Tree;
-      Descriptor      : in WisiToken.Descriptor;
-      Root            : in Node_Index                   := Invalid_Node_Index;
-      Image_Augmented : in Syntax_Trees.Image_Augmented := null;
-      Image_Action    : in Syntax_Trees.Image_Action    := null)
-   with Pre => Tree.Flushed;
+     (Tree         : in Syntax_Trees.Tree;
+      Root         : in Node_Access := Invalid_Node_Access;
+      Line_Numbers : in Boolean     := False;
+      Non_Grammar  : in Boolean     := False);
    --  Print tree rooted at Root (default Tree.Root) to
-   --  Text_IO.Current_Output, for debugging. For each node,
-   --  Image_Augmented is called if it is not null and node.augmented is
-   --  not null.
+   --  Tree.Lexer.Trace, for debugging.
+   --
+   --  This is the same as Trace.Put_Line (Tree.Image (..., Children =>
+   --  True)), but avoids storing the entire trace image on the stack;
+   --  required for large trees.
+
+   procedure Print_Streams
+     (Tree        : in     Syntax_Trees.Tree;
+      Children    : in     Boolean := False;
+      Non_Grammar : in     Boolean := False);
+
+   function Tree_Size_Image (Tree : in Syntax_Trees.Tree) return String;
+   --  For debugging; node counts.
+
+   procedure Print_Ref_Counts (Tree : in Syntax_Trees.Tree);
+
+   procedure Find_Node (Tree : in Syntax_Trees.Tree; Node : in 
Valid_Node_Access);
+   --  Print to tree.lexer.trace the index of Node in Tree.Nodes; useful
+   --  in debugger for a stable way to access Node.
 
 private
    use all type Ada.Containers.Count_Type;
 
-   type Node (Label : Node_Label := Virtual_Terminal) is
-   --  Label has a default to allow changing the label during tree editing.
-   record
-      ID : WisiToken.Token_ID := Invalid_Token_ID;
+   type Error_List_Access is access all Error_Data_Lists.List;
+   procedure Free is new Ada.Unchecked_Deallocation (Error_Data_Lists.List, 
Error_List_Access);
 
-      Byte_Region : Buffer_Region := Null_Buffer_Region;
-      --  Computed by Set_Children, used in Semantic_Check actions and debug
-      --  messages.
+   type Node
+     (Label       : Node_Label;
+      Child_Count : SAL.Base_Peek_Type)
+   --  Descriminants have no default because allocated nodes are
+   --  constrained anyway (ARM 4.8 6/3).
+   is record
+      Copied_Node : Node_Access;
+      --  Only set during Copy_Tree
 
-      Parent : Node_Index := Invalid_Node_Index;
+      ID : WisiToken.Token_ID := Invalid_Token_ID;
 
-      State : Unknown_State_Index := Unknown_State;
-      --  Parse state that was on stack with this token, to allow undoing a
-      --  reduce.
+      Node_Index : Syntax_Trees.Node_Index := Invalid_Node_Index;
+      --  If Terminal_Label, positive, and corresponds to text order after
+      --  initial lex. If Nonterm, negative, arbitrary. After a batch parse,
+      --  node indices are unique within the tree, but after incremental
+      --  editing, they are reused because nodes created for unsuccessful
+      --  parse streams are deleted.
 
-      Augmented : Base_Token_Class_Access := null;
+      Parent : Node_Access := Invalid_Node_Access;
 
-      case Label is
-      when Shared_Terminal =>
-         Terminal : Token_Index; -- into Parser.Terminals
+      Augmented : Augmented_Class_Access := null;
 
-      when Virtual_Terminal =>
-         Before : Base_Token_Index := Invalid_Token_Index; -- into 
Parser.Terminals
+      Error_List : Error_List_Access;
+      --  We store an access to an error list object in each node, rather
+      --  than a list object, to reduce the size of a node; almost all nodes
+      --  have no errors.
 
-      when Virtual_Identifier =>
-         Identifier : Identifier_Index; -- into user data
+      case Label is
+      when Terminal_Label =>
+         Non_Grammar : aliased Lexer.Token_Arrays.Vector;
+         --  Immediately following Node. In initial lex, this can only be in a
+         --  Source_Terminal node. User Insert_Terminal can move it to a
+         --  Virtual_Terminal node, editing the tree can copy it to a
+         --  Virtual_Identifier node.
+         --
+         --  Not a pointer, because many nodes have non_grammar, and to
+         --  simplify using Tree.Non_Grammar_Var.
+
+         Sequential_Index : Syntax_Trees.Base_Sequential_Index := 
Invalid_Sequential_Index;
+
+         case Label is
+         when Source_Terminal =>
+            Byte_Region    : Buffer_Region         := Null_Buffer_Region;
+            Char_Region    : Buffer_Region         := Null_Buffer_Region;
+            New_Line_Count : Base_Line_Number_Type := 0;
+            --  Data from lexer. We store the absolute buffer region here to 
avoid
+            --  storing all whitespace in the tree. Edit_Tree shifts these for
+            --  incremental parse. We don't store Line_Region here, because it
+            --  changes when Insert_Terminal moves Non_Grammar; Non_Grammars 
all
+            --  store Line_Region. We store New_Line_Count to allow computing 
line
+            --  numbers from previous or following Non_Grammar across 
multi-line
+            --  tokens.
+
+            Following_Deleted : aliased Valid_Node_Access_Lists.List;
+            --  Nodes that follow this terminal that were deleted by error
+            --  recovery.
+            --  FIXME: change to ptr like error_list, for space saving?
+
+         when Virtual_Terminal_Label =>
+            Insert_Location : WisiToken.Insert_Location := Before_Next;
+            --  Overridden Insert_Token can change the default.
+            --  If the node has non_grammar tokens, Insert_Location must be
+            --  Between.
+
+            case Label is
+            when Virtual_Terminal =>
+               null;
+            when Virtual_Identifier =>
+               Identifier : Identifier_Index; -- index into user data
+            when Source_Terminal | Nonterm =>
+               null;
+            end case;
+
+         when Nonterm =>
+            null;
+         end case;
 
       when Nonterm =>
          Virtual : Boolean := False;
          --  True if any child node is Virtual_Terminal or Nonterm with Virtual
-         --  set. Used by Semantic_Check actions.
+         --  set. Used by In_Parse_Actions and error recover, via
+         --  Contains_Virtual_Terminal.
+
+         Recover_Conflict : Boolean := False;
+         --  True if this node is created during error recover Resume when more
+         --  than one parser is active, due to a grammar conflict. Edit_Tree
+         --  must breakdown this node to redo the conflict resolution; the
+         --  correct resolution may require a different branch.
+         --  test_incremental.adb Undo_Conflict_01.
 
          RHS_Index : Natural;
          --  With ID, index into Productions.
-         --  Used for debug output, keep for future use.
 
-         Action : Semantic_Action := null;
+         Name_Offset : Base_Buffer_Pos := 0;
+         Name_Length : Base_Buffer_Pos := 0;
+         --  Name_* are set and checked by In_Parse_Actions. We use an offset
+         --  from First_Terminal (Node).Byte_Region.First, rather than a
+         --  Buffer_Region, to avoid needing to shift it during Edit_Tree for
+         --  incremental parse. IMPROVEME: generalize for other actions;
+         --  post_parse_augmented, in_parse_augmented.
+
+         Children : Node_Access_Array (1 .. Child_Count);
+         --  We use an explicit array, rather than a pointer to the first
+         --  child, to preserve child indices while editing the tree.
+      end case;
+   end record;
+
+   type Stream_Label is range -2 .. Integer'Last;
+   --  First parser has label 0, for compatibility with tests, and for
+   --  general sanity. There is no practical upper limit; parsing a large
+   --  file spawns and terminates thousands of parsers.
 
-         Name : Buffer_Region := Null_Buffer_Region;
-         --  Name is set and checked by Semantic_Check actions.
+   Invalid_Stream_Label : constant Stream_Label := -2;
+   Shared_Stream_Label  : constant Stream_Label := -1;
 
-         Children : Valid_Node_Index_Arrays.Vector;
+   function Trimmed_Image is new SAL.Gen_Trimmed_Image (Stream_Label);
 
-         Min_Terminal_Index : Base_Token_Index := Invalid_Token_Index;
-         --  Cached for push_back of nonterminals during recovery
-      end case;
+   type Stream_Element is record
+      --  We use separate stream pointers, rather than reusing the nonterm
+      --  child pointers as in [1], to allow each parallel parser to have
+      --  its own stream. This also preserves Child_Index when children are
+      --  deleted during editing.
+      Node : Node_Access  := Invalid_Node_Access;
+
+      State : Unknown_State_Index := Unknown_State;
+      --  Parse state that is on the parse stack with this token.
+      --  Unknown_State in Shared_Stream or a parse stream input.
    end record;
 
-   subtype Nonterm_Node is Node (Nonterm);
+   --  We often hold copies of Stream_Element_Lists cursors while editing
+   --  a tree, so we use a ref_count list to detect dangling references.
+   --  This is not measureably slower according to the ada-mode timing
+   --  benchmarks.
+   package Stream_Element_Lists is new 
SAL.Gen_Definite_Doubly_Linked_Lists_Ref_Count (Stream_Element);
+   use all type Stream_Element_Lists.Cursor;
+
+   type Stream_Index is record
+      Cur : Stream_Element_Lists.Cursor;
+   end record;
+
+   Invalid_Stream_Index : constant Stream_Index := (Cur => 
Stream_Element_Lists.No_Element);
+
+   type Parse_Stream is record
+      Label : Stream_Label := Invalid_Stream_Label;
+
+      Stack_Top : Stream_Element_Lists.Cursor := 
Stream_Element_Lists.No_Element;
+      --  The top of the parse stack. The stack is Stack_Top and previous
+      --  elements, the input stream is the following elements, or
+      --  Shared_Stream if Stack_Top.Next is Invalid_Stream_Index. In
+      --  batch parsing with no error correction, this is always Last. In
+      --  Shared_Stream, always Invalid_Stream_Index.
+
+      Shared_Link : Stream_Element_Lists.Cursor := 
Stream_Element_Lists.No_Element;
+      --  A complete parse stream consists of elements in
+      --  Parse_Stream.Elements, followed by elements in
+      --  Shared_Stream.Elements starting at Shared_Link, terminating in an
+      --  EOI element. EOI is never shifted to the parse stream, but it can
+      --  be copied to the parse stream to add an error. Then Shared_Link is
+      --  No_Element.
+
+      Elements : Stream_Element_Lists.List;
+   end record;
+
+   package Parse_Stream_Lists is new SAL.Gen_Definite_Doubly_Linked_lists 
(Parse_Stream);
+   use all type Parse_Stream_Lists.Cursor;
+
+   type Stream_ID is record
+      Cur : Parse_Stream_Lists.Cursor;
+   end record;
+
+   Invalid_Stream_ID : constant Stream_ID := (Cur => 
Parse_Stream_Lists.No_Element);
+
+   Invalid_Stream_Node_Ref : constant Stream_Node_Ref :=
+     (Invalid_Stream_ID, Invalid_Stream_Index, Invalid_Node_Access);
 
-   package Node_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
-     (Valid_Node_Index, Node, Default_Element => (others => <>));
+   Invalid_Stream_Node_Parents : constant Stream_Node_Parents := 
(Invalid_Stream_Node_Ref, Parents => <>);
 
-   type Base_Tree is new Ada.Finalization.Controlled with record
-      Nodes : Node_Arrays.Vector;
-      --  During normal parsing, tokens are added to Nodes by "parallel"
-      --  LALR parsers, but they are all run from one Ada task, so there's
-      --  no need for Nodes to be Protected. Packrat parsing also has a
-      --  single Ada task.
-      --
-      --  During McKenzie_Recover, which has multiple Ada tasks, the syntax
-      --  tree is read but not modified.
+   package Node_Access_Arrays is new SAL.Gen_Unbounded_Definite_Vectors 
(Positive_Node_Index, Node_Access, null);
 
-      Augmented_Present : Boolean := False;
-      --  True if Set_Augmented has been called on any node. Declared in
-      --  Base_Tree so it can be checked by Finalize (Base_Tree) and
-      --  Finalize (Tree).
+   type Tree is new Base_Tree with record
+      Next_Stream_Label : Stream_Label := Shared_Stream_Label + 1;
+
+      Next_Terminal_Node_Index : Node_Index := 1;
+
+      Root : Node_Access := Invalid_Node_Access;
+      SOI  : Node_Access := Invalid_Node_Access;
+      EOI  : Node_Access := Invalid_Node_Access;
+
+      Streams : Parse_Stream_Lists.List;
+
+      Shared_Stream : Stream_ID;
+
+      Nodes : Node_Access_Arrays.Vector;
+      --  Stores ref to all nodes, for Finalize.
 
       Traversing : Boolean := False;
       --  True while traversing tree in Process_Tree.
@@ -749,57 +3167,104 @@ private
       --  note above.
    end record;
 
-   function Is_Empty (Tree : in Base_Tree) return Boolean
-   is (Tree.Nodes.Length = 0);
-
-   type Tree is new Ada.Finalization.Controlled with record
-      Shared_Tree : Base_Tree_Access;
-      --  If we need to set anything (ie parent) in Shared_Tree, we move the
-      --  branch point instead, unless Flush = True.
-
-      Last_Shared_Node : Node_Index := Invalid_Node_Index;
-      Branched_Nodes   : Node_Arrays.Vector;
-      Flush            : Boolean    := False;
-      --  If Flush is True, all nodes are in Shared_Tree. Otherwise, all
-      --  greater than Last_Shared_Node are in Branched_Nodes.
-      --
-      --  We maintain Last_Shared_Node when Flush is True or False, so
-      --  subprograms that have no reason to check Flush can rely on
-      --  Last_Shared_Node.
-
-      Root : Node_Index := Invalid_Node_Index;
-   end record with
-     Type_Invariant =>
-       (Shared_Tree = null or else
-        (if Tree.Flush
-         then Last_Shared_Node = Shared_Tree.Nodes.Last_Index and
-            Branched_Nodes.Length = 0
-         else Last_Shared_Node <= Shared_Tree.Nodes.Last_Index and
-            Last_Shared_Node < Branched_Nodes.First_Index));
-
-   subtype Node_Const_Ref is Node_Arrays.Constant_Reference_Type;
-   subtype Node_Var_Ref is Node_Arrays.Variable_Reference_Type;
-
-   function Get_Node_Const_Ref
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
-     return Node_Const_Ref
-   is (if Node <= Tree.Last_Shared_Node
-         then Tree.Shared_Tree.Nodes.Constant_Ref (Node)
-         else Tree.Branched_Nodes.Constant_Ref (Node));
-
-   function Get_Node_Var_Ref
-     (Tree : in Syntax_Trees.Tree;
-      Node : in Valid_Node_Index)
-     return Node_Var_Ref
-   is (if Node <= Tree.Last_Shared_Node
-         then Tree.Shared_Tree.Nodes.Variable_Ref (Node)
-         else Tree.Branched_Nodes.Variable_Ref (Node));
-
-   function Is_Empty (Tree : in Syntax_Trees.Tree) return Boolean
-   is (Tree.Branched_Nodes.Length = 0 and (Tree.Shared_Tree = null or else 
Tree.Shared_Tree.Is_Empty));
-
-   function Parents_Set (Tree : in Syntax_Trees.Tree) return Boolean
-   is (Tree.Shared_Tree.Parents_Set);
+   procedure Free is new Ada.Unchecked_Deallocation (Node, Node_Access);
+
+   Dummy_Node : constant Node_Access := new Node'(Label => Virtual_Identifier, 
Child_Count => 0, others => <>);
+
+   type Token_Array_Var_Ref (Element : not null access 
WisiToken.Lexer.Token_Arrays.Vector) is record
+      Dummy : Integer := raise Program_Error with "uninitialized reference";
+   end record;
+
+   type Token_Array_Const_Ref (Element : not null access constant 
WisiToken.Lexer.Token_Arrays.Vector) is record
+      Dummy : Integer := raise Program_Error with "uninitialized reference";
+   end record;
+
+   type Valid_Node_Access_List_Var_Ref (List : not null access 
Valid_Node_Access_Lists.List) is record
+      Dummy : Integer := raise Program_Error with "uninitialized reference";
+   end record;
+
+   function Node_Image
+     (Node : in Node_Access;
+      Tree : in Syntax_Trees.Tree'Class)
+     return String
+   is (Tree.Image (Node, Node_Numbers => True));
+
+   function Node_List_Image is new Valid_Node_Access_Lists.Gen_Image_Aux 
(Tree'Class, Node_Image);
+
+   function Image
+     (Tree : in Syntax_Trees.Tree;
+      List : in Valid_Node_Access_Lists.List)
+     return String
+   is (Node_List_Image (List, Tree));
+
+   ----------
+   --  Errors
+
+   type Error_Data_List_Const_Ref (List : not null access constant 
Error_Data_Lists.List) is record
+      Dummy : Integer := raise Program_Error;
+   end record;
+
+   Empty_Error_List : aliased constant Error_Data_Lists.List := 
Error_Data_Lists.Empty_List;
+   --  WORKAROUND: with GNAT Community 2021, adding 'aliased' in
+   --  sal-gen_indefinite_doubly_linked_lists.ads doesn't work.
+
+   type Error_Ref is record
+      --  Used when tree is fully parsed.
+      Node    : Node_Access;
+      Deleted : Valid_Node_Access_Lists.Cursor;
+      --  If Node = Invalid_Node_Access, no error. If Deleted = No_Element,
+      --  the error node is Node. If Deleted /= No_Element, any error on
+      --  Node has already been visited, the error node is Element
+      --  (Deleted), which is a Source_Terminal, and Element
+      --  (Deleted).Parent = Node.
+
+      Error : Error_Data_Lists.Cursor;
+      --  Element in error node Error_Data_List.
+   end record;
+
+   Invalid_Error_Ref : constant Error_Ref :=
+     (Invalid_Node_Access, Valid_Node_Access_Lists.No_Element, 
Error_Data_Lists.No_Element);
+
+   type Error_Iterator (Tree : not null access constant Syntax_Trees.Tree)
+     is new Error_Iterator_Interfaces.Forward_Iterator with
+     null record;
+
+   overriding function First (Object : Error_Iterator) return Error_Ref;
+
+   overriding function Next
+     (Object   : Error_Iterator;
+      Position : Error_Ref)
+     return Error_Ref;
+
+   type Stream_Error_Ref is record
+      --  Used while parsing
+      Ref     : Stream_Node_Parents;
+      --  Same meaning as Node in Error_Ref.
+
+      Deleted : Valid_Node_Access_Lists.Cursor;
+      --  Same as Deleted in Error_Ref.
+
+      Error   : Error_Data_Lists.Cursor;
+      --  Same as Error in Error_Ref.
+   end record;
+
+   Invalid_Stream_Error_Ref : constant Stream_Error_Ref :=
+     (Invalid_Stream_Node_Parents, Valid_Node_Access_Lists.No_Element, 
Error_Data_Lists.No_Element);
+
+   type Stream_Error_Cursor is record
+      SER : Stream_Error_Ref;
+   end record;
+
+   type Stream_Error_Iterator (Tree : not null access constant 
Syntax_Trees.Tree)
+   is new Stream_Error_Iterator_Interfaces.Forward_Iterator with record
+      Stream : Parse_Stream_Lists.Cursor;
+   end record;
+
+   overriding function First (Object : Stream_Error_Iterator) return 
Stream_Error_Cursor;
+
+   overriding function Next
+     (Object   : Stream_Error_Iterator;
+      Position : Stream_Error_Cursor)
+     return Stream_Error_Cursor;
 
 end WisiToken.Syntax_Trees;
diff --git a/wisitoken-text_io_trace.adb b/wisitoken-text_io_trace.adb
index 77fb3319de..080be91a32 100644
--- a/wisitoken-text_io_trace.adb
+++ b/wisitoken-text_io_trace.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2017, 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2017, 2019, 2021 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -70,12 +70,13 @@ package body WisiToken.Text_IO_Trace is
          then Insert_Prefix_At_Newlines (Trace, Item)
          else Item);
    begin
-
       if Trace.File /= null and then Is_Open (Trace.File.all) then
-         Ada.Text_IO.Put_Line (Trace.File.all, -Trace.Prefix & Temp);
+         Ada.Text_IO.Put (Trace.File.all, -Trace.Prefix);
+         Ada.Text_IO.Put_Line (Trace.File.all, Temp);
          Ada.Text_IO.Flush (Trace.File.all);
       else
-         Ada.Text_IO.Put_Line (-Trace.Prefix & Temp);
+         Ada.Text_IO.Put (-Trace.Prefix);
+         Ada.Text_IO.Put_Line (Temp);
          Ada.Text_IO.Flush;
       end if;
    end Put_Line;
diff --git a/wisitoken-text_io_trace.ads b/wisitoken-text_io_trace.ads
index b400c23858..2167449f15 100644
--- a/wisitoken-text_io_trace.ads
+++ b/wisitoken-text_io_trace.ads
@@ -2,7 +2,7 @@
 --
 --  Trace output to Ada.Text_IO
 --
---  Copyright (C) 2017, 2019, 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2017, 2019 - 2021 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -31,7 +31,8 @@ package WisiToken.Text_IO_Trace is
 
    overriding
    procedure Put_Line (Trace : in out Text_IO_Trace.Trace; Item : in String);
-   --  If Item contains ASCII.LF, Prefix is output after each one.
+   --  Prepend Prefix. If Item contains ASCII.LF, Prefix is also output
+   --  after each one.
 
    overriding
    procedure New_Line (Trace : in out Text_IO_Trace.Trace);
diff --git a/wisitoken-to_tree_sitter.adb b/wisitoken-to_tree_sitter.adb
deleted file mode 100644
index 2213414007..0000000000
--- a/wisitoken-to_tree_sitter.adb
+++ /dev/null
@@ -1,528 +0,0 @@
---  Abstract :
---
---  Translate a wisitoken grammar file to a tree-sitter grammar file.
---
---  References:
---
---  [1] tree-sitter grammar: 
https://tree-sitter.github.io/tree-sitter/creating-parsers#the-grammar-dsl
---
---  Copyright (C) 2020 Stephen Leake All Rights Reserved.
---
---  This library is free software;  you can redistribute it and/or modify it
---  under terms of the  GNU General Public License  as published by the Free
---  Software  Foundation;  either version 3,  or (at your  option) any later
---  version. This library is distributed in the hope that it will be useful,
---  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
---  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-
-pragma License (GPL);
-
-with Ada.Command_Line;
-with Ada.Directories;
-with Ada.Exceptions;
-with Ada.Strings.Fixed;
-with Ada.Text_IO; use Ada.Text_IO;
-with GNAT.Traceback.Symbolic;
-with WisiToken.Syntax_Trees.LR_Utils;
-with WisiToken.Parse.LR.Parser_No_Recover;
-with WisiToken.Syntax_Trees;
-with WisiToken.Text_IO_Trace;
-with WisiToken_Grammar_Runtime;
-with Wisitoken_Grammar_Actions; use Wisitoken_Grammar_Actions;
-with Wisitoken_Grammar_Main;
-procedure WisiToken.To_Tree_Sitter
-is
-   procedure Put_Usage
-   is begin
-      Put_Line ("wisitoken-to_tree_sitter [--verbosity <level] <wisitoken 
grammar file> <language_name>");
-   end Put_Usage;
-
-   procedure Print_Tree_Sitter
-     (Data             : in WisiToken_Grammar_Runtime.User_Data_Type;
-      Tree             : in Syntax_Trees.Tree;
-      Output_File_Name : in String;
-      Language_Name    : in String)
-   is
-      use WisiToken.Syntax_Trees;
-
-      File : File_Type;
-
-      --  Local specs
-
-      procedure Put_RHS_Item_List (Node : in Valid_Node_Index; First : in 
Boolean)
-      with Pre => Tree.ID (Node) = +rhs_item_list_ID;
-
-      --  Local bodies
-
-      function Get_Text (Tree_Index : in Valid_Node_Index) return String
-      is
-         function Strip_Delimiters (Tree_Index : in Valid_Node_Index) return 
String
-         is
-            Region : Buffer_Region renames Data.Terminals.all (Tree.Terminal 
(Tree_Index)).Byte_Region;
-         begin
-            if -Tree.ID (Tree_Index) in RAW_CODE_ID | REGEXP_ID | ACTION_ID 
then
-               --  Strip delimiters. We don't strip leading/trailing spaces to 
preserve indent.
-               return Data.Grammar_Lexer.Buffer_Text ((Region.First + 2, 
Region.Last - 2));
-
-               --  We don't strip string delimiters; tree-setter can use the 
same ones.
-            else
-               return Data.Grammar_Lexer.Buffer_Text (Region);
-            end if;
-         end Strip_Delimiters;
-
-      begin
-         case Tree.Label (Tree_Index) is
-         when Shared_Terminal =>
-            return Strip_Delimiters (Tree_Index);
-
-         when Virtual_Terminal =>
-            --  Terminal keyword inserted during tree edit. We could check for
-            --  Identifier, but that will be caught later.
-            return Image (Tree.ID (Tree_Index), 
Wisitoken_Grammar_Actions.Descriptor);
-
-         when Virtual_Identifier =>
-            raise SAL.Programmer_Error;
-
-         when Nonterm =>
-            declare
-               use all type Ada.Strings.Unbounded.Unbounded_String;
-               Result       : Ada.Strings.Unbounded.Unbounded_String;
-               Tree_Indices : constant Valid_Node_Index_Array := 
Tree.Get_Terminals (Tree_Index);
-               Need_Space   : Boolean                                      := 
False;
-            begin
-               for Tree_Index of Tree_Indices loop
-                  Result := Result & (if Need_Space then " " else "") &
-                    Get_Text (Tree_Index);
-                  Need_Space := True;
-               end loop;
-               return -Result;
-            end;
-         end case;
-      end Get_Text;
-
-      procedure Not_Translated (Label : in String; Node : in Valid_Node_Index)
-      is begin
-         New_Line (File);
-         Put (File, "// " & Label & ": not translated: " & Node_Index'Image 
(Node) & ":" &
-                Tree.Image (Node, Wisitoken_Grammar_Actions.Descriptor, 
Include_Children => True));
-      end Not_Translated;
-
-      procedure Put_RHS_Alternative_List (Node : in Valid_Node_Index; First : 
in Boolean)
-      with Pre => Tree.ID (Node) = +rhs_alternative_list_ID
-      is begin
-         case Tree.RHS_Index (Node) is
-         when 0 =>
-            --  If only alternative, don't need "choice()".
-            Put_RHS_Item_List (Tree.Child (Node, 1), First => True);
-
-         when 1 =>
-            if First then
-               Put (File, "choice(");
-            end if;
-
-            Put_RHS_Alternative_List (Tree.Child (Node, 1), First => False);
-            Put (File, ", ");
-            Put_RHS_Item_List (Tree.Child (Node, 3), First => True);
-
-            if First then
-               Put (File, ")");
-            end if;
-
-         when others =>
-            Not_Translated ("Put_RHS_Alternative_List", Node);
-         end case;
-      end Put_RHS_Alternative_List;
-
-      procedure Put_RHS_Optional_Item (Node : in Valid_Node_Index)
-      with Pre => Tree.ID (Node) = +rhs_optional_item_ID
-      is begin
-         Put (File, "optional(");
-
-         case Tree.RHS_Index (Node) is
-         when 0 | 1 =>
-            Put_RHS_Alternative_List (Tree.Child (Node, 2), First => True);
-         when 2 =>
-            Put (File, "$." & Get_Text (Tree.Child (Node, 1)));
-         when 3 =>
-            --  STRING_LITERAL_2
-            Put (File, Get_Text (Tree.Child (Node, 1)));
-         when others =>
-            Not_Translated ("Put_RHS_Optional_Item", Node);
-         end case;
-
-         Put (File, ")");
-      end Put_RHS_Optional_Item;
-
-      procedure Put_RHS_Multiple_Item (Node : in Valid_Node_Index)
-      with Pre => Tree.ID (Node) = +rhs_multiple_item_ID
-      is begin
-         case Tree.RHS_Index (Node) is
-         when 0 | 3 =>
-            Put (File, "repeat(");
-            Put_RHS_Alternative_List (Tree.Child (Node, 2), First => True);
-            Put (File, ")");
-
-         when 1 | 2 =>
-            Put (File, "repeat1(");
-            Put_RHS_Alternative_List (Tree.Child (Node, 2), First => True);
-            Put (File, ")");
-
-         when 4 =>
-            Put (File, "repeat1(");
-            Put (File, "$." & Get_Text (Tree.Child (Node, 1)));
-            Put (File, ")");
-
-         when 5 =>
-            Put (File, "repeat(");
-            Put (File, "$." & Get_Text (Tree.Child (Node, 1)));
-            Put (File, ")");
-
-         when others =>
-            Not_Translated ("Put_RHS_Multiple_Item", Node);
-         end case;
-      end Put_RHS_Multiple_Item;
-
-      procedure Put_RHS_Group_Item (Node : in Valid_Node_Index)
-      with Pre => Tree.ID (Node) = +rhs_group_item_ID
-      is begin
-         Not_Translated ("Put_RHS_Group_Item", Node); -- maybe just plain ()?
-      end Put_RHS_Group_Item;
-
-      procedure Put_RHS_Item (Node : in Valid_Node_Index)
-      with Pre => Tree.ID (Node) = +rhs_item_ID
-      is begin
-         case Tree.RHS_Index (Node) is
-         when 0 =>
-            declare
-               use WisiToken_Grammar_Runtime;
-
-               Ident : constant String     := Get_Text (Node);
-               Decl  : constant Node_Index := Find_Declaration (Data, Tree, 
Ident);
-            begin
-               if Decl = Invalid_Node_Index then
-                  Raise_Programmer_Error ("decl for '" & Ident & "' not 
found", Data, Tree, Node);
-
-               elsif Tree.ID (Decl) = +nonterminal_ID then
-                  Put (File, "$." & Get_Text (Tree.Child (Decl, 1)));
-
-               else
-                  case Tree.RHS_Index (Decl) is
-                  when 0 =>
-                     case To_Token_Enum (Tree.ID (Tree.Child (Tree.Child 
(Decl, 2), 1))) is
-                     when KEYWORD_ID =>
-                        Put (File, Get_Text (Tree.Child (Decl, 4)));
-
-                     when NON_GRAMMAR_ID =>
-                        Not_Translated ("put_rhs_item", Node);
-
-                     when Wisitoken_Grammar_Actions.TOKEN_ID =>
-                        declare
-                           use WisiToken.Syntax_Trees.LR_Utils;
-                           Iter : constant Syntax_Trees.LR_Utils.Iterator :=
-                             Iterate (Data, Tree, Tree.Child (Decl, 4), 
+declaration_item_ID);
-                           Item : constant Valid_Node_Index :=
-                             Tree.Child (Syntax_Trees.LR_Utils.Node (First 
(Iter)), 1);
-                        begin
-                           case To_Token_Enum (Tree.ID (Item)) is
-                           when REGEXP_ID =>
-                              Put (File, "$." & Ident);
-
-                           when STRING_LITERAL_1_ID | STRING_LITERAL_2_ID =>
-                              --  FIXME: case insensitive?
-                              Put (File, Get_Text (Item));
-
-                           when others =>
-                              Not_Translated ("put_rhs_item ident token", 
Node);
-                           end case;
-                        end;
-
-                     when others =>
-                        Not_Translated ("put_rhs_item ident", Node);
-                     end case;
-
-                  when others =>
-                     Not_Translated ("put_rhs_item 0", Node);
-                  end case;
-               end if;
-            end;
-
-         when 1 =>
-            --  STRING_LITERAL_2
-            Put (File, Get_Text (Node));
-
-         when 2 =>
-            --  ignore attribute
-            null;
-
-         when 3 =>
-            Put_RHS_Optional_Item (Tree.Child (Node, 1));
-
-         when 4 =>
-            Put_RHS_Multiple_Item (Tree.Child (Node, 1));
-
-         when 5 =>
-            Put_RHS_Group_Item (Tree.Child (Node, 1));
-
-         when others =>
-            Not_Translated ("Put_RHS_Item", Node);
-         end case;
-      end Put_RHS_Item;
-
-      procedure Put_RHS_Element (Node : in Valid_Node_Index)
-      with Pre => Tree.ID (Node) = +rhs_element_ID
-      is begin
-         case Tree.RHS_Index (Node) is
-         when 0 =>
-            Put_RHS_Item (Tree.Child (Node, 1));
-
-         when 1 =>
-            --  Ignore the label
-            Put_RHS_Item (Tree.Child (Node, 3));
-
-         when others =>
-            Not_Translated ("Put_RHS_Element", Node);
-         end case;
-      end Put_RHS_Element;
-
-      procedure Put_RHS_Item_List (Node : in Valid_Node_Index; First : in 
Boolean)
-      is
-         Children : constant Valid_Node_Index_Array := Tree.Children (Node);
-      begin
-         if Children'Length = 1 then
-            Put_RHS_Element (Children (1));
-         else
-            if First then
-               Put (File, "seq(");
-            end if;
-            Put_RHS_Item_List (Children (1), First => False);
-            Put (File, ", ");
-            Put_RHS_Element (Children (2));
-
-            if First then
-               Put (File, ")");
-            end if;
-         end if;
-      end Put_RHS_Item_List;
-
-      procedure Put_RHS (Node : in Valid_Node_Index)
-      with Pre => Tree.ID (Node) = +rhs_ID
-      is begin
-         case Tree.RHS_Index (Node) is
-         when 0 =>
-            Put (File, "/* empty */,");
-
-         when 1 .. 3 =>
-            Put_RHS_Item_List (Tree.Child (Node, 1), First => True);
-            --  ignore actions
-
-         when others =>
-            Not_Translated ("put_rhs", Node);
-         end case;
-      end Put_RHS;
-
-      procedure Put_RHS_List (Node : in Valid_Node_Index; First : in Boolean)
-      with Pre => Tree.ID (Node) = +rhs_list_ID
-      is
-         Children : constant Valid_Node_Index_Array := Tree.Children (Node);
-      begin
-         case Tree.RHS_Index (Node) is
-         when 0 =>
-            Put_RHS (Children (1));
-
-         when 1 =>
-            if First then
-               Put (File, "choice(");
-            end if;
-
-            Put_RHS_List (Children (1), First => False);
-            Put (File, ",");
-            Put_RHS (Children (3));
-
-            if First then
-               Put (File, ")");
-            end if;
-
-         when others =>
-            Not_Translated ("Put_RHS_List", Node);
-         end case;
-      end Put_RHS_List;
-
-      procedure Process_Node (Node : in Valid_Node_Index)
-      is begin
-         case To_Token_Enum (Tree.ID (Node)) is
-         --  Enum_Token_ID alphabetical order
-         when compilation_unit_ID =>
-            Process_Node (Tree.Child (Node, 1));
-
-         when compilation_unit_list_ID =>
-            declare
-               Children : constant Valid_Node_Index_Array := Tree.Children 
(Node);
-            begin
-               case To_Token_Enum (Tree.ID (Children (1))) is
-               when compilation_unit_list_ID =>
-                  Process_Node (Children (1));
-                  Process_Node (Children (2));
-               when compilation_unit_ID =>
-                  Process_Node (Children (1));
-               when others =>
-                  raise SAL.Programmer_Error;
-               end case;
-            end;
-
-         when declaration_ID =>
-            case Tree.RHS_Index (Node) is
-            when 0 =>
-               if Tree.ID (Tree.Child (Tree.Child (Node, 2), 1)) = 
+Wisitoken_Grammar_Actions.TOKEN_ID then
-                  declare
-                     use Ada.Strings;
-                     use Ada.Strings.Fixed;
-                     use WisiToken.Syntax_Trees.LR_Utils;
-                     Name : constant String := Get_Text (Tree.Child (Node, 3));
-                     Iter : constant Syntax_Trees.LR_Utils.Iterator :=
-                       WisiToken_Grammar_Runtime.Iterate (Data, Tree, 
Tree.Child (Node, 4), +declaration_item_ID);
-                     Item : constant Valid_Node_Index :=
-                       Tree.Child (Syntax_Trees.LR_Utils.Node (First (Iter)), 
1);
-                  begin
-                     case To_Token_Enum (Tree.ID (Item)) is
-                     when REGEXP_ID =>
-                        Put_Line (File, Name & ": $ => /" & Trim (Get_Text 
(Item), Both) & "/,");
-
-                     when others =>
-                        null;
-                     end case;
-                  end;
-               end if;
-
-            when others =>
-               null;
-            end case;
-
-         when nonterminal_ID =>
-            declare
-               Children : constant Valid_Node_Index_Array := Tree.Children 
(Node);
-            begin
-               Put (File, Get_Text (Children (1)) & ": $ => ");
-
-               Put_RHS_List (Children (3), First => True);
-
-               Put_Line (File, ",");
-            end;
-
-         when wisitoken_accept_ID =>
-            Process_Node (Tree.Child (Node, 1));
-
-         when others =>
-            raise SAL.Not_Implemented with Image (Tree.ID (Node), 
Wisitoken_Grammar_Actions.Descriptor);
-         end case;
-      end Process_Node;
-   begin
-      Create (File, Out_File, Output_File_Name);
-      Put_Line (File, "// generated from " & Data.Grammar_Lexer.File_Name & " 
-*- buffer-read-only:t -*-");
-
-      --  FIXME: copy copyright, license?
-
-      Put_Line (File, "module.exports = grammar({");
-      Put_Line (File, "  name: '" & Language_Name & "',");
-
-      Put_Line (File, "  rules: {");
-
-      Process_Node (Tree.Root);
-
-      Put_Line (File, "  }");
-      Put_Line (File, "});");
-      Close (File);
-   end Print_Tree_Sitter;
-
-   Trace          : aliased WisiToken.Text_IO_Trace.Trace 
(Wisitoken_Grammar_Actions.Descriptor'Access);
-   Input_Data     : aliased WisiToken_Grammar_Runtime.User_Data_Type;
-   Grammar_Parser : WisiToken.Parse.LR.Parser_No_Recover.Parser;
-
-   Input_File_Name : Ada.Strings.Unbounded.Unbounded_String;
-   Language_Name   : Ada.Strings.Unbounded.Unbounded_String;
-begin
-   Wisitoken_Grammar_Main.Create_Parser
-     (Parser    => Grammar_Parser,
-      Trace     => Trace'Unchecked_Access,
-      User_Data => Input_Data'Unchecked_Access);
-
-   declare
-      use Ada.Command_Line;
-      Arg : Integer := 1;
-   begin
-      if not (Argument_Count in 1 .. 4) then
-         Put_Usage;
-         Set_Exit_Status (Failure);
-         return;
-      end if;
-
-      loop
-         exit when Arg > Argument_Count;
-
-         if Argument (Arg) = "--verbosity" then
-            Arg := Arg + 1;
-            Trace_Generate_EBNF := Integer'Value (Argument (Arg));
-            Arg := Arg + 1;
-
-         else
-            exit;
-         end if;
-      end loop;
-
-      --  no more options
-      Input_File_Name := +Argument (Arg);
-      Arg := Arg + 1;
-      Language_Name := +Argument (Arg);
-   end;
-
-   begin
-      Grammar_Parser.Lexer.Reset_With_File (-Input_File_Name);
-   exception
-   when Ada.Text_IO.Name_Error | Ada.Text_IO.Use_Error =>
-      raise Ada.Text_IO.Name_Error with "input file '" & (-Input_File_Name) & 
"' could not be opened.";
-   end;
-
-   begin
-      Grammar_Parser.Parse;
-   exception
-   when WisiToken.Syntax_Error =>
-      Grammar_Parser.Put_Errors;
-      raise;
-   end;
-
-   Grammar_Parser.Execute_Actions;
-
-   declare
-      use Ada.Directories;
-
-      Output_File_Name : constant String := Base_Name (-Input_File_Name) & 
".js";
-
-      Tree  : WisiToken.Syntax_Trees.Tree renames 
Grammar_Parser.Parsers.First_State_Ref.Tree;
-   begin
-      if Trace_Generate_EBNF > Outline then
-         Put_Line ("'" & (-Input_File_Name) & "' => '" & Output_File_Name & 
"'");
-      end if;
-
-      if Trace_Generate_EBNF > Detail then
-         Put_Line ("wisitoken tree:");
-         Tree.Print_Tree (Wisitoken_Grammar_Actions.Descriptor);
-         Ada.Text_IO.New_Line;
-      end if;
-
-      Print_Tree_Sitter (Input_Data, Tree, Output_File_Name, -Language_Name);
-   end;
-
-exception
-when WisiToken.Syntax_Error | WisiToken.Parse_Error =>
-   --  error message already output
-   Ada.Command_Line.Set_Exit_Status (Ada.Command_Line.Failure);
-
-when E :  others =>
-   declare
-      use Ada.Exceptions;
-      use Ada.Command_Line;
-   begin
-      Put_Line (Standard_Error, Exception_Name (E) & ": " & Exception_Message 
(E));
-      Put_Line (Standard_Error, GNAT.Traceback.Symbolic.Symbolic_Traceback 
(E));
-      Set_Exit_Status (Failure);
-   end;
-end WisiToken.To_Tree_Sitter;
diff --git a/wisitoken-user_guide.texinfo b/wisitoken-user_guide.texinfo
index 0a371a8167..ba59be658d 100644
--- a/wisitoken-user_guide.texinfo
+++ b/wisitoken-user_guide.texinfo
@@ -1,11 +1,11 @@
 \input texinfo
 @c Author : Stephen Leake stephen_leake@stephe-leake.org
 @c Web    : http://stephe-leake.org/ada/opentoken.html
-@setfilename wisitoken-user_guide.info
+@setfilename wisi-generate
 @settitle WisiToken User Guide
 
 @copying
-Copyright @copyright{} 2014-2015, 2017, 2018, 2020, 2021 Stephen Leake.
+Copyright @copyright{} 2014-2015, 2017-2018, 2020-2022 Stephen Leake.
 
 @quotation
 Permission is granted to copy, distribute and/or modify this
@@ -33,7 +33,7 @@ section entitled "GNU Free Documentation License".
 @contents
 
 @node Top
-@top WisiToken User Guide
+@top WisiToken User Guide version 3.0
 
 @ifnottex
 @insertcopying
@@ -43,16 +43,18 @@ section entitled "GNU Free Documentation License".
 * Overview::
 * Common grammar problems::
 * Grammar File Syntax::
+* Language-specific parser runtime functions::
 @end menu
 
 @node Overview
 @chapter Overview
 
-WisiToken is a parser and parser generator toolkit, supporting
+WisiToken is a parser runtime and generator toolkit, supporting
 generalized LR (both LALR and LR1) and packrat parsers; the LR parser
-provides robust error recovery. The grammar can be expressed as either
-Ada source code statements, or in an EBNF file. The parser generator
-generates Ada, either plain or assuming the Emacs wisi package.
+provides robust error recovery and incremental parsing. The grammar
+can be expressed as either Ada source code statements, or in an EBNF
+file. The parser generator generates Ada, either plain or assuming the
+Emacs wisi package.
 
 At one point, ``wisi'' was short for ``Wisent Indentation engine'';
 the Emacs 'wisi' package implements an indentation engine that used to
@@ -73,134 +75,78 @@ re2c, and other lexers can be added.
 
 re2c is available from @url{http://re2c.org/}; it is also packaged in
 Mingw64 and Debian. WisiToken requires at least version 1.3.
-WisiToken assumes the executable @code{re2c} is in
+The WisiToken makefile assumes the executable @code{re2c} is in
 @code{$PATH}.
 
 @node Common grammar problems
 @chapter Common grammar problems
 
-LALR grammars are tricky. Here we describe some common problems people
+LR grammars can be tricky. Here we describe some common problems people
 run into.
 
 @menu
-* Empty choice in list::
+* Too many empty nonterms::
 @end menu
 
-@node Empty choice in list
-@section Empty choice in list
-
-Many programming languages have lists in the grammar. For example, Ada
-has lists of declarations:
-
-@example
-package_body
-  : PACKAGE name IS declaration_list BEGIN statement_list END SEMICOLON
-  ;
-
-declaration_list
-  : declaration
-  | declaration_list declaration
-  ;
-
-declaration
-  : object_declaration
-  | subprogram_declaration
-  ;; ...
-  ;
-@end example
-
-Note that the above grammar fragment does not allow an empty
-declaration_list. But Ada does, so the question is how can we add that
-to the grammar.
-
-There are four choices:
-
-@enumerate
-@item
-Add an empty declaration choice to declaration_list:
+@node Too many empty nonterms
+@section Too many empty nonterms
+If there are too many possibly empty nonterms in a right hand side,
+incremental parse can get confused.
 
+For example, in the original grammar for Emacs wisitoken grammar mode,
+a nonterminal declaration had the syntax:
 @example
-declaration_list
-  : ;; empty list
-  | declaration
-  | declaration_list declaration
-  ;
+nonterminal : IDENTIFIER ':' rhs_list [';']
 @end example
-This is now redundant; since declaration_list can be empty, the second
-choice is not needed:
+where @code{rhs_list} can be empty, because @code{rhs} can be
+empty. However, suppose we have a declaration in our language grammar
+file:
 @example
-declaration_list
-  : ;; empty list
-  | declaration_list declaration
+expression
+  : IDENTIFIER
+  | NUMBER
   ;
 @end example
-
-
-@item
-Add an empty declaration choice to declaration:
-
+and we want to insert an intermediate new nonterminal 'primary'. So we
+start typing:
 @example
-declaration
-  : ;; empty declaration
-  | object_declaration
-  | subprogram_declaration
-  ;; ...
+expression
+  : primary
+  : IDENTIFIER
+  | NUMBER
   ;
 @end example
-
-@item
-Add another rule with the empty production:
-
+At this point, this is parsed as ``nonterminal nonterminal'', where
+the @code{rhs_list} in the first nonterminal is empty, and the
+semicolon is absent. There is no syntax error. Now we type ``;'':
 @example
-package_body
-  : PACKAGE name IS declarative_part BEGIN statement_list END SEMICOLON
-  ;
-
-declarative_part
-  : ;; empty
-  | declaration_list
+expression
+  : primary
   ;
-
-declaration_list
-  : declaration
-  | declaration_list declaration
-  ;
-
-declaration
-  : object_declaration
-  | subprogram_declaration
-  ;; ...
+  : IDENTIFIER
+  | NUMBER
   ;
 @end example
-
-@item
-Add another choice in package_body that leaves out declaration_list:
+Emacs wisitoken grammar mode uses incremental parse; the first step in
+parsing this is to edit the syntax tree to insert the new ``;''. That
+leaves the token stream ``nonterminal IDENTIFIER SEMICOLON
+nonterminal'', which is a syntax error.
+
+One solution is to improve the edit step to delete the empty nonterms,
+which would allow the parser to replace them with the intended
+text. However, the empty nonterms are not adjacent to the edit point;
+they are before ``primary'', the edit point is after. So this would
+require deleting all empty nonterms in the entire tree, or guessing
+about what range to delete. The first option is not incremental, the
+second will fail mysteriously on a minor change to a complex grammar.
+
+Instead, we improved the wisitoken grammar to avoid the empty
+@code{rhs_list}, since there is no point to one anyway:
 @example
-package_body
-  : PACKAGE name IS declaration_list BEGIN statement_list END SEMICOLON
-  | PACKAGE name IS BEGIN statement_list END SEMICOLON
-  ;
+nonterminal : IDENTIFIER ':' rhs_list ['|'] [';']
 @end example
-@end enumerate
-
-Choice 1 is redundant, giving parse errors at parse time.
-Consider the following statements, where "<empty>" is used to indicate
-an empty declaration:
-
-1) package One is <empty> begin end ;
-2) package One is package One is <empty> begin end ; begin end ;
-3) package One is <empty> package One is <empty declaration> begin end ; begin 
end ;
-
-In parsing 3), the second 'package' causes a shift/reduce conflict;
-shift to start the nested declaration (as in 2), reduce to the empty
-declaration. Both are correct according to the grammar.
-
-Choice 2 leads to a shift/reduce conflict in the production for
-package_body; implementing the wisi parser as a generalized LALR parser
-allows it to handle this option.
-
-Choice 2 is the preferred choice for Ada, since it involves the least
-modifications to the original Ada grammar in the Ada reference manual.
+and @code{rhs_list} cannot be empty. Now incremental parse does not
+get confused.
 
 @node Grammar File Syntax
 @chapter Grammar File Syntax
@@ -220,6 +166,7 @@ Comments are started by @code{;;} and terminated by end of 
line.
 * Declarations::
 * Nonterminals::
 * Conditional code::
+* Optimized lists::
 @end menu
 
 @node Declarations
@@ -246,6 +193,10 @@ Raw code declarations contain arbitrary code, copied 
verbatim into the
 output. The keywords following @code{%code} determine where
 the section is output.
 
+Sometimes the generator cannot tell what context clauses for other
+packages are required in the actions package body; then you must use a
+@verb{|%code|} declaration to add them.
+
 @node Keywords
 @subsection Keywords
 @verbatim
@@ -263,53 +214,130 @@ lexers recognize them by the given string.
 @node Tokens
 @subsection Tokens
 @verbatim
-%token < kind > name regexp
+%token < kind > name regexp repair_image
 @end verbatim
 
 example:
 @verbatim
-%token <symbol> IDENTIFIER %[ ... ]%
+%token <symbol> IDENTIFIER %[ ... ]% "A_Bogus_Identifier"
 %token <punctuation> TICK "'"
 @end verbatim
 
 The syntax of the regular expression is determined by the lexer
-generator. The meaning of @code{kind} is determined by the lexer
-(@code{re2c} ignores this), with the following defined by the
-WisiToken generator. Other token kinds have no effect; they may be
-used for documentation.
+generator.
+
+@code{repair_image} is used in error repair information; it should be
+inserted by an editor at the place of the expected but missing token.
+
+The meaning of the following values of @code{kind} are defined by the
+WisiToken generator. Other token kinds may be used for documentation;
+they just declare a token name and regular expression.
 
 @table @code
+@item <string-double-one-line>
+@verbatim
+%token <string-double-one-line> STRING_LITERAL %[ ... ]%
+@end verbatim
+A string of characters that have string syntax, with double quote
+delimiters, may not contain a new-line.
+
+The regular expression is assumed to match such a string; this is
+not checked.
+
+The restriction of not containing a new-line allows improving
+incremental parse when inserting/deleting string quotes; the text is
+affected only thru the following new-line. Without this restriction,
+when a string quote is inserted/deleted, the entire following text
+must be scanned by the lexer, and parsed.
+
+If an embedded quote is escaped by doubling it (as for Ada strings),
+include the declaration @code{%escape_delimiter_doubled <token_name>}.
+
+@item <string-single-one-line>
+@verbatim
+%token <string-single-one-line> CHARACTER_LITERAL %[ ... ]%
+@end verbatim
+A string of characters that have string syntax, with single quote
+delimiters, may not contain a new-line.
+
+The regular expression is assumed to match such a string; this is
+not checked.
+
 @item <string-double>
 @verbatim
 %token <string-double> STRING_LITERAL %[ ... ]%
 @end verbatim
-A string of characters that have string syntax, with double quote delimiters.
+A string of characters that have string syntax, with double quote
+delimiters, may contain a newline.
+
+The regular expression is assumed to match such a string; this is
+not checked.
 
 @item <string-single>
 @verbatim
 %token <string-single> CHARACTER_LITERAL %[ ... ]%
 @end verbatim
-A string of characters that have string syntax, with single quote delimiters.
+A string of characters that have string syntax, with single quote
+delimiters, may contain a newline.
+
+The regular expression is assumed to match such a string; this is
+not checked.
 
 @item <new-line>
 @verbatim
-%token <new-line> [\n] %[ ... ]%
+%token <new-line> NEW_LINE
 @end verbatim
-Not used by the wisi lexer; required by the Ada lexer. The third
-argument is the regular expression to recognize the entire comment.
+Declares the non-grammar new-line token, used to count lines. It has
+no regexp argument; DOS and Unix line endings are added
+internally. This must be declared if line information is desired.
 
-@item <non-reporting>
+For backward compatibility with previous WisiToken versions, any
+regexp present is ignored.
+
+@item <comment-new-line>
 @verbatim
-%token <non-reporting> WHITESPACE %[ [ \t] ]%
+%token <comment-new-line> COMMENT "--"
 @end verbatim
-A token that is recognized by the lexer, but not returned to the
-parser.
+Declares a non-grammar comment token that is terminated by a line end
+or end of input. The string argument must match only the comment
+start (the example shows the Ada comment start); DOS and Unix line
+endings, and end of input, are added internally. The token includes the
+line end; no separate new-line token is produced after the comment
+token.
+
+@item <comment-one-line>
+@verbatim
+%token <comment-one-line> PLACEHOLDER "{" "}"
+@end verbatim
+Declares a non-grammar comment token that is terminated by the end
+delimiter, may not contain a new-line. The two arguments are strings,
+and must match only the comment start and end (the example shows a
+template placeholder); DOS and Unix line endings, and end of input,
+are added internally. The delimiters must be different; this is
+checked at grammar generation time.
+
+Does not handle nested delimiters; the token is terminated by the
+first end delimiter.
 
 @item <delimited-text>
 @verbatim
 %token <delimited-text> RAW_CODE "%{" "}%"
 @end verbatim
-A token that contains arbitrary text, delimited by the two strings.
+A token that contains arbitrary text (including new-line), delimited
+by the two strings. The arguments provide the comment start and end -
+the rest of the regular expression is provided by the generator. The
+delimiters must be different; this is checked at grammar generation
+time.
+
+Does not handle nested delimiters; the token is terminated by the
+first end delimiter.
+
+@item <non-reporting>
+@verbatim
+%token <non-reporting> WHITESPACE %[ [ \t] ]%
+@end verbatim
+A token that is recognized by the lexer, but not returned to the
+parser.
 
 @end table
 
@@ -318,18 +346,6 @@ A token that contains arbitrary text, delimited by the two 
strings.
 The parser uses an error recovery algorithm when it encounters a
 syntax error; if a solution is found, the parse continues.
 
-Error recovery uses multiple tasks to take advantage of multiple CPU
-cores.  Unfortunately, this means there is a race condition; the
-solutions found can be delivered in different orders on different
-runs. This matters because each solution results in a successful
-parse, possibly with different actions (different indentation
-computed, for example).  Which solution finally succeeds depends on
-which are terminated due to identical parser stacks, which in turn
-depends on the order they were delivered.
-
-Once the syntax errors are fixed, only ambiguities in the grammar
-itself can cause a similar problem.
-
 Several grammar file declarations set parameters for the error
 recovery. If none of these parameters are present in the grammar file,
 the generated parser does not do error recovery.
@@ -340,12 +356,16 @@ back tokens. Each possible solution is given a cost, and 
enqueued to
 be checked later. Solutions are checked in cost order (lowest first).
 
 @table @code
+@item %no_error_recover
+States that there is no error recovery for this parser. This is the
+default if no error recover parameters are specified.
+
 @item %mckenzie_check_limit <limit>
 The number of tokens past the error point that must be parsed
 successfully for a solution to be deemed successful. Smaller values
 give faster recovery; larger values give better solutions. Too large a
 value risks encountering another user error, making a solution
-impossible. 3 or 4 works well in practice.
+impossible. 3 or 4 works well in practice; default is 4.
 
 @item mckenzie_check_delta_limit <limit>
 When error recovery is entered with multiple parsers active, once a
@@ -353,6 +373,38 @@ solution has been found for one parser, the other parsers 
are allowed
 to check only @code{mckenzie_check_delta_limit} possible solutions
 before they fail. This prevents long recovery times.
 
+@item %mckenzie_zombie_limit <limit>
+When a parser encounters an error, it is not terminated immediately;
+it becomes a zombie.  The other parsers must advance zombie limit
+tokens past the error point without error before the zombie is
+terminated.
+
+Smaller values give faster parsing, because parallel parsers are
+terminated sooner; in particular, in a language with conflicts that
+occur often, a small value gives the generalized parser a chance to
+get down to 1 parser, which is a significant gain in speed.
+
+Larger values give better error solutions, because they may include
+the original intended code. For example, consider the following Ada
+code:
+@example
+procedure Pattern_1 is new Ada.Containers.Indefinite_Doubly_Linked_Lists 
(Pattern);
+procedure Pattern_2 is Ada : Integer; begin null; end;
+@end example
+Pattern_1 is a generic instantiation; Pattern_2 is a procedure body.
+There is a grammar conflict on ``is'', so a second parser is spawned
+there, with one expecting to see a generic instantiation, the other a
+procedure body. If the ``new'' is missing, the generic instantiation
+parser errors on ``Ada'', but the procedure body parser doesn't error
+until ``.'' (a variable declaration cannot have ``.'' in the name). If
+@code{mckenzie_zombie_limit} is 1, the generic instantiation parser is
+terminated before error recover is started, so the correct solution
+(insert ``new'') is not found.
+
+Setting @code{mckenzie_zombie_limit} the same as
+@code{mckenzie_check_limit} works well in practice, unless it needs to
+be smaller; default is 4.
+
 @item %mckenzie_cost_default <insert> <delete> <push back> <ignore check fail>
 McKenzie error recovery default costs for insert, delete, push back
 single tokens, and for ignoring a semantic check failure; four
@@ -400,6 +452,7 @@ normally negative.
 @node Other declarations
 @subsection Other declarations
 @table @code
+@c alphabetical by declaration name
 @item %case_insensitive
 If present, keywords are case insensitive in the lexer.
 
@@ -408,17 +461,46 @@ Declare a known conflict.
 
 Example conflict declaration:
 @verbatim
-%conflict REDUCE/REDUCE in state abstract_limited_opt, 
abstract_limited_synchronized_opt on token NEW
+%conflict REDUCE abstract_limited_opt | REDUCE 
abstract_limited_synchronized_opt on token NEW
 @end verbatim
 
 The conflict description is output by @code{wisitoken-bnf-generate}
 when an undeclared conflict is detected. If the user decides to not
 fix the conflict, the description can be copied into the grammar
-source file, so it will be ignored next time around.
+source file, so it will be ignored next time around. Or it can be
+converted to a @code{%conflict_resolution}; see the next item.
+
+If a conflict has more than two branches, it must be declared more
+than once, first with two branches, then with one more, etc. This is
+due to the way conflicts are found during the parse table generation
+process.
 
 Resolving conflicts in the grammar can be difficult, but leaving them
 in can increase parse time and cause ambiguous parses.
 
+In Emacs, @code{wisitoken-parse_table-mode} provides a command
+@code{wisitoken-parse_table-conflict-goto} that will find a conflict
+in the parse table file, which has more information that might help
+resolve the conflict. @code{wisitoken-grammar-mode} binds that command
+to @key{^c .}.
+
+@item %conflict_resolution <conflict description> : <resolution>
+Declare a conflict resolution. The conflict description is the same as
+in a @code{%conflict} declaration; the resolution says which branch of
+the conflict to take.
+
+Only one kind of resolution is supported: a token name, which must
+match one of the token names in the conflict description; the branch
+that contains that token is taken.
+
+Example conflict resolution declaration:
+@verbatim
+%conflict REDUCE abstract_limited_opt
+ | REDUCE abstract_limited_synchronized_opt on token NEW
+ : abstract_limited_opt
+@end verbatim
+This says to always reduce to @code{abstract_limited_opt}.
+
 @item %elisp_face <name>
 Declare a name for an elisp face constant.
 
@@ -426,21 +508,46 @@ When generating Ada code for Emacs, the elisp faces 
applied by
 @code{wisi-face-apply} actions must be declared, so the elisp and Ada
 code aggree on what they mean.
 
-@item %elisp_indent <elisp name> <Ada name>
-Declare elisp and Ada names for an indent variable.
+@item %elisp_indent <elisp name> <Ada name> [<arg_count> [token_arg_index]...]
+Declare elisp and Ada names for an indent variable or function.
+
+When generating Ada code for Emacs, names used in @code{wisi-indent}
+actions that are not recognized are assumed to be elisp and Ada
+variables, with the Ada name derived from elisp name by replacing
+@code{-} with @code{_}, and converting to @code{Mixed_Case}.
 
-When generating Ada code for Emacs, the elisp indent variables used in
-@code{wisi-indent} actions must be declared, so the elisp and Ada code
-aggree on what they mean.
+Indent variables that don't meet that naming convention must be
+declared, so the elisp and Ada code agree on what they mean.
+
+Custom indent functions are implemented in Ada; the @code{elisp name}
+is the name used in grammar file actions. The declaration includes the
+argument count and which arguments are token indicies. Token index
+arguments are converted to token labels if automatic labeling is in
+effect.
+
+For example, Ada declares:
+@verbatim
+%elisp_indent "ada-indent-record*" Ada_Indent_Record_1  3 1 2
+@end verbatim
+The name used in grammar file actions is @code{ada-indent-record*}.
+The Ada function name is @code{Ada_Indent_Record_1}; it must be
+visible in the generated code body, either by being declared in the
+language runtime package spec, or in a package made use-visible by a
+use-clause in a @code{%code} block. It takes three arguments; the
+first two are token indices.
 
 @item %elisp_action <elisp name> <Ada name>
 Declare elisp and Ada names for a custom action subprogram written in
 Ada.
 
-The term ``elisp'' here is historical; the name is not actually used
-by elisp in the current implementation.
+The elisp name is used in grammar actions.
+
+Note that custom Ada functions can also be declared by
+@code{%elisp_indent}; those must appear as an argument to a
+@code{wisi-indent-action} grammar action; @code{elisp_action} are
+grammar actions.
 
-@item end_names_optional_option <name>
+@item %end_names_optional_option <name>
 When generating Ada code for Emacs, the name of the Ada variable
 determining whether end block names are optional.
 
@@ -458,27 +565,89 @@ These names are optional in the Ada standard. Making them 
required
 improves error recovery; the recovery algorithm can use matching names
 to isolate the error.
 
-@item generate <generate_algorithm> <output_language> [text_rep]
+@item %escape_delimiter_doubled <token_name>
+The named token escapes embedded delimiters by doubling them, as for
+Ada strings. This is used by incremental parse when editing such tokens.
 
-@code{<generate_algorithm>} is one of @code{LALR | LR1 | Packrat_Gen | 
Packrat_Proc | External}
+@item %generate <generate_algorithm> <output_language>
 
-@code{<output_language>} is one of @code{Ada | Ada_Emacs}
+@code{<generate_algorithm>} is one of @code{LALR | LR1 | Packrat_Gen |
+Packrat_Proc | External | Tree_Sitter}.
+
+@code{<output_language>} is one of @code{Ada | Ada_Emacs}.
 
 The algorithm/output_language pair declares one output source
 set. Multiple sets can be declared; they are all generated together.
 
-@code{text_rep} determines how the parse table is represented; if
-present, it is in a text file that is loaded at parser run time. If
-absent, it is in the code. For very large parse tables, such as for an
-LR1 parser for a large language like Ada, the text representation may
-be needed, because the Ada compiler can't handle the very large number
-of statements that represent the parser table in the code. The text
-file can take a long time to read at parser startup (a few seconds for
-the Ada language).
+More detail on @code{generate_algorithm}:
+@table @code
+@item LALR | LR1
+Generates a parse table using the LALR or LR1 algorithms (see
+@code{https://en.wikipedia.org/wiki/LR_parser}). At runtime, an
+error correcting generalized LR parser uses the parse table to parse
+the input text.
+
+An additional generate parameter @code{text_rep} determines how the
+parse table is represented; if present, it is in a text file that is
+loaded at parser run time. If absent, it is in the code. For very
+large parse tables, such as for an LR1 parser for a large language
+like Ada, the text representation may be needed, because the Ada
+compiler can't handle the very large number of statements that
+represent the parser table in the code. The text file can take a long
+time to read at parser startup (a few seconds for the Ada language).
+
+@item Packrat_Gen
+Generates Ada code that implements a packrat parser. Left recursive
+grammar productions are not supported. See
+@code{https://en.wikipedia.org/wiki/Parsing_expression_grammar}.
+
+@item Packrat_Proc
+Generates Ada code that interprets the grammar using a packrat
+parser. Left recursive grammar productions are not supported. See
+@code{https://en.wikipedia.org/wiki/Parsing_expression_grammar}.
+
+This uses the same parsing algorithm as @code{Packrat_Gen}; it is
+slower, but easier to debug.
+
+@item External
+Generates code that implements the grammar actions only, for use with
+a parser that is generated by an external program.
+
+@item Tree_Sitter
+Translates the grammar file to a tree-sitter grammar file, and
+generates code that impements the grammar actions.
 
-@item %language_runtime
+@end table
+
+@code{<output_language>} determines both what code is generated, and
+what language is used for the grammar actions. For @code{Ada}, the
+grammar action language is Ada, and it is copied verbatim into the
+generated grammar action code. For @code{Emacs_Ada}, the grammar
+action language is elisp, which is translated to Ada by assuming
+it will be used by the GNU ELPA package @code{wisi} (see
+@code{https://elpa.gnu.org/packages/wisi.html}).
+
+When the output language is emacs_ada, an additional parameter is
+required: @code{Process | Module}. For @code{Process}, the generated
+code runs in an Emacs background process. For @code{Module}, the
+generated code runs as an Emacs loadable module (currently not
+supported).
+
+@item %language_runtime <string>
 Specify an alternate name for the language runtime package; the
-default is @code{Wisi.<language_name>}.
+default is @code{Wisi.<language_name>}, where @code{<language_name>}
+is the simple name of the language grammar file, without the file
+extension. The value must be enclosed in quotes.
+
+@item %lr1_hash_table_size <integer>
+Specify the size of the hash table used when generating an LR1 parse
+table. The default size is 113; larger size can decrease generate time
+on larger languages, but only by 10%. A prime larger than the
+requested size is used.
+
+@item %max_parallel <integer>
+Maximum number of parallel parsers during main parsing. Default is 15;
+a language with many conflicts may need more.
 
 @item %meta_syntax [BNF | EBNF]
 Declares the syntax used by the grammar file. BNF is a minor extension
@@ -506,12 +675,21 @@ hard; @code{%partial_recursion} tells WisiToken to use a 
simpler
 approximate calculation. This will affect the quality of the error
 recovery, but it will still be robust.
 
-@item %start
+@item %start <nontermininal>
 The start token for the grammar.
 
-@item re2c_regexp <name> <value>
-Declare a named regular expression with re2c name and syntax. The name
-may then occur in another re2c regular expression.
+@item %suppress <nontermininal> <warning label>
+Suppress the indicated warning for the nonterminal.
+
+@code{<warning label>} is a copy of part of the text of the warning
+message. The supported warnings are:
+@itemize
+@item may never match; it shares a prefix
+@end itemize
+
+@item %lexer_regexp <name> <value>
+Declare a named regular expression with name and current lexer
+syntax. The name may then occur in another lexer regular expression.
 @end table
 
 @node Nonterminals
@@ -545,7 +723,7 @@ The actions are written in output-language code; for 
@code{Ada_Emacs}
 output, this is elisp (a hold-over from when WisiToken only output
 elisp code).
 
-If using BNF:
+If using BNF, the syntax of an @verb{|rhs_item|} is:
 @verbatim
 rhs_item : token ;
 @end verbatim
@@ -599,18 +777,85 @@ more times.
 It is sometimes necessary to include or exclude some declarations
 and portions of rules based on the choice of lexer or parser.
 
-Therefore WisiToken supports @code{%if ... %end if} in the grammar file:
+Therefore WisiToken supports @code{%if ... %elsif ... %end if} in the grammar 
file:
 @verbatim
 %if {lexer | parser} = {<lexer> | <generate_algorithm>}
 ...
+%elsif {lexer | parser} = {<lexer> | <generate_algorithm>}
+...
 %end if
 @end verbatim
 
-The lines between @code{%if} and @code{%end if} are ignored if the
-current lexer or parser is not the one specified in the @code{%if}
-condition.
+The lines between @code{%if, %elsif} and @code{%end if} are ignored if
+the current lexer or parser is not the one specified in the @code{%if,
+%elsif} condition.
 
 @code{%if ... %end if} cannot be nested.
 
-@c FIXME: doc language_fixes etc.
+@c FIXME: overview of error correction, and its role in parsing,
+@c consequences for indent, etc. Clean up error correction paper, add
+@c to elpa :docs, reference here.
+
+@node Optimized lists
+@section Optimized lists
+A list of tokens is often specified as:
+@example
+declarations
+  : declaration
+  | declarations declaration
+  ;
+@end example
+If the input syntax consists of a single long list of declarations,
+the resulting syntax tree is a linear list at the top level. This
+causes performance problems in incremental parse; editing the syntax
+tree requires breaking down then entire list, taking time proportional
+to the input text length.
+
+This is improved by recognizing the following construct:
+@example
+declarations
+  : declaration
+  | declarations declaration
+  | declarations declarations
+  ;
+@end example
+This is called an ``optimized list''. It has conflicts, which are
+resolved by choosing to reduce to declarations; they do not need to be
+declared in the grammar file. In a batch parse, this produces the same
+syntax tree as the first definition of ``declarations''. However, when
+the tree is edited for incremental parse, the list is broken into two
+sublists, rather than breaking down the entire list.
+
+The various EBNF repeats all produce optimized lists.
+
+@node Language-specific parser runtime functions
+@chapter Language-specific parser runtime functions
+The parser has several hooks for language-specific functions:
+
+@table @code
+@item Language_Fixes
+Called by error correction for each error (either the original error,
+or another encountered while checking a proposed solution); it should
+enqueue new solutions if it recognizes the error.
+
+For example, in Ada, compound statements like @code{if then else end
+if;} or @code{loop end loop;} have a matching keyword after the
+@code{end}. So if the error is an incorrect keyword after @code{end},
+@code{Language_Fixes} can recognize that, push back the @code{end},
+and insert @code{end <keyword>;}.
+
+@item Language_Matching_Begin_Tokens
+Called by error correction for each proposed solution; it should
+return a token sequence to precede the current token.
+
+For example, in Ada, if the current token is @code{end} and the next
+token is @code{if}, @code{Language_Matching_Begin_Tokens} should
+return @code{if}.
+
+@item Language_String_ID_Set
+Called during language initialization; return a set of @code{Token_ID}
+that can contain the given string literal ID. The set is then used by
+error correction when correcting missing string quotes.
+@end table
+
 @bye
diff --git a/wisitoken-wisi_ada.adb b/wisitoken-wisi_ada.adb
index dd5e072a8c..1178ca8789 100644
--- a/wisitoken-wisi_ada.adb
+++ b/wisitoken-wisi_ada.adb
@@ -2,7 +2,7 @@
 --
 --  see spec
 --
---  Copyright (C) 2013, 2014, 2015, 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2013, 2014, 2015, 2017 - 2020, 2022 Free Software 
Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -45,19 +45,19 @@ package body WisiToken.Wisi_Ada is
       end return;
    end "&";
 
-   function "+" (Tokens : in Token_ID_Arrays.Vector; Action : in 
Syntax_Trees.Semantic_Action) return Right_Hand_Side
+   function "+" (Tokens : in Token_ID_Arrays.Vector; Action : in 
Syntax_Trees.Post_Parse_Action) return Right_Hand_Side
    is begin
-      return (Tokens, Recursion => <>, Action => Action, Check => null);
+      return (Tokens, Recursion => <>, Post_Parse_Action => Action, 
In_Parse_Action => null);
    end "+";
 
-   function "+" (Tokens : in Token_ID; Action : in 
Syntax_Trees.Semantic_Action) return Right_Hand_Side
+   function "+" (Tokens : in Token_ID; Action : in 
Syntax_Trees.Post_Parse_Action) return Right_Hand_Side
    is begin
-      return (Only (Tokens), Recursion => <>, Action => Action, Check => null);
+      return (Only (Tokens), Recursion => <>, Post_Parse_Action => Action, 
In_Parse_Action => null);
    end "+";
 
-   function "+" (Action : in Syntax_Trees.Semantic_Action) return 
Right_Hand_Side
+   function "+" (Action : in Syntax_Trees.Post_Parse_Action) return 
Right_Hand_Side
    is begin
-      return (Tokens => <>, Recursion => <>, Action => Action, Check => null);
+      return (Tokens => <>, Recursion => <>, Post_Parse_Action => Action, 
In_Parse_Action => null);
    end "+";
 
    function Only (Item : in WisiToken.Productions.Right_Hand_Side) return 
WisiToken.Productions.RHS_Arrays.Vector
@@ -79,7 +79,7 @@ package body WisiToken.Wisi_Ada is
 
    function "<=" (LHS : in Token_ID; RHSs : in 
WisiToken.Productions.RHS_Arrays.Vector) return Instance
    is begin
-      return (LHS, RHSs);
+      return (LHS, Optimized_List => False, RHSs => RHSs);
    end "<=";
 
    function Only (Subject : in Instance) return Prod_Arrays.Vector
@@ -119,11 +119,7 @@ package body WisiToken.Wisi_Ada is
    function "and" (Left : in Prod_Arrays.Vector; Right : in Instance) return 
Prod_Arrays.Vector
    is begin
       return Result : Prod_Arrays.Vector := Left do
-         if Right.LHS < Result.First_Index then
-            Result.Set_First_Last (Right.LHS, Result.Last_Index);
-         elsif Right.LHS > Result.Last_Index then
-            Result.Set_First_Last (Result.First_Index, Right.LHS);
-         end if;
+         Result.Extend (Right.LHS);
 
          if Result (Right.LHS).LHS = Invalid_Token_ID then
             Result (Right.LHS) := Right;
@@ -136,16 +132,8 @@ package body WisiToken.Wisi_Ada is
    function "and" (Left : in Prod_Arrays.Vector; Right : in 
Prod_Arrays.Vector) return Prod_Arrays.Vector
    is begin
       return Result : Prod_Arrays.Vector := Left do
-         if Right.First_Index < Result.First_Index then
-            Result.Set_First_Last (Right.First_Index, Result.Last_Index);
-         elsif Right.First_Index > Result.Last_Index then
-            Result.Set_First_Last (Result.First_Index, Right.First_Index);
-         end if;
-         if Right.Last_Index < Result.First_Index then
-            Result.Set_First_Last (Right.Last_Index, Result.Last_Index);
-         elsif Right.Last_Index > Result.Last_Index then
-            Result.Set_First_Last (Result.First_Index, Right.Last_Index);
-         end if;
+         Result.Extend (Right.First_Index);
+         Result.Extend (Right.Last_Index);
 
          for P of Right loop
             if Result (P.LHS).LHS = Invalid_Token_ID then
diff --git a/wisitoken-wisi_ada.ads b/wisitoken-wisi_ada.ads
index a78643f33b..0452f1e8c7 100644
--- a/wisitoken-wisi_ada.ads
+++ b/wisitoken-wisi_ada.ads
@@ -2,7 +2,7 @@
 --
 --  Type and operations for building a grammar directly in Ada source.
 --
---  Copyright (C) 2003, 2013 - 2015, 2017, 2018 Free Software Foundation, Inc.
+--  Copyright (C) 2003, 2013 - 2015, 2017, 2018, 2020 Free Software 
Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -36,13 +36,13 @@ package WisiToken.Wisi_Ada is
 
    function "+"
      (Tokens : in WisiToken.Token_ID_Arrays.Vector;
-      Action : in WisiToken.Syntax_Trees.Semantic_Action)
+      Action : in WisiToken.Syntax_Trees.Post_Parse_Action)
      return WisiToken.Productions.Right_Hand_Side;
    function "+"
      (Tokens : in Token_ID;
-      Action : in WisiToken.Syntax_Trees.Semantic_Action)
+      Action : in WisiToken.Syntax_Trees.Post_Parse_Action)
      return WisiToken.Productions.Right_Hand_Side;
-   function "+" (Action : in WisiToken.Syntax_Trees.Semantic_Action) return 
WisiToken.Productions.Right_Hand_Side;
+   function "+" (Action : in WisiToken.Syntax_Trees.Post_Parse_Action) return 
WisiToken.Productions.Right_Hand_Side;
    --  Create the right hand side of a production.
 
    function Only (Item : in WisiToken.Productions.Right_Hand_Side) return 
WisiToken.Productions.RHS_Arrays.Vector;
diff --git a/wisitoken.adb b/wisitoken.adb
index 9efeb18619..558ce13640 100644
--- a/wisitoken.adb
+++ b/wisitoken.adb
@@ -2,7 +2,7 @@
 --
 --  See spec
 --
---  Copyright (C) 2009, 2014-2015, 2017 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2009, 2014-2015, 2017 - 2022 Free Software Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -26,6 +26,8 @@
 --  executable file might be covered by the GNU Public License.
 -------------------------------------------------------------------------------
 
+with Ada.Characters.Handling;
+with Ada.Streams.Stream_IO;
 with Ada.Strings.Fixed;
 package body WisiToken is
 
@@ -82,6 +84,15 @@ package body WisiToken is
       end return;
    end To_Vector;
 
+   function To_Array (Item : in Token_ID_Arrays.Vector) return Token_ID_Array
+   is begin
+      return Result : Token_ID_Array (Item.First_Index .. Item.Last_Index) do
+         for I in Result'Range loop
+            Result (I) := Item (I);
+         end loop;
+      end return;
+   end To_Array;
+
    function Shared_Prefix (A, B : in Token_ID_Arrays.Vector) return Natural
    is
       use all type Ada.Containers.Count_Type;
@@ -201,6 +212,11 @@ package body WisiToken is
       return '(' & Trimmed_Image (Item.LHS) & ',' & Natural'Image (Item.RHS) & 
')';
    end Image;
 
+   function Image (Item : in Production_ID; Descriptor : in 
WisiToken.Descriptor) return String
+   is begin
+      return Image (Item.LHS, Descriptor) & '_' & Trimmed_Image (Item.RHS);
+   end Image;
+
    function Trimmed_Image (Item : in Production_ID) return String
    is begin
       return Trimmed_Image (Item.LHS) & '.' & Trimmed_Image (Item.RHS);
@@ -300,22 +316,57 @@ package body WisiToken is
       end if;
    end Put;
 
+   procedure Report_Memory (Trace : in out WisiToken.Trace'Class; Prefix : in 
Boolean)
+   is
+      use GNATCOLL.Memory;
+      Memory_Use : constant Watermark_Info := Get_Ada_Allocations;
+
+      procedure Trace_Put_Line (S : in String)
+      is begin
+         Trace.Put (S, Prefix);
+         Trace.New_Line;
+      end Trace_Put_Line;
+
+      procedure Trace_Put (S : in String)
+      is begin
+         Trace.Put (S, Prefix);
+      end Trace_Put;
+
+      procedure Trace_Dump is new Redirectable_Dump
+        (Put_Line => Trace_Put_Line,
+         Put      => Trace_Put);
+   begin
+      if WisiToken.Trace_Memory > 1 then
+         Trace_Dump (4 * Trace_Memory, Report => Memory_Usage);
+      end if;
+
+      Trace.Put
+        ("(message ""memory: high" &
+           Byte_Count'Image (Memory_Use.High) &
+           " current" &
+           Byte_Count'Image (Memory_Use.Current - Memory_Baseline) & """)",
+         Prefix => False);
+      Trace.New_Line;
+   end Report_Memory;
+
    function Error_Message
      (File_Name : in String;
-      Line      : in Line_Number_Type;
+      Line      : in Base_Line_Number_Type;
       Column    : in Ada.Text_IO.Count;
       Message   : in String)
      return String
-   is begin
+   is
+      use all type Ada.Text_IO.Count;
+   begin
       return File_Name & ":" &
         Trimmed_Image (if Line = Invalid_Line_Number then Integer'(0) else 
Integer (Line)) & ":" &
-        Trimmed_Image (Integer (Column)) & ": " &
+        Trimmed_Image (Integer (Column + 1)) & ": " &
         Message;
    end Error_Message;
 
    function Image (Item : in Buffer_Region) return String
    is begin
-      return "(" & Trimmed_Image (Integer (Item.First)) & " ." & 
Buffer_Pos'Image (Item.Last) & ")";
+      return "(" & Trimmed_Image (Item.First) & " ." & Base_Buffer_Pos'Image 
(Item.Last) & ")";
    end Image;
 
    function "and" (Left, Right : in Buffer_Region) return Buffer_Region
@@ -323,45 +374,178 @@ package body WisiToken is
       return (Buffer_Pos'Min (Left.First, Right.First), Buffer_Pos'Max 
(Left.Last, Right.Last));
    end "and";
 
-   function Image
-     (Item       : in Base_Token;
-      Descriptor : in WisiToken.Descriptor)
-     return String
+   function Contains
+     (Outer, Inner   : in Buffer_Region;
+      First_Boundary : in Boundary := Inclusive;
+      Last_Boundary  : in Boundary := Inclusive)
+     return Boolean
    is
-      ID_Image : constant String := WisiToken.Image (Item.ID, Descriptor);
+      Result : Boolean;
    begin
-      if Item.Char_Region = Null_Buffer_Region then
-         return "(" & ID_Image & ")";
+      case First_Boundary is
+      when Inclusive =>
+         Result := Outer.First <= Inner.First;
+      when Exclusive =>
+         Result := Outer.First < Inner.First;
+      end case;
+
+      case Last_Boundary is
+      when Inclusive =>
+         Result := @ and Outer.Last >= Inner.Last;
+      when Exclusive =>
+         Result := @ and Outer.Last > Inner.Last;
+      end case;
+      return Result;
+   end Contains;
 
+   function Overlaps (A, B : in Buffer_Region) return Boolean
+   is begin
+      if Length (A) > 0 and Length (B) > 0 then
+         return Contains (B, A.First) or Contains (B, A.Last) or Contains (A, 
B.First) or Contains (A, B.Last);
       else
-         return "(" & ID_Image & ", " & Image (Item.Char_Region) & ")";
+         return False;
       end if;
-   end Image;
+   end Overlaps;
 
-   function Image
-     (Token      : in Base_Token_Index;
-      Terminals  : in Base_Token_Arrays.Vector;
-      Descriptor : in WisiToken.Descriptor)
-     return String
-   is begin
-      if Token = Invalid_Token_Index then
-         return "<invalid_token_index>";
+   function Trimmed_Image (Item : in Base_Line_Number_Type) return String
+   is
+      function Base_Trimmed_Image is new SAL.Gen_Trimmed_Image 
(Line_Number_Type);
+   begin
+      if Item = Invalid_Line_Number then
+         return "-";
       else
-         return Token_Index'Image (Token) & ":" & Image (Terminals (Token), 
Descriptor);
+         return Base_Trimmed_Image (Item);
       end if;
-   end Image;
+   end Trimmed_Image;
 
-   function Image
-     (Item       : in Recover_Token;
-      Descriptor : in WisiToken.Descriptor)
-     return String
+   function Image (Item : in Line_Region) return String
    is begin
-      return
-        (if Item.Min_Terminal_Index = Invalid_Token_Index
-         then ""
-         else Trimmed_Image (Item.Min_Terminal_Index) & ":") &
-        "(" & Image (Item.ID, Descriptor) &
-        (if Item.Byte_Region = Null_Buffer_Region then "" else ", " & Image 
(Item.Byte_Region)) & ")";
+      return "(" & Trimmed_Image (Item.First) & "," & Item.Last'Image & ")";
    end Image;
 
+   function "+" (Left : in Line_Region; Right : in Base_Line_Number_Type) 
return Line_Region
+   is begin
+      return (Left.First + Right, Left.Last + Right);
+   end "+";
+
+   procedure Enable_Trace (Config : in String)
+   is
+      use Ada.Characters.Handling;
+      use Ada.Strings.Fixed;
+      Name_First : Integer := Config'First;
+      Name_Last  : Integer;
+
+      Value_First : Integer;
+      Value_Last  : Integer;
+   begin
+      loop
+         Name_Last := Index (Config, "=", Name_First);
+         exit when Name_Last = 0;
+
+         Value_First := Name_Last + 1;
+         Name_Last   := Name_Last - 1;
+         Value_Last  := Index (Config, " ", Value_First);
+         if Value_Last = 0 then
+            Value_Last := Config'Last;
+         end if;
+         declare
+            Name : constant String := To_Lower (Config (Name_First .. 
Name_Last));
+
+            function Get_Value return Integer
+            is begin
+               return Integer'Value (Config (Value_First .. Value_Last));
+            exception
+            when Constraint_Error =>
+               raise User_Error with "expecting integer trace value, found '" &
+                 Config (Value_First .. Value_Last) & "'";
+            end Get_Value;
+
+            Value : constant Integer := Get_Value;
+         begin
+            --  Trace var alphabetical order. When modify this, also modify
+            --  Enable_Trace_Help.
+            if Name = "debug" then
+               Debug_Mode := Value > 0;
+            elsif Name = "action" then
+               Trace_Action := Value;
+            elsif Name = "conflicts" or Name = "generate_conflicts" then
+               Trace_Generate_Conflicts := Value;
+            elsif Name = "ebnf" or Name = "generate_ebnf" then
+               Trace_Generate_EBNF := Value;
+            elsif Name = "generate" then
+               Trace_Generate := Value;
+            elsif Name = "minimal_complete" or Name = 
"generate_minimal_complete" then
+               Trace_Generate_Minimal_Complete := Value;
+            elsif Name = "table" or Name = "generate_table" then
+               Trace_Generate_Table := Value;
+            elsif Name = "incremental" or Name = "incremental_parse" then
+               Trace_Incremental_Parse := Value;
+            elsif Name = "lexer" then
+               Trace_Lexer := Value;
+            elsif Name = "mckenzie" then
+               Trace_McKenzie := Value;
+            elsif Name = "memory" then
+               Trace_Memory := Value;
+            elsif Name = "parse" then
+               Trace_Parse := Value;
+            elsif Name = "tests" or Name = "test" then
+               Trace_Tests := Value;
+            elsif Name = "time" then
+               Trace_Time := Value > 0;
+            else
+               raise User_Error with "expecting trace name, found '" & Config 
(Name_First .. Name_Last) & "'";
+            end if;
+         end;
+
+         Name_First := Value_Last + 1;
+         exit when Name_First > Config'Last;
+      end loop;
+   end Enable_Trace;
+
+   procedure Enable_Trace_Help
+   is
+      use Ada.Text_IO;
+   begin
+      Put_Line (Current_Error, "debug=0|1 - show stack trace on exception, 
other debug settings");
+      Put_Line (Current_Error, "action=n - verbosity during parse actions");
+      Put_Line (Current_Error, "ebnf=n generate_ebnf=n - verbosity during 
translate EBNF to BNF");
+      Put_Line (Current_Error, "generate=n - top level verbosity during 
grammar generation");
+      Put_Line (Current_Error, "minimal_complete=n generate_minimal_complete=n 
- verbosity during minimal_complete");
+      Put_Line (Current_Error, "table=n generate_table=n - verbosity during 
generate parse table");
+      Put_Line (Current_Error, "incremental=n incremental_parse=n - verbosity 
during edit_tree");
+      Put_Line (Current_Error, "lexer=n - verbosity during lexing");
+      Put_Line (Current_Error, "mckenzie=n - verbosity during error recover");
+      Put_Line (Current_Error, "parse=n - verbosity during parsing");
+      Put_Line (Current_Error, "test=n - verbosity during unit tests");
+      Put_Line (Current_Error, "time=n - output times of various operations");
+   end Enable_Trace_Help;
+
+   function Next_Value
+     (Stream : not null access Ada.Streams.Root_Stream_Type'Class;
+      Delims : in              Ada.Strings.Maps.Character_Set)
+     return String
+   is
+      use Ada.Strings.Unbounded;
+      Result : Unbounded_String;
+      Char   : Character;
+   begin
+      loop
+         Character'Read (Stream, Char);
+         exit when not Ada.Strings.Maps.Is_In (Char, Delims);
+      end loop;
+      Append (Result, Char);
+
+      loop
+         Character'Read (Stream, Char);
+         if Ada.Strings.Maps.Is_In (Char, Delims) then
+            return To_String (Result);
+         else
+            Append (Result, Char);
+         end if;
+      end loop;
+   exception
+   when Ada.Streams.Stream_IO.End_Error =>
+      return To_String (Result);
+   end Next_Value;
+
 end WisiToken;
diff --git a/wisitoken.ads b/wisitoken.ads
index 85c00cab72..3606c8387d 100644
--- a/wisitoken.ads
+++ b/wisitoken.ads
@@ -16,7 +16,17 @@
 --  Sethi, and Ullman (aka: "The [Red] Dragon Book" due to the dragon
 --  on the cover).
 --
---  Copyright (C) 2009, 2010, 2013 - 2015, 2017 - 2021 Free Software 
Foundation, Inc.
+--  [gnu_coding] https://www.gnu.org/prep/standards/standards.html#Errors
+--
+--  [Lahav 2004] - Elad Lahav. Efficient Semantic Analysis for Text
+--  Editors. final project for CS842 School of Computer Science,
+--  University of Waterloo.
+--
+--  [Wagner Graham 1998] - Tim A. Wagner and Susan L. Graham.
+--  Efficient and flexible incremental parsing. ACM Transactions on
+--  Programming Languages and Systems,20(5):980-1013, 1998
+--
+--  Copyright (C) 2009, 2010, 2013 - 2015, 2017 - 2022 Free Software 
Foundation, Inc.
 --
 --  This file is part of the WisiToken package.
 --
@@ -41,16 +51,15 @@
 pragma License (Modified_GPL);
 
 with Ada.Containers.Doubly_Linked_Lists;
-with Ada.Containers;
+with Ada.Streams;
+with Ada.Strings.Maps;
 with Ada.Strings.Unbounded;
 with Ada.Text_IO;
-with Ada.Unchecked_Deallocation;
+with GNATCOLL.Memory;
 with SAL.Gen_Trimmed_Image;
 with SAL.Gen_Unbounded_Definite_Queues;
 with SAL.Gen_Unbounded_Definite_Vectors.Gen_Image;
 with SAL.Gen_Unbounded_Definite_Vectors.Gen_Image_Aux;
-with SAL.Gen_Unconstrained_Array_Image;
-with SAL.Generic_Decimal_Image;
 package WisiToken is
 
    Partial_Parse : exception; -- a partial parse terminated.
@@ -75,6 +84,7 @@ package WisiToken is
    type Unknown_State_Index is new Integer range -1 .. Integer'Last;
    subtype State_Index is Unknown_State_Index range 0 .. 
Unknown_State_Index'Last;
    Unknown_State : constant Unknown_State_Index := -1;
+   Accept_State  : constant State_Index         := State_Index'Last;
 
    function Trimmed_Image is new SAL.Gen_Trimmed_Image (Unknown_State_Index);
 
@@ -87,7 +97,11 @@ package WisiToken is
    ----------
    --  Token IDs
 
-   type Token_ID is range 0 .. Integer'Last; -- 0 origin to match elisp array
+   type Token_ID is range 0 .. 2**15 - 1;
+   for Token_ID'Size use 16;
+   --  0 origin to match elisp array, 16 bits to reduce storage, signed
+   --  to match generics. Biggest language will have < 500 token ids; Ada
+   --  2020 has 481, Java 19 has 321.
 
    Invalid_Token_ID : constant Token_ID := Token_ID'Last;
 
@@ -100,7 +114,8 @@ package WisiToken is
       Last_Terminal     : Token_ID;
       First_Nonterminal : Token_ID;
       Last_Nonterminal  : Token_ID;
-      EOI_ID            : Token_ID;
+      SOI_ID            : Token_ID; -- start of input
+      EOI_ID            : Token_ID; -- end of input
       Accept_ID         : Token_ID)
    is record
       --  Tokens in the range Token_ID'First .. First_Terminal - 1 are
@@ -110,6 +125,10 @@ package WisiToken is
       --  Tokens in the range Last_Terminal + 1 .. Last_Nonterminal are
       --  the nonterminals of a grammar.
       --
+      --  SOI_ID is not reported by the lexer, and is not present in
+      --  grammar productions; it is hard-coded by the syntax tree
+      --  utilities.
+      --
       --  Components are discriminants if they can be specified statically.
 
       Case_Insensitive : Boolean;  -- keywords and names
@@ -123,8 +142,11 @@ package WisiToken is
       --  have two kinds of string literals, set one or both of these to
       --  Invalid_Token_ID.
 
-      Image : Token_ID_Array_String (Token_ID'First .. Last_Nonterminal);
+      Image : Token_ID_Array_String (Token_ID'First .. SOI_ID);
       --  User names for tokens.
+      --
+      --  See wisitoken-bnf-generate_utils.adb Initialize for actual order
+      --  of tokens.
 
       Terminal_Image_Width : Integer;
       Image_Width          : Integer; --  max width of Image
@@ -137,7 +159,7 @@ package WisiToken is
       --  Last_Lookahead. After the LR table is generated, Last_Lookahead is
       --  no longer used.
    end record;
-   type Descriptor_Access is access Descriptor;
+   type Descriptor_Access is access all Descriptor;
    type Descriptor_Access_Constant is access constant Descriptor;
 
    function Padded_Image (Item : in Token_ID; Desc : in Descriptor) return 
String;
@@ -153,6 +175,12 @@ package WisiToken is
    --  Put user readable token list (token_id'first ..
    --  descriptor.last_nonterminal) to Ada.Text_IO.Current_Output
 
+   function Is_Terminal (ID : in Token_ID; Descriptor : in 
WisiToken.Descriptor) return Boolean
+   is (ID in Descriptor.First_Terminal .. Descriptor.Last_Terminal);
+
+   function Is_Nonterminal (ID : in Token_ID; Descriptor : in 
WisiToken.Descriptor) return Boolean
+   is (ID in Descriptor.First_Nonterminal .. Descriptor.Last_Nonterminal);
+
    function Find_ID (Descriptor : in WisiToken.Descriptor; Name : in String) 
return Token_ID;
    --  Return index of Name in Descriptor.Image. If not found, raise 
Programmer_Error.
 
@@ -171,6 +199,8 @@ package WisiToken is
    procedure To_Vector (Item : in Token_ID_Array; Vector : in out 
Token_ID_Arrays.Vector);
    function To_Vector (Item : in Token_ID_Array) return Token_ID_Arrays.Vector;
 
+   function To_Array (Item : in Token_ID_Arrays.Vector) return Token_ID_Array;
+
    function Shared_Prefix (A, B : in Token_ID_Arrays.Vector) return Natural;
    --  Return last index in A of a prefix shared between A, B; 0 if none.
 
@@ -232,6 +262,9 @@ package WisiToken is
    function Image (Item : in Production_ID) return String;
    --  Ada positional aggregate syntax, for code generation.
 
+   function Image (Item : in Production_ID; Descriptor : in 
WisiToken.Descriptor) return String;
+   --  Nonterm_name_rhs_index, for messages.
+
    function Trimmed_Image (Item : in Production_ID) return String;
    --  Nonterm.rhs_index, both integers, no leading or trailing space;
    --  for parse table output and diagnostics.
@@ -268,11 +301,19 @@ package WisiToken is
    ----------
    --  Tokens
 
-   type Base_Buffer_Pos is range 0 .. Integer'Last;
+   type Base_Buffer_Pos is new Integer;
+   --  Token shift amounts in edited source can be arbitrarily large
+   --  positive or negative.
+
+   subtype Zero_Buffer_Pos is Base_Buffer_Pos range 0 .. Base_Buffer_Pos'Last; 
-- allow 0
    subtype Buffer_Pos is Base_Buffer_Pos range 1 .. Base_Buffer_Pos'Last; -- 
match Emacs buffer origin.
 
+   type Buffer_Pos_Access is access all Buffer_Pos;
+
    package Buffer_Pos_Lists is new Ada.Containers.Doubly_Linked_Lists 
(Buffer_Pos);
 
+   function Trimmed_Image is new SAL.Gen_Trimmed_Image (Base_Buffer_Pos);
+
    type Buffer_Region is record
       First : Buffer_Pos;
       Last  : Base_Buffer_Pos; --  allow representing null range.
@@ -281,145 +322,72 @@ package WisiToken is
    Invalid_Buffer_Pos : constant Buffer_Pos    := Buffer_Pos'Last;
    Null_Buffer_Region : constant Buffer_Region := (Buffer_Pos'Last, 
Buffer_Pos'First);
 
-   function Length (Region : in Buffer_Region) return Natural is (Natural 
(Region.Last - Region.First + 1));
-
-   function Inside (Pos : in Buffer_Pos; Region : in Buffer_Region) return 
Boolean
-     is (Region.First <= Pos and Pos <= Region.Last);
-
-   function Image (Item : in Buffer_Region) return String;
-
-   function "and" (Left, Right : in Buffer_Region) return Buffer_Region;
-   --  Return region enclosing both Left and Right.
-
-   type Line_Number_Type is range 1 .. Natural'Last; -- Match Emacs buffer 
line numbers.
-   function Trimmed_Image is new SAL.Gen_Trimmed_Image (Line_Number_Type);
+   function Length (Region : in Buffer_Region) return Natural is
+     ((if Region.Last >= Region.First
+       then Natural (Region.Last - Region.First + 1)
+       else 0));
 
-   Invalid_Line_Number : constant Line_Number_Type := Line_Number_Type'Last;
+   function Contains (Region : in Buffer_Region; Pos : in Base_Buffer_Pos) 
return Boolean
+   is (Region.First <= Pos and Pos <= Region.Last);
 
-   --  Syntax tree nodes.
-   type Node_Index is range 0 .. Integer'Last;
-   subtype Valid_Node_Index is Node_Index range 1 .. Node_Index'Last;
-   --  Note that Valid_Node_Index includes Deleted_Child.
+   type Boundary is (Inclusive, Exclusive);
 
-   Invalid_Node_Index : constant Node_Index := Node_Index'First;
-   Deleted_Child      : constant Node_Index := Node_Index'Last;
-
-   type Valid_Node_Index_Array is array (Positive_Index_Type range <>) of 
Valid_Node_Index;
-   --  Index matches Base_Token_Array, Augmented_Token_Array
+   function Contains
+     (Outer, Inner   : in Buffer_Region;
+      First_Boundary : in Boundary := Inclusive;
+      Last_Boundary  : in Boundary := Inclusive)
+     return Boolean;
+   --  True if Outer entirely contains Inner, according to Boundaries.
+   --
+   --  Note that any non-null region contains Null_Buffer_Region.
 
-   function Image is new SAL.Generic_Decimal_Image (Valid_Node_Index);
-   --  Has Width parameter
+   function Overlaps (A, B : in Buffer_Region) return Boolean;
+   --  True if A and B have some positions in common.
 
-   function Image (Item : in Valid_Node_Index) return String
-     is (Image (Item, 4));
+   function Image (Item : in Buffer_Region) return String;
 
-   function Image is new SAL.Gen_Unconstrained_Array_Image
-     (Positive_Index_Type, Valid_Node_Index, Valid_Node_Index_Array, Image);
+   function "and" (Left, Right : in Buffer_Region) return Buffer_Region;
+   --  Return region enclosing both Left and Right.
 
-   package Valid_Node_Index_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
-     (Positive_Index_Type, Valid_Node_Index, Default_Element => 
Valid_Node_Index'Last);
-   --  Index matches Valid_Node_Index_Array.
+   function "+" (Left : in Buffer_Region; Right : in Base_Buffer_Pos) return 
Buffer_Region
+     is (Left.First + Right, Left.Last + Right);
 
-   type Base_Token is tagged record
-      --  Base_Token is used in the core parser. The parser only needs ID and 
Tree_Index;
-      --  semantic checks need Byte_Region to compare names. Line, Col, and
-      --  Char_Region are included for error messages.
+   function Adjust (Left : in Buffer_Region; Delta_First : in Integer; 
Delta_Last : in Integer) return Buffer_Region
+   is (Buffer_Pos (Integer'Max (1, Integer (Left.First) + Delta_First)),
+       Base_Buffer_Pos (Integer (Left.Last) + Delta_Last));
 
-      ID         : Token_ID   := Invalid_Token_ID;
-      Tree_Index : Node_Index := Invalid_Node_Index;
+   type Base_Line_Number_Type is new Integer; -- for delta line numbers.
+   subtype Line_Number_Type is Base_Line_Number_Type range 1 .. 
Base_Line_Number_Type'Last - 1;
+   --  Match Emacs buffer line numbers.
 
-      Byte_Region : Buffer_Region := Null_Buffer_Region;
-      --  Index into the Lexer buffer for the token text.
+   Invalid_Line_Number : constant Base_Line_Number_Type := 
Base_Line_Number_Type'Last;
 
-      Line   : Line_Number_Type  := Invalid_Line_Number;
-      Column : Ada.Text_IO.Count := 0;
-      --  At start of token.
+   function Trimmed_Image (Item : in Base_Line_Number_Type) return String;
+   --  '-' if Invalid_Line_Number
 
-      Char_Region : Buffer_Region := Null_Buffer_Region;
-      --  Character position, useful for finding the token location in Emacs
-      --  buffers.
+   type Line_Region is record
+      First, Last : Line_Number_Type;
    end record;
 
-   type Base_Token_Class_Access is access all Base_Token'Class;
-   type Base_Token_Class_Access_Constant is access constant Base_Token'Class;
-
-   function Image
-     (Item       : in Base_Token;
-      Descriptor : in WisiToken.Descriptor)
-     return String;
-   --  For debug/test messages.
-
-   procedure Free is new Ada.Unchecked_Deallocation (Base_Token'Class, 
Base_Token_Class_Access);
-
-   Invalid_Token : constant Base_Token := (others => <>);
-
-   type Base_Token_Index is range 0 .. Integer'Last;
-   subtype Token_Index is Base_Token_Index range 1 .. Base_Token_Index'Last;
+   Null_Line_Region : constant Line_Region := (Line_Number_Type'Last, 
Line_Number_Type'First);
 
-   Invalid_Token_Index : constant Base_Token_Index := Base_Token_Index'First;
+   function New_Line_Count (Region : in Line_Region) return 
Base_Line_Number_Type
+   is ((if Region.Last >= Region.First
+        then Region.Last - Region.First
+        else 0));
 
-   function Trimmed_Image is new SAL.Gen_Trimmed_Image (Base_Token_Index);
+   function Contains_New_Line (Region : in Line_Region) return Boolean
+   is (Region.Last > Region.First);
 
-   type Token_Index_Array is array (Natural range <>) of Token_Index;
+   function Image (Item : in Line_Region) return String;
+   --  Ada positional aggregate.
 
-   package Recover_Token_Index_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
-     (Natural, Base_Token_Index, Default_Element => Invalid_Token_Index);
+   function "+" (Left : in Line_Region; Right : in Base_Line_Number_Type) 
return Line_Region;
 
-   type Base_Token_Array is array (Positive_Index_Type range <>) of Base_Token;
+   function Contains (Region : in Line_Region; Pos : in Base_Line_Number_Type) 
return Boolean
+   is (Region.First <= Pos and Pos <= Region.Last);
 
-   package Base_Token_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
-     (Token_Index, Base_Token, Default_Element => (others => <>));
-   type Base_Token_Array_Access is access all Base_Token_Arrays.Vector;
-   type Base_Token_Array_Access_Constant is access constant 
Base_Token_Arrays.Vector;
-
-   function Image is new Base_Token_Arrays.Gen_Image_Aux 
(WisiToken.Descriptor, Trimmed_Image, Image);
-
-   function Image
-     (Token      : in Base_Token_Index;
-      Terminals  : in Base_Token_Arrays.Vector;
-      Descriptor : in WisiToken.Descriptor)
-     return String;
-
-   package Line_Begin_Token_Vectors is new SAL.Gen_Unbounded_Definite_Vectors
-     (Line_Number_Type, Base_Token_Index, Default_Element => 
Invalid_Token_Index);
-
-   type Recover_Token is record
-      --  Maintaining a syntax tree during error recovery is too slow, so we
-      --  store enough information in the recover stack to perform
-      --  Semantic_Checks, Language_Fixes, and Push_Back operations. and to
-      --  apply the solution to the main parser state. We make thousands of
-      --  copies of the parse stack during recover, so minimizing size and
-      --  compute time for this is critical.
-      ID : Token_ID := Invalid_Token_ID;
-
-      Byte_Region : Buffer_Region := Null_Buffer_Region;
-      --  Byte_Region is used to detect empty tokens, for cost and other 
issues.
-
-      Min_Terminal_Index : Base_Token_Index := Invalid_Token_Index;
-      --  For terminals, index of this token in Shared_Parser.Terminals. For
-      --  nonterminals, minimum of contained tokens (Invalid_Token_Index if
-      --  empty). For virtuals, Invalid_Token_Index. Used for push_back of
-      --  nonterminals.
-
-      Name : Buffer_Region := Null_Buffer_Region;
-      --  Set and used by semantic_checks.
-
-      Virtual : Boolean := True;
-      --  For terminals, True if inserted by recover. For nonterminals, True
-      --  if any contained token has Virtual = True.
-   end record;
-
-   function Image
-     (Item       : in Recover_Token;
-      Descriptor : in WisiToken.Descriptor)
-     return String;
-
-   type Recover_Token_Array is array (Positive_Index_Type range <>) of 
Recover_Token;
-
-   package Recover_Token_Arrays is new SAL.Gen_Unbounded_Definite_Vectors
-     (Token_Index, Recover_Token, Default_Element => (others => <>));
-
-   function Image is new Recover_Token_Arrays.Gen_Image_Aux 
(WisiToken.Descriptor, Trimmed_Image, Image);
+   type Insert_Location is (After_Prev, Between, Before_Next);
 
    type Base_Identifier_Index is range 0 .. Integer'Last;
    subtype Identifier_Index is Base_Identifier_Index range 1 .. 
Base_Identifier_Index'Last;
@@ -441,7 +409,7 @@ package WisiToken is
    Outline     : constant := 0; -- spawn/terminate parallel parsers, error 
recovery enter/exit
    Detail      : constant := 1; -- add each parser cycle
    Extra       : constant := 2; -- add pending semantic state operations
-   Lexer_Debug : constant := 3; -- add lexer debug
+   Extreme     : constant := 3; -- add ?
 
    Trace_McKenzie : Integer  := 0;
    --  If Trace_McKenzie > 0, Parse prints messages helpful for debugging 
error recovery.
@@ -450,24 +418,58 @@ package WisiToken is
    --  Detail  - add each error recovery configuration
    --  Extra   - add error recovery parse actions
 
+   Trace_Lexer : Integer := 0;
+
+   Trace_Incremental_Parse : Integer := 0;
+
    Trace_Action : Integer := 0;
-   --  Output during Execute_Action, and unit tests.
+   --  Output during Execute_Action
+
+   Trace_Tests : Integer := 0;
+   --  Output during unit tests
 
+   Trace_Generate                  : Integer := 0;
    Trace_Generate_EBNF             : Integer := 0;
    Trace_Generate_Table            : Integer := 0;
+   Trace_Generate_Conflicts        : Integer := 0;
    Trace_Generate_Minimal_Complete : Integer := 0;
    --  Output during grammar generation.
 
    Trace_Time : Boolean := False;
    --  Output execution time for various things.
 
+   Trace_Memory : Integer := 0;
+
+   Trace_Parse_No_State_Numbers : Boolean := False;
+   --  For test_lr1_parallel.adb
+
    Debug_Mode : Boolean := False;
-   --  If True, Output stack traces, propagate exceptions to top level.
+   --  If True, output stack traces, propagate exceptions to top level.
    --  Otherwise, be robust to errors, so user does not notice them.
 
-   type Trace (Descriptor : not null access constant WisiToken.Descriptor) is 
abstract tagged limited null record;
-   --  Output for tests/debugging. Descriptor included here because many
-   --  uses of Trace will use Image (Item, Descriptor);
+   Test_McKenzie_Recover : Boolean := False;
+   --  True when running test_mckenzie_recover.adb; error recover stores
+   --  extra info for test.
+
+   procedure Enable_Trace (Config : in String);
+   --  Config has the format:
+   --
+   --  name=value ...
+   --
+   --  where "name" is the suffix of on of the Trace_* variables above
+   --  (or an abbreviation; see body), and "value" is an integer.
+   --
+   --  For Boolean variables, value > 0 is True, 0 is False.
+   --
+   --  In addition, the name "debug" sets Debug_Mode.
+
+   procedure Enable_Trace_Help;
+   --  Output to Text_IO.Current_Error a message describing available
+   --  options for Enable_Trace.
+
+   type Trace is abstract tagged limited null record;
+   type Trace_Access is access all Trace'Class;
+   --  Output for tests/debugging.
 
    procedure Set_Prefix (Trace : in out WisiToken.Trace; Prefix : in String) 
is abstract;
    --  Prepend Prefix to all subsequent messages. Usefull for adding
@@ -477,17 +479,28 @@ package WisiToken is
    --  Put Item to the Trace display. If Prefix is True, prepend the stored 
prefix.
 
    procedure Put_Line (Trace : in out WisiToken.Trace; Item : in String) is 
abstract;
-   --  Put Item to the Trace display, followed by a newline.
+   --  Put Item to the Trace display, preceded by the stored prefix, followed 
by a newline.
 
    procedure New_Line (Trace : in out WisiToken.Trace) is abstract;
-   --  Put a newline to the Trace display.
+   --  Put a newline to the Trace display (no prefix).
 
    procedure Put_Clock (Trace : in out WisiToken.Trace; Label : in String) is 
abstract;
-   --  Put Ada.Calendar.Clock to Trace.
+   --  Put Ada.Calendar.Clock to Trace, preceded by stored prefix.
+
+   Memory_Baseline : GNATCOLL.Memory.Byte_Count := 0;
+   --  This is only the Current value; trying to save the high water mark
+   --  for later subtraction does not make sense.
+
+   procedure Report_Memory (Trace : in out WisiToken.Trace'Class; Prefix : in 
Boolean);
+   --  Output data from GNATCOLL.Memory, relative to Memory_Baseline.
 
    ----------
    --  Misc
 
+   type Cache_Version is mod 2**16;
+
+   type Boolean_Access is access all Boolean;
+
    function "+" (Item : in String) return 
Ada.Strings.Unbounded.Unbounded_String
      renames Ada.Strings.Unbounded.To_Unbounded_String;
 
@@ -498,15 +511,25 @@ package WisiToken is
 
    function Error_Message
      (File_Name : in String;
-      Line      : in Line_Number_Type;
+      Line      : in Base_Line_Number_Type;
       Column    : in Ada.Text_IO.Count;
       Message   : in String)
      return String;
-   --  Return Gnu-formatted error message.
+   --  Return Gnu-formatted error message. Column parameter is origin 0
+   --  (WisiToken and Emacs standard); in formatted message it is origin
+   --  1 (Gnu coding standards [gnu_coding])
 
    type Names_Array is array (Integer range <>) of String_Access_Constant;
    type Names_Array_Access is access Names_Array;
    type Names_Array_Array is array (WisiToken.Token_ID range <>) of 
Names_Array_Access;
    type Names_Array_Array_Access is access Names_Array_Array;
 
+   function Next_Value
+     (Stream : not null access Ada.Streams.Root_Stream_Type'Class;
+      Delims : in              Ada.Strings.Maps.Character_Set)
+     return String;
+   --  Return a string from Stream, ending at a member of Delims or EOI
+   --  (ending delim is read from the stream but not included in result).
+   --  Leading Delims are skipped.
+
 end WisiToken;
diff --git a/wisitoken_grammar_actions.adb b/wisitoken_grammar_actions.adb
index 8819828b1e..9c53fba32d 100644
--- a/wisitoken_grammar_actions.adb
+++ b/wisitoken_grammar_actions.adb
@@ -1,8 +1,8 @@
---  generated parser support file.
+--  generated parser support file. -*- buffer-read-only:t  -*-
 --  command line: wisitoken-bnf-generate.exe  --generate LALR Ada re2c 
wisitoken_grammar.wy
 --
 
---  Copyright (C) 2017 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  Author: Stephen Leake <stephe-leake@stephe-leake.org>
 --
@@ -28,155 +28,218 @@ package body Wisitoken_Grammar_Actions is
    procedure declaration_0
     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
      Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array)
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
-      pragma Unreferenced (Nonterm);
    begin
-      Add_Declaration (User_Data, Tree, Tokens);
+      Add_Declaration (User_Data, Tree, Nonterm);
    end declaration_0;
 
    procedure declaration_1
     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
      Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array)
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
-      pragma Unreferenced (Nonterm);
    begin
-      Add_Declaration (User_Data, Tree, Tokens);
+      Add_Declaration (User_Data, Tree, Nonterm);
    end declaration_1;
 
    procedure declaration_2
     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
      Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array)
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
-      pragma Unreferenced (Nonterm);
    begin
-      Add_Declaration (User_Data, Tree, Tokens);
+      Add_Declaration (User_Data, Tree, Nonterm);
    end declaration_2;
 
    procedure declaration_3
     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
      Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array)
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
-      pragma Unreferenced (Nonterm);
    begin
-      Add_Declaration (User_Data, Tree, Tokens);
+      Add_Declaration (User_Data, Tree, Nonterm);
    end declaration_3;
 
    procedure declaration_4
     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
      Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array)
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
-      pragma Unreferenced (Nonterm);
    begin
-      Start_If (User_Data, Tree, Tokens);
+      Add_Declaration (User_Data, Tree, Nonterm);
    end declaration_4;
 
    procedure declaration_5
     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
      Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array)
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
-      pragma Unreferenced (Tree, Nonterm, Tokens);
    begin
-      End_If (User_Data);
+      Add_Declaration (User_Data, Tree, Nonterm);
    end declaration_5;
 
+   procedure declaration_6
+    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+     Tree      : in out WisiToken.Syntax_Trees.Tree;
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
+   is
+   begin
+      Add_Declaration (User_Data, Tree, Nonterm);
+   end declaration_6;
+
+   procedure declaration_7
+    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+     Tree      : in out WisiToken.Syntax_Trees.Tree;
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
+   is
+   begin
+      Add_Declaration (User_Data, Tree, Nonterm);
+   end declaration_7;
+
+   procedure declaration_8
+    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+     Tree      : in out WisiToken.Syntax_Trees.Tree;
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
+   is
+   begin
+      Add_Declaration (User_Data, Tree, Nonterm);
+   end declaration_8;
+
+   procedure declaration_9
+    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+     Tree      : in out WisiToken.Syntax_Trees.Tree;
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
+   is
+   begin
+      Add_Declaration (User_Data, Tree, Nonterm);
+   end declaration_9;
+
+   procedure declaration_10
+    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+     Tree      : in out WisiToken.Syntax_Trees.Tree;
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
+   is
+   begin
+      Add_Declaration (User_Data, Tree, Nonterm);
+   end declaration_10;
+
+   procedure declaration_11
+    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+     Tree      : in out WisiToken.Syntax_Trees.Tree;
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
+   is
+   begin
+      Start_If (User_Data, Tree, Nonterm);
+   end declaration_11;
+
+   procedure declaration_12
+    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+     Tree      : in out WisiToken.Syntax_Trees.Tree;
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
+   is
+   begin
+      Start_If (User_Data, Tree, Nonterm);
+   end declaration_12;
+
+   procedure declaration_13
+    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+     Tree      : in out WisiToken.Syntax_Trees.Tree;
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
+   is
+   begin
+      Start_If (User_Data, Tree, Nonterm);
+   end declaration_13;
+
+   procedure declaration_14
+    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+     Tree      : in out WisiToken.Syntax_Trees.Tree;
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
+   is
+   begin
+      Start_If (User_Data, Tree, Nonterm);
+   end declaration_14;
+
+   procedure declaration_15
+    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+     Tree      : in out WisiToken.Syntax_Trees.Tree;
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
+   is
+      pragma Unreferenced (Tree, Nonterm);
+   begin
+      End_If (User_Data);
+   end declaration_15;
+
    procedure nonterminal_0
     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
      Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array)
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
-      pragma Unreferenced (Nonterm);
    begin
-      Add_Nonterminal (User_Data, Tree, Tokens);
+      Add_Nonterminal (User_Data, Tree, Nonterm);
    end nonterminal_0;
 
    procedure nonterminal_1
     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
      Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array)
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
-      pragma Unreferenced (Nonterm);
    begin
-      Add_Nonterminal (User_Data, Tree, Tokens);
+      Add_Nonterminal (User_Data, Tree, Nonterm);
    end nonterminal_1;
 
    procedure rhs_item_1
     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
      Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array)
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
-      pragma Unreferenced (Nonterm);
    begin
-      Check_EBNF (User_Data, Tree, Tokens, 1);
+      Check_EBNF (User_Data, Tree, Nonterm, 1);
    end rhs_item_1;
 
    procedure rhs_item_2
     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
      Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array)
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
-      pragma Unreferenced (Nonterm);
    begin
-      Check_EBNF (User_Data, Tree, Tokens, 1);
+      Check_EBNF (User_Data, Tree, Nonterm, 1);
    end rhs_item_2;
 
    procedure rhs_item_3
     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
      Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array)
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
-      pragma Unreferenced (Nonterm);
    begin
-      Check_EBNF (User_Data, Tree, Tokens, 1);
+      Check_EBNF (User_Data, Tree, Nonterm, 1);
    end rhs_item_3;
 
    procedure rhs_item_4
     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
      Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array)
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
-      pragma Unreferenced (Nonterm);
    begin
-      Check_EBNF (User_Data, Tree, Tokens, 1);
+      Check_EBNF (User_Data, Tree, Nonterm, 1);
    end rhs_item_4;
 
    procedure rhs_item_5
     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
      Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array)
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
-      pragma Unreferenced (Nonterm);
    begin
-      Check_EBNF (User_Data, Tree, Tokens, 1);
+      Check_EBNF (User_Data, Tree, Nonterm, 1);
    end rhs_item_5;
 
    procedure rhs_optional_item_3
     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
      Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array)
+     Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
-      pragma Unreferenced (Nonterm);
    begin
-      Check_EBNF (User_Data, Tree, Tokens, 1);
+      Check_EBNF (User_Data, Tree, Nonterm, 1);
    end rhs_optional_item_3;
 
 end Wisitoken_Grammar_Actions;
diff --git a/wisitoken_grammar_actions.ads b/wisitoken_grammar_actions.ads
index 1308267b8e..4c044a5302 100644
--- a/wisitoken_grammar_actions.ads
+++ b/wisitoken_grammar_actions.ads
@@ -1,8 +1,8 @@
---  generated parser support file.
+--  generated parser support file. -*- buffer-read-only:t  -*-
 --  command line: wisitoken-bnf-generate.exe  --generate LALR Ada re2c PROCESS 
wisitoken_grammar.wy
 --
 
---  Copyright (C) 2017 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  Author: Stephen Leake <stephe-leake@stephe-leake.org>
 --
@@ -24,26 +24,35 @@
 with WisiToken.Syntax_Trees;
 package Wisitoken_Grammar_Actions is
 
-   Descriptor : aliased WisiToken.Descriptor :=
+   Descriptor : aliased constant WisiToken.Descriptor :=
      (First_Terminal    => 3,
-      Last_Terminal     => 36,
-      First_Nonterminal => 37,
-      Last_Nonterminal  => 56,
-      EOI_ID            => 36,
-      Accept_ID         => 37,
-      Case_Insensitive  => False,
-      New_Line_ID       => 1,
-      String_1_ID       => 35,
-      String_2_ID       => 34,
-      Image             =>
+      Last_Terminal       => 42,
+      First_Nonterminal   => 43,
+      Last_Nonterminal    => 66,
+      SOI_ID              => 67,
+      EOI_ID              => 42,
+      Accept_ID           => 43,
+      Case_Insensitive    => False,
+      New_Line_ID         => 1,
+      String_1_ID         => 41,
+      String_2_ID         => 40,
+      Image               =>
         (new String'("WHITESPACE"),
          new String'("NEW_LINE"),
          new String'("COMMENT"),
+         new String'("ACCEPT_I"),
          new String'("CODE"),
+         new String'("CONFLICT"),
+         new String'("CONFLICT_RESOLUTION"),
          new String'("END"),
+         new String'("ELSIF"),
          new String'("IF"),
+         new String'("IN"),
          new String'("KEYWORD"),
          new String'("NON_GRAMMAR"),
+         new String'("ON"),
+         new String'("REDUCE_I"),
+         new String'("SHIFT_I"),
          new String'("TOKEN"),
          new String'("RAW_CODE"),
          new String'("REGEXP"),
@@ -51,7 +60,6 @@ package Wisitoken_Grammar_Actions is
          new String'("BAR"),
          new String'("COLON"),
          new String'("COLON_COLON_EQUAL"),
-         new String'("COMMA"),
          new String'("EQUAL"),
          new String'("GREATER"),
          new String'("LEFT_BRACE"),
@@ -66,7 +74,6 @@ package Wisitoken_Grammar_Actions is
          new String'("RIGHT_BRACKET"),
          new String'("RIGHT_PAREN"),
          new String'("SEMICOLON"),
-         new String'("SLASH"),
          new String'("STAR"),
          new String'("NUMERIC_LITERAL"),
          new String'("IDENTIFIER"),
@@ -74,11 +81,15 @@ package Wisitoken_Grammar_Actions is
          new String'("STRING_LITERAL_2"),
          new String'("Wisi_EOI"),
          new String'("wisitoken_accept"),
+         new String'("regexp_string"),
+         new String'("conflict_item"),
+         new String'("conflict_item_list"),
+         new String'("token_name"),
          new String'("declaration"),
-         new String'("token_keyword_non_grammar"),
          new String'("identifier_list"),
-         new String'("declaration_item_list"),
+         new String'("IDENTIFIER_BAR_list"),
          new String'("declaration_item"),
+         new String'("declaration_item_list"),
          new String'("nonterminal"),
          new String'("semicolon_opt"),
          new String'("rhs_list"),
@@ -92,20 +103,29 @@ package Wisitoken_Grammar_Actions is
          new String'("rhs_multiple_item"),
          new String'("rhs_alternative_list"),
          new String'("compilation_unit"),
-         new String'("compilation_unit_list")),
-      Terminal_Image_Width => 17,
-      Image_Width          => 25,
-      Last_Lookahead       => 37);
+         new String'("compilation_unit_list"),
+         new String'("Wisi_SOI")),
+      Terminal_Image_Width => 19,
+      Image_Width          => 21,
+      Last_Lookahead       => 43);
 
    type Token_Enum_ID is
      (WHITESPACE_ID,
       NEW_LINE_ID,
       COMMENT_ID,
+      ACCEPT_I_ID,
       CODE_ID,
+      CONFLICT_ID,
+      CONFLICT_RESOLUTION_ID,
       END_ID,
+      ELSIF_ID,
       IF_ID,
+      IN_ID,
       KEYWORD_ID,
       NON_GRAMMAR_ID,
+      ON_ID,
+      REDUCE_I_ID,
+      SHIFT_I_ID,
       TOKEN_ID,
       RAW_CODE_ID,
       REGEXP_ID,
@@ -113,7 +133,6 @@ package Wisitoken_Grammar_Actions is
       BAR_ID,
       COLON_ID,
       COLON_COLON_EQUAL_ID,
-      COMMA_ID,
       EQUAL_ID,
       GREATER_ID,
       LEFT_BRACE_ID,
@@ -128,7 +147,6 @@ package Wisitoken_Grammar_Actions is
       RIGHT_BRACKET_ID,
       RIGHT_PAREN_ID,
       SEMICOLON_ID,
-      SLASH_ID,
       STAR_ID,
       NUMERIC_LITERAL_ID,
       IDENTIFIER_ID,
@@ -136,11 +154,15 @@ package Wisitoken_Grammar_Actions is
       STRING_LITERAL_2_ID,
       Wisi_EOI_ID,
       wisitoken_accept_ID,
+      regexp_string_ID,
+      conflict_item_ID,
+      conflict_item_list_ID,
+      token_name_ID,
       declaration_ID,
-      token_keyword_non_grammar_ID,
       identifier_list_ID,
-      declaration_item_list_ID,
+      IDENTIFIER_BAR_list_ID,
       declaration_item_ID,
+      declaration_item_list_ID,
       nonterminal_ID,
       semicolon_opt_ID,
       rhs_list_ID,
@@ -154,7 +176,8 @@ package Wisitoken_Grammar_Actions is
       rhs_multiple_item_ID,
       rhs_alternative_list_ID,
       compilation_unit_ID,
-      compilation_unit_list_ID);
+      compilation_unit_list_ID,
+      Wisi_SOI_ID);
 
    type Token_Enum_ID_Array is array (Positive range <>) of Token_Enum_ID;
    use all type WisiToken.Token_ID;
@@ -165,73 +188,99 @@ package Wisitoken_Grammar_Actions is
    function "-" (Item : in WisiToken.Token_ID) return Token_Enum_ID renames 
To_Token_Enum;
 
    procedure declaration_0
-    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-     Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array);
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
    procedure declaration_1
-    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-     Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array);
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
    procedure declaration_2
-    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-     Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array);
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
    procedure declaration_3
-    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-     Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array);
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
    procedure declaration_4
-    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-     Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array);
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
    procedure declaration_5
-    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-     Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array);
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
+   procedure declaration_6
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
+   procedure declaration_7
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
+   procedure declaration_8
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
+   procedure declaration_9
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
+   procedure declaration_10
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
+   procedure declaration_11
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
+   procedure declaration_12
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
+   procedure declaration_13
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
+   procedure declaration_14
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
+   procedure declaration_15
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
    procedure nonterminal_0
-    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-     Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array);
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
    procedure nonterminal_1
-    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-     Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array);
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
    procedure rhs_item_1
-    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-     Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array);
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
    procedure rhs_item_2
-    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-     Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array);
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
    procedure rhs_item_3
-    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-     Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array);
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
    procedure rhs_item_4
-    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-     Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array);
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
    procedure rhs_item_5
-    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-     Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array);
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
    procedure rhs_optional_item_3
-    (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-     Tree      : in out WisiToken.Syntax_Trees.Tree;
-     Nonterm   : in     WisiToken.Valid_Node_Index;
-     Tokens    : in     WisiToken.Valid_Node_Index_Array);
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
 end Wisitoken_Grammar_Actions;
diff --git a/wisitoken_grammar_editing.adb b/wisitoken_grammar_editing.adb
new file mode 100644
index 0000000000..81b16b1dcc
--- /dev/null
+++ b/wisitoken_grammar_editing.adb
@@ -0,0 +1,3598 @@
+--  Abstract :
+--
+--  See spec.
+--
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+--  As a special exception under Section 7 of GPL version 3, you are granted
+--  additional permissions described in the GCC Runtime Library Exception,
+--  version 3.1, as published by the Free Software Foundation.
+
+pragma License (Modified_GPL);
+
+with Ada.Characters.Handling;
+with Ada.Containers;
+with Ada.Exceptions;
+with Ada.Strings.Fixed;
+with Ada.Text_IO;
+with GNAT.Regexp;
+with SAL.Generic_Decimal_Image;
+with WisiToken.BNF;
+with WisiToken.Generate;
+with WisiToken.Lexer;
+package body WisiToken_Grammar_Editing is
+
+   use WisiToken;
+   use WisiToken.Syntax_Trees;
+   use Wisitoken_Grammar_Actions;
+   use WisiToken_Grammar_Runtime;
+
+   function To_Identifier_Token
+     (Item : in Valid_Node_Access;
+      Tree : in Syntax_Trees.Tree)
+     return Identifier_Token
+   is
+      Node : constant Node_Access :=
+        (case To_Token_Enum (Tree.ID (Item)) is
+         when rhs_element_ID => Tree.First_Terminal (Tree.Find_Descendant 
(Item, +rhs_item_ID)),
+         when rhs_item_ID    => Tree.First_Terminal (Item),
+         when IDENTIFIER_ID  => Item,
+         when others         => raise SAL.Programmer_Error);
+   begin
+      return
+        (case Tree.Label (Node) is
+         when Source_Terminal    => (Source_Terminal, Node),
+         when Virtual_Terminal   => (Virtual_Terminal, Node),
+         when Virtual_Identifier => (Virtual_Identifier, Tree.ID (Node), 
Tree.Identifier (Node)),
+         when others             => raise SAL.Programmer_Error);
+   end To_Identifier_Token;
+
+   function Add_RHS_Group_Item
+     (Tree      : in out Syntax_Trees.Tree;
+      RHS_Index : in     Natural;
+      Content   : in     Valid_Node_Access)
+     return Valid_Node_Access
+   is begin
+      return Tree.Add_Nonterm ((+rhs_group_item_ID, RHS_Index), (1 => 
Content), Clear_Parents => False);
+   end Add_RHS_Group_Item;
+
+   function Add_RHS_Optional_Item
+     (Tree      : in out Syntax_Trees.Tree;
+      RHS_Index : in     Natural;
+      Content   : in     Valid_Node_Access)
+     return Valid_Node_Access
+   is begin
+      return Tree.Add_Nonterm
+        ((+rhs_optional_item_ID, RHS_Index),
+         (case RHS_Index is
+          when 0 =>
+            (1 => Tree.Add_Terminal (+LEFT_BRACKET_ID),
+             2 => Content,
+             3 => Tree.Add_Terminal (+RIGHT_BRACKET_ID)),
+          when 1 =>
+            (1 => Tree.Add_Terminal (+LEFT_PAREN_ID),
+             2 => Content,
+             3 => Tree.Add_Terminal (+RIGHT_PAREN_ID),
+             4 => Tree.Add_Terminal (+QUESTION_ID)),
+          when 2 | 3 =>
+            (1 => Content,
+             2 => Tree.Add_Terminal (+QUESTION_ID)),
+
+          when others => raise SAL.Programmer_Error),
+         Clear_Parents => True);
+   end Add_RHS_Optional_Item;
+
+   function Add_Identifier_Token
+     (Tree : in out Syntax_Trees.Tree;
+      Item : in     Identifier_Token)
+     return Valid_Node_Access
+   is begin
+      case Item.Label is
+      when Source_Terminal =>
+         return Tree.Add_Terminal
+           (WisiToken.Lexer.Token'
+              (ID          => ID (Tree, Item),
+               Byte_Region => Tree.Byte_Region (Item.Node, 
Trailing_Non_Grammar => False),
+               Char_Region => Tree.Char_Region (Item.Node, 
Trailing_Non_Grammar => False),
+               Line_Region => Null_Line_Region), --  Line_Region ignored for 
Source_Terminal
+            Errors => Syntax_Trees.Null_Error_List);
+
+      when Virtual_Terminal =>
+         return Tree.Add_Terminal (ID (Tree, Item));
+
+      when Virtual_Identifier =>
+         return Tree.Add_Identifier (ID (Tree, Item), Item.Identifier);
+      end case;
+   end Add_Identifier_Token;
+
+   function Add_RHS_Item
+     (Tree : in out Syntax_Trees.Tree;
+      Item : in     Valid_Node_Access)
+     return Valid_Node_Access
+   is begin
+      return Tree.Add_Nonterm ((+rhs_item_ID, 0), (1 => Item), Clear_Parents 
=> True);
+   end Add_RHS_Item;
+
+   function Add_RHS_Element
+     (Tree  : in out Syntax_Trees.Tree;
+      Item  : in     Valid_Node_Access;
+      Label : in     Identifier_Token := Invalid_Identifier_Token)
+     return Valid_Node_Access
+   is
+      Label_Node : constant Node_Access :=
+        (if Label = Invalid_Identifier_Token
+         then Invalid_Node_Access
+         else Add_Identifier_Token (Tree, Label));
+   begin
+      return Tree.Add_Nonterm
+        ((+rhs_element_ID, (if Label = Invalid_Identifier_Token then 0 else 
1)),
+         (if Label = Invalid_Identifier_Token
+          then (1 => Item)
+          else (Label_Node, Tree.Add_Terminal (+EQUAL_ID), Item)),
+         Clear_Parents => False);
+   end Add_RHS_Element;
+
+   function Empty_RHS_Item_List
+     (Tree : aliased in out Syntax_Trees.Tree)
+     return LR_Utils.List
+   is begin
+      return LR_Utils.Creators.Empty_List
+        (Tree,
+         List_ID           => +rhs_item_list_ID,
+         Multi_Element_RHS => 1,
+         Element_ID        => +rhs_element_ID,
+         Separator_ID      => Invalid_Token_ID);
+   end Empty_RHS_Item_List;
+
+   function To_RHS_Item_List
+     (Tree         : aliased in out Syntax_Trees.Tree;
+      List_Element :         in     Valid_Node_Access)
+     return LR_Utils.List
+   with Pre => Tree.ID (List_Element) = +rhs_element_ID
+   is
+      use LR_Utils;
+      Result : List := Creators.Empty_List
+        (Tree,
+         List_ID           => +rhs_item_list_ID,
+         Multi_Element_RHS => 1,
+         Element_ID        => +rhs_element_ID,
+         Separator_ID      => Invalid_Token_ID);
+   begin
+      Result.Append (List_Element);
+      return Result;
+   end To_RHS_Item_List;
+
+   function Empty_RHS_List
+     (Tree : aliased in out Syntax_Trees.Tree)
+     return LR_Utils.List
+   is begin
+      return LR_Utils.Creators.Empty_List
+        (Tree,
+         List_ID           => +rhs_list_ID,
+         Multi_Element_RHS => 1,
+         Element_ID        => +rhs_ID,
+         Separator_ID      => +BAR_ID);
+   end Empty_RHS_List;
+
+   function Add_RHS
+     (Tree              : in out Syntax_Trees.Tree;
+      Item              : in     Valid_Node_Access;
+      Auto_Token_Labels : in     Boolean;
+      Edited_Token_List : in     Boolean;
+      Post_Parse_Action : in     Node_Access := Invalid_Node_Access;
+      In_Parse_Action   : in     Node_Access := Invalid_Node_Access)
+     return Valid_Node_Access
+   is
+      Aug : constant Augmented_Access := new 
WisiToken_Grammar_Runtime.Augmented'
+        (EBNF              => False,
+         Auto_Token_Labels => Auto_Token_Labels,
+         Edited_Token_List => Edited_Token_List);
+
+      RHS : constant Valid_Node_Access :=
+        (if In_Parse_Action = Invalid_Node_Access
+         then
+           (if Post_Parse_Action = Invalid_Node_Access
+            then Tree.Add_Nonterm ((+rhs_ID, 1), (1 => Item), Clear_Parents => 
True)
+            else Tree.Add_Nonterm ((+rhs_ID, 2), (Item, Post_Parse_Action), 
Clear_Parents => True))
+         else
+           (if Post_Parse_Action = Invalid_Node_Access
+            then Tree.Add_Nonterm
+              ((+rhs_ID, 3), (Item, Tree.Add_Terminal (+ACTION_ID), 
In_Parse_Action), Clear_Parents => True)
+            else Tree.Add_Nonterm
+              ((+rhs_ID, 3), (Item, Post_Parse_Action, In_Parse_Action), 
Clear_Parents => True)));
+   begin
+      Tree.Set_Augmented (RHS, WisiToken.Syntax_Trees.Augmented_Class_Access 
(Aug));
+      return RHS;
+   end Add_RHS;
+
+   function Empty_RHS (Tree : in out Syntax_Trees.Tree) return 
Valid_Node_Access
+   is begin
+      return Tree.Add_Nonterm ((+rhs_ID, 0), (1 .. 0 => Dummy_Node), 
Clear_Parents => False);
+   end Empty_RHS;
+
+   function Find_Declaration
+     (Data  : in     WisiToken_Grammar_Runtime.User_Data_Type;
+      Tree  : in out Syntax_Trees.Tree;
+      Name  : in     String)
+     return Node_Access
+   is
+      use LR_Utils;
+      use LR_Utils.Creators;
+
+      function Decl_Name (Decl : in Valid_Node_Access) return String
+      is begin
+         case To_Token_Enum (Tree.ID (Decl)) is
+         when declaration_ID =>
+            case Tree.RHS_Index (Decl) is
+            when 0 =>
+               return Get_Text (Data, Tree, Tree.Child (Decl, 3));
+
+            when 1 =>
+               return Get_Text (Data, Tree, Tree.Child (Decl, 6));
+
+            when 3 | 4 =>
+               return Get_Text (Data, Tree, Tree.Child (Decl, 2));
+
+            when others =>
+               return "";
+            end case;
+
+         when nonterminal_ID =>
+            return Get_Text (Data, Tree, Tree.Child (Decl, 1));
+
+         when others =>
+            return "";
+         end case;
+      end Decl_Name;
+
+      --  Tree.Root is wisitoken_accept, first child is SOI
+      List : constant Constant_List := Create_List
+        (Tree, Tree.Child (Tree.Root, 2), +compilation_unit_list_ID, 
+compilation_unit_ID);
+   begin
+      for N of List loop
+         declare
+            Decl : constant Valid_Node_Access := Tree.Child (N, 1);
+         begin
+            if Name = Decl_Name (Decl) then
+               return Decl;
+            end if;
+         end;
+      end loop;
+      return Invalid_Node_Access;
+   end Find_Declaration;
+
+   EBNF_Allowed : Boolean := True;
+   procedure Validate_Node
+     (Tree                : in     Syntax_Trees.Tree;
+      Node                : in     Valid_Node_Access;
+      User_Data           : in out Syntax_Trees.User_Data_Type'Class;
+      Node_Error_Reported : in out Boolean)
+   is
+      use Ada.Text_IO;
+
+      Data : WisiToken_Grammar_Runtime.User_Data_Type renames 
WisiToken_Grammar_Runtime.User_Data_Type (User_Data);
+
+      procedure Put_Error (Msg : in String)
+      is begin
+         if Data.Error_Reported.Contains (Node) then
+            --  We only output one error per node, because we call 
Validate_Tree multiple times.
+            return;
+         end if;
+
+         Node_Error_Reported := True;
+
+         Put_Line
+           (Current_Error,
+            Tree.Error_Message
+              (Node, Tree.Image
+                 (Node,
+                  RHS_Index    => True,
+                  Children     => True,
+                  Node_Numbers => True)));
+         Put_Line (Current_Error, Tree.Error_Message (Node, "... invalid tree: 
" & Msg));
+         WisiToken.Generate.Error := True;
+      end Put_Error;
+
+      procedure Check_EBNF_Allowed
+      is begin
+         if not EBNF_Allowed then
+            Put_Error ("no EBNF allowed");
+         end if;
+      end Check_EBNF_Allowed;
+
+   begin
+      if Tree.Label (Node) /= Nonterm then
+         return;
+      end if;
+
+      declare
+         use all type SAL.Base_Peek_Type;
+         Children  : constant Node_Access_Array := Tree.Children (Node);
+         RHS_Index : constant Natural           := Tree.RHS_Index (Node);
+      begin
+         if (for some Child of Children => Child = null) then
+            Put_Error ("deleted child");
+            return;
+         end if;
+
+         case To_Token_Enum (Tree.ID (Node)) is
+         when nonterminal_ID =>
+            null;
+
+         when rhs_list_ID =>
+            case RHS_Index is
+            when 0 =>
+               if Children'Length /= 1 then
+                  Put_Error ("expected child_count 1");
+               elsif Tree.ID (Children (1)) /= +rhs_ID then
+                  Put_Error ("child 1 not rhs");
+               end if;
+
+            when 1 =>
+               if Tree.Child_Count (Node) /= 3 then
+                  Put_Error ("expected child_count 3");
+               elsif Tree.ID (Children (1)) /= +rhs_list_ID or
+                 Tree.ID (Children (2)) /= +BAR_ID or
+                 Tree.ID (Children (3)) /= +rhs_ID
+               then
+                  Put_Error ("expecting rhs_list BAR rhs");
+               end if;
+
+            when others =>
+               --  The reset are for %if .. %endif, which are supposed to be 
translated before now.
+               Put_Error ("unexpected RHS_Index");
+            end case;
+
+         when rhs_ID =>
+            case RHS_Index is
+            when 0 =>
+               if Children'Length /= 0 then
+                  Put_Error ("expected child_count 0");
+               end if;
+
+            when 1 =>
+               if Tree.Child_Count (Node) /= 1 then
+                  Put_Error ("expected child_count 1");
+               elsif Tree.ID (Children (1)) /= +rhs_item_list_ID then
+                  Put_Error ("expecting rhs_item_list");
+               end if;
+
+            when 2 =>
+               if Tree.Child_Count (Node) /= 2 then
+                  Put_Error ("expected child_count 2");
+               elsif Tree.ID (Children (1)) /= +rhs_item_list_ID or
+                 Tree.ID (Children (2)) /= +ACTION_ID
+               then
+                  Put_Error ("expecting rhs_item_list ACTION");
+               end if;
+
+            when 3 =>
+               if Tree.Child_Count (Node) /= 3 then
+                  Put_Error ("expected child_count 3");
+               elsif Tree.ID (Children (1)) /= +rhs_item_list_ID or
+                 Tree.ID (Children (2)) /= +ACTION_ID or
+                 Tree.ID (Children (3)) /= +ACTION_ID
+               then
+                  Put_Error ("expecting rhs_item_list ACTION ACTION");
+               end if;
+
+            when others =>
+               Put_Error ("unexpected RHS_Index");
+            end case;
+
+         when rhs_attribute_ID =>
+            Check_EBNF_Allowed;
+
+         when rhs_element_ID =>
+            case RHS_Index is
+            when 0 =>
+               if Tree.Child_Count (Node) /= 1 then
+                  Put_Error ("expected child_count 1");
+               elsif Tree.ID (Children (1)) /= +rhs_item_ID then
+                  Put_Error ("expecting rhs_item");
+               end if;
+
+            when 1 =>
+               if Tree.Child_Count (Node) /= 3 then
+                  Put_Error ("expected child_count 3");
+               elsif Tree.ID (Children (1)) /= +IDENTIFIER_ID or
+                 Tree.ID (Children (2)) /= +EQUAL_ID or
+                 Tree.ID (Children (3)) /= +rhs_item_ID
+               then
+                  Put_Error ("expecting IDENTIFIER EQUAL rhs_item");
+               end if;
+
+            when others =>
+               Put_Error ("unexpected RHS_Index");
+            end case;
+
+         when rhs_item_list_ID =>
+            case RHS_Index is
+            when 0 =>
+               if Tree.Child_Count (Node) /= 1 then
+                  Put_Error ("expected child_count 1");
+               elsif Tree.ID (Children (1)) /= +rhs_element_ID then
+                  Put_Error ("expecting rhs_element");
+               end if;
+
+            when 1 =>
+               if Tree.Child_Count (Node) /= 2 then
+                  Put_Error ("expected child_count 2");
+               elsif Tree.ID (Children (1)) /= +rhs_item_list_ID or
+                 Tree.ID (Children (2)) /= +rhs_element_ID
+               then
+                  Put_Error ("expecting rhs_item_list ELEMENT");
+               end if;
+
+            when others =>
+               Put_Error ("unexpected RHS_Index");
+            end case;
+
+         when rhs_item_ID =>
+            if Tree.Child_Count (Node) /= 1 then
+               Put_Error ("expected child_count 1");
+            end if;
+
+            case RHS_Index is
+            when 0 =>
+               if Tree.ID (Children (1)) /= +IDENTIFIER_ID then
+                  Put_Error ("expecting IDENTIFIER");
+               end if;
+
+            when 1 =>
+               if Tree.ID (Children (1)) /= +STRING_LITERAL_2_ID then
+                  Put_Error ("expecting STRING_LITERAL_2");
+               end if;
+
+            when 2 =>
+               if Tree.ID (Children (1)) /= +rhs_attribute_ID then
+                  Put_Error ("expecting rhs_attribute");
+               end if;
+
+            when 3 =>
+               if Tree.ID (Children (1)) /= +rhs_optional_item_ID then
+                  Put_Error ("expecting rhs_optional_item");
+               end if;
+
+            when 4 =>
+               if Tree.ID (Children (1)) /= +rhs_multiple_item_ID then
+                  Put_Error ("expecting rhs_multiple_item");
+               end if;
+
+            when 5 =>
+               if Tree.ID (Children (1)) /= +rhs_group_item_ID then
+                  Put_Error ("expecting rhs_group_item");
+               end if;
+
+            when others =>
+               Put_Error ("unexpected RHS_Index");
+            end case;
+
+         when rhs_group_item_ID =>
+            Check_EBNF_Allowed;
+            if RHS_Index /= 0 or
+              (Children'Length /= 3 or else
+                 (Tree.ID (Children (1)) /= +LEFT_PAREN_ID or
+                    Tree.ID (Children (2)) /= +rhs_alternative_list_ID or
+                    Tree.ID (Children (3)) /= +RIGHT_PAREN_ID))
+            then
+               Put_Error ("expecting RHS_Index 0, LEFT_PAREN 
rhs_alternative_list RIGHT_PAREN");
+            end if;
+
+         when rhs_optional_item_ID =>
+            Check_EBNF_Allowed;
+            case RHS_Index is
+            when 0 =>
+               if Children'Length /= 3 or else
+                 (Tree.ID (Children (1)) /= +LEFT_BRACKET_ID or
+                    Tree.ID (Children (2)) /= +rhs_alternative_list_ID or
+                    Tree.ID (Children (3)) /= +RIGHT_BRACKET_ID)
+               then
+                  Put_Error ("expecting LEFT_BRACKET rhs_alternative_list 
RIGHT_BRACKET");
+               end if;
+
+            when 1 =>
+               if Children'Length /= 4 or else
+                 (Tree.ID (Children (1)) /= +LEFT_PAREN_ID or
+                    Tree.ID (Children (2)) /= +rhs_alternative_list_ID or
+                    Tree.ID (Children (3)) /= +RIGHT_PAREN_ID or
+                    Tree.ID (Children (4)) /= +QUESTION_ID)
+               then
+                  Put_Error ("expecting LEFT_PAREN rhs_alternative_list 
RIGHT_PAREN QUESTION");
+               end if;
+
+            when 2 =>
+               if Children'Length /= 2 or else
+                 (Tree.ID (Children (1)) /= +IDENTIFIER_ID or
+                    Tree.ID (Children (2)) /= +QUESTION_ID)
+               then
+                  Put_Error ("expecting IDENTIFIER QUESTION");
+               end if;
+
+            when 3 =>
+               if Children'Length /= 2 or else
+                 (Tree.ID (Children (1)) /= +STRING_LITERAL_2_ID or
+                    Tree.ID (Children (2)) /= +QUESTION_ID)
+               then
+                  Put_Error ("expecting STRING_LITERAL_2 QUESTION");
+               end if;
+
+            when others =>
+               Put_Error ("unexpected RHS_Index");
+            end case;
+
+         when rhs_multiple_item_ID =>
+            Check_EBNF_Allowed;
+            case RHS_Index is
+            when 0 =>
+               if Children'Length /= 3 or else
+                 (Tree.ID (Children (1)) /= +LEFT_BRACE_ID or
+                    Tree.ID (Children (2)) /= +rhs_alternative_list_ID or
+                    Tree.ID (Children (3)) /= +RIGHT_BRACE_ID)
+               then
+                  Put_Error ("expecting LEFT_BRACE rhs_alternative_list 
RIGHT_BRACE");
+               end if;
+
+            when 1 =>
+               if Children'Length /= 4 or else
+                 (Tree.ID (Children (1)) /= +LEFT_BRACE_ID or
+                    Tree.ID (Children (2)) /= +rhs_alternative_list_ID or
+                    Tree.ID (Children (3)) /= +RIGHT_BRACE_ID or
+                    Tree.ID (Children (4)) /= +MINUS_ID)
+               then
+                  Put_Error ("expecting LEFT_BRACE rhs_alternative_list 
RIGHT_BRACE MINUS");
+               end if;
+
+            when 2 =>
+               if Children'Length /= 4 or else
+                 (Tree.ID (Children (1)) /= +LEFT_PAREN_ID or
+                    Tree.ID (Children (2)) /= +rhs_alternative_list_ID or
+                    Tree.ID (Children (3)) /= +RIGHT_PAREN_ID or
+                    Tree.ID (Children (4)) /= +PLUS_ID)
+               then
+                  Put_Error ("expecting LEFT_PAREN rhs_alternative_list 
RIGHT_PAREN PLUS");
+               end if;
+
+            when 3 =>
+               if Children'Length /= 4 or else
+                 (Tree.ID (Children (1)) /= +LEFT_PAREN_ID or
+                    Tree.ID (Children (2)) /= +rhs_alternative_list_ID or
+                    Tree.ID (Children (3)) /= +RIGHT_PAREN_ID or
+                    Tree.ID (Children (4)) /= +STAR_ID)
+               then
+                  Put_Error ("expecting LEFT_PAREN rhs_alternative_list 
RIGHT_PAREN STAR");
+               end if;
+
+            when 4 =>
+               if Children'Length /= 2 or else
+                 (Tree.ID (Children (1)) /= +IDENTIFIER_ID or
+                    Tree.ID (Children (2)) /= +PLUS_ID)
+               then
+                  Put_Error ("expecting IDENTIFIER PLUS");
+               end if;
+
+            when 5 =>
+               if Children'Length /= 2 or else
+                 (Tree.ID (Children (1)) /= +IDENTIFIER_ID or
+                    Tree.ID (Children (2)) /= +STAR_ID)
+               then
+                  Put_Error ("expecting IDENTIFIER STAR");
+               end if;
+
+            when others =>
+               Put_Error ("unexpected RHS_Index");
+            end case;
+
+         when rhs_alternative_list_ID =>
+            Check_EBNF_Allowed;
+            case RHS_Index is
+            when 0 =>
+               if Children'Length /= 1 or else
+                 (Tree.ID (Children (1)) /= +rhs_item_list_ID)
+               then
+                  Put_Error ("expecting rhs_item_list");
+               end if;
+
+            when 1 =>
+               if Children'Length /= 3 or else
+                 (Tree.ID (Children (1)) /= +rhs_alternative_list_ID or
+                    Tree.ID (Children (2)) /= +BAR_ID or
+                    Tree.ID (Children (3)) /= +rhs_item_list_ID)
+               then
+                  Put_Error ("expecting rhs_alternative_list BAR 
rhs_item_list");
+               end if;
+            when others =>
+               Put_Error ("unexpected RHS_Index");
+            end case;
+
+         when compilation_unit_ID =>
+            null;
+
+         when compilation_unit_list_ID =>
+            null;
+
+         when others =>
+            null;
+         end case;
+      end;
+   end Validate_Node;
+
+   procedure Translate_EBNF_To_BNF
+     (Tree : in out Syntax_Trees.Tree;
+      Data : in out WisiToken_Grammar_Runtime.User_Data_Type)
+   is
+      use all type Ada.Containers.Count_Type;
+      use all type SAL.Base_Peek_Type;
+      use all type Node_Sets.Set;
+
+      Data_Access : constant Syntax_Trees.User_Data_Access_Constant := 
Data'Unchecked_Access;
+
+      EBNF_Nodes        : Node_Sets.Set;
+      Copied_EBNF_Nodes : Node_Sets.Set;
+
+      Symbol_Regexp : constant GNAT.Regexp.Regexp := GNAT.Regexp.Compile
+        ((if Data.Language_Params.Case_Insensitive
+          then "[A-Z0-9_]+"
+          else "[a-zA-Z0-9_]+"),
+         Case_Sensitive => not Data.Language_Params.Case_Insensitive);
+
+      procedure Clear_EBNF_Node (Node : in Valid_Node_Access)
+      is begin
+         if EBNF_Nodes.Contains (Node) then
+            if Trace_Generate_EBNF > Outline then
+               Ada.Text_IO.Put_Line ("clear translated EBNF node " & 
Trimmed_Image (Get_Node_Index (Node)));
+            end if;
+            EBNF_Nodes.Delete (Node);
+         else
+            Copied_EBNF_Nodes.Delete (Node);
+         end if;
+      end Clear_EBNF_Node;
+
+      function New_Identifier (Text : in String) return Identifier_Index
+      is
+         ID : constant Identifier_Index := Base_Identifier_Index 
(Data.Tokens.Virtual_Identifiers.Length) + 1;
+      begin
+         Data.Tokens.Virtual_Identifiers.Append (+Text);
+         return ID;
+      end New_Identifier;
+
+      Keyword_Ident : constant Identifier_Index := New_Identifier ("keyword");
+      Percent_Ident : constant Identifier_Index := New_Identifier ("percent");
+
+      function Next_Nonterm_Name (Prefix : in String := "nonterminal") return 
Identifier_Index
+      is
+         function Image is new SAL.Generic_Decimal_Image (Identifier_Index);
+         ID : constant Identifier_Index := Identifier_Index 
(Data.Tokens.Virtual_Identifiers.Length) + 1;
+      begin
+
+         if ID > 999 then
+            --  We assume 3 digits below
+            raise SAL.Programmer_Error with "more than 3 digits needed for 
virtual identifiers in EBNF translate";
+         end if;
+
+         Data.Tokens.Virtual_Identifiers.Append (+(Prefix & "_" & Image (ID, 
Width => 3)));
+
+         return ID;
+      end Next_Nonterm_Name;
+
+      function Needs_Token_Labels (RHS : in Valid_Node_Access) return Boolean
+      is
+         Has_EBNF         : Boolean := False;
+         Has_Manual_Label : Boolean := False;
+
+         procedure Any_EBNF_Manual_Label
+           (Tree : in out Syntax_Trees.Tree;
+            Node : in     Valid_Node_Access)
+         is begin
+            Has_Manual_Label := Has_Manual_Label or
+              (Tree.ID (Node) = +rhs_element_ID and then Tree.RHS_Index (Node) 
= 1);
+
+            Has_EBNF := Has_EBNF or EBNF_Nodes.Contains (Node);
+            --  Not every ebnf node requires auto-labels (ie literal tokens), 
but
+            --  it's not easy to tell from here.
+         end Any_EBNF_Manual_Label;
+
+      begin
+         case Tree.RHS_Index (RHS) is
+         when 0 | 1 =>
+            return False;
+
+         when 2 | 3 =>
+            Tree.Process_Tree (Any_EBNF_Manual_Label'Unrestricted_Access, RHS);
+            return Has_EBNF and not Has_Manual_Label;
+
+         when others =>
+            raise SAL.Programmer_Error;
+         end case;
+      end Needs_Token_Labels;
+
+      Last_Token_Index : Integer := 0;
+
+      function Next_Token_Label (Prefix : in String := "T") return 
WisiToken.Identifier_Index
+      is begin
+         Last_Token_Index := @ + 1;
+         declare
+            Label : constant String := Prefix & WisiToken.Trimmed_Image 
(Last_Token_Index);
+         begin
+            for I in Data.Tokens.Virtual_Identifiers.First_Index .. 
Data.Tokens.Virtual_Identifiers.Last_Index loop
+               if -Data.Tokens.Virtual_Identifiers (I) = Label then
+                  return I;
+               end if;
+            end loop;
+            return New_Identifier (Label);
+         end;
+      end Next_Token_Label;
+
+      procedure Add_Token_Labels (RHS : in Valid_Node_Access)
+      with Pre => Tree.ID (RHS) = +rhs_ID
+      is
+         use LR_Utils;
+
+         procedure Add_Token_Label (Element : in out Valid_Node_Access)
+         with Pre => Tree.ID (Element) = +rhs_element_ID
+         is
+            Ident : constant Valid_Node_Access := Tree.Add_Identifier 
(+IDENTIFIER_ID, Next_Token_Label);
+            Equal : constant Valid_Node_Access := Tree.Add_Terminal 
(+EQUAL_ID);
+         begin
+            Tree.Set_Children (Element, (+rhs_element_ID, 1), (Ident, Equal, 
Tree.Child (Element, 1)));
+         end Add_Token_Label;
+
+         procedure Add_Token_Labels_1 (Node : in Valid_Node_Access)
+         with Pre => To_Token_Enum (Tree.ID (Node)) in rhs_alternative_list_ID 
| rhs_item_list_ID
+         is begin
+            case To_Token_Enum (Tree.ID (Node)) is
+            when rhs_alternative_list_ID =>
+               declare
+                  Alt_List : constant Constant_List := Creators.Create_List
+                    (Tree, Node, +rhs_alternative_list_ID, +rhs_item_list_ID);
+               begin
+                  for Item_List of Alt_List loop
+                     Add_Token_Labels_1 (Item_List);
+                  end loop;
+               end;
+
+            when rhs_item_list_ID =>
+               declare
+                  RHS_Item_List : constant Constant_List := 
Creators.Create_List
+                    (Tree, Node, +rhs_item_list_ID, +rhs_element_ID);
+               begin
+                  for Cur in RHS_Item_List.Iterate_Constant loop
+                     declare
+                        Element : Valid_Node_Access          := 
WisiToken.Syntax_Trees.LR_Utils.Element (Cur);
+                        Item    : constant Valid_Node_Access := Tree.Child 
(Element, 1);
+                     begin
+                        case Tree.RHS_Index (Item) is
+                        when 0 | 1 =>
+                           Add_Token_Label (Element);
+
+                        when 2 =>
+                           null;
+
+                        when 3 =>
+                           declare
+                              Opt_Item : constant Valid_Node_Access := 
Tree.Child (Item, 1);
+                           begin
+                              case Tree.RHS_Index (Opt_Item) is
+                              when 0 | 1 =>
+                                 Add_Token_Labels_1 (Tree.Child (Opt_Item, 2));
+
+                              when 2 | 3 =>
+                                 Add_Token_Label (Element);
+                              when others =>
+                                 raise SAL.Programmer_Error;
+                              end case;
+                           end;
+
+                        when 4 =>
+                           declare
+                              Mult_Item : constant Valid_Node_Access := 
Tree.Child (Item, 1);
+                           begin
+                              case Tree.RHS_Index (Mult_Item) is
+                              when 0 .. 3 =>
+                                 declare
+                                    Content_List : constant Constant_List := 
Creators.Create_List
+                                      (Tree, Tree.Child (Mult_Item, 2), 
+rhs_alternative_list_ID, +rhs_item_list_ID);
+                                 begin
+                                    if Content_List.Count = 1 then
+                                       Add_Token_Label (Element);
+                                    else
+                                       Add_Token_Labels_1 (Tree.Child 
(Mult_Item, 2));
+                                    end if;
+                                 end;
+
+                              when 4 .. 5 =>
+                                 Add_Token_Label (Element);
+                              when others =>
+                                 raise SAL.Programmer_Error;
+                              end case;
+                           end;
+
+                        when 5 =>
+                           --  rhs_group_item
+                           Add_Token_Labels_1 (Tree.Child (Tree.Child (Item, 
1), 2));
+
+                        when others =>
+                           raise SAL.Programmer_Error;
+                        end case;
+                     end;
+                  end loop;
+               end;
+            when others =>
+               raise SAL.Programmer_Error;
+            end case;
+         end Add_Token_Labels_1;
+      begin
+         if Trace_Generate_EBNF > Outline then
+            Ada.Text_IO.Put_Line ("add token labels " & Tree.Image (RHS, 
Node_Numbers => True));
+         end if;
+
+         case Tree.RHS_Index (RHS) is
+         when 0 | 1 =>
+            null;
+
+         when 2 | 3 =>
+            declare
+               Aug : Augmented_Access := Augmented_Access (Tree.Augmented 
(RHS));
+            begin
+               if Aug = null then
+                  Aug := new 
WisiToken_Grammar_Runtime.Augmented'(Auto_Token_Labels => True, others => <>);
+                  Tree.Set_Augmented (RHS, 
WisiToken.Syntax_Trees.Augmented_Class_Access (Aug));
+               else
+                  Aug.Auto_Token_Labels := True;
+               end if;
+               Add_Token_Labels_1 (Tree.Child (RHS, 1));
+               --  Labels in actions will be applied in 
wisitoken-bnf-output_ada_emacs.adb
+            end;
+         when others =>
+            raise SAL.Programmer_Error;
+         end case;
+      end Add_Token_Labels;
+
+      function Nonterm_Content_Equal
+        (Target : in String;
+         List   : in LR_Utils.Constant_List'Class;
+         Node   : in Valid_Node_Access)
+        return Boolean
+      is
+         pragma Unreferenced (List);
+      begin
+         if Tree.ID (Tree.Child (Node, 1)) = +nonterminal_ID then
+            --  Target nonterm is:
+            --
+            --  (compilation_unit_1, (111 . 128))
+            --  | (nonterminal_0, (111 . 128))
+            --  | |  7;(IDENTIFIER, (111 . 128))
+            --  | | (COLON)
+            --  | | (rhs_list_1, (111 . 128))
+            --  | | | ...
+            declare
+               RHS_List_1 : constant Node_Access := Tree.Child (Tree.Child 
(Node, 1), 3);
+            begin
+               if RHS_List_1 /= Invalid_Node_Access and then
+                 Target = Get_Text (Data, Tree, RHS_List_1)
+               then
+                  return True;
+               end if;
+            end;
+         end if;
+         return False;
+      end Nonterm_Content_Equal;
+
+      function Find_Nonterminal
+        (Target : in String;
+         Equal  : in LR_Utils.Find_Equal)
+        return Node_Access
+      is
+         use LR_Utils;
+      begin
+         return Element
+           (Creators.Create_List
+              (Tree, Tree.Child (Tree.Root, 2), +compilation_unit_list_ID, 
+compilation_unit_ID).Find
+              (Target, Equal));
+      end Find_Nonterminal;
+
+      function Tree_Add_Nonterminal
+        (Child_1 : in Valid_Node_Access;
+         Child_2 : in Valid_Node_Access;
+         Child_3 : in Valid_Node_Access;
+         Child_4 : in Valid_Node_Access)
+        return Valid_Node_Access
+      is begin
+         --  Work around GNAT error about arbitrary evaluation order in
+         --  aggregates (no error about the arbitrary order in subprogram
+         --  parameter_assocation_lists!).
+         return Tree.Add_Nonterm
+           (Production    => (+nonterminal_ID, 0),
+            Children      => (Child_1, Child_2, Child_3, Child_4),
+            Clear_Parents => False);
+      end Tree_Add_Nonterminal;
+
+      function Duplicate
+        (List        : in LR_Utils.Constant_List'Class;
+         New_Content : in Node_Access)
+        return Boolean
+      is
+         --  We don't require New_Content.ID = List.Element_ID; since we are
+         --  comparing result of Get_Text.
+         New_Content_Str : constant String :=
+           (if New_Content = Invalid_Node_Access
+            then "" --  Empty RHS
+            else Get_Text (Data, Tree, New_Content));
+      begin
+         for N of List loop
+            if New_Content_Str = Get_Text (Data, Tree, N) then
+               return True;
+            end if;
+         end loop;
+         return False;
+      end Duplicate;
+
+      procedure Insert_Empty_RHS
+        (RHS_List : in out LR_Utils.List;
+         After    : in     Valid_Node_Access)
+      with Pre => RHS_List.List_ID = +rhs_list_ID and RHS_List.Element_ID = 
+rhs_ID and
+                  Tree.ID (After) = +rhs_ID and RHS_List.Contains (After)
+      is begin
+         RHS_List.Insert
+           (New_Element      => Tree.Add_Nonterm
+              ((+rhs_ID, 0),
+               (1 .. 0       => Invalid_Node_Access),
+               Clear_Parents => False),
+            After            => RHS_List.To_Cursor (After));
+      end Insert_Empty_RHS;
+
+      procedure Insert_RHS
+        (RHS_List          : in out LR_Utils.List;
+         New_RHS_Item_List : in     Valid_Node_Access;
+         After             : in     Valid_Node_Access;
+         Auto_Token_Labels : in     Boolean)
+      with Pre => RHS_List.List_ID = +rhs_list_ID and RHS_List.Element_ID = 
+rhs_ID and
+                  Tree.ID (New_RHS_Item_List) = +rhs_item_list_ID and
+                  Tree.ID (After) = +rhs_ID and RHS_List.Contains (After)
+      is
+         Child_3 : constant Valid_Node_Access :=
+           (if Tree.RHS_Index (After) = 3
+            then Tree.Copy_Subtree (Tree.Child (After, 3), Data_Access)
+            else Dummy_Node);
+
+         RHS : constant Valid_Node_Access := Tree.Add_Nonterm
+           (Production => (+rhs_ID, Tree.RHS_Index (After)),
+            Children =>
+              (case Tree.RHS_Index (After) is
+               when 1 => (1 => New_RHS_Item_List),
+               when 2 => (New_RHS_Item_List, Tree.Copy_Subtree (Tree.Child 
(After, 2), Data_Access)),
+               when 3 => (New_RHS_Item_List,
+                          Tree.Copy_Subtree (Tree.Child (After, 2), 
Data_Access),
+                          Child_3),
+               when others => raise SAL.Programmer_Error),
+            Clear_Parents  => True);
+
+         Aug : constant Augmented_Access := new 
WisiToken_Grammar_Runtime.Augmented'
+           (EBNF              => False,
+            Auto_Token_Labels => Auto_Token_Labels,
+            Edited_Token_List => True);
+      begin
+         Tree.Set_Augmented (RHS, 
WisiToken.Syntax_Trees.Augmented_Class_Access (Aug));
+
+         RHS_List.Insert
+           (New_Element   => RHS,
+            After         => RHS_List.To_Cursor (After));
+      end Insert_RHS;
+
+      procedure Record_Copied_EBNF_Nodes (Node : in Valid_Node_Access)
+      is
+         procedure Record_Copied_Node
+           (Tree : in out Syntax_Trees.Tree;
+            Node : in     Valid_Node_Access)
+         is begin
+            if To_Token_Enum (Tree.ID (Node)) in
+              rhs_optional_item_ID |
+              rhs_multiple_item_ID |
+              rhs_group_item_ID |
+              rhs_attribute_ID |
+              STRING_LITERAL_2_ID
+            then
+               if Trace_Generate_EBNF > Outline then
+                  Ada.Text_IO.Put_Line ("new EBNF node " & Tree.Image (Node, 
Node_Numbers => True));
+               end if;
+               Copied_EBNF_Nodes.Insert (Node);
+            end if;
+         end Record_Copied_Node;
+      begin
+         Tree.Process_Tree (Record_Copied_Node'Access, Node);
+      end Record_Copied_EBNF_Nodes;
+
+      procedure Erase_Deleted_EBNF_Nodes (Node : in Valid_Node_Access)
+      is
+         procedure Erase_Deleted_Node
+           (Tree : in out Syntax_Trees.Tree;
+            Node : in     Valid_Node_Access)
+         is begin
+            if To_Token_Enum (Tree.ID (Node)) in
+              rhs_optional_item_ID |
+              rhs_multiple_item_ID |
+              rhs_group_item_ID |
+              rhs_attribute_ID |
+              STRING_LITERAL_2_ID
+            then
+               if EBNF_Nodes.Contains (Node) then
+                  --  Node is original, not copied
+                  if Trace_Generate_EBNF > Outline then
+                     Ada.Text_IO.Put_Line ("erase original deleted EBNF node" 
& Trimmed_Image (Get_Node_Index (Node)));
+                  end if;
+                  EBNF_Nodes.Delete (Node);
+               else
+                  Copied_EBNF_Nodes.Delete (Node);
+               end if;
+            end if;
+         end Erase_Deleted_Node;
+      begin
+         Tree.Process_Tree (Erase_Deleted_Node'Access, Node);
+      end Erase_Deleted_EBNF_Nodes;
+
+      function Get_RHS_Auto_Token_Labels (Node : in Valid_Node_Access) return 
Boolean
+      is
+         RHS : constant Valid_Node_Access :=
+           (if Tree.ID (Node) = +rhs_ID then Node else Tree.Find_Ancestor 
(Node, +rhs_ID));
+         Aug : constant Augmented_Access := Augmented_Access (Tree.Augmented 
(RHS));
+      begin
+         if Aug = null then
+            return False;
+         else
+            return Aug.Auto_Token_Labels;
+         end if;
+      end Get_RHS_Auto_Token_Labels;
+
+      function Insert_Optional_RHS (B : in Valid_Node_Access) return 
Valid_Node_Access
+      with Pre => Tree.ID (B) in +rhs_multiple_item_ID | +rhs_optional_item_ID 
| +IDENTIFIER_ID
+      is
+         --  B is an optional item in an rhs_item_list:
+         --  | A B? C
+         --
+         --  or B is an rhs_multiple_item that is allowed to be empty:
+         --  | A B* C
+         --
+         --  or B is a virtual identifier naming the new nonterm replacing the
+         --  original
+         --
+         --  A, C can be empty. The containing element may be rhs or
+         --  rhs_alternative_list.
+         --
+         --  Insert either a second rhs, or a second rhs_item_list, after the
+         --  one containing B, without B.
+         --
+         --  Return the List_Root of the edited list.
+
+         use LR_Utils;
+         use LR_Utils.Creators;
+
+         function Find_Skips return Skip_Info
+         is
+            Non_Empty_List : Node_Access := Invalid_Node_Access;
+            --  First (nearest) rhs_item_list ancestor of B that will not be 
empty
+            --  when B is skipped.
+
+            Skip_Last        : Positive_Index_Type'Base := 
Positive_Index_Type'First;
+            Last_Skip_Node   : Valid_Node_Access         := Tree.Find_Ancestor 
(B, +rhs_element_ID);
+            Reset_Search_For : WisiToken.Token_ID       := +rhs_item_list_ID;
+
+            procedure Search (Result : in out Skip_Info)
+            is
+               Skip_Node  : Valid_Node_Access   := Last_Skip_Node;
+               Search_For : WisiToken.Token_ID := Reset_Search_For;
+            begin
+               loop
+                  case To_Token_Enum (Search_For) is
+                  when rhs_item_list_ID =>
+                     Skip_Node := Tree.Find_Ancestor (Skip_Node, 
+rhs_item_list_ID);
+
+                     Skip_Node := List_Root (Tree, Skip_Node, 
+rhs_item_list_ID);
+
+                     Search_For := +rhs_element_ID;
+
+                     if Result.Skips'Length = 0 then
+                        declare
+                           List_Count : constant Ada.Containers.Count_Type := 
Create_List
+                             (Tree, Skip_Node, +rhs_item_list_ID, 
+rhs_element_ID).Count;
+                        begin
+                           if List_Count > 1 then
+                              Non_Empty_List := List_Root (Tree, Skip_Node, 
+rhs_item_list_ID);
+                              exit;
+
+                           elsif Skip_Last = Positive_Index_Type'First and 
List_Count = 1 then
+                              --  This list will be empty; no need to descend 
into it
+                              Last_Skip_Node   := Skip_Node;
+                              Reset_Search_For := Search_For;
+                           else
+                              Skip_Last := Skip_Last + 1;
+                           end if;
+                        end;
+                     else
+                        Result.Skips (Skip_Last) :=
+                          (Label             => Nested,
+                           Element           => Skip_Node,
+                           List_Root         => Skip_Node,
+                           List_ID           => +rhs_item_list_ID,
+                           Element_ID        => +rhs_element_ID,
+                           Separator_ID      => Invalid_Token_ID,
+                           Multi_Element_RHS => 1);
+
+                        Skip_Last := Skip_Last - 1;
+                     end if;
+
+                  when rhs_element_ID =>
+                     declare
+                        List_Node : Valid_Node_Access := Tree.Find_Ancestor
+                          (Skip_Node, (+rhs_ID, +rhs_alternative_list_ID));
+                     begin
+
+                        if Result.Skips'Length = 0 and then
+                          Tree.ID (List_Node) = +rhs_ID
+                        then
+                           Non_Empty_List := List_Root (Tree, Skip_Node, 
+rhs_item_list_ID);
+                           Skip_Last      := Skip_Last - 1;
+                           exit;
+                        end if;
+
+                        List_Node := List_Root (Tree, List_Node, 
+rhs_alternative_list_ID);
+                        Skip_Node := Tree.Find_Ancestor (Skip_Node, 
+rhs_element_ID);
+
+                        Search_For := +rhs_item_list_ID;
+
+                        if Result.Skips'Length = 0 then
+                           if Skip_Last = Positive_Index_Type'First then
+                              --  This list will be empty; no need to descend 
into it
+                              Last_Skip_Node   := Skip_Node;
+                              Reset_Search_For := Search_For;
+                           else
+                              Skip_Last := Skip_Last + 1;
+                           end if;
+                        else
+                           Result.Skips (Skip_Last) :=
+                             (Label             => Nested,
+                              Element           => Skip_Node,
+                              List_Root         => List_Node,
+                              List_ID           => +rhs_alternative_list_ID,
+                              Element_ID        => +rhs_item_list_ID,
+                              Separator_ID      => +BAR_ID,
+                              Multi_Element_RHS => 1);
+
+                           Skip_Last := Skip_Last - 1;
+                        end if;
+                     end;
+                  when others =>
+                     raise SAL.Programmer_Error;
+                  end case;
+
+               end loop;
+            end Search;
+
+            Result_1 : Skip_Info (Skip_Last => Positive_Index_Type'First - 1);
+         begin
+            --  First count the number of Skip_Items we need, and set
+            --  Non_Empty_List.
+            Search (Result_1);
+
+            declare
+               Result : Skip_Info (Skip_Last);
+            begin
+               if Result.Skips'Length = 0 then
+                  return Result;
+               end if;
+
+               Result.Start_List_Root  := Non_Empty_List;
+               Result.Start_List_ID    := +rhs_item_list_ID;
+               Result.Start_Element_ID := +rhs_element_ID;
+
+               Result.Start_Separator_ID      := Invalid_Token_ID;
+               Result.Start_Multi_Element_RHS := 1;
+
+               Result.Skips (Skip_Last) := (Skip, Last_Skip_Node);
+
+               if Result.Skips'Length = 1 then
+                  return Result;
+               end if;
+
+               Search (Result);
+               return Result;
+            end;
+         end Find_Skips;
+
+         Container : Valid_Node_Access := Tree.Find_Ancestor (B, (+rhs_ID, 
+rhs_alternative_list_ID));
+         Container_ID : WisiToken.Token_ID := Tree.ID (Container);
+
+         Container_List : LR_Utils.List :=
+           (if Container_ID = +rhs_ID
+            then Create_From_Element
+              (Tree,
+               Element      => Container,
+               List_ID      => +rhs_list_ID,
+               Element_ID   => +rhs_ID,
+               Separator_ID => +BAR_ID)
+            else Create_List
+              (Tree,
+               Root         => List_Root (Tree, Container, 
+rhs_alternative_list_ID),
+               List_ID      => +rhs_alternative_list_ID,
+               Element_ID   => +rhs_item_list_ID,
+               Separator_ID => +BAR_ID));
+
+      begin
+         if Trace_Generate_EBNF > Extra then
+            Ada.Text_IO.New_Line;
+            Ada.Text_IO.Put_Line ("Insert_Optional_RHS start: " & Get_Text 
(Data, Tree, Container));
+            Tree.Print_Tree (Container);
+         end if;
+
+         declare
+            Skip_List : constant Skip_Info := Find_Skips;
+
+            New_RHS_AC   : Node_Access := Invalid_Node_Access;
+            Is_Duplicate : Boolean    := False;
+         begin
+            if WisiToken.Trace_Generate_EBNF > Extra then
+               Ada.Text_IO.New_Line;
+               Ada.Text_IO.Put_Line ("skip: " & Image (Skip_List, 
Wisitoken_Grammar_Actions.Descriptor));
+            end if;
+
+            if Skip_List.Skips'Length = 0 or else
+              +rhs_ID = Tree.ID (Tree.Parent (Skip_List.Start_List_Root))
+            then
+               --  Insert an edited rhs into the rhs_list.
+               --
+               --  We can't insert an empty rhs_item_list into an
+               --  rhs_alterative_list, so we insert an empty rhs.
+
+               if Container_ID = +rhs_alternative_list_ID then
+
+                  Container := Tree.Find_Ancestor (B, +rhs_ID);
+
+                  Container_ID := +rhs_ID;
+
+                  Container_List := Create_From_Element
+                    (Tree,
+                     Element      => Container,
+                     List_ID      => +rhs_list_ID,
+                     Element_ID   => +rhs_ID,
+                     Separator_ID => +BAR_ID);
+               end if;
+
+               if Skip_List.Skips'Length = 0 then
+                  --  New rhs is empty; no rhs_item_list
+                  null;
+               else
+                  New_RHS_AC := Copy_Skip_Nested (Skip_List, Tree, 
Data_Access);
+               end if;
+
+               if Duplicate (Container_List, New_RHS_AC) then
+                  Is_Duplicate := True;
+               else
+                  if Skip_List.Skips'Length = 0 then
+                     Insert_Empty_RHS (Container_List, Container);
+                  else
+                     Insert_RHS
+                       (Container_List,
+                        New_RHS_AC,
+                        After             => Container,
+                        Auto_Token_Labels => Get_RHS_Auto_Token_Labels (B));
+                  end if;
+               end if;
+
+            else
+               --  Insert an edited rhs_item_list into an rhs_alternative_list
+
+               New_RHS_AC := Copy_Skip_Nested (Skip_List, Tree, Data_Access);
+
+               if Duplicate (Container_List, New_RHS_AC) then
+                  --  IMPROVEME: check for duplicate before do copy; requires 
version of
+                  --  Get_Text that understands Skip_Info
+                  Is_Duplicate := True;
+               else
+                  declare
+                     After : Valid_Node_Access := B;
+
+                     Aug : constant Augmented_Access := new 
WisiToken_Grammar_Runtime.Augmented'
+                       (EBNF              => False,
+                        Auto_Token_Labels => Get_RHS_Auto_Token_Labels (B),
+                        Edited_Token_List => True);
+                  begin
+                     loop
+                        After := List_Root (Tree, Tree.Find_Ancestor (After, 
+rhs_item_list_ID), +rhs_item_list_ID);
+                        exit when Container_List.Contains (After);
+                     end loop;
+
+                     Tree.Set_Augmented (New_RHS_AC, 
WisiToken.Syntax_Trees.Augmented_Class_Access (Aug));
+
+                     Container_List.Insert
+                       (New_Element   => New_RHS_AC,
+                        After         => Container_List.To_Cursor (After));
+                  end;
+               end if;
+            end if;
+
+            if Trace_Generate_EBNF > Detail then
+               Ada.Text_IO.New_Line;
+               if Is_Duplicate then
+                  Ada.Text_IO.Put_Line
+                    ("Insert_Optional_RHS duplicate '" &
+                       (if New_RHS_AC = null
+                        then "<empty>"
+                        else Get_Text (Data, Tree, New_RHS_AC)) & "'");
+               else
+                  if Container_ID = +rhs_ID then
+                     Ada.Text_IO.Put_Line
+                       ("Insert_Optional_RHS old rhs, new rhs: " & Get_Text 
(Data, Tree, Container_List.Root));
+                     Tree.Print_Tree (Container_List.Root);
+                  else
+                     Ada.Text_IO.Put_Line
+                       ("Insert_Optional_RHS edited rhs_alternative_list: " & 
Get_Text
+                          (Data, Tree, Tree.Parent (Container_List.Root)));
+                     Tree.Print_Tree (Tree.Parent (Container_List.Root));
+                  end if;
+               end if;
+            end if;
+
+            if not (Skip_List.Skips'Length = 0 or Is_Duplicate) then
+               Record_Copied_EBNF_Nodes (New_RHS_AC);
+            end if;
+         end;
+         return Container_List.Root;
+      end Insert_Optional_RHS;
+
+      procedure Add_Compilation_Unit (Label : in String; Unit : in 
Valid_Node_Access; Prepend : in Boolean := False)
+      with Pre => Tree.ID (Unit) in +declaration_ID | +nonterminal_ID
+      is
+         use LR_Utils;
+
+         List : LR_Utils.List := Creators.Create_List
+           (Tree, Tree.Child (Tree.Root, 2), +compilation_unit_list_ID, 
+compilation_unit_ID, Invalid_Token_ID);
+
+         Comp_Unit : constant Valid_Node_Access := Tree.Add_Nonterm
+           ((+compilation_unit_ID, (if Tree.ID (Unit) = +declaration_ID then 0 
else 1)),
+            (1 => Unit),
+            Clear_Parents => True);
+
+         function Equal
+           (Target    : in String;
+            List      : in LR_Utils.Constant_List'Class;
+            Comp_Unit : in Valid_Node_Access)
+           return Boolean
+         --  Compare Target to list item kind token.
+         is
+            pragma Unreferenced (List);
+            Decl : constant Valid_Node_Access := Tree.Child (Comp_Unit, 1);
+         begin
+            return Tree.ID (Decl) = +declaration_ID and then Target =
+              (case To_Token_Enum (Tree.ID (Tree.Child (Decl, 2))) is
+               when Wisitoken_Grammar_Actions.TOKEN_ID
+                 | NON_GRAMMAR_ID              => Get_Text (Data, Tree, 
Tree.Child (Decl, 4)),
+               when KEYWORD_ID | IDENTIFIER_ID => Get_Text (Data, Tree, 
Tree.Child (Decl, 2)),
+               when others                     => "");
+         end Equal;
+
+      begin
+         if Prepend then
+            --  Prepend is true for keywords, which must be declared before 
they
+            --  are used. We put them all after the %meta_syntax declaration, 
to
+            --  closer match the likely original EBNF layout.
+            declare
+               Meta_Syntax : constant Cursor := List.Find ("meta_syntax", 
Equal'Unrestricted_Access);
+            begin
+               List.Insert (Comp_Unit, After => Meta_Syntax);
+            end;
+         else
+            List.Append (Comp_Unit);
+         end if;
+
+         if Trace_Generate_EBNF > Outline then
+            Ada.Text_IO.New_Line;
+            Ada.Text_IO.Put_Line
+              ("new " & Label & ":" & Trimmed_Image (Get_Node_Index 
(Comp_Unit)) & ": '" &
+                 Get_Text (Data, Tree, Unit) & "'");
+            if Trace_Generate_EBNF > Extra then
+               Tree.Print_Tree (Comp_Unit);
+            end if;
+         end if;
+      end Add_Compilation_Unit;
+
+      function To_RHS_List
+        (RHS_Element       : in Valid_Node_Access;
+         Auto_Token_Labels : in Boolean;
+         Post_Parse_Action : in Node_Access := Invalid_Node_Access;
+         In_Parse_Action   : in Node_Access := Invalid_Node_Access)
+        return Valid_Node_Access
+      with Pre => Tree.ID (RHS_Element) = +rhs_element_ID
+      --  Add an RHS containing RHS_Element.
+      --
+      --  Post_Parse_Action, _2 are not copied.
+      is
+         RHS_Item_List : constant Valid_Node_Access := Tree.Add_Nonterm
+           ((+rhs_item_list_ID, 0), (1 => RHS_Element), Clear_Parents => True);
+
+         RHS : constant Valid_Node_Access := Add_RHS
+           (Tree,
+            RHS_Item_List,
+            Auto_Token_Labels,
+            Edited_Token_List => True,
+            Post_Parse_Action => Post_Parse_Action,
+            In_Parse_Action   => In_Parse_Action);
+      begin
+         return Tree.Add_Nonterm ((+rhs_list_ID, 0), (1 => RHS), Clear_Parents 
=> False);
+      end To_RHS_List;
+
+      function Convert_RHS_Alternative
+        (Content           : in Valid_Node_Access;
+         Auto_Token_Labels : in Boolean;
+         Post_Parse_Action : in Node_Access := Invalid_Node_Access;
+         In_Parse_Action   : in Node_Access := Invalid_Node_Access)
+        return Valid_Node_Access
+      with Pre => Tree.ID (Content) = +rhs_alternative_list_ID
+      --  Convert Content to an rhs_list; Content is edited.
+      --
+      --  Post_Parse_Action, _2 are not copied for the first RHS; they are 
copied
+      --  for any more.
+      is
+         Node         : Valid_Node_Access := Content;
+         Copy_Actions : Boolean          := False;
+      begin
+         loop
+            exit when Tree.RHS_Index (Node) = 0;
+
+            --  current tree:
+            --  rhs_alternative_list : Node
+            --  | rhs_alternative_list: Node.Child (1)
+            --  | |  ...
+            --  | BAR: Node.child (2)
+            --  | rhs_item_list: Node.Child (3)
+
+            --  new tree:
+            --  rhs_list: Node
+            --  | rhs_alternative_list: keep Node.Child (1)
+            --  | |  ...
+            --  | BAR: keep
+            --  | rhs: new
+            --  | | rhs_item_list: keep Node,Child (3)
+
+            if not Tree.Has_Children (Tree.Child (Node, 3)) then
+               --  Convert empty rhs_item_list to empty rhs
+               declare
+                  Node_Var       : Node_Access := Node;
+                  Node_Child_Var : Node_Access := Tree.Child (Node, 3);
+               begin
+                  Tree.Set_Children
+                    (Node_Child_Var,
+                     (+rhs_ID, 0),
+                     (1 .. 0 => Invalid_Node_Access));
+
+                  Tree.Set_Children
+                    (Node_Var,
+                     (+rhs_list_ID, 1),
+                     (1 => Tree.Child (Node, 1),
+                      2 => Tree.Child (Node, 2),
+                      3 => Tree.Child (Node, 3)));
+               end;
+            else
+               declare
+                  RHS : constant Valid_Node_Access := Add_RHS
+                    (Tree, Tree.Child (Node, 3), Auto_Token_Labels,
+                     Edited_Token_List => True,
+                     Post_Parse_Action =>
+                       (if Copy_Actions then Tree.Copy_Subtree 
(Post_Parse_Action, Data_Access) else Post_Parse_Action),
+                     In_Parse_Action   =>
+                       (if Copy_Actions then Tree.Copy_Subtree 
(In_Parse_Action, Data_Access) else In_Parse_Action));
+               begin
+                  Tree.Set_Children
+                    (Node,
+                     (+rhs_list_ID, 1),
+                     (1 => Tree.Child (Node, 1),
+                      2 => Tree.Child (Node, 2),
+                      3 => RHS));
+               end;
+
+               Copy_Actions := True;
+            end if;
+
+            Node := Tree.Child (Node, 1);
+         end loop;
+
+         --  current tree:
+         --  rhs_alternative_list : Node
+         --  | rhs_item_list: Node.Child (1)
+
+         --  new tree:
+         --  rhs_list: Node
+         --  | rhs: new
+         --  | | rhs_item_list: Node.Child (1)
+
+         declare
+            RHS : constant Valid_Node_Access := Add_RHS
+              (Tree, Tree.Child (Node, 1), Auto_Token_Labels,
+               Edited_Token_List => True,
+               Post_Parse_Action =>
+                 (if Copy_Actions then Tree.Copy_Subtree (Post_Parse_Action, 
Data_Access) else Post_Parse_Action),
+               In_Parse_Action   =>
+                 (if Copy_Actions then Tree.Copy_Subtree (In_Parse_Action, 
Data_Access) else In_Parse_Action));
+         begin
+            Tree.Set_Children (Node, (+rhs_list_ID, 0), (1 => RHS));
+         end;
+
+         return Content;
+      end Convert_RHS_Alternative;
+
+      procedure New_Nonterminal
+        (Label          : in String;
+         New_Identifier : in Identifier_Index;
+         Content        : in Valid_Node_Access)
+      with Pre => To_Token_Enum (Tree.ID (Content)) in rhs_alternative_list_ID 
| rhs_element_ID
+      --  Convert subtree rooted at Content to an rhs_list contained by a new 
nonterminal
+      --  named New_Identifier.
+      --
+      --  We don't copy actions to a new nonterminal; they will not make sense.
+      is
+         Child_1 : constant Valid_Node_Access := Tree.Add_Identifier 
(+IDENTIFIER_ID, New_Identifier);
+
+         Child_2 : constant Valid_Node_Access := Tree.Add_Terminal (+COLON_ID);
+
+         Child_3 : constant Valid_Node_Access :=
+           (case To_Token_Enum (Tree.ID (Content)) is
+            when rhs_element_ID          => To_RHS_List (Content, 
Get_RHS_Auto_Token_Labels (Content)),
+            when rhs_alternative_list_ID => Convert_RHS_Alternative
+              (Content, Get_RHS_Auto_Token_Labels (Content)),
+            when others => raise SAL.Programmer_Error);
+
+         Child_4 : constant Valid_Node_Access := Tree.Add_Nonterm
+           ((+semicolon_opt_ID, 0),
+            (1     => Tree.Add_Terminal (+SEMICOLON_ID)),
+            Clear_Parents => True);
+
+         New_Nonterm : constant Valid_Node_Access := Tree.Add_Nonterm
+           (Production    => (+nonterminal_ID, 0),
+            Children      => (Child_1, Child_2, Child_3, Child_4),
+            Clear_Parents => True); -- Child_3 can be Content
+      begin
+         Add_Compilation_Unit (Label & New_Identifier'Image, New_Nonterm);
+      end New_Nonterminal;
+
+      procedure Maybe_Optimized_List
+        (RHS_List          : in out LR_Utils.List;
+         List_Name         : in     Identifier_Token;
+         Separator         : in     Identifier_Token;
+         Auto_Token_Labels : in     Boolean;
+         Label             : in     Identifier_Token := 
Invalid_Identifier_Token)
+      is
+         use LR_Utils;
+
+         RHS_Item_List_3 : List := Empty_RHS_Item_List (Tree);
+      begin
+         if Data.User_Parser in WisiToken.BNF.LR_Generate_Algorithm and 
Data.Language_Params.Error_Recover then
+            --  FIXME: these should have different labels? need test case
+            RHS_Item_List_3.Append
+              (Add_RHS_Element (Tree, Add_RHS_Item (Tree, Add_Identifier_Token 
(Tree, List_Name)), Label));
+
+            if Separator /= Invalid_Identifier_Token then
+               RHS_Item_List_3.Append
+                 (Add_RHS_Element
+                    (Tree, Add_RHS_Item (Tree, Add_Identifier_Token (Tree, 
Separator)), Label));
+            end if;
+
+            RHS_Item_List_3.Append
+              (Add_RHS_Element (Tree, Add_RHS_Item (Tree, Add_Identifier_Token 
(Tree, List_Name)), Label));
+
+            RHS_List.Append
+              (Add_RHS
+                 (Tree,
+                  RHS_Item_List_3.Root,
+                  Auto_Token_Labels => Auto_Token_Labels,
+                  Edited_Token_List => True,
+                  Post_Parse_Action => Invalid_Node_Access,
+                  In_Parse_Action   => Invalid_Node_Access));
+         end if;
+
+      end Maybe_Optimized_List;
+
+      procedure New_Nonterminal_List
+        (List_Nonterm         : in Identifier_Token;
+         RHS_Item_List_1_Root : in Valid_Node_Access;
+         Separator            : in Identifier_Token;
+         Auto_Token_Labels    : in Boolean)
+      with Pre => Tree.ID (RHS_Item_List_1_Root) = +rhs_item_list_ID
+      is
+         use LR_Utils;
+
+         RHS_Item_List_1 : constant Constant_List := Creators.Create_List
+           (Tree, RHS_Item_List_1_Root, +rhs_item_list_ID, +rhs_element_ID);
+
+         RHS_Item_List_2 : List := Empty_RHS_Item_List (Tree);
+         RHS_List        : List := Empty_RHS_List (Tree);
+      begin
+         RHS_Item_List_2.Append
+           (Add_RHS_Element
+              (Tree, Add_RHS_Item (Tree, Add_Identifier_Token (Tree, 
List_Nonterm))));
+
+         if Separator /= Invalid_Identifier_Token then
+            RHS_Item_List_2.Append
+              (Add_RHS_Element
+                 (Tree, Add_RHS_Item (Tree, Add_Identifier_Token (Tree, 
Separator))));
+         end if;
+
+         for Element of RHS_Item_List_1 loop
+            RHS_Item_List_2.Append (Tree.Copy_Subtree (Element, Data_Access));
+         end loop;
+
+         RHS_List.Append (Add_RHS (Tree, RHS_Item_List_1.Root, 
Auto_Token_Labels, Edited_Token_List => True));
+         RHS_List.Append (Add_RHS (Tree, RHS_Item_List_2.Root, 
Auto_Token_Labels, Edited_Token_List => True));
+
+         Maybe_Optimized_List (RHS_List, List_Nonterm, Separator, 
Auto_Token_Labels);
+
+         Add_Compilation_Unit
+           ("canonical list",
+            Tree_Add_Nonterminal
+              (Child_1 => Add_Identifier_Token (Tree, List_Nonterm),
+               Child_2 => Tree.Add_Terminal (+COLON_ID),
+               Child_3 => RHS_List.Root,
+               Child_4 => Tree.Add_Nonterm
+                 ((+semicolon_opt_ID, 0),
+                  (1   => Tree.Add_Terminal (+SEMICOLON_ID)),
+                  Clear_Parents => False)));
+      end New_Nonterminal_List;
+
+      procedure New_Nonterminal_List
+        (List_Nonterm      : in Identifier_Token;
+         List_Element      : in Identifier_Token;
+         Separator         : in Identifier_Token;
+         Auto_Token_Labels : in Boolean)
+      is
+         --  Add a nonterminal declaration for a canonical list:
+         --
+         --  foo_list ;; List_Nonterm
+         --  : foo ;; List_Element
+         --  | foo_list separator foo ;; List_Nonterm Separator List_Element
+
+         use LR_Utils;
+
+         RHS_Item_List_1 : List := Empty_RHS_Item_List (Tree);
+      begin
+         RHS_Item_List_1.Append
+           (Add_RHS_Element
+              (Tree, Add_RHS_Item (Tree, Add_Identifier_Token (Tree, 
List_Element))));
+         New_Nonterminal_List (List_Nonterm, RHS_Item_List_1.Root, Separator, 
Auto_Token_Labels);
+      end New_Nonterminal_List;
+
+      function List_Matches
+        (N                 : in Valid_Node_Access;
+         Separator_Content : in String;
+         Element_Content   : in String)
+        return Node_Access
+      with Pre => Element_Content'Length > 0
+      --  Return True if the declaration at N is a nonterminal for a
+      --  canonical list matching Separator_Content, Element_Content,
+      --  possibly optimized.
+      is
+         use LR_Utils;
+      begin
+         if Tree.ID (N) = +nonterminal_ID then
+            declare
+               --  Target List_Nonterm is:
+               --
+               --  list_nonterm
+               --     : element
+               --     | list_nonterm separator? element
+               --     | list_nonterm list_nonterm
+               --
+               --  nonterminal: N
+               --  | IDENTIFIER : Name_Node
+               --  | COLON
+               --  | rhs_list: RHS_List
+               --  | | rhs_list:
+               --  | | | rhs
+               --  | | | | ... list_element
+               --  | | BAR
+               --  | | rhs: ... list_nonterm separator? list_element
+
+               Name_Node : constant Node_Access    := Tree.Child (N, 1);
+               RHS_List  : constant Constant_List := Creators.Create_List
+                 (Tree, Tree.Child (N, 3), +rhs_list_ID, +rhs_ID);
+               Iter : constant Constant_Iterator := RHS_List.Iterate_Constant;
+            begin
+               if RHS_List.Count in 2 | 3 then
+                  declare
+                     List_Name : constant String := Get_Text (Data, Tree, 
Name_Node);
+                     RHS_1 : constant String := Get_Text (Data, Tree, Element 
(RHS_List.First));
+                     RHS_2 : constant String := Get_Text (Data, Tree, Element 
(Iter.Next (RHS_List.First)));
+                     Expected_RHS_2 : constant String := List_Name & " " &
+                       Separator_Content & (if Separator_Content = "" then "" 
else " ") & Element_Content;
+                  begin
+                     if Element_Content = RHS_1 and RHS_2 = Expected_RHS_2 then
+                        if RHS_List.Count = 3 then
+                           declare
+                              RHS_3 : constant String := Get_Text (Data, Tree, 
Element (RHS_List.Last));
+                              Expected_RHS_3 : constant String := List_Name & 
" " &
+                                Separator_Content & (if Separator_Content = "" 
then "" else " ") & List_Name;
+                           begin
+                              if RHS_3 = Expected_RHS_3 then
+                                 return Name_Node;
+                              end if;
+                           end;
+                        else
+                           return Name_Node;
+                        end if;
+                     end if;
+                  end;
+               end if;
+            end;
+         end if;
+         return Invalid_Node_Access;
+      end List_Matches;
+
+      function Find_List_Nonterminal_1
+        (Separator_Content : in String;
+         Element_Content   : in String)
+        return Identifier_Token
+      with Pre => Element_Content'Length > 0
+      --  Search for a nonterminal declaration (virtual or not) implementing
+      --  a list matching Separator_Content, Element_Content. If found,
+      --  return an identifier_token for it. Otherwise, return
+      --  Invalid_Identifier_Token.
+      is
+         use LR_Utils;
+
+         List : constant Constant_List := Creators.Create_List
+           (Tree, Tree.Child (Tree.Root, 2), +compilation_unit_list_ID, 
+compilation_unit_ID);
+      begin
+         return List_Nonterm_Name : Identifier_Token := 
Invalid_Identifier_Token do
+            for N of List loop
+               declare
+                  Name_Node : constant Node_Access := List_Matches
+                    (Tree.Child (N, 1), Separator_Content, Element_Content);
+               begin
+                  if Name_Node /= Invalid_Node_Access then
+                     List_Nonterm_Name := To_Identifier_Token (Name_Node, 
Tree);
+                     exit;
+                  end if;
+               end;
+            end loop;
+         end return;
+      end Find_List_Nonterminal_1;
+
+      function Maybe_New_Nonterminal_List
+        (List_Nonterm_String : in String;
+         Element             : in Valid_Node_Access;
+         Separator           : in Node_Access;
+         Auto_Token_Labels   : in Boolean)
+        return Identifier_Token
+      with Pre => To_Token_Enum (Tree.ID (Element)) in rhs_item_list_ID | 
rhs_element_ID | rhs_item_ID | IDENTIFIER_ID
+      --  If there is an existing nonterminal matching List_Nonterm_String,
+      --  Element_Content, Separator, return an identifier_token for it.
+      --  Otherwise, create a new list nonterminal, return an
+      --  identifier_token for that.
+      is
+         Existing_Decl : constant Node_Access := Find_Declaration (Data, Tree, 
List_Nonterm_String);
+
+         Element_Content : constant String := Get_Text (Data, Tree, Element); 
--  IMPROVEME: ignore token labels.
+
+         Separator_Content : constant String :=
+           (if Separator = Invalid_Node_Access
+            then ""
+            else Get_Item_Text (Data, Tree, Separator));
+
+         Separator_Ident_Tok : constant Identifier_Token :=
+           (if Separator = Invalid_Node_Access
+            then Invalid_Identifier_Token
+            else To_Identifier_Token (Tree.Find_Descendant (Separator, 
+rhs_item_ID), Tree));
+
+         Name_Node : constant Node_Access :=
+           (if Existing_Decl = Invalid_Node_Access
+            then Invalid_Node_Access
+            else List_Matches (Existing_Decl, Separator_Content, 
Element_Content));
+      begin
+         return List_Nonterm_Name : Identifier_Token do
+            if Name_Node = Invalid_Node_Access then
+               List_Nonterm_Name := Find_List_Nonterminal_1 
(Separator_Content, Element_Content);
+
+               if List_Nonterm_Name = Invalid_Identifier_Token then
+                  List_Nonterm_Name := To_Identifier_Token
+                    (WisiToken.Identifier_Index'
+                       (if Existing_Decl = Invalid_Node_Access
+                        then New_Identifier (List_Nonterm_String)
+                        else Next_Nonterm_Name (List_Nonterm_String)));
+
+                  case To_Token_Enum (Tree.ID (Element)) is
+                  when rhs_item_list_ID =>
+                     pragma Assert (Separator_Ident_Tok = 
Invalid_Identifier_Token);
+
+                     New_Nonterminal_List (List_Nonterm_Name, Element, 
Separator_Ident_Tok, Auto_Token_Labels);
+
+                  when rhs_element_ID =>
+
+                     New_Nonterminal_List
+                       (List_Nonterm      => List_Nonterm_Name,
+                        List_Element      => To_Identifier_Token
+                          (Tree.Find_Descendant (Element, +rhs_item_ID), Tree),
+                        Separator         => Separator_Ident_Tok,
+                        Auto_Token_Labels => Auto_Token_Labels);
+
+                  when rhs_item_ID | IDENTIFIER_ID =>
+
+                     New_Nonterminal_List
+                       (List_Nonterm      => List_Nonterm_Name,
+                        List_Element      => To_Identifier_Token (Element, 
Tree),
+                        Separator         => Separator_Ident_Tok,
+                        Auto_Token_Labels => Auto_Token_Labels);
+
+                  when others =>
+                     raise SAL.Programmer_Error with "unexpected case in 
Maybe_New_List_Nonterminal: " &
+                       Tree.Image (Element);
+                  end case;
+               end if;
+
+            else
+               List_Nonterm_Name := To_Identifier_Token (Name_Node, Tree);
+
+               Erase_Deleted_EBNF_Nodes (Element);
+
+               if Trace_Generate_EBNF > Extra then
+                  Ada.Text_IO.Put_Line ("use " & Get_Text (Data, Tree, 
Name_Node));
+               end if;
+            end if;
+
+         end return;
+      end Maybe_New_Nonterminal_List;
+
+      procedure Copy_Non_Grammar
+        (From : in Valid_Node_Access;
+         To   : in Valid_Node_Access)
+      is begin
+         Tree.Non_Grammar_Var (To).Element.all := Tree.Non_Grammar_Const 
(From);
+      end Copy_Non_Grammar;
+
+      procedure Translate_RHS_Group_Item (Node : in Valid_Node_Access)
+      is
+         --  Current tree:
+         --
+         --  rhs_element: Parent (Node, 2)
+         --  | rhs_item: Parent (Node, 1)
+         --  | | rhs_group_item: Node
+         --  | | | LEFT_PAREN
+         --  | | | rhs_alternative_list: Child (Node, 2)
+         --  | | | RIGHT_PAREN
+
+         RHS              : constant Valid_Node_Access := Tree.Find_Ancestor 
(Node, +rhs_ID);
+         Has_Actions      : constant Boolean          := Tree.RHS_Index (RHS) 
in 2 .. 3;
+         Element_Content  : constant String           := Get_Text (Data, Tree, 
Tree.Child (Node, 2));
+         Right_Paren_Node : constant Valid_Node_Access := Tree.Child (Node, 3);
+         Found_Unit       : constant Node_Access       :=
+           (if Has_Actions then Invalid_Node_Access
+            else Find_Nonterminal
+              (Element_Content, Nonterm_Content_Equal'Unrestricted_Access));
+         New_Ident        : Base_Identifier_Index     := 
Invalid_Identifier_Index;
+      begin
+         if Found_Unit = Invalid_Node_Access then
+            New_Ident := Next_Nonterm_Name;
+            New_Nonterminal ("group item", New_Ident, Tree.Child (Node, 2));
+         else
+            declare
+               Name_Node : constant Node_Access := Tree.Child (Tree.Child 
(Found_Unit, 1), 1);
+            begin
+               case Tree.Label (Name_Node) is
+               when Source_Terminal =>
+                  New_Ident := New_Identifier (Get_Text (Data, Tree, 
Name_Node));
+               when Virtual_Identifier =>
+                  New_Ident := Tree.Identifier (Name_Node);
+               when others =>
+                  WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error 
("translate_rhs_group_item", Tree, Name_Node);
+               end case;
+            end;
+            Erase_Deleted_EBNF_Nodes (Tree.Child (Node, 2));
+         end if;
+
+         declare
+            Ident_Node   : constant Node_Access := Tree.Add_Identifier 
(+IDENTIFIER_ID, New_Ident);
+            Element_Node : Valid_Node_Access    := Tree.Parent (Node, 2);
+            Item_Node    : Valid_Node_Access    := Tree.Parent (Node, 1);
+         begin
+            Tree.Set_Children (Item_Node, (+rhs_item_ID, 0), (1 => 
Ident_Node));
+            Copy_Non_Grammar (Right_Paren_Node, Ident_Node);
+
+            if Get_RHS_Auto_Token_Labels (RHS) then
+               declare
+                  Label        : constant Valid_Node_Access := 
Tree.Add_Identifier
+                    (+IDENTIFIER_ID, Next_Token_Label ("G"));
+                  Equal        : constant Valid_Node_Access := 
Tree.Add_Terminal (+EQUAL_ID);
+               begin
+                  Tree.Set_Children (Element_Node, (+rhs_element_ID, 1), 
(Label, Equal, Item_Node));
+               end;
+            end if;
+         end;
+
+         Clear_EBNF_Node (Node);
+      end Translate_RHS_Group_Item;
+
+      procedure Translate_RHS_Multiple_Item (B : in Valid_Node_Access)
+      is
+         --  We have one of:
+         --
+         --  | a { b }  c
+         --  | a { b } - c
+         --  | a ( b ) + c
+         --  | a ( b ) * c
+         --  | a b+ c
+         --  | a b* c
+         --
+         --  where a and/or c can be empty. Replace it with a new canonical
+         --  list nonterminal:
+         --
+         --  nonterminal_nnn_list
+         --  : b
+         --  | nonterminal_nnn_list b
+         --  | nonterminal_nnn_list nonterminal_nnn_list
+         --
+         --  where the third option makes it an optimized list; only done if
+         --  error recover is enabled.
+         --
+         --  and a second RHS if it can be empty:
+         --  | a c
+
+         --  Current tree:
+         --
+         --  rhs_element : Parent (B, 2)
+         --  | rhs_item: Parent (B, 1)
+         --  | | rhs_multiple_item: B
+         --  | | | LEFT_BRACE | LEFT_PAREN
+         --  | | | rhs_alternative_list
+         --  | | | ...
+         --  | | | RIGHT_BRACE | RIGHT_PAREN
+         --  | | | [MINUS | PLUS | STAR]
+
+         --  or:
+         --
+         --  rhs_element : Parent (B, 2)
+         --  | rhs_item: Parent (B, 1)
+         --  | | rhs_multiple_item: B
+         --  | | | IDENTIFIER
+         --  | | | PLUS | STAR
+
+         use LR_Utils;
+         use LR_Utils.Creators;
+
+         Canonical_List    : Boolean           := False;
+         Done              : Boolean           := False;
+         Simple_Named      : Boolean           := False;
+         Has_Separator     : Boolean           := False;
+         Separator_Node    : Node_Access       := Invalid_Node_Access;
+         Separator_Token   : Identifier_Token  := Invalid_Identifier_Token;
+         Parent_RHS_Item   : Valid_Node_Access := Tree.Parent (B);
+         List_Nonterm_Name : Identifier_Token  := Invalid_Identifier_Token;
+
+         B_Alt_List_List : constant Constant_List :=
+           (case Tree.RHS_Index (B) is
+            when 0 .. 3 =>
+               Create_List (Tree, Tree.Child (B, 2), +rhs_alternative_list_ID, 
+rhs_item_list_ID),
+            when others => Invalid_List (Tree));
+         --  The rhs_alternative_list of the rhs_multiple_item.
+
+         B_Alt_List_Item_List : List :=
+           (if B_Alt_List_List.Is_Invalid
+            then Invalid_List (Tree)
+            else Create_List
+              (Tree, Element (B_Alt_List_List.First), +rhs_item_list_ID, 
+rhs_element_ID,
+               Separator_ID => Invalid_Token_ID));
+         --  The first rhs_item_list of the rhs_multiple_item.
+
+         Container_List_Root : Node_Access := Invalid_Node_Access;
+         --  Updated by Insert_Optional_RHS.
+
+         procedure Check_Canonical_List
+         is
+            --  In EBNF, a canonical list with a separator looks like:
+            --
+            --  A enumConstant (',' enumConstant)* C
+            --
+            --  or, with no separator:
+            --
+            --  SwitchLabels : SwitchLabel {SwitchLabel}
+            --
+            --  where B is the rhs_multiple_item containing "(','
+            --  enumConstant)*" or "{SwitchLabel}".
+            --
+            --  The tokens may have labels.
+            --
+            --  Handling these cases specially eliminates a conflict between
+            --  reducing to enumConstants and reducing to the introduced 
nonterm
+            --  list.
+            --
+            --  Alternately, the no separator case can be:
+            --
+            --  enumConstants : enumConstant+ ;
+            --
+            --  Handling this no separator case specially does not eliminate 
any
+            --  conflicts, but it does reduce the number of added nonterminals,
+            --  and keeps the names simpler.
+
+            List_Nonterm_Decl : constant Valid_Node_Access := 
Tree.Find_Ancestor (B, +nonterminal_ID);
+            RHS_List_Root     : constant Valid_Node_Access := Tree.Child 
(List_Nonterm_Decl, 3);
+
+            RHS_List : List := Create_List
+              (Tree, RHS_List_Root, +rhs_list_ID, +rhs_ID, Separator_ID => 
+BAR_ID);
+
+            RHS : constant Valid_Node_Access := Tree.Find_Ancestor
+              (B, (+rhs_ID, +rhs_alternative_list_ID));
+            --  If rhs_ID, the RHS containing the canonical list candidate.
+            --  If rhs_alternative_list_ID, an unnamed canonical list candidate
+
+            RHS_Item_List_Root : constant Valid_Node_Access := List_Root
+              (Tree, Tree.Find_Ancestor (B, +rhs_item_list_ID), 
+rhs_item_list_ID);
+
+            RHS_Item_List_List : List := Create_List
+              (Tree, RHS_Item_List_Root, +rhs_item_list_ID, +rhs_element_ID, 
Separator_ID => Invalid_Token_ID);
+            --  The rhs_item_list containing the rhs_multiple_item
+
+            RHS_Item_List_Iter : constant Constant_Iterator := 
RHS_Item_List_List.Iterate_Constant;
+
+            Element_2 : constant Cursor := RHS_Item_List_List.To_Cursor 
(Tree.Parent (B, 2));
+            --  The rhs_element containing the rhs_multiple_item
+
+            Element_1 : constant Node_Access :=
+              (if Tree.RHS_Index (B) in 4 .. 5
+               then Invalid_Node_Access
+               else Element (RHS_Item_List_Iter.Previous (Element_2)));
+            --  The rhs_element containing the first list element
+
+            Can_Be_Empty : constant Boolean := Element_1 = Invalid_Node_Access 
and Tree.RHS_Index (B) in 0 | 3 | 5;
+
+            procedure Do_Simple_Named (List_Elements : in Valid_Node_Access)
+            with Pre => To_Token_Enum (Tree.ID (List_Elements)) in 
rhs_element_ID | rhs_item_list_ID | IDENTIFIER_ID
+            is
+               pragma Assert (Tree.ID (RHS) = +rhs_ID);
+
+               --  The existing nonterminal declaration is one of:
+               --
+               --  1a) list_name
+               --       : list_element {separator list_element}
+               --         %( action? )%
+               --       ;
+               --
+               --  2a) nonterm_name
+               --      : {list_element}
+               --        %( action? )%
+               --      ;
+               --
+               --  3a) nonterm_name
+               --      : list_element+ ;; list_element ID is IDENTIFIER_ID
+               --        %( action? )%
+               --      ;
+               --
+               --  Rewrite 1a) to:
+               --
+               --  1b) list_name
+               --        : list_element
+               --          %( action? )%
+               --        | list_name separator list_element
+               --          %( action? )%
+               --        | list_name separator list_name
+               --        ;
+               --
+               --  If 2a) can be empty, rewrite to:
+               --
+               --  2b) list_element_list
+               --        : list_element
+               --          %( action? )%
+               --        | list_element_list list_element
+               --          %( action? )%
+               --        | list_element_list list_element_list
+               --        ;
+               --
+               --      nonterm_name
+               --        : list_name
+               --          %( action? )%
+               --        | empty
+               --        ;
+               --
+               --  If instead we did the shortcut:
+               --      list_element_list
+               --        : list_element
+               --        | list_element_list list_element
+               --        | empty
+               --        ;
+               --  and list_element starts with a nullable nonterm, then there 
is a
+               --  conflict between reducing 0 tokens to an empty 
list_element_list
+               --  or to the nullable nonterm; see ada_lite_ebnf.wy 
declarative_part.
+               --  We have not computed Nullable yet, so we assume it is true, 
and
+               --  don't use this shortcut. This would also complicate 
recognizing
+               --  this as an optimzed list.
+               --
+               --  otherwise rewrite to:
+               --
+               --  2c) nonterm_name
+               --        : list_element
+               --          %( action? )%
+               --        | nonterm_name list_element
+               --          %( action? )%
+               --        | nonterm_name nonterm_name
+               --        ;
+               --
+               --  3a is similar to 2a.
+
+               RHS_Item_List_1 : List :=
+                 (case To_Token_Enum (Tree.ID (List_Elements)) is
+                  when rhs_element_ID   => To_RHS_Item_List (Tree, 
List_Elements),
+                  when rhs_item_list_ID => Creators.Create_List
+                    (Tree, List_Elements, +rhs_item_list_ID, +rhs_element_ID, 
Invalid_Token_ID),
+                  when IDENTIFIER_ID    => Empty_List (Tree, 
+rhs_item_list_ID, 1, +rhs_element_ID, Invalid_Token_ID),
+                  when others           => raise SAL.Programmer_Error);
+
+               RHS_Item_List_2 : List :=
+                 (if B_Alt_List_Item_List.Is_Invalid
+                  then Empty_List (Tree, +rhs_item_list_ID, 1, 
+rhs_element_ID, Invalid_Token_ID)
+                  else B_Alt_List_Item_List);
+
+               New_RHS_List : List := Empty_RHS_List (Tree);
+
+               Post_Parse_Action : constant Node_Access := Tree.Child (RHS, 
2); --  deleted by first Add_RHS
+               In_Parse_Action   : constant Node_Access := Tree.Child (RHS, 3);
+               Auto_Token_Labels : constant Boolean    := 
Get_RHS_Auto_Token_Labels (RHS);
+
+               Label : constant Identifier_Token :=
+                 (if Auto_Token_Labels
+                  then To_Identifier_Token (Next_Token_Label ("G"))
+                  else Invalid_Identifier_Token);
+
+               List_Name : constant Identifier_Token :=
+                 (if Can_Be_Empty
+                  then To_Identifier_Token
+                    (New_Identifier
+                       (Get_Text
+                          (Data, Tree,
+                           (if Tree.ID (List_Elements) = +IDENTIFIER_ID
+                            then List_Elements
+                            else Tree.Find_Descendant (List_Elements, 
+rhs_item_ID)))
+                          & "_list"))
+                  else To_Identifier_Token (Tree.Child (List_Nonterm_Decl, 1), 
Tree));
+            begin
+               if Tree.ID (List_Elements) = +IDENTIFIER_ID then
+                  RHS_Item_List_1.Append
+                    (Add_RHS_Element
+                       (Tree, Add_RHS_Item (Tree, Tree.Copy_Subtree 
(List_Elements, Data_Access)), Label));
+
+                  RHS_Item_List_2.Append (Add_RHS_Element (Tree, Add_RHS_Item 
(Tree, List_Elements)));
+               end if;
+
+               New_RHS_List.Append
+                 (Add_RHS
+                    (Tree, RHS_Item_List_1.Root,
+                     Auto_Token_Labels => Get_RHS_Auto_Token_Labels (B),
+                     Edited_Token_List => True,
+                     Post_Parse_Action => Post_Parse_Action,
+                     In_Parse_Action   => In_Parse_Action));
+
+               RHS_Item_List_2.Prepend
+                 (Add_RHS_Element (Tree, Add_RHS_Item (Tree, 
Add_Identifier_Token (Tree, List_Name)), Label));
+
+               New_RHS_List.Append
+                 (Add_RHS
+                    (Tree,
+                     RHS_Item_List_2.Root,
+                     Auto_Token_Labels => Get_RHS_Auto_Token_Labels (B),
+                     Edited_Token_List => True,
+                     Post_Parse_Action => Tree.Copy_Subtree 
(Post_Parse_Action, Data_Access),
+                     In_Parse_Action   => Tree.Copy_Subtree (In_Parse_Action, 
Data_Access)));
+
+               Maybe_Optimized_List
+                 (New_RHS_List, List_Name, Separator_Token,
+                  Auto_Token_Labels => Get_RHS_Auto_Token_Labels (B),
+                  Label             => Label);
+
+               if Can_Be_Empty then
+                  Add_Compilation_Unit
+                    ("canonical list",
+                     Tree_Add_Nonterminal
+                       (Child_1 => Add_Identifier_Token (Tree, List_Name),
+                        Child_2 => Tree.Add_Terminal (+COLON_ID),
+                        Child_3 => New_RHS_List.Root,
+                        Child_4 => Tree.Add_Nonterm
+                          ((+semicolon_opt_ID, 0),
+                           (1   => Tree.Add_Terminal (+SEMICOLON_ID)),
+                           Clear_Parents => False)));
+
+                  Tree.Replace_Child
+                    (Parent               => Tree.Find_Descendant (Element 
(RHS_List.First), +rhs_item_list_ID),
+                     Child_Index          => 1,
+                     New_Child            => Add_RHS_Element
+                       (Tree,
+                        Add_RHS_Item
+                          (Tree, Add_Identifier_Token (Tree, List_Name)),
+                        Label),
+                     Old_Child            => Element (Element_2),
+                     Old_Child_New_Parent => Invalid_Node_Access);
+
+                  --  This goes on RHS_List, _not_ New_RHS_List.
+                  RHS_List.Append (Empty_RHS (Tree));
+
+               else
+                  Tree.Replace_Child
+                    (Parent               => List_Nonterm_Decl,
+                     Child_Index          => 3,
+                     Old_Child            => RHS_List_Root,
+                     New_Child            => New_RHS_List.Root,
+                     Old_Child_New_Parent => Invalid_Node_Access);
+               end if;
+
+               Clear_EBNF_Node (B);
+
+               if Trace_Generate_EBNF > Extra then
+                  Ada.Text_IO.New_Line;
+                  Ada.Text_IO.Put_Line ("Simple_Named Canonical_List edited 
nonterm:");
+                  Tree.Print_Tree (List_Nonterm_Decl);
+               end if;
+            end Do_Simple_Named;
+
+         begin
+            if Trace_Generate_EBNF > Detail then
+               Ada.Text_IO.Put_Line ("Check_Canonical_List start RHS: " & 
Get_Text (Data, Tree, RHS_List.Root));
+               Ada.Text_IO.Put_Line (" ...                         B: " & 
Get_Text (Data, Tree, B));
+            end if;
+
+            if not B_Alt_List_List.Is_Invalid and then B_Alt_List_List.Count 
/= 1 then
+               return;
+            end if;
+
+            if Element_1 = Invalid_Node_Access then
+               Has_Separator := False;
+            else
+               if Tree.RHS_Index (B) in 4 .. 5 then
+                  Has_Separator := False;
+
+               elsif B_Alt_List_Item_List.Count in 1 .. 2 and then
+                 Get_Item_Text (Data, Tree, Element_1) =
+                 Get_Item_Text (Data, Tree, Element 
(B_Alt_List_Item_List.Last))
+               then
+                  Has_Separator := B_Alt_List_Item_List.Count = 2;
+                  if Has_Separator then
+                     Separator_Node  := Element (B_Alt_List_Item_List.First);
+                     Separator_Token := To_Identifier_Token 
(Tree.Find_Descendant (Separator_Node, +rhs_item_ID), Tree);
+                  end if;
+               else
+                  return;
+               end if;
+            end if;
+
+            if not B_Alt_List_List.Is_Invalid and then
+              Duplicate
+                (RHS_List,
+                 Tree.Find_Descendant
+                   ((if Element_1 = Invalid_Node_Access
+                     then Element (B_Alt_List_Item_List.Last)
+                     else Element_1),
+                    +rhs_item_ID))
+            then
+               --  See ada_lite_ebnf.wy expression; recognizing this would 
cause
+               --  conflicts between reducing a relation to expression or one 
of the
+               --  lists.
+               return;
+            end if;
+
+            if (RHS_List.Count = 1 and Tree.ID (RHS) = +rhs_ID and 
Tree.RHS_Index (RHS) = 1) and then
+              ((RHS_Item_List_List.Count = 1 and
+                  (B_Alt_List_List.Is_Invalid or else 
B_Alt_List_Item_List.Count = 1)) or
+                 (RHS_Item_List_List.Count = 2 and Element_2 = 
RHS_Item_List_List.Last))
+            then
+               Simple_Named := True;
+            end if;
+
+            if Tree.RHS_Index (B) in 4 .. 5 and not Simple_Named then
+               --   Handled below
+               return;
+            end if;
+
+            Canonical_List := True;
+
+            if Trace_Generate_EBNF > Detail then
+               Ada.Text_IO.Put_Line
+                 ((if Simple_Named then "simple named " else "embedded ") & 
"canonical list" &
+                    (if Has_Separator then " with separator" else ""));
+            end if;
+
+            if Simple_Named then
+               declare
+                  List_Elements : constant Node_Access :=
+                    (if Element_1 = Invalid_Node_Access
+                     then (if B_Alt_List_Item_List.Is_Invalid
+                           then Tree.Child (B, 1)
+                           else Tree.Copy_Subtree (B_Alt_List_Item_List.Root, 
Data_Access))
+                     else Element_1);
+               begin
+                  Do_Simple_Named (List_Elements);
+
+                  if Element_1 = Invalid_Node_Access then
+                     Record_Copied_EBNF_Nodes (List_Elements);
+                  end if;
+                  Done := True;
+                  return;
+               end;
+            elsif Can_Be_Empty then
+               --  use cases for this Insert_Optional_RHS:
+               --  yes: java_types_ch19.wy Dims
+               --  no: ada_lite_ebnf.wy enumeration_type_definition 
simple_expression
+               Container_List_Root := Insert_Optional_RHS (B);
+            end if;
+
+            declare
+               pragma Assert
+                 (Element_1 = Invalid_Node_Access or else Tree.ID
+                    (case Tree.RHS_Index (Element_1) is
+                     when 0      => Tree.Child (Tree.Child (Element_1, 1), 1),
+                     when 1      => Tree.Child (Tree.Child (Element_1, 3), 1),
+                     when others => raise SAL.Programmer_Error)
+                    = +IDENTIFIER_ID);
+               --  So we can use it as a nonterm name. If the source text has a
+               --  terminal literal (see java_ebnf.wy arrayCreatorRest), it 
should
+               --  have been translated to a token name by now.
+
+               List_Nonterm_String : constant String :=
+                 (if Has_Separator
+                  then Get_Item_Text (Data, Tree, Element_1) & "_" & 
Get_Item_Text (Data, Tree, Separator_Node)
+                  elsif Element_1 /= Invalid_Node_Access
+                  then Get_Item_Text (Data, Tree, Element_1) & "_" &
+                     Get_Item_Text (Data, Tree, Element 
(B_Alt_List_Item_List.First))
+                  else Get_Item_Text (Data, Tree, Element 
(B_Alt_List_Item_List.First)) &
+                    (if B_Alt_List_Item_List.Count = 1
+                     then ""
+                     else "_" & Get_Item_Text
+                       (Data, Tree, Element (B_Alt_List_Item_List.Iterate.Next 
(B_Alt_List_Item_List.First))))) &
+                 "_list";
+            begin
+               List_Nonterm_Name := Maybe_New_Nonterminal_List
+                 (List_Nonterm_String   => List_Nonterm_String,
+                  Element               =>
+                    (if Element_1 /= Invalid_Node_Access
+                     then Tree.Find_Descendant (Element_1, +rhs_item_ID)
+                     else B_Alt_List_Item_List.Root),
+                  Separator         => Separator_Node,
+                  Auto_Token_Labels => Get_RHS_Auto_Token_Labels (B));
+
+               if Element_1 /= Invalid_Node_Access then
+                  declare
+                     Cur : Cursor := RHS_Item_List_List.To_Cursor (Element_1);
+                  begin
+                     --  Delete element_1; code below will replace element_2 
with List_Nonterm_Name
+                     RHS_Item_List_List.Delete (Cur);
+                  end;
+               end if;
+            end;
+         end Check_Canonical_List;
+
+         procedure Find_List_Nonterminal_2 (Separator_Content : in String; 
Element_Content : in String)
+         is
+            --  Look for a pair of nonterms implementing a list of 
[Separator_Content] Element_Content.
+            --
+            --  list_element : element ;
+            --
+            --  list
+            --    : list_element
+            --    | list separator list_element
+            --    ;
+            --
+            --  If found, set List_Nonterm_*_Name
+            List : constant Constant_List := Creators.Create_List
+              (Tree, Tree.Child (Tree.Root, 2), +compilation_unit_list_ID, 
+compilation_unit_ID);
+         begin
+            for Comp_Unit of List loop
+               declare
+                  Nonterm : constant Valid_Node_Access := Tree.Child 
(Comp_Unit, 1);
+               begin
+                  if Tree.ID (Nonterm) = +nonterminal_ID then
+                     if Element_Content = Get_Text (Data, Tree, Tree.Child 
(Nonterm, 3)) then
+                        --  Found list_element_nonterm : element ;
+                        List_Nonterm_Name := Find_List_Nonterminal_1
+                          (Separator_Content, Get_Text (Data, Tree, Tree.Child 
(Nonterm, 1)));
+                        exit;
+                     end if;
+                  end if;
+               end;
+            end loop;
+         end Find_List_Nonterminal_2;
+
+      begin
+         --  Check if this is a recognized pattern
+         Check_Canonical_List;
+         if Done then
+            return;
+         end if;
+
+         --  Check to see if there is an already declared nonterminal
+         --  list with the same content; if not, create one.
+         case Tree.RHS_Index (B) is
+         when 0 .. 3 =>
+            --  0: { rhs_alternative_list }
+            --  1: { rhs_alternative_list } -
+            --  2: ( rhs_alternative_list ) +
+            --  3: ( rhs_alternative_list ) *
+
+            if Tree.RHS_Index (B) in 0 | 3 and not Canonical_List then
+               Container_List_Root := Insert_Optional_RHS (B);
+            end if;
+
+            if Canonical_List then
+               --  List_Nonterm_Name set by Check_Canonical_List
+               null;
+
+            elsif 1 = B_Alt_List_List.Count then
+               --  IMPROVEME: this is redundant with check_canonical_list when 
Element_1 = invalid; simplify.
+               List_Nonterm_Name := Maybe_New_Nonterminal_List
+                 (List_Nonterm_String =>
+                    Get_Item_Text (Data, Tree, Element 
(B_Alt_List_Item_List.First)) &
+                      (if B_Alt_List_Item_List.Count = 1
+                       then ""
+                       else "_" & Get_Item_Text
+                         (Data, Tree, Element
+                            (B_Alt_List_Item_List.Iterate.Next 
(B_Alt_List_Item_List.First)))) &
+                      "_list",
+                  Element           => B_Alt_List_Item_List.Root,
+                  Separator         => Invalid_Node_Access,
+                  Auto_Token_Labels => Get_RHS_Auto_Token_Labels (B));
+
+            else
+               --  IMPROVEME: handle separator here? need test case
+               --  IMPROVEME: ignore token labels
+               --  IMPROVEME: extend maybe_new_nonterminal_list to handle this 
case.
+               Find_List_Nonterminal_2
+                 (Separator_Content => "",
+                  Element_Content   => Get_Text (Data, Tree, Tree.Child (B, 
2)));
+
+               if List_Nonterm_Name = Invalid_Identifier_Token then
+                  declare
+                     List_Element_Name_String : constant String :=
+                       Get_Item_Text (Data, Tree, Element 
(B_Alt_List_Item_List.First)) &
+                       (if B_Alt_List_Item_List.Count > 1
+                        then "_" & Get_Item_Text
+                          (Data, Tree, Element 
(B_Alt_List_Item_List.Iterate.Next (B_Alt_List_Item_List.First)))
+                        else "_" & Get_Item_Text
+                          (Data, Tree, Element 
(B_Alt_List_List.Iterate_Constant.Next (B_Alt_List_List.First))));
+
+                     List_Nonterm_Name_String : constant String := 
List_Element_Name_String & "_list";
+
+                     Existing_Decl : constant Node_Access := Find_Declaration
+                       (Data, Tree, List_Nonterm_Name_String);
+
+                     List_Element_Name : constant Identifier_Index :=
+                       (if Existing_Decl = Invalid_Node_Access
+                        then New_Identifier (List_Element_Name_String)
+                        else Next_Nonterm_Name (List_Element_Name_String));
+                  begin
+                     List_Nonterm_Name := To_Identifier_Token
+                       ((if Existing_Decl = Invalid_Node_Access
+                         then New_Identifier (List_Nonterm_Name_String)
+                         else Next_Nonterm_Name ("list")));
+
+                     New_Nonterminal ("canonical list element", 
List_Element_Name, Tree.Child (B, 2));
+                     New_Nonterminal_List
+                       (List_Nonterm_Name,
+                        To_Identifier_Token (List_Element_Name),
+                        Separator         => Invalid_Identifier_Token,
+                        Auto_Token_Labels => Get_RHS_Auto_Token_Labels (B));
+                  end;
+               else
+                  Erase_Deleted_EBNF_Nodes (Tree.Child (B, 2));
+               end if;
+            end if;
+
+         when 4 | 5 =>
+            --  IDENTIFIER + | *
+
+            List_Nonterm_Name := Maybe_New_Nonterminal_List
+              (List_Nonterm_String => Get_Text (Data, Tree, Tree.Child (B, 1)) 
& "_list",
+               Element             => Tree.Child (B, 1),
+               Separator           => Invalid_Node_Access,
+               Auto_Token_Labels   => Get_RHS_Auto_Token_Labels (B));
+
+            if Tree.RHS_Index (B) = 5 then
+               Container_List_Root := Insert_Optional_RHS (B);
+            end if;
+
+         when others =>
+            WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error
+              ("Translate_RHS_Multiple_Item unimplemented", Tree, B);
+         end case;
+
+         --  Edit rhs_item to use list name
+         declare
+            Child : constant Valid_Node_Access := Add_Identifier_Token (Tree, 
List_Nonterm_Name);
+         begin
+            Tree.Set_Children (Parent_RHS_Item, (+rhs_item_ID, 0), (1 => 
Child));
+         end;
+
+         Clear_EBNF_Node (B);
+
+         if Trace_Generate_EBNF > Detail then
+            declare
+               Item : constant Valid_Node_Access :=
+                 (if Container_List_Root = Invalid_Node_Access
+                  then Tree.Parent (Parent_RHS_Item)
+                  else Container_List_Root);
+            begin
+               Ada.Text_IO.New_Line;
+               Ada.Text_IO.Put_Line ("Translate_RHS_Multiple_Item edited: " & 
Get_Text (Data, Tree, Item));
+               if Trace_Generate_EBNF > Extra then
+                  Tree.Print_Tree (Item);
+               end if;
+            end;
+         end if;
+      end Translate_RHS_Multiple_Item;
+
+      procedure Translate_RHS_Optional_Item (B : in Valid_Node_Access)
+      is
+         --  Source looks like:
+         --
+         --  | A [B] C
+         --
+         --  where A, B, C are token sequences. All are contained in one
+         --  rhs_item_list, which may be contained in an rhs or an
+         --  rhs_alternative_list. B contains an rhs_alternative_list.
+         --
+         --  First add a second rhs_item_list without B:
+         --  | A C
+         --
+         --  then for each alternative in B, splice together rhs_item_lists A,
+         --  B_i, C, copying A, C on all after the first:
+         --  | A B_i C
+         --
+         --  See nested_ebnf_optional.wy for an example of nested optional
+         --  items.
+         --
+         --  We don't create a separate nonterminal for B, so token labels stay
+         --  in the same RHS for actions.
+         --
+         --  current tree:
+         --
+         --  rhs_list:
+         --  | rhs | rhs_alternative_list:
+         --  | | rhs_item_list
+         --  | | | rhs_item_list
+         --  | | ...
+         --  | | | | | rhs_element: a.last
+         --  | | | | | | rhs_item:
+         --  | | | | rhs_element:
+         --  | | | | | rhs_item: contains b
+         --  | | | | | | rhs_optional_item: B
+         --  | | | | | | | LEFT_BRACKET: B.Children (1)
+         --  | | | | | | | rhs_alternative_list: B.Children (2) b
+         --  | | | | | | | RIGHT_BRACKET: B.Children (3)
+         --  | | | rhs_element: c.first
+         --  | | | | rhs_item:
+
+         use LR_Utils;
+         use LR_Utils.Creators;
+
+         Container_List_Root : constant Valid_Node_Access := 
Insert_Optional_RHS (B);
+      begin
+         case Tree.RHS_Index (B) is
+         when 0 | 1 =>
+            --  : LEFT_BRACKET rhs_alternative_list RIGHT_BRACKET
+            --  | LEFT_PAREN rhs_alternative_list RIGHT_PAREN QUESTION
+
+            declare
+               Container_List : LR_Utils.List :=
+                 (if Tree.ID (Container_List_Root) = +rhs_list_ID
+                  then Create_List
+                    (Tree,
+                     Root         => Container_List_Root,
+                     List_ID      => +rhs_list_ID,
+                     Element_ID   => +rhs_ID,
+                     Separator_ID => +BAR_ID)
+                  else Create_List
+                    (Tree,
+                     Root         => Container_List_Root,
+                     List_ID      => +rhs_alternative_list_ID,
+                     Element_ID   => +rhs_item_list_ID,
+                     Separator_ID => +BAR_ID));
+
+               Container_Cur : Cursor := Container_List.Find
+                 (if Container_List.Element_ID = +rhs_ID
+                  then Tree.Find_Ancestor (B, +rhs_ID)
+                  else List_Root (Tree, Tree.Find_Ancestor (B, 
+rhs_item_list_ID), +rhs_item_list_ID));
+
+               ABC_List : List := Create_From_Element
+                 (Tree, Tree.Parent (B, 2),
+                  List_ID      => +rhs_item_list_ID,
+                  Element_ID   => +rhs_element_ID,
+                  Separator_ID => Invalid_Token_ID);
+
+               ABC_Iter : constant Iterator := ABC_List.Iterate;
+
+               ABC_B_Cur   : constant Cursor := ABC_List.To_Cursor 
(Tree.Parent (B, 2));
+               ABC_A_Last  : constant Cursor := ABC_Iter.Previous (ABC_B_Cur);
+               ABC_C_First : constant Cursor := ABC_Iter.Next (ABC_B_Cur);
+
+               B_Alternative_List : constant Constant_List := Create_List
+                 (Tree, Tree.Child (B, 2), +rhs_alternative_list_ID, 
+rhs_item_list_ID);
+
+            begin
+               --  An alternate design would be to splice together the 
existing A,
+               --  B_i, C; but it's too hard to get all the parent updates 
right.
+               for Alt of reverse B_Alternative_List loop
+
+                  declare
+                     B_Item_List : constant Constant_List := Create_List
+                       (Tree, Alt, +rhs_item_list_ID, +rhs_element_ID);
+
+                     New_ABC : List := Empty_List (ABC_List);
+                  begin
+                     if Has_Element (ABC_A_Last) then
+                        Copy (Source_List => ABC_List,
+                              Source_Last => ABC_A_Last,
+                              Dest_List   => New_ABC,
+                              User_Data   => Data_Access);
+                     end if;
+
+                     Copy (B_Item_List, Dest_List => New_ABC, User_Data => 
Data_Access);
+
+                     if Has_Element (ABC_C_First) then
+                        Copy (ABC_List, Source_First => ABC_C_First, Dest_List 
=> New_ABC, User_Data => Data_Access);
+                     end if;
+
+                     if Container_List.Element_ID = +rhs_ID then
+                        Insert_RHS
+                          (Container_List,
+                           New_ABC.Root,
+                           After             => Element (Container_Cur),
+                           Auto_Token_Labels => Get_RHS_Auto_Token_Labels (B));
+                     else
+                        Container_List.Insert (New_ABC.Root, After => 
Container_Cur);
+                     end if;
+
+                     Record_Copied_EBNF_Nodes (New_ABC.Root);
+                  end;
+               end loop;
+
+               Erase_Deleted_EBNF_Nodes (Element (Container_Cur));
+               --  This includes B, so we don't do 'Clear_EBNF_Node (B)'.
+
+               Container_List.Delete (Container_Cur);
+            end;
+
+         when 2 =>
+            --  | IDENTIFIER QUESTION
+            --
+            --  Current tree:
+            --   rhs_item_3
+            --   | rhs_optional_item_2: B
+            --   | | IDENTIFIER
+            --   | | QUESTION
+            --
+            --  Change to:
+            --   rhs_item_0
+            --   | IDENTIFIER
+
+            declare
+               Parent_Var : Node_Access := Tree.Parent (B);
+            begin
+               Tree.Set_Children (Parent_Var, (+rhs_item_ID, 0), (1 => 
Tree.Child (B, 1)));
+               Clear_EBNF_Node (B);
+            end;
+
+         when 3 =>
+            --  | STRING_LITERAL_2 QUESTION
+            declare
+               Parent_Var : Node_Access := Tree.Parent (B);
+            begin
+               Tree.Set_Children (Parent_Var, (+rhs_item_ID, 1), (1 => 
Tree.Child (B, 1)));
+               Clear_EBNF_Node (B);
+            end;
+
+         when others =>
+            WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error
+              ("translate_ebnf_to_bnf rhs_optional_item unimplemented", Tree, 
B);
+         end case;
+
+         if WisiToken.Trace_Generate_EBNF > Detail then
+            Ada.Text_IO.New_Line;
+            Ada.Text_IO.Put_Line ("Translate_RHS_Optional_Item edited:");
+            Tree.Print_Tree (Container_List_Root);
+         end if;
+      end Translate_RHS_Optional_Item;
+
+      procedure Translate_Token_Literal (Node : in Valid_Node_Access)
+      is
+         use LR_Utils;
+
+         Name_Ident : Identifier_Index;
+
+         function Equal
+           (Target : in String;
+            List   : in Constant_List'Class;
+            N      : in Valid_Node_Access)
+           return Boolean
+         is
+            pragma Unreferenced (List);
+         begin
+            if To_Token_Enum (Tree.ID (Tree.Child (N, 1))) = declaration_ID 
then
+               declare
+                  Decl : constant Node_Access := Tree.Child (N, 1);
+
+                  Name_Node : constant Node_Access :=
+                    (case To_Token_Enum (Tree.ID (Tree.Child (Decl, 2))) is
+                     when Wisitoken_Grammar_Actions.TOKEN_ID | NON_GRAMMAR_ID 
=> Tree.Child (Decl, 6),
+                     when KEYWORD_ID => Tree.Child (Decl, 3),
+                     when others => Invalid_Node_Access);
+
+                  Regexp_String_Node : constant Node_Access :=
+                    (case To_Token_Enum (Tree.ID (Tree.Child (Decl, 2))) is
+                     when Wisitoken_Grammar_Actions.TOKEN_ID | NON_GRAMMAR_ID 
=> Tree.Child (Decl, 7),
+                     when KEYWORD_ID => Tree.Child (Decl, 4),
+                     when others => Invalid_Node_Access);
+
+                  Value_Node : constant Node_Access :=
+                    (if Regexp_String_Node = Invalid_Node_Access
+                     then Invalid_Node_Access
+                     else Tree.Child (Regexp_String_Node, 1));
+               begin
+                  if Value_Node = Invalid_Node_Access then
+                     return False;
+
+                  elsif To_Token_Enum (Tree.ID (Value_Node)) in
+                    IDENTIFIER_ID | REGEXP_ID | STRING_LITERAL_1_ID | 
STRING_LITERAL_2_ID and then
+                    Target = Get_Text (Data, Tree, Value_Node, Strip_Quotes => 
True)
+                  then
+                     case Tree.Label (Name_Node) is
+                     when Source_Terminal =>
+                        Name_Ident := New_Identifier (Get_Text (Data, Tree, 
Name_Node));
+                     when Virtual_Identifier =>
+                        Name_Ident := Tree.Identifier (Name_Node);
+                     when others =>
+                        raise SAL.Programmer_Error;
+                     end case;
+                     return True;
+                  else
+                     return False;
+                  end if;
+               end;
+            else
+               return False;
+            end if;
+         end Equal;
+
+         Value : constant String      := Get_Text (Data, Tree, Node, 
Strip_Quotes => True);
+         Found : constant Node_Access := Find_Nonterminal (Value, 
Equal'Unrestricted_Access);
+         --  Found declares a name for the literal
+      begin
+         if Found = Invalid_Node_Access then
+            if GNAT.Regexp.Match (Value, Symbol_Regexp) then
+               --  Don't need to declare keywords.
+               Name_Ident := New_Identifier (Ada.Characters.Handling.To_Upper 
(Value));
+            else
+               WisiToken.Generate.Put_Error
+                 (Tree.Error_Message (Node, "punctuation token '" & Value & "' 
not declared"));
+
+               Clear_EBNF_Node (Node); -- So we don't try again
+               return;
+            end if;
+         end if;
+
+         --  Replace string literal in rhs_item
+         declare
+            Parent    : Valid_Node_Access := Tree.Parent (Node);
+            New_Child : constant Valid_Node_Access := Tree.Add_Identifier 
(+IDENTIFIER_ID, Name_Ident);
+         begin
+            case To_Token_Enum (Tree.ID (Parent)) is
+            when rhs_item_ID =>
+               Tree.Set_Children
+                 (Parent,
+                  (+rhs_item_ID, 0),
+                  (1 => New_Child));
+
+            when rhs_optional_item_ID =>
+               Tree.Set_Children
+                 (Parent,
+                  (+rhs_optional_item_ID, 2),
+                  (New_Child, Tree.Child (Parent, 2)));
+
+            when others =>
+               WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error
+                 ("translate_ebnf_to_bnf string_literal_2 unimplemented", 
Tree, Node);
+            end case;
+            Copy_Non_Grammar (Node, New_Child);
+         end;
+
+         Clear_EBNF_Node (Node);
+         if Found /= Invalid_Node_Access then
+            return;
+         end if;
+
+         --  Declare token for keyword string literal
+         declare
+            Keyword       : constant Valid_Node_Access := Tree.Add_Identifier 
(+KEYWORD_ID, Keyword_Ident);
+            Value_Literal : constant Valid_Node_Access := Tree.Add_Identifier
+              (+STRING_LITERAL_1_ID, New_Identifier ('"' & Value & '"'));
+            Regexp_String : constant Valid_Node_Access := Tree.Add_Nonterm
+              ((+regexp_string_ID, 1),
+               (1            => Value_Literal),
+               Clear_Parents => False);
+
+            Percent : constant Valid_Node_Access := Tree.Add_Identifier 
(+PERCENT_ID, Percent_Ident);
+            Name    : constant Valid_Node_Access := Tree.Add_Identifier 
(+IDENTIFIER_ID, Name_Ident);
+            Decl    : constant Valid_Node_Access := Tree.Add_Nonterm
+              ((+declaration_ID, 0), (Percent, Keyword, Name, Regexp_String),
+               Clear_Parents => False);
+         begin
+            Add_Compilation_Unit ("literal token", Decl, Prepend => True);
+         end;
+
+      end Translate_Token_Literal;
+
+      procedure Process_Node (Node : in Valid_Node_Access)
+      is begin
+         case To_Token_Enum (Tree.ID (Node)) is
+         --  Token_Enum_ID alphabetical order
+         when declaration_ID =>
+            --  Must be "%meta_syntax EBNF"; change to BNF
+            declare
+               Decl_Item    : Valid_Node_Access          := 
Tree.Find_Descendant
+                 (Tree.Child (Node, 3), +declaration_item_ID);
+               Old_Children : constant Node_Access_Array := Tree.Children 
(Decl_Item);
+               New_Children : constant Node_Access_Array :=
+                 (1 => Tree.Add_Identifier (+IDENTIFIER_ID, New_Identifier 
("BNF")));
+            begin
+               Copy_Non_Grammar (Old_Children (1), New_Children (1));
+               Tree.Set_Children (Decl_Item, (+declaration_item_ID, 1), 
New_Children);
+            end;
+            Clear_EBNF_Node (Node);
+
+         when rhs_alternative_list_ID =>
+            --  All handled by New_Nonterminal*
+            raise SAL.Programmer_Error;
+
+         when rhs_attribute_ID =>
+            --  Just delete it
+            declare
+               use LR_Utils;
+               RHS_Item_List : List := Creators.Create_From_Element
+                 (Tree, Tree.Parent (Node, 2), +rhs_item_list_ID, 
+rhs_element_ID, Invalid_Token_ID);
+               Element : Cursor := RHS_Item_List.To_Cursor (Tree.Parent (Node, 
2));
+            begin
+               RHS_Item_List.Delete (Element);
+            end;
+            Clear_EBNF_Node (Node);
+
+         when rhs_group_item_ID =>
+            Translate_RHS_Group_Item (Node);
+
+         when rhs_multiple_item_ID =>
+            Translate_RHS_Multiple_Item (Node);
+
+         when rhs_optional_item_ID =>
+            Translate_RHS_Optional_Item (Node);
+
+         when STRING_LITERAL_2_ID =>
+            Translate_Token_Literal (Node);
+
+         when others =>
+            WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error 
("unimplemented EBNF node", Tree, Node);
+         end case;
+
+      exception
+      when SAL.Programmer_Error =>
+         raise;
+      when E : others =>
+         if Debug_Mode then
+            --  The traceback from a failed precondition is most useful
+            raise;
+         else
+            WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error
+              ("unhandled exception " & Ada.Exceptions.Exception_Name (E) & ": 
" &
+                 Ada.Exceptions.Exception_Message (E),
+               Tree, Node);
+         end if;
+      end Process_Node;
+
+      procedure Check_Original_Copied_EBNF
+      is
+         use Ada.Text_IO;
+         Subtree_Root  : Node_Access;
+         Error_Present : Boolean := False;
+      begin
+         for N of EBNF_Nodes loop
+            Subtree_Root := Tree.Subtree_Root (N);
+            if Subtree_Root /= Tree.Root then
+               Put_Line (Current_Error, Tree.Error_Message (N, Tree.Image (N, 
Node_Numbers => True)));
+               Put_Line
+                 (Current_Error,
+                  "... Original_EBNF not in tree; in root " & Trimmed_Image 
(Get_Node_Index (Subtree_Root)));
+               WisiToken.Generate.Error := True;
+               Error_Present := True;
+            end if;
+         end loop;
+         for N of Copied_EBNF_Nodes loop
+            Subtree_Root := Tree.Subtree_Root (N);
+            if Subtree_Root /= Tree.Root then
+               Put_Line (Current_Error, Tree.Error_Message (N, Tree.Image (N, 
Node_Numbers      => True)));
+               Put_Line
+                 (Current_Error,
+                  "... Copied_EBNF not in tree; in root" & Trimmed_Image 
(Get_Node_Index (Subtree_Root)));
+               WisiToken.Generate.Error := True;
+               Error_Present := True;
+            end if;
+         end loop;
+         if Error_Present then
+            Ada.Text_IO.New_Line;
+            Ada.Text_IO.Put_Line ("tree:");
+            Tree.Print_Tree;
+         end if;
+      end Check_Original_Copied_EBNF;
+   begin
+      EBNF_Allowed := True;
+
+      if Debug_Mode then
+         Tree.Validate_Tree
+           (Data, Data.Error_Reported,
+            Root              => Tree.Root,
+            Validate_Node     => Validate_Node'Access,
+            Node_Index_Order  => True,
+            Byte_Region_Order => True);
+
+         if Data.Error_Reported.Count > 0 then
+            Ada.Text_IO.New_Line;
+            Ada.Text_IO.Put_Line ("initial invalid tree:");
+            Tree.Print_Tree;
+         end if;
+      end if;
+
+      --  Set EBNF_Nodes
+      declare
+         procedure Process_Node (Tree : in out Syntax_Trees.Tree; Node : in 
Valid_Node_Access)
+         is begin
+            if Tree.Augmented (Node) = null then
+               null;
+            elsif Augmented_Access (Tree.Augmented (Node)).EBNF then
+               EBNF_Nodes.Insert (Node);
+            end if;
+         end Process_Node;
+      begin
+         Tree.Process_Tree (Process_Node'Access);
+      end;
+
+      --  Apply labels if needed, so they are consistent in copied RHS
+      declare
+         use LR_Utils;
+         use LR_Utils.Creators;
+
+         --  Tree.Root is wisitoken_accept, first child is SOI
+         List : constant Constant_List := Create_List
+           (Tree, Tree.Child (Tree.Root, 2), +compilation_unit_list_ID, 
+compilation_unit_ID);
+      begin
+         for Unit of List loop
+            declare
+               Nonterm  : constant Valid_Node_Access := Tree.Child (Unit, 1);
+            begin
+               if Tree.ID (Nonterm) = +nonterminal_ID then
+                  declare
+                     RHS_List : constant Constant_List := Creators.Create_List
+                       (Tree, Tree.Child (Nonterm, 3), +rhs_list_ID, +rhs_ID);
+                  begin
+                     for RHS of RHS_List loop
+                        Last_Token_Index := 0;
+
+                        if Needs_Token_Labels (RHS) then
+                           Add_Token_Labels (RHS);
+                        end if;
+                     end loop;
+                  end;
+               end if;
+            end;
+         end loop;
+      end;
+
+      if Debug_Mode then
+         --  We've edited the tree, creating new nodes, so Node_Index_Order is
+         --  no longer valid. We've reused name tokens, so byte_region_order is
+         --  not valid.
+         Tree.Validate_Tree
+           (Data, Data.Error_Reported,
+            Root              => Tree.Root,
+            Validate_Node     => Validate_Node'Access,
+            Node_Index_Order  => False,
+            Byte_Region_Order => False);
+         if Data.Error_Reported.Count /= 0 then
+            Ada.Text_IO.New_Line;
+            Ada.Text_IO.Put_Line ("invalid tree after Add_Token_Labels:");
+            Tree.Print_Tree;
+         end if;
+      end if;
+
+      --  Process nodes in node increasing order, so contained items are
+      --  translated first, so duplicates of the containing item can be found.
+      --
+      --  Process_Node calls EBNF_Nodes.Delete, which is invalid when
+      --  an iterator is active. So first we extract the list of nodes to
+      --  process.
+      declare
+         Nodes_To_Process : Valid_Node_Access_Array (1 .. SAL.Base_Peek_Type 
(EBNF_Nodes.Count)) :=
+           --  WORKAROUND: GNAT Community 2020 -ada2020 doesn't support 'of' 
iterator here
+           --  (for Node of EBNF_Nodes => Node);
+           (others => Syntax_Trees.Dummy_Node);
+         I : SAL.Base_Peek_Type := 1;
+      begin
+         for Node of EBNF_Nodes loop
+            Nodes_To_Process (I) := Node;
+            I := I + 1;
+         end loop;
+
+         for Node of Nodes_To_Process loop
+            --  Node may have been deleted from EBNF_Nodes
+            if EBNF_Nodes.Contains (Node) then
+               if Trace_Generate_EBNF > Outline then
+                  Ada.Text_IO.New_Line;
+                  Ada.Text_IO.Put_Line
+                    ("translate original node " & Tree.Image
+                       (Node,
+                        RHS_Index    => True,
+                        Node_Numbers => True));
+               end if;
+
+               Process_Node (Node);
+
+               if Debug_Mode then
+                  Tree.Validate_Tree
+                    (Data, Data.Error_Reported,
+                     Root              => Tree.Root,
+                     Validate_Node     => Validate_Node'Access,
+                     Node_Index_Order  => False,
+                     Byte_Region_Order => False);
+                  if Data.Error_Reported.Count /= 0 then
+                     Ada.Text_IO.New_Line;
+                     Ada.Text_IO.Put_Line ("invalid tree after translate one 
node:");
+                     Tree.Print_Tree;
+                  end if;
+                  Check_Original_Copied_EBNF;
+               end if;
+            end if;
+         end loop;
+      end;
+
+      declare
+         use Ada.Text_IO;
+      begin
+         for Node of EBNF_Nodes loop
+            Put_Line
+              (Current_Error,
+               Tree.Error_Message
+                 (Node,
+                  Tree.Image
+                    (Node,
+                     RHS_Index    => True,
+                     Children     => Trace_Generate_EBNF > Detail,
+                     Node_Numbers => True)));
+            Put_Line (Current_Error, "... original EBNF node not translated");
+         end loop;
+      end;
+
+      --  Processing copied nodes edits Copied_EBNF_Nodes, so we extract the
+      --  nodes first, and repeat.
+      loop
+         exit when Copied_EBNF_Nodes.Count = 0;
+         declare
+            Nodes_To_Process : Valid_Node_Access_Array (1 .. 
SAL.Base_Peek_Type (Copied_EBNF_Nodes.Count)) :=
+              (others => Syntax_Trees.Dummy_Node);
+            I : SAL.Base_Peek_Type := 1;
+         begin
+            for Node of Copied_EBNF_Nodes loop
+               Nodes_To_Process (I) := Node;
+               I := I + 1;
+            end loop;
+
+            for Node of Nodes_To_Process loop
+               if Copied_EBNF_Nodes.Contains (Node) then
+                  if Trace_Generate_EBNF > Outline then
+                     Ada.Text_IO.New_Line;
+                     Ada.Text_IO.Put_Line
+                       ("translate copied node " & Tree.Image
+                          (Node,
+                           RHS_Index    => True,
+                           Node_Numbers => True));
+                  end if;
+
+                  Process_Node (Node);
+
+                  if Debug_Mode then
+                     Tree.Validate_Tree
+                       (Data, Data.Error_Reported,
+                        Root              => Tree.Root,
+                        Validate_Node     => Validate_Node'Access,
+                        Node_Index_Order  => False,
+                        Byte_Region_Order => False);
+                     if Data.Error_Reported.Count /= 0 then
+                        Ada.Text_IO.New_Line;
+                        Ada.Text_IO.Put_Line ("invalid tree after translate 
copied node:");
+                        Tree.Print_Tree;
+                     end if;
+                     Check_Original_Copied_EBNF;
+                  end if;
+               end if;
+            end loop;
+         end;
+      end loop;
+
+      declare
+         use Ada.Text_IO;
+      begin
+         for Node of Copied_EBNF_Nodes loop
+            Put_Line
+              (Current_Error,
+               Tree.Error_Message
+                 (Node,
+                  Tree.Image
+                    (Node,
+                     RHS_Index    => True,
+                     Children     => Trace_Generate_EBNF > Detail,
+                     Node_Numbers => True)));
+            Put_Line (Current_Error, "... copied EBNF node not translated");
+         end loop;
+      end;
+
+      EBNF_Allowed := False;
+      if Debug_Mode then
+         Tree.Validate_Tree
+           (Data, Data.Error_Reported,
+            Root              => Tree.Root,
+            Validate_Node     => Validate_Node'Access,
+            Node_Index_Order  => False,
+            Byte_Region_Order => False);
+         if Data.Error_Reported.Count /= 0 then
+            Ada.Text_IO.New_Line;
+            Ada.Text_IO.Put_Line ("invalid tree after Data.EBNF_Allowed 
False:");
+            Tree.Print_Tree;
+         end if;
+      end if;
+      Data.Meta_Syntax := BNF_Syntax;
+
+      if Trace_Generate_EBNF > Detail then
+         Ada.Text_IO.New_Line;
+         Ada.Text_IO.Put_Line ("Identifiers:");
+         for I in Data.Tokens.Virtual_Identifiers.First_Index .. 
Data.Tokens.Virtual_Identifiers.Last_Index loop
+            Ada.Text_IO.Put_Line (Base_Identifier_Index'Image (I) & " " & 
(-Data.Tokens.Virtual_Identifiers (I)));
+         end loop;
+      end if;
+   end Translate_EBNF_To_BNF;
+
+   procedure Print_Source
+     (File_Name : in String;
+      Tree      : in Syntax_Trees.Tree;
+      Data      : in WisiToken_Grammar_Runtime.User_Data_Type)
+   is
+      use Ada.Text_IO;
+
+      File : File_Type;
+
+      procedure Put_Comments
+        (Node           : in Node_Access;
+         Force_New_Line : in Boolean := False;
+         Force_Comment  : in String  := "")
+      is begin
+         if Node = null then
+            if Force_New_Line then
+               New_Line (File);
+            end if;
+            return;
+         end if;
+         declare
+            use all type Ada.Containers.Count_Type;
+            Last_Term   : constant Node_Access               := 
Tree.Last_Terminal (Node);
+            Non_Grammar : constant Lexer.Token_Arrays.Vector :=
+              (if Last_Term = Invalid_Node_Access
+               then Lexer.Token_Arrays.Empty_Vector
+               else (case Tree.Label (Last_Term) is
+                     when Terminal_Label => Tree.Non_Grammar_Const (Last_Term),
+                     when others => Lexer.Token_Arrays.Empty_Vector));
+
+            Comments_Include_Newline : Boolean := False;
+         begin
+            if Non_Grammar.Length = 0 then
+               if Force_Comment /= "" then
+                  Put_Line (File, Force_Comment);
+
+               elsif Force_New_Line then
+                  New_Line (File);
+               end if;
+            else
+               for Token of Non_Grammar loop
+                  if Contains_New_Line (Token.Line_Region) then
+                     Comments_Include_Newline := True;
+                  end if;
+                  Put (File, Tree.Lexer.Buffer_Text (Token.Byte_Region));
+               end loop;
+               if Force_New_Line and not Comments_Include_Newline then
+                  New_Line (File);
+               end if;
+            end if;
+         end;
+      end Put_Comments;
+
+      procedure Put_Regexp_String (Node : in Valid_Node_Access)
+      is
+         Children : constant Node_Access_Array := Tree.Children (Node);
+      begin
+         pragma Assert (Children'Length = 1);
+         case To_Token_Enum (Tree.ID (Children (1))) is
+         when STRING_LITERAL_1_ID | STRING_LITERAL_2_ID =>
+            Put (File, ' ' & Get_Text (Data, Tree, Children (1)));
+         when REGEXP_ID =>
+            Put (File, " %[" & Get_Text (Data, Tree, Children (1)) & "]%");
+         when others =>
+            raise SAL.Programmer_Error;
+         end case;
+      end Put_Regexp_String;
+
+      procedure Put_Declaration_Item (Node : in Valid_Node_Access)
+      is
+         Children : constant Node_Access_Array := Tree.Children (Node);
+      begin
+         pragma Assert (Children'Length = 1);
+         case To_Token_Enum (Tree.ID (Children (1))) is
+         when IDENTIFIER_ID | NUMERIC_LITERAL_ID =>
+            Put (File, ' ' & Get_Text (Data, Tree, Children (1)));
+         when regexp_string_ID =>
+            Put_Regexp_String (Children (1));
+         when others =>
+            raise SAL.Programmer_Error;
+         end case;
+      end Put_Declaration_Item;
+
+      procedure Put_Declaration_Item_List (Node : in Valid_Node_Access)
+      is
+         Children : constant Node_Access_Array := Tree.Children (Node);
+      begin
+         if Children'Length = 1 then
+            Put_Declaration_Item (Children (1));
+         else
+            Put_Declaration_Item_List (Children (1));
+            Put_Declaration_Item (Children (2));
+         end if;
+      end Put_Declaration_Item_List;
+
+      procedure Put_Identifier_List (Node : in Valid_Node_Access)
+      is
+         Children : constant Node_Access_Array := Tree.Children (Node);
+      begin
+         if Children'Length = 1 then
+            Put (File, Get_Text (Data, Tree, Children (1)));
+         else
+            Put_Identifier_List (Children (1));
+            Put (File, ' ');
+            Put (File, Get_Text (Data, Tree, Children (2)));
+         end if;
+      end Put_Identifier_List;
+
+      procedure Put_RHS_Element (Node : in Valid_Node_Access)
+      with Pre => Tree.ID (Node) = +rhs_element_ID
+      is begin
+         --  We don't raise an exception for errors here; it's easier to debug 
from the
+         --  mangled source listing.
+
+         case Tree.RHS_Index (Node) is
+         when 0 =>
+            Put (File, Get_Text (Data, Tree, Node));
+
+         when 1 =>
+            --  Output no spaces around "="
+            declare
+               Children : constant Node_Access_Array := Tree.Children (Node);
+            begin
+               Put
+                 (File, Get_Text (Data, Tree, Children (1)) & "=" & Get_Text 
(Data, Tree, Children (3)));
+            end;
+
+         when others =>
+            New_Line (File);
+            Put (File, " ;; not translated: " & Trimmed_Image (Get_Node_Index 
(Node)) & ":" &
+                   Tree.Image
+                     (Node,
+                      Children     => True,
+                      RHS_Index    => True,
+                      Node_Numbers => True));
+         end case;
+      exception
+      when SAL.Programmer_Error =>
+         raise;
+
+      when E : others =>
+         declare
+            use Ada.Exceptions;
+         begin
+            WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error
+              ("Put_RHS_Element: " & Exception_Name (E) & ": " & 
Exception_Message (E), Tree, Node);
+         end;
+      end Put_RHS_Element;
+
+      procedure Put_RHS_Item_List (Node : in Valid_Node_Access)
+      with Pre => Tree.ID (Node) = +rhs_item_list_ID
+      is
+         Children : constant Node_Access_Array := Tree.Children (Node);
+      begin
+         if Children'Length = 1 then
+            Put_RHS_Element (Children (1));
+         else
+            Put_RHS_Item_List (Children (1));
+            Put (File, ' ');
+            Put_RHS_Element (Children (2));
+         end if;
+      exception
+      when SAL.Programmer_Error =>
+         raise;
+
+      when E : others =>
+         declare
+            use Ada.Exceptions;
+         begin
+            WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error
+              ("Put_RHS_Item_List: " & Exception_Name (E) & ": " & 
Exception_Message (E), Tree, Node);
+         end;
+      end Put_RHS_Item_List;
+
+      procedure Put_RHS
+        (Node  : in Valid_Node_Access;
+         First : in Boolean)
+      with Pre => Tree.ID (Node) = +rhs_ID
+      is
+         Children : constant Node_Access_Array := Tree.Children (Node);
+      begin
+         Put (File, (if First then "  : " else "  | "));
+         case Tree.RHS_Index (Node) is
+         when 0 =>
+            Put_Comments (Tree.Parent (Node), Force_Comment => ";; empty");
+
+         when 1 .. 3 =>
+            Put_RHS_Item_List (Children (1));
+            Put_Comments (Children (1), Force_New_Line => True);
+
+            if Tree.RHS_Index (Node) > 1 then
+               Put (File, "    %(" & Get_Text (Data, Tree, Children (2)) & 
")%"); -- action
+               Put_Comments (Children (2), Force_New_Line => True);
+
+               if Tree.RHS_Index (Node) > 2 then
+                  Put (File, "    %(" & Get_Text (Data, Tree, Children (3)) & 
")%"); -- check
+                  Put_Comments (Children (3), Force_New_Line => True);
+               end if;
+            end if;
+
+         when others =>
+            WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error ("Put_RHS", 
Tree, Node);
+         end case;
+      exception
+      when SAL.Programmer_Error =>
+         raise;
+
+      when E : others =>
+         declare
+            use Ada.Exceptions;
+         begin
+            WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error
+              ("Put_RHS: " & Exception_Name (E) & ": " & Exception_Message 
(E), Tree, Node);
+         end;
+      end Put_RHS;
+
+      procedure Put_RHS_List
+        (Node    : in     Valid_Node_Access;
+         First   : in out Boolean;
+         Virtual : in     Boolean)
+      with Pre => Tree.ID (Node) = +rhs_list_ID
+      is
+         Children : constant Node_Access_Array := Tree.Children (Node);
+      begin
+         case Tree.RHS_Index (Node) is
+         when 0 =>
+            Put_RHS (Children (1), First);
+            First := False;
+         when 1 =>
+            Put_RHS_List (Children (1), First, Virtual);
+            Put_RHS (Children (3), First => False);
+         when 2 =>
+            Put
+              (File, "%if " & Get_Text (Data, Tree, Children (3)) & " = " & 
Get_Text
+                 (Data, Tree, Children (4)));
+            Put_Comments (Node);
+
+         when 3 =>
+            Put (File, "%end if");
+            Put_Comments (Node);
+
+         when others =>
+            WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error 
("Put_RHS_List", Tree, Node);
+         end case;
+      exception
+      when SAL.Programmer_Error =>
+         raise;
+
+      when E : others =>
+         declare
+            use Ada.Exceptions;
+         begin
+            WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error
+              ("Put_RHS_List: " & Exception_Name (E) & ": " & 
Exception_Message (E), Tree, Node);
+         end;
+      end Put_RHS_List;
+
+      procedure Process_Node (Node : in Valid_Node_Access)
+      is begin
+         case To_Token_Enum (Tree.ID (Node)) is
+         --  Enum_Token_ID alphabetical order
+         when compilation_unit_ID =>
+            Process_Node (Tree.Child (Node, 1));
+
+         when compilation_unit_list_ID =>
+            declare
+               Children : constant Node_Access_Array := Tree.Children (Node);
+            begin
+               case To_Token_Enum (Tree.ID (Children (1))) is
+               when compilation_unit_list_ID =>
+                  Process_Node (Children (1));
+                  Process_Node (Children (2));
+               when compilation_unit_ID =>
+                  Process_Node (Children (1));
+               when others =>
+                  raise SAL.Programmer_Error;
+               end case;
+            end;
+
+         when declaration_ID =>
+            declare
+               use all type SAL.Base_Peek_Type;
+
+               Children : constant Node_Access_Array := Tree.Children (Node);
+            begin
+               case To_Token_Enum (Tree.ID (Children (2))) is
+               when Wisitoken_Grammar_Actions.TOKEN_ID | NON_GRAMMAR_ID =>
+                  Put (File,
+                       (if To_Token_Enum (Tree.ID (Children (2))) = 
Wisitoken_Grammar_Actions.TOKEN_ID
+                        then "%token <"
+                        else "%non_grammar <"));
+
+                  Put (File, Get_Text (Data, Tree, Children (4)) & "> " & 
Get_Text (Data, Tree, Children (6)));
+
+                  if Children'Last >= 7 then
+                     Put_Regexp_String (Children (7));
+                  end if;
+
+                  if Children'Last = 8 then
+                     Put_Regexp_String (Children (8));
+                  end if;
+                  Put_Comments (Node, Force_New_Line => True);
+
+               when KEYWORD_ID =>
+                  Put (File, "%keyword " & Get_Text (Data, Tree, Children 
(3)));
+                  Put_Regexp_String (Children (4));
+                  Put_Comments (Children (4), Force_New_Line => True);
+
+               when CODE_ID =>
+                  Put (File, "%code ");
+                  Put_Identifier_List (Children (3));
+                  Put (File, " %{" & Get_Text (Data, Tree, Children (4)) & 
"}%"); -- RAW_CODE
+                  Put_Comments (Node);
+
+               when CONFLICT_ID | CONFLICT_RESOLUTION_ID =>
+                  Put (File,
+                       (if To_Token_Enum (Tree.ID (Children (2))) = CONFLICT_ID
+                        then "%conflict "
+                        else "%conflict_resolution "));
+                  Put (File, Get_Text (Data, Tree, Children (3))); -- 
conflict_item_list
+                  Put (File, " on token " & Get_Text (Data, Tree, Children 
(6)));
+                  if Children'Last = 8 then
+                     Put (File, " : " & Get_Text (Data, Tree, Children (8)));
+                     Put_Comments (Children (8), Force_New_Line => True);
+                  else
+                     Put_Comments (Children (6), Force_New_Line => True);
+                  end if;
+
+               when IDENTIFIER_ID =>
+                  Put (File, "%" & Get_Text (Data, Tree, Children (2)));
+                  if Children'Last = 3 then
+                     Put_Declaration_Item_List (Children (3));
+                  end if;
+                  Put_Comments (Node);
+
+               when IF_ID =>
+                  Put (File,
+                       "%if " &
+                         Get_Text (Data, Tree, Children (3)) &
+                         (if To_Token_Enum (Tree.ID (Children (4))) = EQUAL_ID
+                          then " = "
+                          else " in ") &
+                         Get_Text (Data, Tree, Children (5)));
+                  Put_Comments (Children (5));
+
+               when ELSIF_ID =>
+                  Put
+                    (File, "%elsif " & Get_Text (Data, Tree, Children (3)) &
+                       (if To_Token_Enum (Tree.ID (Children (4))) = EQUAL_ID
+                        then " = "
+                        else " in ") &
+                       Get_Text
+                         (Data, Tree, Children (5)));
+                  Put_Comments (Node);
+
+               when END_ID =>
+                  Put (File, "%end if");
+                  Put_Comments (Node);
+
+               when others =>
+                  raise SAL.Programmer_Error;
+               end case;
+            end;
+
+         when nonterminal_ID =>
+            declare
+               Children : constant Node_Access_Array := Tree.Children (Node);
+               Virtual  : constant Boolean           := Tree.Label (Children 
(1)) = Virtual_Identifier;
+               First    : Boolean                    := True;
+            begin
+               Put (File, Get_Text (Data, Tree, Children (1)));
+               Put_Comments (Children (1), Force_New_Line => True);
+
+               Put_RHS_List (Children (3), First, Virtual);
+
+               --  We force a terminating ";" here, to speed parsing in 
_bnf.wy files.
+               if Tree.RHS_Index (Children (4)) = 1 then
+                  --  Empty
+                  Put_Line (File, "  ;");
+               else
+                  --  ";" present, including trailing newline, unless virtual.
+                  Put (File, "  ;");
+                  Put_Comments (Children (4), Force_New_Line => True);
+               end if;
+            end;
+
+         when wisitoken_accept_ID =>
+            Process_Node (Tree.Child (Node, 2));
+
+         when others =>
+            raise SAL.Not_Implemented with Image (Tree.ID (Node), 
Wisitoken_Grammar_Actions.Descriptor);
+         end case;
+      end Process_Node;
+   begin
+      Create (File, Out_File, File_Name);
+      declare
+         use all type Ada.Containers.Count_Type;
+         use Ada.Strings.Fixed;
+         Leading_Non_Grammar : Lexer.Token_Arrays.Vector renames 
Tree.Non_Grammar_Const (Tree.SOI);
+         First_Comment : constant String :=
+           (if Leading_Non_Grammar.Length > 0
+            then Tree.Lexer.Buffer_Text (Leading_Non_Grammar (1).Byte_Region)
+            else "");
+         Local_Var_Start   : constant Integer := Index (First_Comment, "-*-");
+         Local_Var_End     : constant Integer := Index
+           (First_Comment (Local_Var_Start + 3 .. First_Comment'Last), "-*-");
+         Local_Var_Default : constant String  := "buffer-read-only:t";
+      begin
+         Put_Line
+           (File, ";;; generated from " & Tree.Lexer.File_Name & " -*- " &
+              Local_Var_Default &
+              (if Local_Var_Start > First_Comment'First and Local_Var_End > 
First_Comment'First
+               then First_Comment (Local_Var_Start + 3 .. Local_Var_End - 1)
+               else "") &
+              " -*-");
+         Put_Line (File, ";;;");
+         for Token of Leading_Non_Grammar loop
+            if Token.ID /= Tree.Lexer.Descriptor.SOI_ID then
+               Put (File, Tree.Lexer.Buffer_Text (Token.Byte_Region));
+            end if;
+         end loop;
+      end;
+
+      Process_Node (Tree.Root);
+
+      Close (File);
+   exception
+   when E : SAL.Not_Implemented =>
+      Close (File);
+      Ada.Text_IO.Put_Line
+        (Ada.Text_IO.Standard_Error, "Print_Source not implemented: " & 
Ada.Exceptions.Exception_Message (E));
+   end Print_Source;
+
+end WisiToken_Grammar_Editing;
+--  Local Variables:
+--  ada-which-func-parse-size: 50000
+--  ada-case-strict: nil
+--  End:
diff --git a/wisitoken_grammar_editing.ads b/wisitoken_grammar_editing.ads
new file mode 100644
index 0000000000..7dea2a88ef
--- /dev/null
+++ b/wisitoken_grammar_editing.ads
@@ -0,0 +1,175 @@
+--  Abstract :
+--
+--  Utilities for editing wisitoken grammars.
+--
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
+--
+--  This library is free software;  you can redistribute it and/or modify it
+--  under terms of the  GNU General Public License  as published by the Free
+--  Software  Foundation;  either version 3,  or (at your  option) any later
+--  version. This library is distributed in the hope that it will be useful,
+--  but WITHOUT ANY WARRANTY;  without even the implied warranty of MERCHAN-
+--  TABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+--  As a special exception under Section 7 of GPL version 3, you are granted
+--  additional permissions described in the GCC Runtime Library Exception,
+--  version 3.1, as published by the Free Software Foundation.
+
+pragma License (Modified_GPL);
+
+with WisiToken.Syntax_Trees.LR_Utils;
+with WisiToken.Syntax_Trees;
+with WisiToken_Grammar_Runtime;
+with Wisitoken_Grammar_Actions;
+package WisiToken_Grammar_Editing is
+   use all type WisiToken.Production_ID;
+   use all type WisiToken.Token_ID;
+   use all type WisiToken.Base_Identifier_Index;
+   use all type Wisitoken_Grammar_Actions.Token_Enum_ID;
+   use all type WisiToken.Syntax_Trees.Node_Label;
+   use all type WisiToken.Syntax_Trees.Node_Access;
+
+   type Identifier_Token
+     (Label : WisiToken.Syntax_Trees.Terminal_Label := 
WisiToken.Syntax_Trees.Terminal_Label'First)
+   is record
+      --  Either a syntax tree terminal node, or data to create a syntax
+      --  tree Virtual_Identifier node. Used to represent an identifier in a
+      --  grammar production.
+      case Label is
+      when Source_Terminal | Virtual_Terminal =>
+         Node : WisiToken.Syntax_Trees.Node_Access;
+
+      when Virtual_Identifier =>
+         ID         : WisiToken.Token_ID;
+         Identifier : WisiToken.Identifier_Index;
+      end case;
+   end record;
+
+   function Image (Item : in Identifier_Token; Tree : in 
WisiToken.Syntax_Trees.Tree) return String
+   is ((case Item.Label is
+        when Source_Terminal | Virtual_Terminal => 
WisiToken.Syntax_Trees.Trimmed_Image
+          (Tree.Get_Node_Index (Item.Node)) & ":",
+        when Virtual_Identifier => Trimmed_Image (Item.Identifier) & ";") &
+         Image (Item.ID, Wisitoken_Grammar_Actions.Descriptor));
+
+   Invalid_Identifier_Token : constant Identifier_Token :=
+     (Label => WisiToken.Syntax_Trees.Virtual_Terminal,
+      Node  => WisiToken.Syntax_Trees.Invalid_Node_Access);
+
+   function ID (Tree : in WisiToken.Syntax_Trees.Tree; Item : in 
Identifier_Token) return WisiToken.Token_ID
+   is (case Item.Label is
+       when Source_Terminal | Virtual_Terminal => Tree.ID (Item.Node),
+       when Virtual_Identifier => Item.ID);
+
+   function To_Identifier_Token
+     (Item : in WisiToken.Identifier_Index)
+     return Identifier_Token
+   is ((Virtual_Identifier, +IDENTIFIER_ID, Item));
+
+   function To_Identifier_Token
+     (Item : in WisiToken.Syntax_Trees.Valid_Node_Access;
+      Tree : in WisiToken.Syntax_Trees.Tree)
+     return Identifier_Token
+   with Pre => To_Token_Enum (Tree.ID (Item)) in rhs_element_ID | rhs_item_ID 
| IDENTIFIER_ID;
+
+   function Add_RHS_Group_Item
+     (Tree      : in out WisiToken.Syntax_Trees.Tree;
+      RHS_Index : in     Natural;
+      Content   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
+     return WisiToken.Syntax_Trees.Valid_Node_Access
+   with Pre => Tree.ID (Content) = +rhs_alternative_list_ID,
+     Post => Tree.ID (Add_RHS_Group_Item'Result) = +rhs_group_item_ID;
+
+   function Add_RHS_Optional_Item
+     (Tree      : in out WisiToken.Syntax_Trees.Tree;
+      RHS_Index : in     Natural;
+      Content   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
+     return WisiToken.Syntax_Trees.Valid_Node_Access
+   with Pre => To_Token_Enum (Tree.ID (Content)) in rhs_alternative_list_ID | 
IDENTIFIER_ID | STRING_LITERAL_2_ID and
+               RHS_Index <= 3,
+     Post => Tree.ID (Add_RHS_Optional_Item'Result) = +rhs_optional_item_ID;
+
+   function Add_Identifier_Token
+     (Tree : in out WisiToken.Syntax_Trees.Tree;
+      Item : in     Identifier_Token)
+     return WisiToken.Syntax_Trees.Valid_Node_Access;
+
+   function Add_RHS_Item
+     (Tree : in out WisiToken.Syntax_Trees.Tree;
+      Item : in     WisiToken.Syntax_Trees.Valid_Node_Access)
+     return WisiToken.Syntax_Trees.Valid_Node_Access
+   with Pre => To_Token_Enum (Tree.ID (Item)) in IDENTIFIER_ID | 
STRING_LITERAL_2_ID,
+     Post => Tree.ID (Add_RHS_Item'Result) = +rhs_item_ID;
+
+   function Add_RHS_Element
+     (Tree  : in out WisiToken.Syntax_Trees.Tree;
+      Item  : in     WisiToken.Syntax_Trees.Valid_Node_Access;
+      Label : in     Identifier_Token := Invalid_Identifier_Token)
+     return WisiToken.Syntax_Trees.Valid_Node_Access
+   with Pre => Tree.ID (Item) = +rhs_item_ID,
+     Post => Tree.Production_ID (Add_RHS_Element'Result) =
+             (+rhs_element_ID, (if Label = Invalid_Identifier_Token then 0 
else 1));
+
+   function Empty_RHS_Item_List
+     (Tree : aliased in out WisiToken.Syntax_Trees.Tree)
+     return WisiToken.Syntax_Trees.LR_Utils.List;
+
+   function Empty_RHS_List
+     (Tree : aliased in out WisiToken.Syntax_Trees.Tree)
+     return WisiToken.Syntax_Trees.LR_Utils.List;
+
+   function Add_RHS
+     (Tree              : in out WisiToken.Syntax_Trees.Tree;
+      Item              : in     WisiToken.Syntax_Trees.Valid_Node_Access;
+      Auto_Token_Labels : in     Boolean;
+      Edited_Token_List : in     Boolean;
+      Post_Parse_Action : in     WisiToken.Syntax_Trees.Node_Access := 
WisiToken.Syntax_Trees.Invalid_Node_Access;
+      In_Parse_Action   : in     WisiToken.Syntax_Trees.Node_Access := 
WisiToken.Syntax_Trees.Invalid_Node_Access)
+     return WisiToken.Syntax_Trees.Valid_Node_Access
+   with Pre => Tree.ID (Item) = +rhs_item_list_ID and
+               (Post_Parse_Action = WisiToken.Syntax_Trees.Invalid_Node_Access 
or else
+                Tree.ID (Post_Parse_Action) = +ACTION_ID) and
+               (In_Parse_Action = WisiToken.Syntax_Trees.Invalid_Node_Access 
or else
+                Tree.ID (In_Parse_Action) = +ACTION_ID),
+     Post => Tree.ID (Add_RHS'Result) = +rhs_ID;
+
+   function Find_Declaration
+     (Data  : in     WisiToken_Grammar_Runtime.User_Data_Type;
+      Tree  : in out WisiToken.Syntax_Trees.Tree;
+      Name  : in     String)
+     return WisiToken.Syntax_Trees.Node_Access
+   with Post => Find_Declaration'Result = 
WisiToken.Syntax_Trees.Invalid_Node_Access or else
+                To_Token_Enum (Tree.ID (Find_Declaration'Result)) in 
declaration_ID | nonterminal_ID;
+   --  Return the node that declares Name, Invalid_Node_Access if none.
+
+   procedure Validate_Node
+     (Tree                : in     WisiToken.Syntax_Trees.Tree;
+      Node                : in     WisiToken.Syntax_Trees.Valid_Node_Access;
+      User_Data           : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Node_Error_Reported : in out Boolean);
+   --  Verify that all nodes match wisitoken_grammar.wy. Data must be of
+   --  type WisiToken_Grammar_Runtime.User_Data_Type. Uses
+   --  Data.EBNF_Allowed.
+   --
+   --  For use with Syntax_Trees.Validate_Tree.
+
+   procedure Translate_EBNF_To_BNF
+     (Tree : in out WisiToken.Syntax_Trees.Tree;
+      Data : in out WisiToken_Grammar_Runtime.User_Data_Type)
+   with Pre => Tree.Editable;
+   --  Edit EBNF nonterms, adding new nonterms as needed, resulting in
+   --  a BNF tree.
+   --
+   --  Tree.Root is updated as necessary; all streams must be cleared, or
+   --  they might be corrupted.
+   --
+   --  Generator.LR.*_Generate requires a BNF grammar.
+
+   procedure Print_Source
+     (File_Name : in String;
+      Tree      : in WisiToken.Syntax_Trees.Tree;
+      Data      : in WisiToken_Grammar_Runtime.User_Data_Type);
+   --  Print the wisitoken grammar source represented by Tree, Terminals
+   --  to a new file File_Name.
+
+end WisiToken_Grammar_Editing;
diff --git a/wisitoken_grammar_main.adb b/wisitoken_grammar_main.adb
index cb91adf83e..d985bb4429 100644
--- a/wisitoken_grammar_main.adb
+++ b/wisitoken_grammar_main.adb
@@ -1,8 +1,8 @@
---  generated parser support file.
+--  generated parser support file. -*- buffer-read-only:t  -*-
 --  command line: wisitoken-bnf-generate.exe  --generate LALR Ada re2c 
wisitoken_grammar.wy
 --
 
---  Copyright (C) 2017 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  Author: Stephen Leake <stephe-leake@stephe-leake.org>
 --
@@ -21,642 +21,1015 @@
 --  You should have received a copy of the GNU General Public License
 --  along with GNU Emacs.  If not, see <http://www.gnu.org/licenses/>.
 
-with Wisitoken_Grammar_Actions; use Wisitoken_Grammar_Actions;
+with SAL;
 with WisiToken.Lexer.re2c;
 with wisitoken_grammar_re2c_c;
+with Wisitoken_Grammar_Actions; use Wisitoken_Grammar_Actions;
 package body Wisitoken_Grammar_Main is
 
+   function Is_Block_Delimited (ID : in WisiToken.Token_ID) return Boolean
+   is begin
+      case To_Token_Enum (ID) is
+      when
+         COMMENT_ID |
+         RAW_CODE_ID |
+         REGEXP_ID |
+         ACTION_ID |
+         STRING_LITERAL_1_ID |
+         STRING_LITERAL_2_ID => return True;
+      when others => return False;
+      end case;
+   end Is_Block_Delimited;
+
+   function Same_Block_Delimiters (ID : in WisiToken.Token_ID) return Boolean
+   is begin
+      case To_Token_Enum (ID) is
+      when COMMENT_ID => return False;
+      when RAW_CODE_ID => return False;
+      when REGEXP_ID => return False;
+      when ACTION_ID => return False;
+      when STRING_LITERAL_1_ID => return True;
+      when STRING_LITERAL_2_ID => return True;
+      when others => return False;
+      end case;
+   end Same_Block_Delimiters;
+
+   function Escape_Delimiter_Doubled (ID : in WisiToken.Token_ID) return 
Boolean
+   is begin
+      case To_Token_Enum (ID) is
+      when others => return False;
+      end case;
+   end Escape_Delimiter_Doubled;
+
+   function Start_Delimiter_Length (ID : in WisiToken.Token_ID) return Integer
+   is begin
+      case To_Token_Enum (ID) is
+      when COMMENT_ID => return 2;
+      when RAW_CODE_ID => return 2;
+      when REGEXP_ID => return 2;
+      when ACTION_ID => return 2;
+      when STRING_LITERAL_1_ID => return 1;
+      when STRING_LITERAL_2_ID => return 1;
+      when others => raise SAL.Programmer_Error; return 0;
+      end case;
+   end Start_Delimiter_Length;
+
+   function End_Delimiter_Length (ID : in WisiToken.Token_ID) return Integer
+   is begin
+      case To_Token_Enum (ID) is
+      when
+         COMMENT_ID |
+         STRING_LITERAL_1_ID |
+         STRING_LITERAL_2_ID => return 1;
+      when RAW_CODE_ID => return 2;
+      when REGEXP_ID => return 2;
+      when ACTION_ID => return 2;
+      when others => raise SAL.Programmer_Error; return 0;
+      end case;
+   end End_Delimiter_Length;
+
+   function New_Line_Is_End_Delimiter (ID : in WisiToken.Token_ID) return 
Boolean
+   is begin
+      return
+        (case To_Token_Enum (ID) is
+         when COMMENT_ID => True,
+         when RAW_CODE_ID => False,
+         when REGEXP_ID => False,
+         when ACTION_ID => False,
+         when STRING_LITERAL_1_ID => True,
+         when STRING_LITERAL_2_ID => True,
+         when others => raise SAL.Programmer_Error);
+   end New_Line_Is_End_Delimiter;
+
+   function Find_End_Delimiter
+     (Source      : in WisiToken.Lexer.Source;
+      ID          : in WisiToken.Token_ID;
+      Token_Start : in WisiToken.Buffer_Pos)
+     return WisiToken.Buffer_Pos
+   is begin
+      return
+        (case To_Token_Enum (ID) is
+         when COMMENT_ID => WisiToken.Lexer.Find_New_Line (Source, 
Token_Start),
+         when RAW_CODE_ID => WisiToken.Lexer.Find_String (Source, Token_Start, 
"}%"),
+         when REGEXP_ID => WisiToken.Lexer.Find_String (Source, Token_Start, 
"]%"),
+         when ACTION_ID => WisiToken.Lexer.Find_String (Source, Token_Start, 
")%"),
+         when STRING_LITERAL_1_ID => WisiToken.Lexer.Find_String_Or_New_Line 
(Source, Token_Start, """"),
+         when STRING_LITERAL_2_ID => WisiToken.Lexer.Find_String_Or_New_Line 
(Source, Token_Start, """"),
+         when others => raise SAL.Programmer_Error);
+   end Find_End_Delimiter;
+
+   function Find_Scan_End
+     (Source   : in WisiToken.Lexer.Source;
+      ID       : in WisiToken.Token_ID;
+      Region   : in WisiToken.Buffer_Region;
+      Inserted : in Boolean;
+      Start    : in Boolean)
+     return WisiToken.Buffer_Pos
+   is
+      use WisiToken;
+   begin
+      return
+        (case To_Token_Enum (ID) is
+         when COMMENT_ID =>
+         (if Inserted then Region.Last
+          elsif Start then Region.Last
+          else Lexer.Find_New_Line (Source, Region.Last)),
+         when RAW_CODE_ID =>
+         (if Inserted then Region.Last
+          elsif Start then Region.Last
+          else Lexer.Find_String (Source, Region.First, "}%")),
+         when REGEXP_ID =>
+         (if Inserted then Region.Last
+          elsif Start then Region.Last
+          else Lexer.Find_String (Source, Region.First, "]%")),
+         when ACTION_ID =>
+         (if Inserted then Region.Last
+          elsif Start then Region.Last
+          else Lexer.Find_String (Source, Region.First, ")%")),
+         when STRING_LITERAL_1_ID => Lexer.Find_New_Line (Source, Region.Last),
+         when STRING_LITERAL_2_ID => Lexer.Find_New_Line (Source, Region.Last),
+         when others => raise SAL.Programmer_Error);
+   end Find_Scan_End;
+
+   function Contains_End_Delimiter
+     (Source : in WisiToken.Lexer.Source;
+      ID     : in WisiToken.Token_ID;
+      Region : in WisiToken.Buffer_Region)
+     return WisiToken.Base_Buffer_Pos
+   is
+      use WisiToken;
+   begin
+      return
+        (case To_Token_Enum (ID) is
+         when COMMENT_ID => Lexer.Find_New_Line (Source, Region),
+         when RAW_CODE_ID => Lexer.Find_String_Or_New_Line (Source, Region, 
"}%"),
+         when REGEXP_ID => Lexer.Find_String_Or_New_Line (Source, Region, 
"]%"),
+         when ACTION_ID => Lexer.Find_String_Or_New_Line (Source, Region, 
")%"),
+         when STRING_LITERAL_1_ID => Lexer.Find_String_Or_New_Line (Source, 
Region, """"),
+         when STRING_LITERAL_2_ID => Lexer.Find_String_Or_New_Line (Source, 
Region, "'"),
+         when others => raise SAL.Programmer_Error);
+   end Contains_End_Delimiter;
+
+   function Line_Begin_Char_Pos
+    (Source : in WisiToken.Lexer.Source;
+     Token  : in WisiToken.Lexer.Token;
+     Line   : in WisiToken.Line_Number_Type)
+   return WisiToken.Buffer_Pos
+   is
+      use all type WisiToken.Base_Buffer_Pos;
+   begin
+      case To_Token_Enum (Token.ID) is
+      when NEW_LINE_ID => return Token.Char_Region.Last + 1;
+      when COMMENT_ID => return Token.Char_Region.Last + 1;
+      when RAW_CODE_ID => return WisiToken.Lexer.Line_Begin_Char_Pos (Source, 
Token, Line);
+      when REGEXP_ID => return WisiToken.Lexer.Line_Begin_Char_Pos (Source, 
Token, Line);
+      when ACTION_ID => return WisiToken.Lexer.Line_Begin_Char_Pos (Source, 
Token, Line);
+      when others => raise SAL.Programmer_Error;
+      end case;
+   end Line_Begin_Char_Pos;
+
+   function Can_Contain_New_Line (ID : in WisiToken.Token_ID) return Boolean
+   is begin
+      case To_Token_Enum (ID) is
+      when NEW_LINE_ID => return True;
+      when COMMENT_ID => return True;
+      when RAW_CODE_ID => return True;
+      when REGEXP_ID => return True;
+      when ACTION_ID => return True;
+      when others => return False;
+      end case;
+   end Can_Contain_New_Line;
+
+   function Terminated_By_New_Line (ID : in WisiToken.Token_ID) return Boolean
+   is begin
+      case To_Token_Enum (ID) is
+      when NEW_LINE_ID => return True;
+      when COMMENT_ID => return True;
+      when STRING_LITERAL_1_ID => return True;
+      when STRING_LITERAL_2_ID => return True;
+      when others => return False;
+      end case;
+   end Terminated_By_New_Line;
+
    package Lexer is new WisiToken.Lexer.re2c
      (wisitoken_grammar_re2c_c.New_Lexer,
       wisitoken_grammar_re2c_c.Free_Lexer,
       wisitoken_grammar_re2c_c.Reset_Lexer,
-      wisitoken_grammar_re2c_c.Next_Token);
+      wisitoken_grammar_re2c_c.Set_Verbosity,
+      wisitoken_grammar_re2c_c.Set_Position,
+      wisitoken_grammar_re2c_c.Next_Token,
+      Is_Block_Delimited,
+      Same_Block_Delimiters,
+      Escape_Delimiter_Doubled,
+      Start_Delimiter_Length,
+      End_Delimiter_Length,
+      New_Line_Is_End_Delimiter,
+      Find_End_Delimiter,
+      Contains_End_Delimiter,
+      Find_Scan_End,
+      Line_Begin_Char_Pos,
+      Can_Contain_New_Line,
+      Terminated_By_New_Line);
 
-   procedure Create_Parser
-     (Parser                         :    out 
WisiToken.Parse.LR.Parser_No_Recover.Parser;
-      Trace                        : not null access WisiToken.Trace'Class;
-      User_Data                    : in     
WisiToken.Syntax_Trees.User_Data_Access)
+   function Create_Parse_Table
+     return WisiToken.Parse.LR.Parse_Table_Ptr
    is
       use WisiToken.Parse.LR;
       Table : constant Parse_Table_Ptr := new Parse_Table
         (State_First       => 0,
-         State_Last        => 102,
+         State_Last        => 145,
          First_Terminal    => 3,
-         Last_Terminal     => 36,
-         First_Nonterminal => 37,
-         Last_Nonterminal  => 56);
+         Last_Terminal     => 42,
+         First_Nonterminal => 43,
+         Last_Nonterminal  => 66);
    begin
       declare
          procedure Subr_1
          is begin
             Table.States (0).Action_List.Set_Capacity (2);
-            Add_Action (Table.States (0), 23, (38, 0), 1);
-            Add_Action (Table.States (0), 33, (43, 0), 2);
+            Add_Action (Table.States (0), 30, (48, 0), 1);
+            Add_Action (Table.States (0), 39, (53, 0), 2);
             Table.States (0).Goto_List.Set_Capacity (4);
-            Add_Goto (Table.States (0), 38, 3);
-            Add_Goto (Table.States (0), 43, 4);
-            Add_Goto (Table.States (0), 55, 5);
-            Add_Goto (Table.States (0), 56, 6);
-            Table.States (1).Action_List.Set_Capacity (7);
-            Add_Action (Table.States (1), 3, (38, 1), 7);
-            Add_Action (Table.States (1), 4, (38, 5), 8);
-            Add_Action (Table.States (1), 5, (38, 4), 9);
-            Add_Action (Table.States (1), 6, (39, 0), 10);
-            Add_Action (Table.States (1), 7, (39, 1), 11);
-            Add_Action (Table.States (1), 8, (39, 2), 12);
-            Add_Action (Table.States (1), 33, (38, 2), 13);
-            Table.States (1).Goto_List.Set_Capacity (1);
-            Add_Goto (Table.States (1), 39, 14);
+            Add_Goto (Table.States (0), 48, 3);
+            Add_Goto (Table.States (0), 53, 4);
+            Add_Goto (Table.States (0), 65, 5);
+            Add_Goto (Table.States (0), 66, 6);
+            Table.States (1).Action_List.Set_Capacity (10);
+            Add_Action (Table.States (1), 4, (48, 6), 7);
+            Add_Action (Table.States (1), 5, (48, 7), 8);
+            Add_Action (Table.States (1), 6, (48, 8), 9);
+            Add_Action (Table.States (1), 7, (48, 15), 10);
+            Add_Action (Table.States (1), 8, (48, 13), 11);
+            Add_Action (Table.States (1), 9, (48, 11), 12);
+            Add_Action (Table.States (1), 11, (48, 5), 13);
+            Add_Action (Table.States (1), 12, (48, 2), 14);
+            Add_Action (Table.States (1), 16, (48, 0), 15);
+            Add_Action (Table.States (1), 39, (48, 9), 16);
             Table.States (2).Action_List.Set_Capacity (2);
-            Add_Action (Table.States (2), 13, (43, 0), 15);
-            Add_Action (Table.States (2), 14, (43, 1), 16);
+            Add_Action (Table.States (2), 21, (53, 0), 17);
+            Add_Action (Table.States (2), 22, (53, 1), 18);
             Table.States (3).Action_List.Set_Capacity (3);
-            Add_Action (Table.States (3), (23, 33, 36), (55, 0),  1, null, 
null);
+            Add_Action (Table.States (3), (30, 39, 42), (65, 0),  1);
             Table.States (4).Action_List.Set_Capacity (3);
-            Add_Action (Table.States (4), (23, 33, 36), (55, 1),  1, null, 
null);
+            Add_Action (Table.States (4), (30, 39, 42), (65, 1),  1);
             Table.States (5).Action_List.Set_Capacity (3);
-            Add_Action (Table.States (5), (23, 33, 36), (56, 0),  1, null, 
null);
+            Add_Action (Table.States (5), (30, 39, 42), (66, 0),  1);
             Table.States (6).Action_List.Set_Capacity (3);
-            Add_Action (Table.States (6), 23, (38, 0), 1);
-            Add_Action (Table.States (6), 33, (43, 0), 2);
-            Add_Action (Table.States (6), 36, Accept_It, (37, 0),  1, null, 
null);
+            Add_Action (Table.States (6), 30, (48, 0), 1);
+            Add_Action (Table.States (6), 39, (53, 0), 2);
+            Add_Action (Table.States (6), 42, Accept_It, (43, 0),  1);
             Table.States (6).Goto_List.Set_Capacity (3);
-            Add_Goto (Table.States (6), 38, 3);
-            Add_Goto (Table.States (6), 43, 4);
-            Add_Goto (Table.States (6), 55, 17);
+            Add_Goto (Table.States (6), 48, 3);
+            Add_Goto (Table.States (6), 53, 4);
+            Add_Goto (Table.States (6), 65, 19);
             Table.States (7).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (7), 33, (40, 0), 18);
+            Add_Action (Table.States (7), 39, (49, 0), 20);
             Table.States (7).Goto_List.Set_Capacity (1);
-            Add_Goto (Table.States (7), 40, 19);
-            Table.States (8).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (8), 5, (38, 5), 20);
-            Table.States (9).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (9), 33, (38, 4), 21);
+            Add_Goto (Table.States (7), 49, 21);
+            Table.States (8).Action_List.Set_Capacity (4);
+            Add_Action (Table.States (8), 3, (45, 2), 22);
+            Add_Action (Table.States (8), 14, (45, 1), 23);
+            Add_Action (Table.States (8), 15, (45, 0), 24);
+            Add_Action (Table.States (8), 39, (45, 3), 25);
+            Table.States (8).Goto_List.Set_Capacity (2);
+            Add_Goto (Table.States (8), 45, 26);
+            Add_Goto (Table.States (8), 46, 27);
+            Table.States (9).Action_List.Set_Capacity (4);
+            Add_Action (Table.States (9), 3, (45, 2), 22);
+            Add_Action (Table.States (9), 14, (45, 1), 23);
+            Add_Action (Table.States (9), 15, (45, 0), 24);
+            Add_Action (Table.States (9), 39, (45, 3), 25);
+            Table.States (9).Goto_List.Set_Capacity (2);
+            Add_Goto (Table.States (9), 45, 26);
+            Add_Goto (Table.States (9), 46, 28);
             Table.States (10).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (10), (1 =>  33), (39, 0),  1, null, 
null);
+            Add_Action (Table.States (10), 9, (48, 15), 29);
             Table.States (11).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (11), 21, (39, 1), 22);
+            Add_Action (Table.States (11), 39, (48, 13), 30);
             Table.States (12).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (12), 21, (39, 2), 23);
-            Table.States (13).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (13), 8, (42, 10), 24);
-            Add_Action (Table.States (13), 10, (42, 5), 25);
-            Add_Action (Table.States (13), 15, (42, 0), 26);
-            Add_Action (Table.States (13), 16, (42, 2), 27);
-            Add_Action (Table.States (13), 20, (42, 3), 28);
-            Add_Action (Table.States (13), 23, Reduce, (38, 3),  2, 
declaration_3'Access, null);
-            Add_Action (Table.States (13), 28, (42, 6), 29);
-            Add_Action (Table.States (13), 30, (42, 7), 30);
-            Add_Action (Table.States (13), 32, (42, 4), 31);
-            Add_Action (Table.States (13), 33, (42, 1), 32);
-            Add_Conflict (Table.States (13), 33, (38, 3),  2, 
declaration_3'Access, null);
-            Add_Action (Table.States (13), 34, (42, 8), 33);
-            Add_Action (Table.States (13), 35, (42, 9), 34);
-            Add_Action (Table.States (13), 36, Reduce, (38, 3),  2, 
declaration_3'Access, null);
-            Table.States (13).Goto_List.Set_Capacity (2);
-            Add_Goto (Table.States (13), 41, 35);
-            Add_Goto (Table.States (13), 42, 36);
+            Add_Action (Table.States (12), 39, (48, 11), 31);
+            Table.States (13).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (13), 39, (48, 5), 32);
             Table.States (14).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (14), 33, (38, 0), 37);
-            Table.States (15).Action_List.Set_Capacity (10);
-            Add_Action (Table.States (15), 12, Reduce, (46, 0),  0, null, 
null);
-            Add_Action (Table.States (15), 18, (53, 0), 38);
-            Add_Action (Table.States (15), 19, (52, 0), 39);
-            Add_Action (Table.States (15), 20, (51, 0), 40);
-            Add_Action (Table.States (15), 21, (47, 0), 41);
-            Add_Action (Table.States (15), 23, Reduce, (46, 0),  0, null, 
null);
-            Add_Action (Table.States (15), 29, Reduce, (46, 0),  0, null, 
null);
-            Add_Action (Table.States (15), 33, (48, 1), 42);
-            Add_Conflict (Table.States (15), 33, (46, 0),  0, null, null);
-            Add_Action (Table.States (15), 35, (50, 1), 43);
-            Add_Action (Table.States (15), 36, Reduce, (46, 0),  0, null, 
null);
-            Table.States (15).Goto_List.Set_Capacity (9);
-            Add_Goto (Table.States (15), 45, 44);
-            Add_Goto (Table.States (15), 46, 45);
-            Add_Goto (Table.States (15), 47, 46);
-            Add_Goto (Table.States (15), 48, 47);
-            Add_Goto (Table.States (15), 49, 48);
-            Add_Goto (Table.States (15), 50, 49);
-            Add_Goto (Table.States (15), 51, 50);
-            Add_Goto (Table.States (15), 52, 51);
-            Add_Goto (Table.States (15), 53, 52);
-            Table.States (16).Action_List.Set_Capacity (10);
-            Add_Action (Table.States (16), 12, Reduce, (46, 0),  0, null, 
null);
-            Add_Action (Table.States (16), 18, (53, 0), 38);
-            Add_Action (Table.States (16), 19, (52, 0), 39);
-            Add_Action (Table.States (16), 20, (51, 0), 40);
-            Add_Action (Table.States (16), 21, (47, 0), 41);
-            Add_Action (Table.States (16), 23, Reduce, (46, 0),  0, null, 
null);
-            Add_Action (Table.States (16), 29, Reduce, (46, 0),  0, null, 
null);
-            Add_Action (Table.States (16), 33, (48, 1), 42);
-            Add_Conflict (Table.States (16), 33, (46, 0),  0, null, null);
-            Add_Action (Table.States (16), 35, (50, 1), 43);
-            Add_Action (Table.States (16), 36, Reduce, (46, 0),  0, null, 
null);
-            Table.States (16).Goto_List.Set_Capacity (9);
-            Add_Goto (Table.States (16), 45, 53);
-            Add_Goto (Table.States (16), 46, 45);
-            Add_Goto (Table.States (16), 47, 46);
-            Add_Goto (Table.States (16), 48, 47);
-            Add_Goto (Table.States (16), 49, 48);
-            Add_Goto (Table.States (16), 50, 49);
-            Add_Goto (Table.States (16), 51, 50);
-            Add_Goto (Table.States (16), 52, 51);
-            Add_Goto (Table.States (16), 53, 52);
-            Table.States (17).Action_List.Set_Capacity (3);
-            Add_Action (Table.States (17), (23, 33, 36), (56, 1),  2, null, 
null);
-            Table.States (18).Action_List.Set_Capacity (2);
-            Add_Action (Table.States (18), (9, 33), (40, 0),  1, null, null);
-            Table.States (19).Action_List.Set_Capacity (2);
-            Add_Action (Table.States (19), 9, (38, 1), 54);
-            Add_Action (Table.States (19), 33, (40, 1), 55);
-            Table.States (20).Action_List.Set_Capacity (3);
-            Add_Action (Table.States (20), (23, 33, 36), (38, 5),  3, 
declaration_5'Access, null);
-            Table.States (21).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (21), 16, (38, 4), 56);
+            Add_Action (Table.States (14), 28, (48, 2), 33);
+            Table.States (15).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (15), 28, (48, 0), 34);
+            Table.States (16).Action_List.Set_Capacity (7);
+            Add_Action (Table.States (16), 18, (44, 0), 35);
+            Add_Action (Table.States (16), 30, Reduce, (48, 10),  2);
+            Add_Action (Table.States (16), 38, (51, 1), 36);
+            Add_Action (Table.States (16), 39, (51, 0), 37);
+            Add_Conflict (Table.States (16), 39, (48, 10),  2);
+            Add_Action (Table.States (16), 40, (44, 1), 38);
+            Add_Action (Table.States (16), 41, (44, 2), 39);
+            Add_Action (Table.States (16), 42, Reduce, (48, 10),  2);
+            Table.States (16).Goto_List.Set_Capacity (3);
+            Add_Goto (Table.States (16), 44, 40);
+            Add_Goto (Table.States (16), 51, 41);
+            Add_Goto (Table.States (16), 52, 42);
+            Table.States (17).Action_List.Set_Capacity (10);
+            Add_Action (Table.States (17), 20, Reduce, (56, 0),  0);
+            Add_Action (Table.States (17), 25, (63, 0), 43);
+            Add_Action (Table.States (17), 26, (62, 0), 44);
+            Add_Action (Table.States (17), 27, (61, 0), 45);
+            Add_Action (Table.States (17), 28, (57, 0), 46);
+            Add_Action (Table.States (17), 30, Reduce, (56, 0),  0);
+            Add_Action (Table.States (17), 36, Reduce, (56, 0),  0);
+            Add_Action (Table.States (17), 39, (58, 1), 47);
+            Add_Conflict (Table.States (17), 39, (56, 0),  0);
+            Add_Action (Table.States (17), 41, (60, 1), 48);
+            Add_Action (Table.States (17), 42, Reduce, (56, 0),  0);
+            Table.States (17).Goto_List.Set_Capacity (9);
+            Add_Goto (Table.States (17), 55, 49);
+            Add_Goto (Table.States (17), 56, 50);
+            Add_Goto (Table.States (17), 57, 51);
+            Add_Goto (Table.States (17), 58, 52);
+            Add_Goto (Table.States (17), 59, 53);
+            Add_Goto (Table.States (17), 60, 54);
+            Add_Goto (Table.States (17), 61, 55);
+            Add_Goto (Table.States (17), 62, 56);
+            Add_Goto (Table.States (17), 63, 57);
+            Table.States (18).Action_List.Set_Capacity (10);
+            Add_Action (Table.States (18), 20, Reduce, (56, 0),  0);
+            Add_Action (Table.States (18), 25, (63, 0), 43);
+            Add_Action (Table.States (18), 26, (62, 0), 44);
+            Add_Action (Table.States (18), 27, (61, 0), 45);
+            Add_Action (Table.States (18), 28, (57, 0), 46);
+            Add_Action (Table.States (18), 30, Reduce, (56, 0),  0);
+            Add_Action (Table.States (18), 36, Reduce, (56, 0),  0);
+            Add_Action (Table.States (18), 39, (58, 1), 47);
+            Add_Conflict (Table.States (18), 39, (56, 0),  0);
+            Add_Action (Table.States (18), 41, (60, 1), 48);
+            Add_Action (Table.States (18), 42, Reduce, (56, 0),  0);
+            Table.States (18).Goto_List.Set_Capacity (9);
+            Add_Goto (Table.States (18), 55, 58);
+            Add_Goto (Table.States (18), 56, 50);
+            Add_Goto (Table.States (18), 57, 51);
+            Add_Goto (Table.States (18), 58, 52);
+            Add_Goto (Table.States (18), 59, 53);
+            Add_Goto (Table.States (18), 60, 54);
+            Add_Goto (Table.States (18), 61, 55);
+            Add_Goto (Table.States (18), 62, 56);
+            Add_Goto (Table.States (18), 63, 57);
+            Table.States (19).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (19), (30, 39, 42), (66, 1),  2);
+            Table.States (20).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (20), (17, 39), (49, 0),  1);
+            Table.States (21).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (21), 17, (48, 6), 59);
+            Add_Action (Table.States (21), 39, (49, 1), 60);
             Table.States (22).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (22), 33, (39, 1), 57);
+            Add_Action (Table.States (22), 39, (45, 2), 61);
             Table.States (23).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (23), 33, (39, 2), 58);
-            Table.States (24).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (24), (8, 10, 15, 16, 20, 23, 28, 30, 32, 
33, 34, 35, 36), (42, 10),  1, null,
-            null);
-            Table.States (25).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (25), (8, 10, 15, 16, 20, 23, 28, 30, 32, 
33, 34, 35, 36), (42, 5),  1, null,
-            null);
-            Table.States (26).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (26), (8, 10, 15, 16, 20, 23, 28, 30, 32, 
33, 34, 35, 36), (42, 0),  1, null,
-            null);
-            Table.States (27).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (27), (8, 10, 15, 16, 20, 23, 28, 30, 32, 
33, 34, 35, 36), (42, 2),  1, null,
-            null);
-            Table.States (28).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (28), (8, 10, 15, 16, 20, 23, 28, 30, 32, 
33, 34, 35, 36), (42, 3),  1, null,
-            null);
-            Table.States (29).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (29), (8, 10, 15, 16, 20, 23, 28, 30, 32, 
33, 34, 35, 36), (42, 6),  1, null,
-            null);
-            Table.States (30).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (30), (8, 10, 15, 16, 20, 23, 28, 30, 32, 
33, 34, 35, 36), (42, 7),  1, null,
-            null);
-            Table.States (31).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (31), (8, 10, 15, 16, 20, 23, 28, 30, 32, 
33, 34, 35, 36), (42, 4),  1, null,
-            null);
-            Table.States (32).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (32), (8, 10, 15, 16, 20, 23, 28, 30, 32, 
33, 34, 35, 36), (42, 1),  1, null,
-            null);
-            Table.States (33).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (33), (8, 10, 15, 16, 20, 23, 28, 30, 32, 
33, 34, 35, 36), (42, 8),  1, null,
-            null);
-            Table.States (34).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (34), (8, 10, 15, 16, 20, 23, 28, 30, 32, 
33, 34, 35, 36), (42, 9),  1, null,
-            null);
-            Table.States (35).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (35), 8, (42, 10), 24);
-            Add_Action (Table.States (35), 10, (42, 5), 25);
-            Add_Action (Table.States (35), 15, (42, 0), 26);
-            Add_Action (Table.States (35), 16, (42, 2), 27);
-            Add_Action (Table.States (35), 20, (42, 3), 28);
-            Add_Action (Table.States (35), 23, Reduce, (38, 2),  3, 
declaration_2'Access, null);
-            Add_Action (Table.States (35), 28, (42, 6), 29);
-            Add_Action (Table.States (35), 30, (42, 7), 30);
-            Add_Action (Table.States (35), 32, (42, 4), 31);
-            Add_Action (Table.States (35), 33, (42, 1), 32);
-            Add_Conflict (Table.States (35), 33, (38, 2),  3, 
declaration_2'Access, null);
-            Add_Action (Table.States (35), 34, (42, 8), 33);
-            Add_Action (Table.States (35), 35, (42, 9), 34);
-            Add_Action (Table.States (35), 36, Reduce, (38, 2),  3, 
declaration_2'Access, null);
-            Table.States (35).Goto_List.Set_Capacity (1);
-            Add_Goto (Table.States (35), 42, 59);
-            Table.States (36).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (36), (8, 10, 15, 16, 20, 23, 28, 30, 32, 
33, 34, 35, 36), (41, 0),  1, null,
-            null);
-            Table.States (37).Action_List.Set_Capacity (11);
-            Add_Action (Table.States (37), 8, (42, 10), 24);
-            Add_Action (Table.States (37), 10, (42, 5), 25);
-            Add_Action (Table.States (37), 15, (42, 0), 26);
-            Add_Action (Table.States (37), 16, (42, 2), 27);
-            Add_Action (Table.States (37), 20, (42, 3), 28);
-            Add_Action (Table.States (37), 28, (42, 6), 29);
-            Add_Action (Table.States (37), 30, (42, 7), 30);
-            Add_Action (Table.States (37), 32, (42, 4), 31);
-            Add_Action (Table.States (37), 33, (42, 1), 32);
-            Add_Action (Table.States (37), 34, (42, 8), 33);
-            Add_Action (Table.States (37), 35, (42, 9), 34);
-            Table.States (37).Goto_List.Set_Capacity (2);
-            Add_Goto (Table.States (37), 41, 60);
-            Add_Goto (Table.States (37), 42, 36);
-            Table.States (38).Action_List.Set_Capacity (6);
-            Add_Action (Table.States (38), 18, (53, 0), 38);
-            Add_Action (Table.States (38), 19, (52, 0), 39);
-            Add_Action (Table.States (38), 20, (51, 0), 40);
-            Add_Action (Table.States (38), 21, (47, 0), 41);
-            Add_Action (Table.States (38), 33, (48, 1), 42);
-            Add_Action (Table.States (38), 35, (50, 1), 43);
-            Table.States (38).Goto_List.Set_Capacity (8);
-            Add_Goto (Table.States (38), 47, 46);
-            Add_Goto (Table.States (38), 48, 47);
-            Add_Goto (Table.States (38), 49, 61);
-            Add_Goto (Table.States (38), 50, 49);
-            Add_Goto (Table.States (38), 51, 50);
-            Add_Goto (Table.States (38), 52, 51);
-            Add_Goto (Table.States (38), 53, 52);
-            Add_Goto (Table.States (38), 54, 62);
-            Table.States (39).Action_List.Set_Capacity (6);
-            Add_Action (Table.States (39), 18, (53, 0), 38);
-            Add_Action (Table.States (39), 19, (52, 0), 39);
-            Add_Action (Table.States (39), 20, (51, 0), 40);
-            Add_Action (Table.States (39), 21, (47, 0), 41);
-            Add_Action (Table.States (39), 33, (48, 1), 42);
-            Add_Action (Table.States (39), 35, (50, 1), 43);
-            Table.States (39).Goto_List.Set_Capacity (8);
-            Add_Goto (Table.States (39), 47, 46);
-            Add_Goto (Table.States (39), 48, 47);
-            Add_Goto (Table.States (39), 49, 61);
-            Add_Goto (Table.States (39), 50, 49);
-            Add_Goto (Table.States (39), 51, 50);
-            Add_Goto (Table.States (39), 52, 51);
-            Add_Goto (Table.States (39), 53, 52);
-            Add_Goto (Table.States (39), 54, 63);
-            Table.States (40).Action_List.Set_Capacity (6);
-            Add_Action (Table.States (40), 18, (53, 0), 38);
-            Add_Action (Table.States (40), 19, (52, 0), 39);
-            Add_Action (Table.States (40), 20, (51, 0), 40);
-            Add_Action (Table.States (40), 21, (47, 0), 41);
-            Add_Action (Table.States (40), 33, (48, 1), 42);
-            Add_Action (Table.States (40), 35, (50, 1), 43);
-            Table.States (40).Goto_List.Set_Capacity (8);
-            Add_Goto (Table.States (40), 47, 46);
-            Add_Goto (Table.States (40), 48, 47);
-            Add_Goto (Table.States (40), 49, 61);
-            Add_Goto (Table.States (40), 50, 49);
-            Add_Goto (Table.States (40), 51, 50);
-            Add_Goto (Table.States (40), 52, 51);
-            Add_Goto (Table.States (40), 53, 52);
-            Add_Goto (Table.States (40), 54, 64);
-            Table.States (41).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (41), 33, (47, 0), 65);
-            Table.States (42).Action_List.Set_Capacity (18);
-            Add_Action (Table.States (42), 11, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (42), 12, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (42), 16, (48, 1), 66);
-            Add_Action (Table.States (42), 18, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (42), 19, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (42), 20, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (42), 21, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (42), 23, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (42), 24, (53, 4), 67);
-            Add_Action (Table.States (42), 25, (52, 2), 68);
-            Add_Action (Table.States (42), 26, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (42), 27, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (42), 28, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (42), 29, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (42), 31, (53, 5), 69);
-            Add_Action (Table.States (42), 33, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (42), 35, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (42), 36, Reduce, (50, 0),  1, null, 
null);
-            Table.States (43).Action_List.Set_Capacity (15);
-            Add_Action (Table.States (43), 11, Reduce, (50, 1),  1, 
rhs_item_1'Access, null);
-            Add_Action (Table.States (43), 12, Reduce, (50, 1),  1, 
rhs_item_1'Access, null);
-            Add_Action (Table.States (43), 18, Reduce, (50, 1),  1, 
rhs_item_1'Access, null);
-            Add_Action (Table.States (43), 19, Reduce, (50, 1),  1, 
rhs_item_1'Access, null);
-            Add_Action (Table.States (43), 20, Reduce, (50, 1),  1, 
rhs_item_1'Access, null);
-            Add_Action (Table.States (43), 21, Reduce, (50, 1),  1, 
rhs_item_1'Access, null);
-            Add_Action (Table.States (43), 23, Reduce, (50, 1),  1, 
rhs_item_1'Access, null);
-            Add_Action (Table.States (43), 25, (52, 3), 70);
-            Add_Action (Table.States (43), 26, Reduce, (50, 1),  1, 
rhs_item_1'Access, null);
-            Add_Action (Table.States (43), 27, Reduce, (50, 1),  1, 
rhs_item_1'Access, null);
-            Add_Action (Table.States (43), 28, Reduce, (50, 1),  1, 
rhs_item_1'Access, null);
-            Add_Action (Table.States (43), 29, Reduce, (50, 1),  1, 
rhs_item_1'Access, null);
-            Add_Action (Table.States (43), 33, Reduce, (50, 1),  1, 
rhs_item_1'Access, null);
-            Add_Action (Table.States (43), 35, Reduce, (50, 1),  1, 
rhs_item_1'Access, null);
-            Add_Action (Table.States (43), 36, Reduce, (50, 1),  1, 
rhs_item_1'Access, null);
-            Table.States (44).Action_List.Set_Capacity (5);
-            Add_Action (Table.States (44), 12, (45, 1), 71);
-            Add_Action (Table.States (44), 23, (45, 2), 72);
-            Add_Conflict (Table.States (44), 23, (44, 1),  0, null, null);
-            Add_Action (Table.States (44), 29, (44, 0), 73);
-            Add_Action (Table.States (44), 33, Reduce, (44, 1),  0, null, 
null);
-            Add_Action (Table.States (44), 36, Reduce, (44, 1),  0, null, 
null);
-            Table.States (44).Goto_List.Set_Capacity (1);
-            Add_Goto (Table.States (44), 44, 74);
-            Table.States (45).Action_List.Set_Capacity (5);
-            Add_Action (Table.States (45), (12, 23, 29, 33, 36), (45, 0),  1, 
null, null);
-            Table.States (46).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (46), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (50, 2),  1,
-            rhs_item_2'Access, null);
-            Table.States (47).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (47), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (49, 0),  1, null,
-            null);
-            Table.States (48).Action_List.Set_Capacity (11);
-            Add_Action (Table.States (48), 11, (46, 2), 75);
-            Add_Action (Table.States (48), 12, Reduce, (46, 1),  1, null, 
null);
-            Add_Action (Table.States (48), 18, (53, 0), 38);
-            Add_Action (Table.States (48), 19, (52, 0), 39);
-            Add_Action (Table.States (48), 20, (51, 0), 40);
-            Add_Action (Table.States (48), 21, (47, 0), 41);
-            Add_Action (Table.States (48), 23, Reduce, (46, 1),  1, null, 
null);
-            Add_Action (Table.States (48), 29, Reduce, (46, 1),  1, null, 
null);
-            Add_Action (Table.States (48), 33, (48, 1), 42);
-            Add_Conflict (Table.States (48), 33, (46, 1),  1, null, null);
-            Add_Action (Table.States (48), 35, (50, 1), 43);
-            Add_Action (Table.States (48), 36, Reduce, (46, 1),  1, null, 
null);
-            Table.States (48).Goto_List.Set_Capacity (6);
-            Add_Goto (Table.States (48), 47, 46);
-            Add_Goto (Table.States (48), 48, 76);
-            Add_Goto (Table.States (48), 50, 49);
-            Add_Goto (Table.States (48), 51, 50);
-            Add_Goto (Table.States (48), 52, 51);
-            Add_Goto (Table.States (48), 53, 52);
-            Table.States (49).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (49), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (48, 0),  1, null,
-            null);
-            Table.States (50).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (50), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (50, 5),  1,
-            rhs_item_5'Access, null);
+            Add_Action (Table.States (23), 39, (45, 1), 62);
+            Table.States (24).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (24), 39, (45, 0), 63);
+            Table.States (25).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (25), (13, 20), (45, 3),  1);
+            Table.States (26).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (26), (13, 20), (46, 0),  1);
+            Table.States (27).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (27), 13, (48, 7), 64);
+            Add_Action (Table.States (27), 20, (46, 1), 65);
+            Table.States (28).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (28), 13, (48, 8), 66);
+            Add_Action (Table.States (28), 20, (46, 1), 65);
+            Table.States (29).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (29), (30, 39, 42), (48, 15),  3);
+            Table.States (30).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (30), 10, (48, 14), 67);
+            Add_Action (Table.States (30), 23, (48, 13), 68);
+            Table.States (31).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (31), 10, (48, 12), 69);
+            Add_Action (Table.States (31), 23, (48, 11), 70);
+            Table.States (32).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (32), 18, (44, 0), 35);
+            Add_Action (Table.States (32), 40, (44, 1), 38);
+            Add_Action (Table.States (32), 41, (44, 2), 39);
+            Table.States (32).Goto_List.Set_Capacity (1);
+            Add_Goto (Table.States (32), 44, 71);
+            Table.States (33).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (33), 39, (48, 2), 72);
+            Table.States (34).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (34), 39, (48, 0), 73);
+            Table.States (35).Action_List.Set_Capacity (7);
+            Add_Action (Table.States (35), (18, 30, 38, 39, 40, 41, 42), (44, 
0),  1);
+            Table.States (36).Action_List.Set_Capacity (7);
+            Add_Action (Table.States (36), (18, 30, 38, 39, 40, 41, 42), (51, 
1),  1);
+            Table.States (37).Action_List.Set_Capacity (7);
+            Add_Action (Table.States (37), (18, 30, 38, 39, 40, 41, 42), (51, 
0),  1);
+            Table.States (38).Action_List.Set_Capacity (7);
+            Add_Action (Table.States (38), (18, 30, 38, 39, 40, 41, 42), (44, 
1),  1);
+            Table.States (39).Action_List.Set_Capacity (7);
+            Add_Action (Table.States (39), (18, 30, 38, 39, 40, 41, 42), (44, 
2),  1);
+            Table.States (40).Action_List.Set_Capacity (7);
+            Add_Action (Table.States (40), (18, 30, 38, 39, 40, 41, 42), (51, 
2),  1);
+            Table.States (41).Action_List.Set_Capacity (7);
+            Add_Action (Table.States (41), (18, 30, 38, 39, 40, 41, 42), (52, 
0),  1);
+            Table.States (42).Action_List.Set_Capacity (7);
+            Add_Action (Table.States (42), 18, (44, 0), 35);
+            Add_Action (Table.States (42), 30, Reduce, (48, 9),  3);
+            Add_Action (Table.States (42), 38, (51, 1), 36);
+            Add_Action (Table.States (42), 39, (51, 0), 37);
+            Add_Conflict (Table.States (42), 39, (48, 9),  3);
+            Add_Action (Table.States (42), 40, (44, 1), 38);
+            Add_Action (Table.States (42), 41, (44, 2), 39);
+            Add_Action (Table.States (42), 42, Reduce, (48, 9),  3);
+            Table.States (42).Goto_List.Set_Capacity (2);
+            Add_Goto (Table.States (42), 44, 40);
+            Add_Goto (Table.States (42), 51, 74);
+            Table.States (43).Action_List.Set_Capacity (6);
+            Add_Action (Table.States (43), 25, (63, 0), 43);
+            Add_Action (Table.States (43), 26, (62, 0), 44);
+            Add_Action (Table.States (43), 27, (61, 0), 45);
+            Add_Action (Table.States (43), 28, (57, 0), 46);
+            Add_Action (Table.States (43), 39, (58, 1), 47);
+            Add_Action (Table.States (43), 41, (60, 1), 48);
+            Table.States (43).Goto_List.Set_Capacity (8);
+            Add_Goto (Table.States (43), 57, 51);
+            Add_Goto (Table.States (43), 58, 52);
+            Add_Goto (Table.States (43), 59, 75);
+            Add_Goto (Table.States (43), 60, 54);
+            Add_Goto (Table.States (43), 61, 55);
+            Add_Goto (Table.States (43), 62, 56);
+            Add_Goto (Table.States (43), 63, 57);
+            Add_Goto (Table.States (43), 64, 76);
+            Table.States (44).Action_List.Set_Capacity (6);
+            Add_Action (Table.States (44), 25, (63, 0), 43);
+            Add_Action (Table.States (44), 26, (62, 0), 44);
+            Add_Action (Table.States (44), 27, (61, 0), 45);
+            Add_Action (Table.States (44), 28, (57, 0), 46);
+            Add_Action (Table.States (44), 39, (58, 1), 47);
+            Add_Action (Table.States (44), 41, (60, 1), 48);
+            Table.States (44).Goto_List.Set_Capacity (8);
+            Add_Goto (Table.States (44), 57, 51);
+            Add_Goto (Table.States (44), 58, 52);
+            Add_Goto (Table.States (44), 59, 75);
+            Add_Goto (Table.States (44), 60, 54);
+            Add_Goto (Table.States (44), 61, 55);
+            Add_Goto (Table.States (44), 62, 56);
+            Add_Goto (Table.States (44), 63, 57);
+            Add_Goto (Table.States (44), 64, 77);
+            Table.States (45).Action_List.Set_Capacity (6);
+            Add_Action (Table.States (45), 25, (63, 0), 43);
+            Add_Action (Table.States (45), 26, (62, 0), 44);
+            Add_Action (Table.States (45), 27, (61, 0), 45);
+            Add_Action (Table.States (45), 28, (57, 0), 46);
+            Add_Action (Table.States (45), 39, (58, 1), 47);
+            Add_Action (Table.States (45), 41, (60, 1), 48);
+            Table.States (45).Goto_List.Set_Capacity (8);
+            Add_Goto (Table.States (45), 57, 51);
+            Add_Goto (Table.States (45), 58, 52);
+            Add_Goto (Table.States (45), 59, 75);
+            Add_Goto (Table.States (45), 60, 54);
+            Add_Goto (Table.States (45), 61, 55);
+            Add_Goto (Table.States (45), 62, 56);
+            Add_Goto (Table.States (45), 63, 57);
+            Add_Goto (Table.States (45), 64, 78);
+            Table.States (46).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (46), 39, (57, 0), 79);
+            Table.States (47).Action_List.Set_Capacity (18);
+            Add_Action (Table.States (47), 19, Reduce, (60, 0),  1);
+            Add_Action (Table.States (47), 20, Reduce, (60, 0),  1);
+            Add_Action (Table.States (47), 23, (58, 1), 80);
+            Add_Action (Table.States (47), 25, Reduce, (60, 0),  1);
+            Add_Action (Table.States (47), 26, Reduce, (60, 0),  1);
+            Add_Action (Table.States (47), 27, Reduce, (60, 0),  1);
+            Add_Action (Table.States (47), 28, Reduce, (60, 0),  1);
+            Add_Action (Table.States (47), 30, Reduce, (60, 0),  1);
+            Add_Action (Table.States (47), 31, (63, 4), 81);
+            Add_Action (Table.States (47), 32, (62, 2), 82);
+            Add_Action (Table.States (47), 33, Reduce, (60, 0),  1);
+            Add_Action (Table.States (47), 34, Reduce, (60, 0),  1);
+            Add_Action (Table.States (47), 35, Reduce, (60, 0),  1);
+            Add_Action (Table.States (47), 36, Reduce, (60, 0),  1);
+            Add_Action (Table.States (47), 37, (63, 5), 83);
+            Add_Action (Table.States (47), 39, Reduce, (60, 0),  1);
+            Add_Action (Table.States (47), 41, Reduce, (60, 0),  1);
+            Add_Action (Table.States (47), 42, Reduce, (60, 0),  1);
+            Table.States (48).Action_List.Set_Capacity (15);
+            Add_Action (Table.States (48), 19, Reduce, (60, 1),  1);
+            Add_Action (Table.States (48), 20, Reduce, (60, 1),  1);
+            Add_Action (Table.States (48), 25, Reduce, (60, 1),  1);
+            Add_Action (Table.States (48), 26, Reduce, (60, 1),  1);
+            Add_Action (Table.States (48), 27, Reduce, (60, 1),  1);
+            Add_Action (Table.States (48), 28, Reduce, (60, 1),  1);
+            Add_Action (Table.States (48), 30, Reduce, (60, 1),  1);
+            Add_Action (Table.States (48), 32, (62, 3), 84);
+            Add_Action (Table.States (48), 33, Reduce, (60, 1),  1);
+            Add_Action (Table.States (48), 34, Reduce, (60, 1),  1);
+            Add_Action (Table.States (48), 35, Reduce, (60, 1),  1);
+            Add_Action (Table.States (48), 36, Reduce, (60, 1),  1);
+            Add_Action (Table.States (48), 39, Reduce, (60, 1),  1);
+            Add_Action (Table.States (48), 41, Reduce, (60, 1),  1);
+            Add_Action (Table.States (48), 42, Reduce, (60, 1),  1);
+            Table.States (49).Action_List.Set_Capacity (5);
+            Add_Action (Table.States (49), 20, (55, 1), 85);
+            Add_Action (Table.States (49), 30, (55, 2), 86);
+            Add_Conflict (Table.States (49), 30, (54, 1),  0);
+            Add_Action (Table.States (49), 36, (54, 0), 87);
+            Add_Action (Table.States (49), 39, Reduce, (54, 1),  0);
+            Add_Action (Table.States (49), 42, Reduce, (54, 1),  0);
+            Table.States (49).Goto_List.Set_Capacity (1);
+            Add_Goto (Table.States (49), 54, 88);
+            Table.States (50).Action_List.Set_Capacity (5);
+            Add_Action (Table.States (50), (20, 30, 36, 39, 42), (55, 0),  1);
             Table.States (51).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (51), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (50, 3),  1,
-            rhs_item_3'Access, null);
+            Add_Action (Table.States (51), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (60, 2),  1);
             Table.States (52).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (52), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (50, 4),  1,
-            rhs_item_4'Access, null);
-            Table.States (53).Action_List.Set_Capacity (5);
-            Add_Action (Table.States (53), 12, (45, 1), 71);
-            Add_Action (Table.States (53), 23, (45, 2), 72);
-            Add_Conflict (Table.States (53), 23, (44, 1),  0, null, null);
-            Add_Action (Table.States (53), 29, (44, 0), 73);
-            Add_Action (Table.States (53), 33, Reduce, (44, 1),  0, null, 
null);
-            Add_Action (Table.States (53), 36, Reduce, (44, 1),  0, null, 
null);
-            Table.States (53).Goto_List.Set_Capacity (1);
-            Add_Goto (Table.States (53), 44, 77);
-            Table.States (54).Action_List.Set_Capacity (3);
-            Add_Action (Table.States (54), (23, 33, 36), (38, 1),  4, 
declaration_1'Access, null);
-            Table.States (55).Action_List.Set_Capacity (2);
-            Add_Action (Table.States (55), (9, 33), (40, 1),  2, null, null);
-            Table.States (56).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (56), 33, (38, 4), 78);
-            Table.States (57).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (57), 17, (39, 1), 79);
-            Table.States (58).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (58), 17, (39, 2), 80);
-            Table.States (59).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (59), (8, 10, 15, 16, 20, 23, 28, 30, 32, 
33, 34, 35, 36), (41, 1),  2, null,
-            null);
-            Table.States (60).Action_List.Set_Capacity (13);
-            Add_Action (Table.States (60), 8, (42, 10), 24);
-            Add_Action (Table.States (60), 10, (42, 5), 25);
-            Add_Action (Table.States (60), 15, (42, 0), 26);
-            Add_Action (Table.States (60), 16, (42, 2), 27);
-            Add_Action (Table.States (60), 20, (42, 3), 28);
-            Add_Action (Table.States (60), 23, Reduce, (38, 0),  4, 
declaration_0'Access, null);
-            Add_Action (Table.States (60), 28, (42, 6), 29);
-            Add_Action (Table.States (60), 30, (42, 7), 30);
-            Add_Action (Table.States (60), 32, (42, 4), 31);
-            Add_Action (Table.States (60), 33, (42, 1), 32);
-            Add_Conflict (Table.States (60), 33, (38, 0),  4, 
declaration_0'Access, null);
-            Add_Action (Table.States (60), 34, (42, 8), 33);
-            Add_Action (Table.States (60), 35, (42, 9), 34);
-            Add_Action (Table.States (60), 36, Reduce, (38, 0),  4, 
declaration_0'Access, null);
-            Table.States (60).Goto_List.Set_Capacity (1);
-            Add_Goto (Table.States (60), 42, 59);
-            Table.States (61).Action_List.Set_Capacity (10);
-            Add_Action (Table.States (61), 12, Reduce, (54, 0),  1, null, 
null);
-            Add_Action (Table.States (61), 18, (53, 0), 38);
-            Add_Action (Table.States (61), 19, (52, 0), 39);
-            Add_Action (Table.States (61), 20, (51, 0), 40);
-            Add_Action (Table.States (61), 21, (47, 0), 41);
-            Add_Action (Table.States (61), 26, Reduce, (54, 0),  1, null, 
null);
-            Add_Action (Table.States (61), 27, Reduce, (54, 0),  1, null, 
null);
-            Add_Action (Table.States (61), 28, Reduce, (54, 0),  1, null, 
null);
-            Add_Action (Table.States (61), 33, (48, 1), 42);
-            Add_Action (Table.States (61), 35, (50, 1), 43);
-            Table.States (61).Goto_List.Set_Capacity (6);
-            Add_Goto (Table.States (61), 47, 46);
-            Add_Goto (Table.States (61), 48, 76);
-            Add_Goto (Table.States (61), 50, 49);
-            Add_Goto (Table.States (61), 51, 50);
-            Add_Goto (Table.States (61), 52, 51);
-            Add_Goto (Table.States (61), 53, 52);
+            Add_Action (Table.States (52), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (59, 0),  1);
+            Table.States (53).Action_List.Set_Capacity (11);
+            Add_Action (Table.States (53), 19, (56, 2), 89);
+            Add_Action (Table.States (53), 20, Reduce, (56, 1),  1);
+            Add_Action (Table.States (53), 25, (63, 0), 43);
+            Add_Action (Table.States (53), 26, (62, 0), 44);
+            Add_Action (Table.States (53), 27, (61, 0), 45);
+            Add_Action (Table.States (53), 28, (57, 0), 46);
+            Add_Action (Table.States (53), 30, Reduce, (56, 1),  1);
+            Add_Action (Table.States (53), 36, Reduce, (56, 1),  1);
+            Add_Action (Table.States (53), 39, (58, 1), 47);
+            Add_Conflict (Table.States (53), 39, (56, 1),  1);
+            Add_Action (Table.States (53), 41, (60, 1), 48);
+            Add_Action (Table.States (53), 42, Reduce, (56, 1),  1);
+            Table.States (53).Goto_List.Set_Capacity (6);
+            Add_Goto (Table.States (53), 57, 51);
+            Add_Goto (Table.States (53), 58, 90);
+            Add_Goto (Table.States (53), 60, 54);
+            Add_Goto (Table.States (53), 61, 55);
+            Add_Goto (Table.States (53), 62, 56);
+            Add_Goto (Table.States (53), 63, 57);
+            Table.States (54).Action_List.Set_Capacity (14);
+            Add_Action (Table.States (54), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (58, 0),  1);
+            Table.States (55).Action_List.Set_Capacity (14);
+            Add_Action (Table.States (55), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (60, 5),  1);
+            Table.States (56).Action_List.Set_Capacity (14);
+            Add_Action (Table.States (56), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (60, 3),  1);
+            Table.States (57).Action_List.Set_Capacity (14);
+            Add_Action (Table.States (57), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (60, 4),  1);
+            Table.States (58).Action_List.Set_Capacity (5);
+            Add_Action (Table.States (58), 20, (55, 1), 85);
+            Add_Action (Table.States (58), 30, (55, 2), 86);
+            Add_Conflict (Table.States (58), 30, (54, 1),  0);
+            Add_Action (Table.States (58), 36, (54, 0), 87);
+            Add_Action (Table.States (58), 39, Reduce, (54, 1),  0);
+            Add_Action (Table.States (58), 42, Reduce, (54, 1),  0);
+            Table.States (58).Goto_List.Set_Capacity (1);
+            Add_Goto (Table.States (58), 54, 91);
+         end Subr_1;
+         procedure Subr_2
+         is begin
+            Table.States (59).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (59), (30, 39, 42), (48, 6),  4);
+            Table.States (60).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (60), (17, 39), (49, 1),  2);
+            Table.States (61).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (61), (13, 20), (45, 2),  2);
             Table.States (62).Action_List.Set_Capacity (2);
-            Add_Action (Table.States (62), 12, (54, 1), 81);
-            Add_Action (Table.States (62), 26, (53, 0), 82);
+            Add_Action (Table.States (62), (13, 20), (45, 1),  2);
             Table.States (63).Action_List.Set_Capacity (2);
-            Add_Action (Table.States (63), 12, (54, 1), 81);
-            Add_Action (Table.States (63), 27, (52, 0), 83);
-            Table.States (64).Action_List.Set_Capacity (2);
-            Add_Action (Table.States (64), 12, (54, 1), 81);
-            Add_Action (Table.States (64), 28, (51, 0), 84);
-            Table.States (65).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (65), 16, (47, 0), 85);
-            Table.States (66).Action_List.Set_Capacity (6);
-            Add_Action (Table.States (66), 18, (53, 0), 38);
-            Add_Action (Table.States (66), 19, (52, 0), 39);
-            Add_Action (Table.States (66), 20, (51, 0), 40);
-            Add_Action (Table.States (66), 21, (47, 0), 41);
-            Add_Action (Table.States (66), 33, (50, 0), 86);
-            Add_Action (Table.States (66), 35, (50, 1), 43);
-            Table.States (66).Goto_List.Set_Capacity (5);
-            Add_Goto (Table.States (66), 47, 46);
-            Add_Goto (Table.States (66), 50, 87);
-            Add_Goto (Table.States (66), 51, 50);
-            Add_Goto (Table.States (66), 52, 51);
-            Add_Goto (Table.States (66), 53, 52);
-            Table.States (67).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (67), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (53, 4),  2, null,
-            null);
-            Table.States (68).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (68), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (52, 2),  2, null,
-            null);
-            Table.States (69).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (69), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (53, 5),  2, null,
-            null);
-            Table.States (70).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (70), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (52, 3),  2,
-            rhs_optional_item_3'Access, null);
-            Table.States (71).Action_List.Set_Capacity (10);
-            Add_Action (Table.States (71), 12, Reduce, (46, 0),  0, null, 
null);
-            Add_Action (Table.States (71), 18, (53, 0), 38);
-            Add_Action (Table.States (71), 19, (52, 0), 39);
-            Add_Action (Table.States (71), 20, (51, 0), 40);
-            Add_Action (Table.States (71), 21, (47, 0), 41);
-            Add_Action (Table.States (71), 23, Reduce, (46, 0),  0, null, 
null);
-            Add_Action (Table.States (71), 29, Reduce, (46, 0),  0, null, 
null);
-            Add_Action (Table.States (71), 33, (48, 1), 42);
-            Add_Conflict (Table.States (71), 33, (46, 0),  0, null, null);
-            Add_Action (Table.States (71), 35, (50, 1), 43);
-            Add_Action (Table.States (71), 36, Reduce, (46, 0),  0, null, 
null);
-            Table.States (71).Goto_List.Set_Capacity (8);
-            Add_Goto (Table.States (71), 46, 88);
-            Add_Goto (Table.States (71), 47, 46);
-            Add_Goto (Table.States (71), 48, 47);
-            Add_Goto (Table.States (71), 49, 48);
-            Add_Goto (Table.States (71), 50, 49);
-            Add_Goto (Table.States (71), 51, 50);
-            Add_Goto (Table.States (71), 52, 51);
-            Add_Goto (Table.States (71), 53, 52);
-            Table.States (72).Action_List.Set_Capacity (2);
-            Add_Action (Table.States (72), 4, (45, 3), 89);
-            Add_Action (Table.States (72), 5, (45, 2), 90);
-            Table.States (73).Action_List.Set_Capacity (3);
-            Add_Action (Table.States (73), (23, 33, 36), (44, 0),  1, null, 
null);
-            Table.States (74).Action_List.Set_Capacity (3);
-            Add_Action (Table.States (74), (23, 33, 36), (43, 0),  4, 
nonterminal_0'Access, null);
-            Table.States (75).Action_List.Set_Capacity (6);
-            Add_Action (Table.States (75), 11, (46, 3), 91);
-            Add_Action (Table.States (75), 12, Reduce, (46, 2),  2, null, 
null);
-            Add_Action (Table.States (75), 23, Reduce, (46, 2),  2, null, 
null);
-            Add_Action (Table.States (75), 29, Reduce, (46, 2),  2, null, 
null);
-            Add_Action (Table.States (75), 33, Reduce, (46, 2),  2, null, 
null);
-            Add_Action (Table.States (75), 36, Reduce, (46, 2),  2, null, 
null);
-            Table.States (76).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (76), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (49, 1),  2, null,
-            null);
-            Table.States (77).Action_List.Set_Capacity (3);
-            Add_Action (Table.States (77), (23, 33, 36), (43, 1),  4, 
nonterminal_1'Access, null);
-            Table.States (78).Action_List.Set_Capacity (3);
-            Add_Action (Table.States (78), (23, 33, 36), (38, 4),  5, 
declaration_4'Access, null);
+            Add_Action (Table.States (63), (13, 20), (45, 0),  2);
+            Table.States (64).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (64), 16, (48, 7), 92);
+            Table.States (65).Action_List.Set_Capacity (4);
+            Add_Action (Table.States (65), 3, (45, 2), 22);
+            Add_Action (Table.States (65), 14, (45, 1), 23);
+            Add_Action (Table.States (65), 15, (45, 0), 24);
+            Add_Action (Table.States (65), 39, (45, 3), 25);
+            Table.States (65).Goto_List.Set_Capacity (1);
+            Add_Goto (Table.States (65), 45, 93);
+            Table.States (66).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (66), 16, (48, 8), 94);
+            Table.States (67).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (67), 39, (50, 0), 95);
+            Table.States (67).Goto_List.Set_Capacity (1);
+            Add_Goto (Table.States (67), 50, 96);
+            Table.States (68).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (68), 39, (48, 13), 97);
+            Table.States (69).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (69), 39, (50, 0), 95);
+            Table.States (69).Goto_List.Set_Capacity (1);
+            Add_Goto (Table.States (69), 50, 98);
+            Table.States (70).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (70), 39, (48, 11), 99);
+            Table.States (71).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (71), (30, 39, 42), (48, 5),  4);
+            Table.States (72).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (72), 24, (48, 2), 100);
+            Table.States (73).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (73), 24, (48, 0), 101);
+            Table.States (74).Action_List.Set_Capacity (7);
+            Add_Action (Table.States (74), (18, 30, 38, 39, 40, 41, 42), (52, 
1),  2);
+            Table.States (75).Action_List.Set_Capacity (10);
+            Add_Action (Table.States (75), 20, Reduce, (64, 0),  1);
+            Add_Action (Table.States (75), 25, (63, 0), 43);
+            Add_Action (Table.States (75), 26, (62, 0), 44);
+            Add_Action (Table.States (75), 27, (61, 0), 45);
+            Add_Action (Table.States (75), 28, (57, 0), 46);
+            Add_Action (Table.States (75), 33, Reduce, (64, 0),  1);
+            Add_Action (Table.States (75), 34, Reduce, (64, 0),  1);
+            Add_Action (Table.States (75), 35, Reduce, (64, 0),  1);
+            Add_Action (Table.States (75), 39, (58, 1), 47);
+            Add_Action (Table.States (75), 41, (60, 1), 48);
+            Table.States (75).Goto_List.Set_Capacity (6);
+            Add_Goto (Table.States (75), 57, 51);
+            Add_Goto (Table.States (75), 58, 90);
+            Add_Goto (Table.States (75), 60, 54);
+            Add_Goto (Table.States (75), 61, 55);
+            Add_Goto (Table.States (75), 62, 56);
+            Add_Goto (Table.States (75), 63, 57);
+            Table.States (76).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (76), 20, (64, 1), 102);
+            Add_Action (Table.States (76), 33, (63, 0), 103);
+            Table.States (77).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (77), 20, (64, 1), 102);
+            Add_Action (Table.States (77), 34, (62, 0), 104);
+            Table.States (78).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (78), 20, (64, 1), 102);
+            Add_Action (Table.States (78), 35, (61, 0), 105);
             Table.States (79).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (79), (1 =>  33), (39, 1),  4, null, 
null);
-            Table.States (80).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (80), (1 =>  33), (39, 2),  4, null, 
null);
-            Table.States (81).Action_List.Set_Capacity (6);
-            Add_Action (Table.States (81), 18, (53, 0), 38);
-            Add_Action (Table.States (81), 19, (52, 0), 39);
-            Add_Action (Table.States (81), 20, (51, 0), 40);
-            Add_Action (Table.States (81), 21, (47, 0), 41);
-            Add_Action (Table.States (81), 33, (48, 1), 42);
-            Add_Action (Table.States (81), 35, (50, 1), 43);
-            Table.States (81).Goto_List.Set_Capacity (7);
-            Add_Goto (Table.States (81), 47, 46);
-            Add_Goto (Table.States (81), 48, 47);
-            Add_Goto (Table.States (81), 49, 92);
-            Add_Goto (Table.States (81), 50, 49);
-            Add_Goto (Table.States (81), 51, 50);
-            Add_Goto (Table.States (81), 52, 51);
-            Add_Goto (Table.States (81), 53, 52);
-            Table.States (82).Action_List.Set_Capacity (15);
-            Add_Action (Table.States (82), 11, Reduce, (53, 0),  3, null, 
null);
-            Add_Action (Table.States (82), 12, Reduce, (53, 0),  3, null, 
null);
-            Add_Action (Table.States (82), 18, Reduce, (53, 0),  3, null, 
null);
-            Add_Action (Table.States (82), 19, Reduce, (53, 0),  3, null, 
null);
-            Add_Action (Table.States (82), 20, Reduce, (53, 0),  3, null, 
null);
-            Add_Action (Table.States (82), 21, Reduce, (53, 0),  3, null, 
null);
-            Add_Action (Table.States (82), 22, (53, 1), 93);
-            Add_Action (Table.States (82), 23, Reduce, (53, 0),  3, null, 
null);
-            Add_Action (Table.States (82), 26, Reduce, (53, 0),  3, null, 
null);
-            Add_Action (Table.States (82), 27, Reduce, (53, 0),  3, null, 
null);
-            Add_Action (Table.States (82), 28, Reduce, (53, 0),  3, null, 
null);
-            Add_Action (Table.States (82), 29, Reduce, (53, 0),  3, null, 
null);
-            Add_Action (Table.States (82), 33, Reduce, (53, 0),  3, null, 
null);
-            Add_Action (Table.States (82), 35, Reduce, (53, 0),  3, null, 
null);
-            Add_Action (Table.States (82), 36, Reduce, (53, 0),  3, null, 
null);
+            Add_Action (Table.States (79), 23, (57, 0), 106);
+            Table.States (80).Action_List.Set_Capacity (6);
+            Add_Action (Table.States (80), 25, (63, 0), 43);
+            Add_Action (Table.States (80), 26, (62, 0), 44);
+            Add_Action (Table.States (80), 27, (61, 0), 45);
+            Add_Action (Table.States (80), 28, (57, 0), 46);
+            Add_Action (Table.States (80), 39, (60, 0), 107);
+            Add_Action (Table.States (80), 41, (60, 1), 48);
+            Table.States (80).Goto_List.Set_Capacity (5);
+            Add_Goto (Table.States (80), 57, 51);
+            Add_Goto (Table.States (80), 60, 108);
+            Add_Goto (Table.States (80), 61, 55);
+            Add_Goto (Table.States (80), 62, 56);
+            Add_Goto (Table.States (80), 63, 57);
+            Table.States (81).Action_List.Set_Capacity (14);
+            Add_Action (Table.States (81), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (63, 4),  2);
+            Table.States (82).Action_List.Set_Capacity (14);
+            Add_Action (Table.States (82), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (62, 2),  2);
             Table.States (83).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (83), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (52, 0),  3, null,
-            null);
-            Table.States (84).Action_List.Set_Capacity (17);
-            Add_Action (Table.States (84), 11, Reduce, (51, 0),  3, null, 
null);
-            Add_Action (Table.States (84), 12, Reduce, (51, 0),  3, null, 
null);
-            Add_Action (Table.States (84), 18, Reduce, (51, 0),  3, null, 
null);
-            Add_Action (Table.States (84), 19, Reduce, (51, 0),  3, null, 
null);
-            Add_Action (Table.States (84), 20, Reduce, (51, 0),  3, null, 
null);
-            Add_Action (Table.States (84), 21, Reduce, (51, 0),  3, null, 
null);
-            Add_Action (Table.States (84), 23, Reduce, (51, 0),  3, null, 
null);
-            Add_Action (Table.States (84), 24, (53, 2), 94);
-            Add_Action (Table.States (84), 25, (52, 1), 95);
-            Add_Action (Table.States (84), 26, Reduce, (51, 0),  3, null, 
null);
-            Add_Action (Table.States (84), 27, Reduce, (51, 0),  3, null, 
null);
-            Add_Action (Table.States (84), 28, Reduce, (51, 0),  3, null, 
null);
-            Add_Action (Table.States (84), 29, Reduce, (51, 0),  3, null, 
null);
-            Add_Action (Table.States (84), 31, (53, 3), 96);
-            Add_Action (Table.States (84), 33, Reduce, (51, 0),  3, null, 
null);
-            Add_Action (Table.States (84), 35, Reduce, (51, 0),  3, null, 
null);
-            Add_Action (Table.States (84), 36, Reduce, (51, 0),  3, null, 
null);
-            Table.States (85).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (85), 33, (47, 0), 97);
-            Table.States (86).Action_List.Set_Capacity (17);
-            Add_Action (Table.States (86), 11, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (86), 12, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (86), 18, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (86), 19, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (86), 20, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (86), 21, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (86), 23, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (86), 24, (53, 4), 67);
-            Add_Action (Table.States (86), 25, (52, 2), 68);
-            Add_Action (Table.States (86), 26, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (86), 27, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (86), 28, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (86), 29, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (86), 31, (53, 5), 69);
-            Add_Action (Table.States (86), 33, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (86), 35, Reduce, (50, 0),  1, null, 
null);
-            Add_Action (Table.States (86), 36, Reduce, (50, 0),  1, null, 
null);
-            Table.States (87).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (87), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (48, 1),  3, null,
-            null);
-            Table.States (88).Action_List.Set_Capacity (5);
-            Add_Action (Table.States (88), (12, 23, 29, 33, 36), (45, 1),  3, 
null, null);
-            Table.States (89).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (89), 5, (45, 3), 98);
-            Table.States (90).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (90), 33, (45, 2), 99);
-            Table.States (91).Action_List.Set_Capacity (5);
-            Add_Action (Table.States (91), (12, 23, 29, 33, 36), (46, 3),  3, 
null, null);
-            Table.States (92).Action_List.Set_Capacity (10);
-            Add_Action (Table.States (92), 12, Reduce, (54, 1),  3, null, 
null);
-            Add_Action (Table.States (92), 18, (53, 0), 38);
-            Add_Action (Table.States (92), 19, (52, 0), 39);
-            Add_Action (Table.States (92), 20, (51, 0), 40);
-            Add_Action (Table.States (92), 21, (47, 0), 41);
-            Add_Action (Table.States (92), 26, Reduce, (54, 1),  3, null, 
null);
-            Add_Action (Table.States (92), 27, Reduce, (54, 1),  3, null, 
null);
-            Add_Action (Table.States (92), 28, Reduce, (54, 1),  3, null, 
null);
-            Add_Action (Table.States (92), 33, (48, 1), 42);
-            Add_Action (Table.States (92), 35, (50, 1), 43);
-            Table.States (92).Goto_List.Set_Capacity (6);
-            Add_Goto (Table.States (92), 47, 46);
-            Add_Goto (Table.States (92), 48, 76);
-            Add_Goto (Table.States (92), 50, 49);
-            Add_Goto (Table.States (92), 51, 50);
-            Add_Goto (Table.States (92), 52, 51);
-            Add_Goto (Table.States (92), 53, 52);
-            Table.States (93).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (93), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (53, 1),  4, null,
-            null);
-            Table.States (94).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (94), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (53, 2),  4, null,
-            null);
-            Table.States (95).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (95), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (52, 1),  4, null,
-            null);
-            Table.States (96).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (96), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (53, 3),  4, null,
-            null);
-            Table.States (97).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (97), 17, (47, 0), 100);
-            Table.States (98).Action_List.Set_Capacity (5);
-            Add_Action (Table.States (98), (12, 23, 29, 33, 36), (45, 3),  4, 
null, null);
-            Table.States (99).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (99), 16, (45, 2), 101);
-            Table.States (100).Action_List.Set_Capacity (14);
-            Add_Action (Table.States (100), (11, 12, 18, 19, 20, 21, 23, 26, 
27, 28, 29, 33, 35, 36), (47, 0),  5,
-            null, null);
+            Add_Action (Table.States (83), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (63, 5),  2);
+            Table.States (84).Action_List.Set_Capacity (14);
+            Add_Action (Table.States (84), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (62, 3),  2);
+            Table.States (85).Action_List.Set_Capacity (10);
+            Add_Action (Table.States (85), 20, Reduce, (56, 0),  0);
+            Add_Action (Table.States (85), 25, (63, 0), 43);
+            Add_Action (Table.States (85), 26, (62, 0), 44);
+            Add_Action (Table.States (85), 27, (61, 0), 45);
+            Add_Action (Table.States (85), 28, (57, 0), 46);
+            Add_Action (Table.States (85), 30, Reduce, (56, 0),  0);
+            Add_Action (Table.States (85), 36, Reduce, (56, 0),  0);
+            Add_Action (Table.States (85), 39, (58, 1), 47);
+            Add_Conflict (Table.States (85), 39, (56, 0),  0);
+            Add_Action (Table.States (85), 41, (60, 1), 48);
+            Add_Action (Table.States (85), 42, Reduce, (56, 0),  0);
+            Table.States (85).Goto_List.Set_Capacity (8);
+            Add_Goto (Table.States (85), 56, 109);
+            Add_Goto (Table.States (85), 57, 51);
+            Add_Goto (Table.States (85), 58, 52);
+            Add_Goto (Table.States (85), 59, 53);
+            Add_Goto (Table.States (85), 60, 54);
+            Add_Goto (Table.States (85), 61, 55);
+            Add_Goto (Table.States (85), 62, 56);
+            Add_Goto (Table.States (85), 63, 57);
+            Table.States (86).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (86), 7, (55, 6), 110);
+            Add_Action (Table.States (86), 8, (55, 4), 111);
+            Add_Action (Table.States (86), 9, (55, 2), 112);
+            Table.States (87).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (87), (30, 39, 42), (54, 0),  1);
+            Table.States (88).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (88), (30, 39, 42), (53, 0),  4);
+            Table.States (89).Action_List.Set_Capacity (6);
+            Add_Action (Table.States (89), 19, (56, 3), 113);
+            Add_Action (Table.States (89), 20, Reduce, (56, 2),  2);
+            Add_Action (Table.States (89), 30, Reduce, (56, 2),  2);
+            Add_Action (Table.States (89), 36, Reduce, (56, 2),  2);
+            Add_Action (Table.States (89), 39, Reduce, (56, 2),  2);
+            Add_Action (Table.States (89), 42, Reduce, (56, 2),  2);
+            Table.States (90).Action_List.Set_Capacity (14);
+            Add_Action (Table.States (90), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (59, 1),  2);
+            Table.States (91).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (91), (30, 39, 42), (53, 1),  4);
+            Table.States (92).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (92), 39, (47, 0), 114);
+            Add_Action (Table.States (92), 41, (47, 1), 115);
+            Table.States (92).Goto_List.Set_Capacity (1);
+            Add_Goto (Table.States (92), 47, 116);
+            Table.States (93).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (93), (13, 20), (46, 1),  3);
+            Table.States (94).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (94), 39, (47, 0), 114);
+            Add_Action (Table.States (94), 41, (47, 1), 115);
+            Table.States (94).Goto_List.Set_Capacity (1);
+            Add_Goto (Table.States (94), 47, 117);
+            Table.States (95).Action_List.Set_Capacity (5);
+            Add_Action (Table.States (95), (20, 30, 36, 39, 42), (50, 0),  1);
+            Table.States (96).Action_List.Set_Capacity (4);
+            Add_Action (Table.States (96), 20, (50, 1), 118);
+            Add_Action (Table.States (96), 30, Reduce, (48, 14),  5);
+            Add_Action (Table.States (96), 39, Reduce, (48, 14),  5);
+            Add_Action (Table.States (96), 42, Reduce, (48, 14),  5);
+            Table.States (97).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (97), (30, 39, 42), (48, 13),  5);
+            Table.States (98).Action_List.Set_Capacity (4);
+            Add_Action (Table.States (98), 20, (50, 1), 118);
+            Add_Action (Table.States (98), 30, Reduce, (48, 12),  5);
+            Add_Action (Table.States (98), 39, Reduce, (48, 12),  5);
+            Add_Action (Table.States (98), 42, Reduce, (48, 12),  5);
+            Table.States (99).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (99), (30, 39, 42), (48, 11),  5);
+            Table.States (100).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (100), 39, (48, 2), 119);
             Table.States (101).Action_List.Set_Capacity (1);
-            Add_Action (Table.States (101), 33, (45, 2), 102);
-            Table.States (102).Action_List.Set_Capacity (5);
-            Add_Action (Table.States (102), (12, 23, 29, 33, 36), (45, 2),  6, 
null, null);
-         end Subr_1;
+            Add_Action (Table.States (101), 39, (48, 0), 120);
+            Table.States (102).Action_List.Set_Capacity (6);
+            Add_Action (Table.States (102), 25, (63, 0), 43);
+            Add_Action (Table.States (102), 26, (62, 0), 44);
+            Add_Action (Table.States (102), 27, (61, 0), 45);
+            Add_Action (Table.States (102), 28, (57, 0), 46);
+            Add_Action (Table.States (102), 39, (58, 1), 47);
+            Add_Action (Table.States (102), 41, (60, 1), 48);
+            Table.States (102).Goto_List.Set_Capacity (7);
+            Add_Goto (Table.States (102), 57, 51);
+            Add_Goto (Table.States (102), 58, 52);
+            Add_Goto (Table.States (102), 59, 121);
+            Add_Goto (Table.States (102), 60, 54);
+            Add_Goto (Table.States (102), 61, 55);
+            Add_Goto (Table.States (102), 62, 56);
+            Add_Goto (Table.States (102), 63, 57);
+            Table.States (103).Action_List.Set_Capacity (15);
+            Add_Action (Table.States (103), 19, Reduce, (63, 0),  3);
+            Add_Action (Table.States (103), 20, Reduce, (63, 0),  3);
+            Add_Action (Table.States (103), 25, Reduce, (63, 0),  3);
+            Add_Action (Table.States (103), 26, Reduce, (63, 0),  3);
+            Add_Action (Table.States (103), 27, Reduce, (63, 0),  3);
+            Add_Action (Table.States (103), 28, Reduce, (63, 0),  3);
+            Add_Action (Table.States (103), 29, (63, 1), 122);
+            Add_Action (Table.States (103), 30, Reduce, (63, 0),  3);
+            Add_Action (Table.States (103), 33, Reduce, (63, 0),  3);
+            Add_Action (Table.States (103), 34, Reduce, (63, 0),  3);
+            Add_Action (Table.States (103), 35, Reduce, (63, 0),  3);
+            Add_Action (Table.States (103), 36, Reduce, (63, 0),  3);
+            Add_Action (Table.States (103), 39, Reduce, (63, 0),  3);
+            Add_Action (Table.States (103), 41, Reduce, (63, 0),  3);
+            Add_Action (Table.States (103), 42, Reduce, (63, 0),  3);
+            Table.States (104).Action_List.Set_Capacity (14);
+            Add_Action (Table.States (104), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (62, 0),  3);
+            Table.States (105).Action_List.Set_Capacity (17);
+            Add_Action (Table.States (105), 19, Reduce, (61, 0),  3);
+            Add_Action (Table.States (105), 20, Reduce, (61, 0),  3);
+            Add_Action (Table.States (105), 25, Reduce, (61, 0),  3);
+            Add_Action (Table.States (105), 26, Reduce, (61, 0),  3);
+            Add_Action (Table.States (105), 27, Reduce, (61, 0),  3);
+            Add_Action (Table.States (105), 28, Reduce, (61, 0),  3);
+            Add_Action (Table.States (105), 30, Reduce, (61, 0),  3);
+            Add_Action (Table.States (105), 31, (63, 2), 123);
+            Add_Action (Table.States (105), 32, (62, 1), 124);
+            Add_Action (Table.States (105), 33, Reduce, (61, 0),  3);
+            Add_Action (Table.States (105), 34, Reduce, (61, 0),  3);
+            Add_Action (Table.States (105), 35, Reduce, (61, 0),  3);
+            Add_Action (Table.States (105), 36, Reduce, (61, 0),  3);
+            Add_Action (Table.States (105), 37, (63, 3), 125);
+            Add_Action (Table.States (105), 39, Reduce, (61, 0),  3);
+            Add_Action (Table.States (105), 41, Reduce, (61, 0),  3);
+            Add_Action (Table.States (105), 42, Reduce, (61, 0),  3);
+            Table.States (106).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (106), 39, (57, 0), 126);
+            Table.States (107).Action_List.Set_Capacity (17);
+            Add_Action (Table.States (107), 19, Reduce, (60, 0),  1);
+            Add_Action (Table.States (107), 20, Reduce, (60, 0),  1);
+            Add_Action (Table.States (107), 25, Reduce, (60, 0),  1);
+            Add_Action (Table.States (107), 26, Reduce, (60, 0),  1);
+            Add_Action (Table.States (107), 27, Reduce, (60, 0),  1);
+            Add_Action (Table.States (107), 28, Reduce, (60, 0),  1);
+            Add_Action (Table.States (107), 30, Reduce, (60, 0),  1);
+            Add_Action (Table.States (107), 31, (63, 4), 81);
+            Add_Action (Table.States (107), 32, (62, 2), 82);
+            Add_Action (Table.States (107), 33, Reduce, (60, 0),  1);
+            Add_Action (Table.States (107), 34, Reduce, (60, 0),  1);
+            Add_Action (Table.States (107), 35, Reduce, (60, 0),  1);
+            Add_Action (Table.States (107), 36, Reduce, (60, 0),  1);
+            Add_Action (Table.States (107), 37, (63, 5), 83);
+            Add_Action (Table.States (107), 39, Reduce, (60, 0),  1);
+            Add_Action (Table.States (107), 41, Reduce, (60, 0),  1);
+            Add_Action (Table.States (107), 42, Reduce, (60, 0),  1);
+            Table.States (108).Action_List.Set_Capacity (14);
+            Add_Action (Table.States (108), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (58, 1),  3);
+            Table.States (109).Action_List.Set_Capacity (5);
+            Add_Action (Table.States (109), (20, 30, 36, 39, 42), (55, 1),  3);
+            Table.States (110).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (110), 9, (55, 6), 127);
+            Table.States (111).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (111), 39, (55, 4), 128);
+            Table.States (112).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (112), 39, (55, 2), 129);
+            Table.States (113).Action_List.Set_Capacity (5);
+            Add_Action (Table.States (113), (20, 30, 36, 39, 42), (56, 3),  3);
+            Table.States (114).Action_List.Set_Capacity (4);
+            Add_Action (Table.States (114), (21, 30, 39, 42), (47, 0),  1);
+            Table.States (115).Action_List.Set_Capacity (4);
+            Add_Action (Table.States (115), (21, 30, 39, 42), (47, 1),  1);
+            Table.States (116).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (116), (30, 39, 42), (48, 7),  6);
+            Table.States (117).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (117), 21, (48, 8), 130);
+            Table.States (118).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (118), 39, (50, 1), 131);
+            Table.States (119).Action_List.Set_Capacity (6);
+            Add_Action (Table.States (119), 18, (44, 0), 35);
+            Add_Action (Table.States (119), 30, Reduce, (48, 4),  6);
+            Add_Action (Table.States (119), 39, Reduce, (48, 4),  6);
+            Add_Action (Table.States (119), 40, (44, 1), 38);
+            Add_Action (Table.States (119), 41, (44, 2), 39);
+            Add_Action (Table.States (119), 42, Reduce, (48, 4),  6);
+            Table.States (119).Goto_List.Set_Capacity (1);
+            Add_Goto (Table.States (119), 44, 132);
+            Table.States (120).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (120), 18, (44, 0), 35);
+            Add_Action (Table.States (120), 40, (44, 1), 38);
+            Add_Action (Table.States (120), 41, (44, 2), 39);
+            Table.States (120).Goto_List.Set_Capacity (1);
+            Add_Goto (Table.States (120), 44, 133);
+            Table.States (121).Action_List.Set_Capacity (10);
+            Add_Action (Table.States (121), 20, Reduce, (64, 1),  3);
+            Add_Action (Table.States (121), 25, (63, 0), 43);
+            Add_Action (Table.States (121), 26, (62, 0), 44);
+            Add_Action (Table.States (121), 27, (61, 0), 45);
+            Add_Action (Table.States (121), 28, (57, 0), 46);
+            Add_Action (Table.States (121), 33, Reduce, (64, 1),  3);
+            Add_Action (Table.States (121), 34, Reduce, (64, 1),  3);
+            Add_Action (Table.States (121), 35, Reduce, (64, 1),  3);
+            Add_Action (Table.States (121), 39, (58, 1), 47);
+            Add_Action (Table.States (121), 41, (60, 1), 48);
+            Table.States (121).Goto_List.Set_Capacity (6);
+            Add_Goto (Table.States (121), 57, 51);
+            Add_Goto (Table.States (121), 58, 90);
+            Add_Goto (Table.States (121), 60, 54);
+            Add_Goto (Table.States (121), 61, 55);
+            Add_Goto (Table.States (121), 62, 56);
+            Add_Goto (Table.States (121), 63, 57);
+            Table.States (122).Action_List.Set_Capacity (14);
+            Add_Action (Table.States (122), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (63, 1),  4);
+            Table.States (123).Action_List.Set_Capacity (14);
+            Add_Action (Table.States (123), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (63, 2),  4);
+            Table.States (124).Action_List.Set_Capacity (14);
+            Add_Action (Table.States (124), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (62, 1),  4);
+            Table.States (125).Action_List.Set_Capacity (14);
+            Add_Action (Table.States (125), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (63, 3),  4);
+            Table.States (126).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (126), 24, (57, 0), 134);
+            Table.States (127).Action_List.Set_Capacity (5);
+            Add_Action (Table.States (127), (20, 30, 36, 39, 42), (55, 6),  4);
+            Table.States (128).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (128), 10, (55, 5), 135);
+            Add_Action (Table.States (128), 23, (55, 4), 136);
+            Table.States (129).Action_List.Set_Capacity (2);
+            Add_Action (Table.States (129), 10, (55, 3), 137);
+            Add_Action (Table.States (129), 23, (55, 2), 138);
+            Table.States (130).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (130), 39, (48, 8), 139);
+            Table.States (131).Action_List.Set_Capacity (5);
+            Add_Action (Table.States (131), (20, 30, 36, 39, 42), (50, 1),  3);
+         end Subr_2;
+         procedure Subr_3
+         is begin
+            Table.States (132).Action_List.Set_Capacity (6);
+            Add_Action (Table.States (132), 18, (44, 0), 35);
+            Add_Action (Table.States (132), 30, Reduce, (48, 3),  7);
+            Add_Action (Table.States (132), 39, Reduce, (48, 3),  7);
+            Add_Action (Table.States (132), 40, (44, 1), 38);
+            Add_Action (Table.States (132), 41, (44, 2), 39);
+            Add_Action (Table.States (132), 42, Reduce, (48, 3),  7);
+            Table.States (132).Goto_List.Set_Capacity (1);
+            Add_Goto (Table.States (132), 44, 140);
+            Table.States (133).Action_List.Set_Capacity (6);
+            Add_Action (Table.States (133), 18, (44, 0), 35);
+            Add_Action (Table.States (133), 30, Reduce, (48, 1),  7);
+            Add_Action (Table.States (133), 39, Reduce, (48, 1),  7);
+            Add_Action (Table.States (133), 40, (44, 1), 38);
+            Add_Action (Table.States (133), 41, (44, 2), 39);
+            Add_Action (Table.States (133), 42, Reduce, (48, 1),  7);
+            Table.States (133).Goto_List.Set_Capacity (1);
+            Add_Goto (Table.States (133), 44, 141);
+            Table.States (134).Action_List.Set_Capacity (14);
+            Add_Action (Table.States (134), (19, 20, 25, 26, 27, 28, 30, 33, 
34, 35, 36, 39, 41, 42), (57, 0),  5);
+            Table.States (135).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (135), 39, (50, 0), 95);
+            Table.States (135).Goto_List.Set_Capacity (1);
+            Add_Goto (Table.States (135), 50, 142);
+            Table.States (136).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (136), 39, (55, 4), 143);
+            Table.States (137).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (137), 39, (50, 0), 95);
+            Table.States (137).Goto_List.Set_Capacity (1);
+            Add_Goto (Table.States (137), 50, 144);
+            Table.States (138).Action_List.Set_Capacity (1);
+            Add_Action (Table.States (138), 39, (55, 2), 145);
+            Table.States (139).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (139), (30, 39, 42), (48, 8),  8);
+            Table.States (140).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (140), (30, 39, 42), (48, 2),  8);
+            Table.States (141).Action_List.Set_Capacity (3);
+            Add_Action (Table.States (141), (30, 39, 42), (48, 0),  8);
+            Table.States (142).Action_List.Set_Capacity (5);
+            Add_Action (Table.States (142), 20, (50, 1), 118);
+            Add_Conflict (Table.States (142), 20, (55, 5),  6);
+            Add_Action (Table.States (142), 30, Reduce, (55, 5),  6);
+            Add_Action (Table.States (142), 36, Reduce, (55, 5),  6);
+            Add_Action (Table.States (142), 39, Reduce, (55, 5),  6);
+            Add_Action (Table.States (142), 42, Reduce, (55, 5),  6);
+            Table.States (143).Action_List.Set_Capacity (5);
+            Add_Action (Table.States (143), (20, 30, 36, 39, 42), (55, 4),  6);
+            Table.States (144).Action_List.Set_Capacity (5);
+            Add_Action (Table.States (144), 20, (50, 1), 118);
+            Add_Conflict (Table.States (144), 20, (55, 3),  6);
+            Add_Action (Table.States (144), 30, Reduce, (55, 3),  6);
+            Add_Action (Table.States (144), 36, Reduce, (55, 3),  6);
+            Add_Action (Table.States (144), 39, Reduce, (55, 3),  6);
+            Add_Action (Table.States (144), 42, Reduce, (55, 3),  6);
+            Table.States (145).Action_List.Set_Capacity (5);
+            Add_Action (Table.States (145), (20, 30, 36, 39, 42), (55, 2),  6);
+         end Subr_3;
       begin
          Subr_1;
+         Subr_2;
+         Subr_3;
          Table.Error_Action := new Parse_Action_Node'((Verb => Error, others 
=> <>), null);
       end;
 
-      WisiToken.Parse.LR.Parser_No_Recover.New_Parser
-        (Parser,
-         Trace,
-         Lexer.New_Lexer (Trace.Descriptor),
-         Table,
-         User_Data,
-         Max_Parallel         => 15,
-         Terminate_Same_State => True);
-   end Create_Parser;
+      Table.Max_Parallel := 15;
+      return Table;
+   end Create_Parse_Table;
+
+   function Create_Lexer (Trace : in WisiToken.Trace_Access) return 
WisiToken.Lexer.Handle
+   is begin
+      return Lexer.New_Lexer (Trace, 
Wisitoken_Grammar_Actions.Descriptor'Access);
+   end Create_Lexer;
+
+   function Create_Productions return 
WisiToken.Syntax_Trees.Production_Info_Trees.Vector
+   is begin
+      return Result : WisiToken.Syntax_Trees.Production_Info_Trees.Vector do
+         Result.Set_First_Last (43, 66);
+         Result (48).RHSs.Set_First_Last (0, 15);
+         Result (48).RHSs (0).In_Parse_Action := null;
+         Result (48).RHSs (0).Post_Parse_Action := declaration_0'Access;
+         Result (48).RHSs (1).In_Parse_Action := null;
+         Result (48).RHSs (1).Post_Parse_Action := declaration_1'Access;
+         Result (48).RHSs (2).In_Parse_Action := null;
+         Result (48).RHSs (2).Post_Parse_Action := declaration_2'Access;
+         Result (48).RHSs (3).In_Parse_Action := null;
+         Result (48).RHSs (3).Post_Parse_Action := declaration_3'Access;
+         Result (48).RHSs (4).In_Parse_Action := null;
+         Result (48).RHSs (4).Post_Parse_Action := declaration_4'Access;
+         Result (48).RHSs (5).In_Parse_Action := null;
+         Result (48).RHSs (5).Post_Parse_Action := declaration_5'Access;
+         Result (48).RHSs (6).In_Parse_Action := null;
+         Result (48).RHSs (6).Post_Parse_Action := declaration_6'Access;
+         Result (48).RHSs (7).In_Parse_Action := null;
+         Result (48).RHSs (7).Post_Parse_Action := declaration_7'Access;
+         Result (48).RHSs (8).In_Parse_Action := null;
+         Result (48).RHSs (8).Post_Parse_Action := declaration_8'Access;
+         Result (48).RHSs (9).In_Parse_Action := null;
+         Result (48).RHSs (9).Post_Parse_Action := declaration_9'Access;
+         Result (48).RHSs (10).In_Parse_Action := null;
+         Result (48).RHSs (10).Post_Parse_Action := declaration_10'Access;
+         Result (48).RHSs (11).In_Parse_Action := null;
+         Result (48).RHSs (11).Post_Parse_Action := declaration_11'Access;
+         Result (48).RHSs (12).In_Parse_Action := null;
+         Result (48).RHSs (12).Post_Parse_Action := declaration_12'Access;
+         Result (48).RHSs (13).In_Parse_Action := null;
+         Result (48).RHSs (13).Post_Parse_Action := declaration_13'Access;
+         Result (48).RHSs (14).In_Parse_Action := null;
+         Result (48).RHSs (14).Post_Parse_Action := declaration_14'Access;
+         Result (48).RHSs (15).In_Parse_Action := null;
+         Result (48).RHSs (15).Post_Parse_Action := declaration_15'Access;
+         Result (53).RHSs.Set_First_Last (0, 1);
+         Result (53).RHSs (0).In_Parse_Action := null;
+         Result (53).RHSs (0).Post_Parse_Action := nonterminal_0'Access;
+         Result (53).RHSs (1).In_Parse_Action := null;
+         Result (53).RHSs (1).Post_Parse_Action := nonterminal_1'Access;
+         Result (60).RHSs.Set_First_Last (0, 5);
+         Result (60).RHSs (0).In_Parse_Action := null;
+         Result (60).RHSs (0).Post_Parse_Action := null;
+         Result (60).RHSs (1).In_Parse_Action := null;
+         Result (60).RHSs (1).Post_Parse_Action := rhs_item_1'Access;
+         Result (60).RHSs (2).In_Parse_Action := null;
+         Result (60).RHSs (2).Post_Parse_Action := rhs_item_2'Access;
+         Result (60).RHSs (3).In_Parse_Action := null;
+         Result (60).RHSs (3).Post_Parse_Action := rhs_item_3'Access;
+         Result (60).RHSs (4).In_Parse_Action := null;
+         Result (60).RHSs (4).Post_Parse_Action := rhs_item_4'Access;
+         Result (60).RHSs (5).In_Parse_Action := null;
+         Result (60).RHSs (5).Post_Parse_Action := rhs_item_5'Access;
+         Result (62).RHSs.Set_First_Last (0, 3);
+         Result (62).RHSs (0).In_Parse_Action := null;
+         Result (62).RHSs (0).Post_Parse_Action := null;
+         Result (62).RHSs (1).In_Parse_Action := null;
+         Result (62).RHSs (1).Post_Parse_Action := null;
+         Result (62).RHSs (2).In_Parse_Action := null;
+         Result (62).RHSs (2).Post_Parse_Action := null;
+         Result (62).RHSs (3).In_Parse_Action := null;
+         Result (62).RHSs (3).Post_Parse_Action := rhs_optional_item_3'Access;
+      end return;
+   end Create_Productions;
+
 end Wisitoken_Grammar_Main;
diff --git a/wisitoken_grammar_main.ads b/wisitoken_grammar_main.ads
index 35a5e9e725..90354e3a79 100644
--- a/wisitoken_grammar_main.ads
+++ b/wisitoken_grammar_main.ads
@@ -1,8 +1,8 @@
---  generated parser support file.
+--  generated parser support file. -*- buffer-read-only:t  -*-
 --  command line: wisitoken-bnf-generate.exe  --generate LALR Ada re2c PROCESS 
wisitoken_grammar.wy
 --
 
---  Copyright (C) 2017 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  Author: Stephen Leake <stephe-leake@stephe-leake.org>
 --
@@ -22,13 +22,14 @@
 --  along with GNU Emacs.  If not, see <http://www.gnu.org/licenses/>.
 
 with WisiToken.Syntax_Trees;
-with WisiToken.Parse.LR.Parser_No_Recover;
+with WisiToken.Lexer;
+with WisiToken.Parse.LR;
 package Wisitoken_Grammar_Main is
 
-   procedure Create_Parser
-     (Parser                       :    out 
WisiToken.Parse.LR.Parser_No_Recover.Parser;
-      --  no error recovery
-      Trace                        : not null access WisiToken.Trace'Class;
-      User_Data                    : in     
WisiToken.Syntax_Trees.User_Data_Access);
+   function Create_Parse_Table
+     return WisiToken.Parse.LR.Parse_Table_Ptr;
 
+   function Create_Productions return 
WisiToken.Syntax_Trees.Production_Info_Trees.Vector;
+
+   function Create_Lexer (Trace : in WisiToken.Trace_Access) return 
WisiToken.Lexer.Handle;
 end Wisitoken_Grammar_Main;
diff --git a/wisitoken_grammar_re2c.c b/wisitoken_grammar_re2c.c
index b58f1d0140..7a76ad1ae7 100644
--- a/wisitoken_grammar_re2c.c
+++ b/wisitoken_grammar_re2c.c
@@ -1,10 +1,10 @@
-/* Generated by re2c 1.3 */
+/* Generated by re2c 2.0.2 */
 #line 1 "../wisitoken_grammar.re2c"
-//  generated parser support file. -*- mode: C -*-
+//  generated parser support file. -*- buffer-read-only:t mode: C -*-
 //  command line: wisitoken-bnf-generate.exe  --generate LALR Ada re2c 
wisitoken_grammar.wy
 //
 
-//  Copyright (C) 2017 - 2019 Free Software Foundation, Inc.
+//  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 //
 //  Author: Stephen Leake <stephe-leake@stephe-leake.org>
 //
@@ -61,9 +61,9 @@ wisi_lexer* wisitoken_grammar_new_lexer
    result->byte_token_start  = input;
    result->char_pos          = 1; /* match WisiToken.Buffer_Region */
    result->char_token_start  = 1;
-   result->line              = (*result->cursor == 0x0A) ? 2 : 1;
+   result->line              = 1;
    result->line_token_start  = result->line;
-   result->verbosity         = verbosity;
+   result->verbosity         = 0;
    return result;
 }
 
@@ -79,7 +79,23 @@ wisitoken_grammar_reset_lexer(wisi_lexer* lexer)
 {
    lexer->cursor   = lexer->buffer;
    lexer->char_pos = 1;
-   lexer->line     = (*lexer->cursor == 0x0A) ? 2 : 1;
+   lexer->line     = 1;
+}
+
+void
+wisitoken_grammar_set_verbosity
+   (wisi_lexer* lexer, int verbosity)
+{
+   lexer->verbosity = verbosity;
+}
+
+void
+wisitoken_grammar_set_position
+   (wisi_lexer* lexer, size_t byte_position, size_t char_position, int line)
+{
+   lexer->cursor   = lexer->buffer + byte_position - 1;
+   lexer->char_pos = char_position;
+   lexer->line     = line;
 }
 
 static void debug(wisi_lexer* lexer, int state, unsigned char ch)
@@ -99,20 +115,22 @@ static void debug(wisi_lexer* lexer, int state, unsigned 
char ch)
 
 static void skip(wisi_lexer* lexer)
 {
-   if (lexer->cursor <= lexer->buffer_last)
-      ++lexer->cursor;
    if (lexer->cursor <= lexer->buffer_last)
    {
-      /* UFT-8 encoding: https://en.wikipedia.org/wiki/UTF-8#Description */
-      if (*lexer->cursor == 0x0A && lexer->cursor > lexer->buffer && 
*(lexer->cursor - 1) == 0x0D)
-        {/* second byte of DOS line ending */
-        }
-      else if ((*lexer->cursor & 0x80) == 0x80 && (*lexer->cursor & 0xC0) != 
0xC0)
-        {/* byte 2, 3 or 4 of multi-byte UTF-8 char */
-        }
-      else
-        ++lexer->char_pos;
-      if (*lexer->cursor == 0x0A) ++lexer->line;
+      ++lexer->cursor;
+      if (lexer->cursor <= lexer->buffer_last)
+      {
+         /* UFT-8 encoding: https://en.wikipedia.org/wiki/UTF-8#Description */
+         if (*lexer->cursor == 0x0A && lexer->cursor > lexer->buffer && 
*(lexer->cursor - 1) == 0x0D)
+           {/* second byte of DOS line ending */
+           }
+         else if ((*lexer->cursor & 0x80) == 0x80 && (*lexer->cursor & 0xC0) 
!= 0xC0)
+           {/* byte 2, 3 or 4 of multi-byte UTF-8 char */
+           }
+         else
+           lexer->char_pos++;
+      } else 
+         lexer->char_pos++;
    }
 }
 #define YYSKIP() skip(lexer)
@@ -123,10 +141,17 @@ static void skip(wisi_lexer* lexer)
 
 static void skip_to(wisi_lexer* lexer, char* target)
 {
-  int i;
+  int i, j;
 
+  // Count all new-lines contained in the skip region. Caller has 
+  // skipped the start delimiter; if lexer->cursor is a new-line it
+  // has not yet been counted. Start and end delimiters do not contain 
new-line.
   while (lexer->cursor <= lexer->buffer_last)
     {
+      if (*lexer->cursor == 0x0A)
+      {
+        lexer->line++;
+      }
       if (*lexer->cursor == target[0])
       {
         i = 0;
@@ -138,7 +163,7 @@ static void skip_to(wisi_lexer* lexer, char* target)
 
         if (0 == target[i])
           {
-            for (i = 0; 0 != target[i]; i++)
+            for (j = 0; j < i; j++)
                skip(lexer);
             break;
           }
@@ -154,37 +179,36 @@ int wisitoken_grammar_next_token
    size_t* byte_length,
    size_t* char_position,
    size_t* char_length,
-   int*    line_start)
+   int*    line_start,
+   int*    line_length)
 {
    int status = NO_ERROR;
    *id = -1;
    if (lexer->cursor > lexer->buffer_last)
    {
-      *id            = 36;
-      *byte_position = lexer->buffer_last - lexer->buffer + 1;
+      *id            = 42;
+      *byte_position = lexer->buffer_last - lexer->buffer + 2;
       *byte_length   = 0;
-      *char_position = lexer->char_token_start;
+      *char_position = lexer->char_pos;
       *char_length   = 0;
       *line_start    = lexer->line;
+      *line_length   = 0;
       return status;
    }
 
    lexer->byte_token_start = lexer->cursor;
    lexer->char_token_start = lexer->char_pos;
-   if (*lexer->cursor == 0x0A)
-      lexer->line_token_start = lexer->line-1;
-   else
-      lexer->line_token_start = lexer->line;
+   lexer->line_token_start = lexer->line;
 
    while (*id == -1 && status == 0)
    {
 
-#line 183 "../wisitoken_grammar_re2c.c"
+#line 207 "../wisitoken_grammar_re2c.c"
 {
        YYCTYPE yych;
        unsigned int yyaccept = 0;
        YYDEBUG(0, *YYCURSOR);
-       yych = YYPEEK ();
+       yych = YYPEEK();
        switch (yych) {
        case 0x04:      goto yy4;
        case '\t':
@@ -198,9 +222,7 @@ int wisitoken_grammar_next_token
        case ')':       goto yy17;
        case '*':       goto yy19;
        case '+':       goto yy21;
-       case ',':       goto yy23;
-       case '-':       goto yy25;
-       case '/':       goto yy27;
+       case '-':       goto yy23;
        case '0':
        case '1':
        case '2':
@@ -210,14 +232,15 @@ int wisitoken_grammar_next_token
        case '6':
        case '7':
        case '8':
-       case '9':       goto yy29;
-       case ':':       goto yy32;
-       case ';':       goto yy34;
-       case '<':       goto yy36;
-       case '=':       goto yy38;
-       case '>':       goto yy40;
-       case '?':       goto yy42;
+       case '9':       goto yy25;
+       case ':':       goto yy28;
+       case ';':       goto yy30;
+       case '<':       goto yy32;
+       case '=':       goto yy34;
+       case '>':       goto yy36;
+       case '?':       goto yy38;
        case 'A':
+       case 'a':       goto yy40;
        case 'B':
        case 'C':
        case 'D':
@@ -234,8 +257,6 @@ int wisitoken_grammar_next_token
        case 'O':
        case 'P':
        case 'Q':
-       case 'R':
-       case 'S':
        case 'T':
        case 'U':
        case 'V':
@@ -243,7 +264,6 @@ int wisitoken_grammar_next_token
        case 'X':
        case 'Y':
        case 'Z':
-       case 'a':
        case 'b':
        case 'd':
        case 'f':
@@ -252,24 +272,26 @@ int wisitoken_grammar_next_token
        case 'j':
        case 'l':
        case 'm':
-       case 'o':
        case 'p':
        case 'q':
-       case 'r':
-       case 's':
        case 'u':
        case 'v':
        case 'w':
        case 'x':
        case 'y':
-       case 'z':       goto yy44;
-       case '[':       goto yy47;
-       case ']':       goto yy49;
-       case 'c':       goto yy51;
-       case 'e':       goto yy52;
-       case 'i':       goto yy53;
-       case 'k':       goto yy54;
-       case 'n':       goto yy55;
+       case 'z':       goto yy42;
+       case 'R':
+       case 'r':       goto yy44;
+       case 'S':
+       case 's':       goto yy45;
+       case '[':       goto yy46;
+       case ']':       goto yy48;
+       case 'c':       goto yy50;
+       case 'e':       goto yy51;
+       case 'i':       goto yy52;
+       case 'k':       goto yy53;
+       case 'n':       goto yy54;
+       case 'o':       goto yy55;
        case 't':       goto yy56;
        case '{':       goto yy57;
        case '|':       goto yy59;
@@ -328,54 +350,51 @@ int wisitoken_grammar_next_token
        default:        goto yy2;
        }
 yy2:
-       YYDEBUG(2, YYPEEK ());
-       YYSKIP ();
+       YYDEBUG(2, YYPEEK());
+       YYSKIP();
 yy3:
-       YYDEBUG(3, YYPEEK ());
-#line 262 "../wisitoken_grammar.re2c"
+       YYDEBUG(3, YYPEEK());
+#line 296 "../wisitoken_grammar.re2c"
        {status = ERROR_unrecognized_character; continue;}
-#line 338 "../wisitoken_grammar_re2c.c"
+#line 360 "../wisitoken_grammar_re2c.c"
 yy4:
-       YYDEBUG(4, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(5, YYPEEK ());
-#line 260 "../wisitoken_grammar.re2c"
-       {*id =  36; continue;}
-#line 345 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(4, YYPEEK());
+       YYSKIP();
+       YYDEBUG(5, YYPEEK());
+#line 294 "../wisitoken_grammar.re2c"
+       {*id = 42; continue;}
+#line 367 "../wisitoken_grammar_re2c.c"
 yy6:
-       YYDEBUG(6, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(7, YYPEEK ());
-#line 218 "../wisitoken_grammar.re2c"
+       YYDEBUG(6, YYPEEK());
+       YYSKIP();
+       YYDEBUG(7, YYPEEK());
+#line 249 "../wisitoken_grammar.re2c"
        { lexer->byte_token_start = lexer->cursor;
           lexer->char_token_start = lexer->char_pos;
-          if (*lexer->cursor == 0x0A)
-             lexer->line_token_start = lexer->line-1;
-          else
-             lexer->line_token_start = lexer->line;
+          lexer->line_token_start = lexer->line;
           continue; }
-#line 358 "../wisitoken_grammar_re2c.c"
+#line 377 "../wisitoken_grammar_re2c.c"
 yy8:
-       YYDEBUG(8, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(9, YYPEEK ());
-#line 225 "../wisitoken_grammar.re2c"
-       {*id =  1; continue;}
-#line 365 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(8, YYPEEK());
+       YYSKIP();
+       YYDEBUG(9, YYPEEK());
+#line 253 "../wisitoken_grammar.re2c"
+       {*id = 1; lexer->line++; continue;}
+#line 384 "../wisitoken_grammar_re2c.c"
 yy10:
-       YYDEBUG(10, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(10, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case '\n':      goto yy8;
        default:        goto yy3;
        }
 yy11:
-       YYDEBUG(11, YYPEEK ());
+       YYDEBUG(11, YYPEEK());
        yyaccept = 0;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case ' ':
        case '!':
@@ -527,9 +546,9 @@ yy11:
        default:        goto yy3;
        }
 yy12:
-       YYDEBUG(12, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(12, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case '(':       goto yy80;
        case '[':       goto yy82;
@@ -537,16 +556,16 @@ yy12:
        default:        goto yy13;
        }
 yy13:
-       YYDEBUG(13, YYPEEK ());
-#line 247 "../wisitoken_grammar.re2c"
-       {*id =  23; continue;}
-#line 544 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(13, YYPEEK());
+#line 282 "../wisitoken_grammar.re2c"
+       {*id = 30; continue;}
+#line 563 "../wisitoken_grammar_re2c.c"
 yy14:
-       YYDEBUG(14, YYPEEK ());
+       YYDEBUG(14, YYPEEK());
        yyaccept = 0;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case ' ':
        case '!':
@@ -698,44 +717,37 @@ yy14:
        default:        goto yy3;
        }
 yy15:
-       YYDEBUG(15, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(16, YYPEEK ());
-#line 244 "../wisitoken_grammar.re2c"
-       {*id =  20; continue;}
-#line 707 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(15, YYPEEK());
+       YYSKIP();
+       YYDEBUG(16, YYPEEK());
+#line 279 "../wisitoken_grammar.re2c"
+       {*id = 27; continue;}
+#line 726 "../wisitoken_grammar_re2c.c"
 yy17:
-       YYDEBUG(17, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(18, YYPEEK ());
-#line 252 "../wisitoken_grammar.re2c"
-       {*id =  28; continue;}
-#line 714 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(17, YYPEEK());
+       YYSKIP();
+       YYDEBUG(18, YYPEEK());
+#line 287 "../wisitoken_grammar.re2c"
+       {*id = 35; continue;}
+#line 733 "../wisitoken_grammar_re2c.c"
 yy19:
-       YYDEBUG(19, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(20, YYPEEK ());
-#line 255 "../wisitoken_grammar.re2c"
-       {*id =  31; continue;}
-#line 721 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(19, YYPEEK());
+       YYSKIP();
+       YYDEBUG(20, YYPEEK());
+#line 289 "../wisitoken_grammar.re2c"
+       {*id = 37; continue;}
+#line 740 "../wisitoken_grammar_re2c.c"
 yy21:
-       YYDEBUG(21, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(22, YYPEEK ());
-#line 248 "../wisitoken_grammar.re2c"
-       {*id =  24; continue;}
-#line 728 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(21, YYPEEK());
+       YYSKIP();
+       YYDEBUG(22, YYPEEK());
+#line 283 "../wisitoken_grammar.re2c"
+       {*id = 31; continue;}
+#line 747 "../wisitoken_grammar_re2c.c"
 yy23:
-       YYDEBUG(23, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(24, YYPEEK ());
-#line 239 "../wisitoken_grammar.re2c"
-       {*id =  15; continue;}
-#line 735 "../wisitoken_grammar_re2c.c"
-yy25:
-       YYDEBUG(25, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(23, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case '0':
        case '1':
@@ -747,26 +759,19 @@ yy25:
        case '7':
        case '8':
        case '9':
-       case '_':       goto yy29;
-       default:        goto yy26;
+       case '_':       goto yy25;
+       default:        goto yy24;
        }
-yy26:
-       YYDEBUG(26, YYPEEK ());
-#line 246 "../wisitoken_grammar.re2c"
-       {*id =  22; continue;}
-#line 758 "../wisitoken_grammar_re2c.c"
-yy27:
-       YYDEBUG(27, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(28, YYPEEK ());
-#line 254 "../wisitoken_grammar.re2c"
-       {*id =  30; continue;}
-#line 765 "../wisitoken_grammar_re2c.c"
-yy29:
-       YYDEBUG(29, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
-       YYDEBUG(30, YYPEEK ());
+yy24:
+       YYDEBUG(24, YYPEEK());
+#line 281 "../wisitoken_grammar.re2c"
+       {*id = 29; continue;}
+#line 770 "../wisitoken_grammar_re2c.c"
+yy25:
+       YYDEBUG(25, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
+       YYDEBUG(26, YYPEEK());
        switch (yych) {
        case '0':
        case '1':
@@ -778,78 +783,96 @@ yy29:
        case '7':
        case '8':
        case '9':
-       case '_':       goto yy29;
-       default:        goto yy31;
+       case '_':       goto yy25;
+       default:        goto yy27;
        }
-yy31:
-       YYDEBUG(31, YYPEEK ());
-#line 256 "../wisitoken_grammar.re2c"
-       {*id =  32; continue;}
-#line 789 "../wisitoken_grammar_re2c.c"
-yy32:
-       YYDEBUG(32, YYPEEK ());
+yy27:
+       YYDEBUG(27, YYPEEK());
+#line 290 "../wisitoken_grammar.re2c"
+       {*id = 38; continue;}
+#line 794 "../wisitoken_grammar_re2c.c"
+yy28:
+       YYDEBUG(28, YYPEEK());
        yyaccept = 1;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case ':':       goto yy96;
-       default:        goto yy33;
+       default:        goto yy29;
        }
-yy33:
-       YYDEBUG(33, YYPEEK ());
-#line 237 "../wisitoken_grammar.re2c"
-       {*id =  13; continue;}
-#line 804 "../wisitoken_grammar_re2c.c"
-yy34:
-       YYDEBUG(34, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+yy29:
+       YYDEBUG(29, YYPEEK());
+#line 273 "../wisitoken_grammar.re2c"
+       {*id = 21; continue;}
+#line 809 "../wisitoken_grammar_re2c.c"
+yy30:
+       YYDEBUG(30, YYPEEK());
+       yyaccept = 2;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case ';':       goto yy97;
-       default:        goto yy35;
+       default:        goto yy31;
        }
-yy35:
-       YYDEBUG(35, YYPEEK ());
-#line 253 "../wisitoken_grammar.re2c"
-       {*id =  29; continue;}
-#line 817 "../wisitoken_grammar_re2c.c"
-yy36:
-       YYDEBUG(36, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(37, YYPEEK ());
-#line 245 "../wisitoken_grammar.re2c"
-       {*id =  21; continue;}
+yy31:
+       YYDEBUG(31, YYPEEK());
+#line 288 "../wisitoken_grammar.re2c"
+       {*id = 36; continue;}
 #line 824 "../wisitoken_grammar_re2c.c"
-yy38:
-       YYDEBUG(38, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(39, YYPEEK ());
-#line 240 "../wisitoken_grammar.re2c"
-       {*id =  16; continue;}
+yy32:
+       YYDEBUG(32, YYPEEK());
+       YYSKIP();
+       YYDEBUG(33, YYPEEK());
+#line 280 "../wisitoken_grammar.re2c"
+       {*id = 28; continue;}
 #line 831 "../wisitoken_grammar_re2c.c"
-yy40:
-       YYDEBUG(40, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(41, YYPEEK ());
-#line 241 "../wisitoken_grammar.re2c"
-       {*id =  17; continue;}
+yy34:
+       YYDEBUG(34, YYPEEK());
+       YYSKIP();
+       YYDEBUG(35, YYPEEK());
+#line 275 "../wisitoken_grammar.re2c"
+       {*id = 23; continue;}
 #line 838 "../wisitoken_grammar_re2c.c"
-yy42:
-       YYDEBUG(42, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(43, YYPEEK ());
-#line 249 "../wisitoken_grammar.re2c"
-       {*id =  25; continue;}
+yy36:
+       YYDEBUG(36, YYPEEK());
+       YYSKIP();
+       YYDEBUG(37, YYPEEK());
+#line 276 "../wisitoken_grammar.re2c"
+       {*id = 24; continue;}
 #line 845 "../wisitoken_grammar_re2c.c"
-yy44:
-       YYDEBUG(44, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
-yy45:
-       YYDEBUG(45, YYPEEK ());
+yy38:
+       YYDEBUG(38, YYPEEK());
+       YYSKIP();
+       YYDEBUG(39, YYPEEK());
+#line 284 "../wisitoken_grammar.re2c"
+       {*id = 32; continue;}
+#line 852 "../wisitoken_grammar_re2c.c"
+yy40:
+       YYDEBUG(40, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'C':
+       case 'c':       goto yy99;
+       default:        goto yy43;
+       }
+yy41:
+       YYDEBUG(41, YYPEEK());
+#line 291 "../wisitoken_grammar.re2c"
+       {*id = 39; continue;}
+#line 868 "../wisitoken_grammar_re2c.c"
+yy42:
+       YYDEBUG(42, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+yy43:
+       YYDEBUG(43, YYPEEK());
        switch (yych) {
        case '-':
        case '0':
@@ -914,7 +937,7 @@ yy45:
        case 'w':
        case 'x':
        case 'y':
-       case 'z':       goto yy44;
+       case 'z':       goto yy42;
        case 0xC2:
        case 0xC3:
        case 0xC4:
@@ -966,112 +989,141 @@ yy45:
        case 0xF2:
        case 0xF3:      goto yy104;
        case 0xF4:      goto yy105;
-       default:        goto yy46;
+       default:        goto yy41;
+       }
+yy44:
+       YYDEBUG(44, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'E':
+       case 'e':       goto yy106;
+       default:        goto yy43;
+       }
+yy45:
+       YYDEBUG(45, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'H':
+       case 'h':       goto yy107;
+       default:        goto yy43;
        }
 yy46:
-       YYDEBUG(46, YYPEEK ());
-#line 257 "../wisitoken_grammar.re2c"
-       {*id =  33; continue;}
-#line 976 "../wisitoken_grammar_re2c.c"
-yy47:
-       YYDEBUG(47, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(48, YYPEEK ());
-#line 243 "../wisitoken_grammar.re2c"
-       {*id =  19; continue;}
-#line 983 "../wisitoken_grammar_re2c.c"
-yy49:
-       YYDEBUG(49, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(50, YYPEEK ());
-#line 251 "../wisitoken_grammar.re2c"
-       {*id =  27; continue;}
-#line 990 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(46, YYPEEK());
+       YYSKIP();
+       YYDEBUG(47, YYPEEK());
+#line 278 "../wisitoken_grammar.re2c"
+       {*id = 26; continue;}
+#line 1023 "../wisitoken_grammar_re2c.c"
+yy48:
+       YYDEBUG(48, YYPEEK());
+       YYSKIP();
+       YYDEBUG(49, YYPEEK());
+#line 286 "../wisitoken_grammar.re2c"
+       {*id = 34; continue;}
+#line 1030 "../wisitoken_grammar_re2c.c"
+yy50:
+       YYDEBUG(50, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'o':       goto yy108;
+       default:        goto yy43;
+       }
 yy51:
-       YYDEBUG(51, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYDEBUG(51, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'o':       goto yy106;
-       default:        goto yy45;
+       case 'l':       goto yy109;
+       case 'n':       goto yy110;
+       default:        goto yy43;
        }
 yy52:
-       YYDEBUG(52, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYDEBUG(52, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'n':       goto yy107;
-       default:        goto yy45;
+       case 'f':       goto yy111;
+       case 'n':       goto yy113;
+       default:        goto yy43;
        }
 yy53:
-       YYDEBUG(53, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYDEBUG(53, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'f':       goto yy108;
-       default:        goto yy45;
+       case 'e':       goto yy115;
+       default:        goto yy43;
        }
 yy54:
-       YYDEBUG(54, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYDEBUG(54, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'e':       goto yy110;
-       default:        goto yy45;
+       case 'o':       goto yy116;
+       default:        goto yy43;
        }
 yy55:
-       YYDEBUG(55, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYDEBUG(55, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'o':       goto yy111;
-       default:        goto yy45;
+       case 'n':       goto yy117;
+       default:        goto yy43;
        }
 yy56:
-       YYDEBUG(56, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYDEBUG(56, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'o':       goto yy112;
-       default:        goto yy45;
+       case 'o':       goto yy119;
+       default:        goto yy43;
        }
 yy57:
-       YYDEBUG(57, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(58, YYPEEK ());
-#line 242 "../wisitoken_grammar.re2c"
-       {*id =  18; continue;}
-#line 1057 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(57, YYPEEK());
+       YYSKIP();
+       YYDEBUG(58, YYPEEK());
+#line 277 "../wisitoken_grammar.re2c"
+       {*id = 25; continue;}
+#line 1109 "../wisitoken_grammar_re2c.c"
 yy59:
-       YYDEBUG(59, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(60, YYPEEK ());
-#line 236 "../wisitoken_grammar.re2c"
-       {*id =  12; continue;}
-#line 1064 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(59, YYPEEK());
+       YYSKIP();
+       YYDEBUG(60, YYPEEK());
+#line 272 "../wisitoken_grammar.re2c"
+       {*id = 20; continue;}
+#line 1116 "../wisitoken_grammar_re2c.c"
 yy61:
-       YYDEBUG(61, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(62, YYPEEK ());
-#line 250 "../wisitoken_grammar.re2c"
-       {*id =  26; continue;}
-#line 1071 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(61, YYPEEK());
+       YYSKIP();
+       YYDEBUG(62, YYPEEK());
+#line 285 "../wisitoken_grammar.re2c"
+       {*id = 33; continue;}
+#line 1123 "../wisitoken_grammar_re2c.c"
 yy63:
-       YYDEBUG(63, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(63, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -1136,15 +1188,15 @@ yy63:
        case 0xBC:
        case 0xBD:
        case 0xBE:
-       case 0xBF:      goto yy44;
+       case 0xBF:      goto yy42;
        default:        goto yy3;
        }
 yy64:
-       YYDEBUG(64, YYPEEK ());
+       YYDEBUG(64, YYPEEK());
        yyaccept = 0;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case 0xA0:
        case 0xA1:
@@ -1181,11 +1233,11 @@ yy64:
        default:        goto yy3;
        }
 yy65:
-       YYDEBUG(65, YYPEEK ());
+       YYDEBUG(65, YYPEEK());
        yyaccept = 0;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -1254,11 +1306,11 @@ yy65:
        default:        goto yy3;
        }
 yy66:
-       YYDEBUG(66, YYPEEK ());
+       YYDEBUG(66, YYPEEK());
        yyaccept = 0;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x90:
        case 0x91:
@@ -1311,11 +1363,11 @@ yy66:
        default:        goto yy3;
        }
 yy67:
-       YYDEBUG(67, YYPEEK ());
+       YYDEBUG(67, YYPEEK());
        yyaccept = 0;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -1384,11 +1436,11 @@ yy67:
        default:        goto yy3;
        }
 yy68:
-       YYDEBUG(68, YYPEEK ());
+       YYDEBUG(68, YYPEEK());
        yyaccept = 0;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -1409,11 +1461,11 @@ yy68:
        default:        goto yy3;
        }
 yy69:
-       YYDEBUG(69, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(69, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
 yy70:
-       YYDEBUG(70, YYPEEK ());
+       YYDEBUG(70, YYPEEK());
        switch (yych) {
        case ' ':
        case '!':
@@ -1565,41 +1617,69 @@ yy70:
        default:        goto yy71;
        }
 yy71:
-       YYDEBUG(71, YYPEEK ());
-       YYRESTORE ();
+       YYDEBUG(71, YYPEEK());
+       YYRESTORE();
        switch (yyaccept) {
-       case 0:         goto yy3;
-       case 1:         goto yy33;
-       case 2:         goto yy46;
-       case 3:         goto yy73;
-       case 4:         goto yy89;
-       case 5:         goto yy99;
-       case 6:         goto yy109;
-       case 7:         goto yy123;
-       case 8:         goto yy128;
-       case 9:         goto yy135;
-       case 10:        goto yy139;
-       default:        goto yy145;
+       case 0:
+               goto yy3;
+       case 1:
+               goto yy29;
+       case 2:
+               goto yy31;
+       case 3:
+               goto yy41;
+       case 4:
+               goto yy73;
+       case 5:
+               goto yy89;
+       case 6:
+               goto yy112;
+       case 7:
+               goto yy114;
+       case 8:
+               goto yy118;
+       case 9:
+               goto yy137;
+       case 10:
+               goto yy145;
+       case 11:
+               goto yy154;
+       case 12:
+               goto yy157;
+       case 13:
+               goto yy161;
+       case 14:
+               goto yy164;
+       case 15:
+               goto yy171;
+       case 16:
+               goto yy175;
+       case 17:
+               goto yy178;
+       case 18:
+               goto yy185;
+       default:
+               goto yy194;
        }
 yy72:
-       YYDEBUG(72, YYPEEK ());
-       yyaccept = 3;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYDEBUG(72, YYPEEK());
+       yyaccept = 4;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case '"':       goto yy69;
        default:        goto yy73;
        }
 yy73:
-       YYDEBUG(73, YYPEEK ());
-#line 258 "../wisitoken_grammar.re2c"
-       {*id =  34; continue;}
-#line 1599 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(73, YYPEEK());
+#line 292 "../wisitoken_grammar.re2c"
+       {*id = 40; continue;}
+#line 1679 "../wisitoken_grammar_re2c.c"
 yy74:
-       YYDEBUG(74, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(74, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -1668,9 +1748,9 @@ yy74:
        default:        goto yy71;
        }
 yy75:
-       YYDEBUG(75, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(75, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0xA0:
        case 0xA1:
@@ -1707,9 +1787,9 @@ yy75:
        default:        goto yy71;
        }
 yy76:
-       YYDEBUG(76, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(76, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -1778,9 +1858,9 @@ yy76:
        default:        goto yy71;
        }
 yy77:
-       YYDEBUG(77, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(77, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x90:
        case 0x91:
@@ -1833,9 +1913,9 @@ yy77:
        default:        goto yy71;
        }
 yy78:
-       YYDEBUG(78, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(78, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -1904,9 +1984,9 @@ yy78:
        default:        goto yy71;
        }
 yy79:
-       YYDEBUG(79, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(79, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -1927,32 +2007,32 @@ yy79:
        default:        goto yy71;
        }
 yy80:
-       YYDEBUG(80, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(81, YYPEEK ());
-#line 235 "../wisitoken_grammar.re2c"
-       {*id =  11; skip_to(lexer, ")%"); continue;}
-#line 1936 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(80, YYPEEK());
+       YYSKIP();
+       YYDEBUG(81, YYPEEK());
+#line 271 "../wisitoken_grammar.re2c"
+       {*id = 19; skip_to(lexer, ")%"); continue;}
+#line 2016 "../wisitoken_grammar_re2c.c"
 yy82:
-       YYDEBUG(82, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(83, YYPEEK ());
-#line 234 "../wisitoken_grammar.re2c"
-       {*id =  10; skip_to(lexer, "]%"); continue;}
-#line 1943 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(82, YYPEEK());
+       YYSKIP();
+       YYDEBUG(83, YYPEEK());
+#line 270 "../wisitoken_grammar.re2c"
+       {*id = 18; skip_to(lexer, "]%"); continue;}
+#line 2023 "../wisitoken_grammar_re2c.c"
 yy84:
-       YYDEBUG(84, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(85, YYPEEK ());
-#line 233 "../wisitoken_grammar.re2c"
-       {*id =  9; skip_to(lexer, "}%"); continue;}
-#line 1950 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(84, YYPEEK());
+       YYSKIP();
+       YYDEBUG(85, YYPEEK());
+#line 269 "../wisitoken_grammar.re2c"
+       {*id = 17; skip_to(lexer, "}%"); continue;}
+#line 2030 "../wisitoken_grammar_re2c.c"
 yy86:
-       YYDEBUG(86, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(86, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
 yy87:
-       YYDEBUG(87, YYPEEK ());
+       YYDEBUG(87, YYPEEK());
        switch (yych) {
        case ' ':
        case '!':
@@ -2104,24 +2184,24 @@ yy87:
        default:        goto yy71;
        }
 yy88:
-       YYDEBUG(88, YYPEEK ());
-       yyaccept = 4;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYDEBUG(88, YYPEEK());
+       yyaccept = 5;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case '\'':      goto yy86;
        default:        goto yy89;
        }
 yy89:
-       YYDEBUG(89, YYPEEK ());
-#line 259 "../wisitoken_grammar.re2c"
-       {*id =  35; continue;}
-#line 2121 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(89, YYPEEK());
+#line 293 "../wisitoken_grammar.re2c"
+       {*id = 41; continue;}
+#line 2201 "../wisitoken_grammar_re2c.c"
 yy90:
-       YYDEBUG(90, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(90, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -2190,9 +2270,9 @@ yy90:
        default:        goto yy71;
        }
 yy91:
-       YYDEBUG(91, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(91, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0xA0:
        case 0xA1:
@@ -2229,9 +2309,9 @@ yy91:
        default:        goto yy71;
        }
 yy92:
-       YYDEBUG(92, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(92, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -2300,9 +2380,9 @@ yy92:
        default:        goto yy71;
        }
 yy93:
-       YYDEBUG(93, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(93, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x90:
        case 0x91:
@@ -2355,9 +2435,9 @@ yy93:
        default:        goto yy71;
        }
 yy94:
-       YYDEBUG(94, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(94, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -2426,9 +2506,9 @@ yy94:
        default:        goto yy71;
        }
 yy95:
-       YYDEBUG(95, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(95, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -2449,20 +2529,18 @@ yy95:
        default:        goto yy71;
        }
 yy96:
-       YYDEBUG(96, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(96, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
-       case '=':       goto yy113;
+       case '=':       goto yy120;
        default:        goto yy71;
        }
 yy97:
-       YYDEBUG(97, YYPEEK ());
-       yyaccept = 5;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
-       YYDEBUG(98, YYPEEK ());
+       YYDEBUG(97, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
+       YYDEBUG(98, YYPEEK());
        switch (yych) {
        case 0x00:
        case 0x01:
@@ -2590,6 +2668,8 @@ yy97:
        case '}':
        case '~':
        case 0x7F:      goto yy97;
+       case 0x04:
+       case '\n':      goto yy122;
        case 0xC2:
        case 0xC3:
        case 0xC4:
@@ -2619,8 +2699,8 @@ yy97:
        case 0xDC:
        case 0xDD:
        case 0xDE:
-       case 0xDF:      goto yy115;
-       case 0xE0:      goto yy116;
+       case 0xDF:      goto yy124;
+       case 0xE0:      goto yy125;
        case 0xE1:
        case 0xE2:
        case 0xE3:
@@ -2635,23 +2715,29 @@ yy97:
        case 0xEC:
        case 0xED:
        case 0xEE:
-       case 0xEF:      goto yy117;
-       case 0xF0:      goto yy118;
+       case 0xEF:      goto yy126;
+       case 0xF0:      goto yy127;
        case 0xF1:
        case 0xF2:
-       case 0xF3:      goto yy119;
-       case 0xF4:      goto yy120;
-       default:        goto yy99;
+       case 0xF3:      goto yy128;
+       case 0xF4:      goto yy129;
+       default:        goto yy71;
        }
 yy99:
-       YYDEBUG(99, YYPEEK ());
-#line 226 "../wisitoken_grammar.re2c"
-       {*id =  2; continue;}
-#line 2651 "../wisitoken_grammar_re2c.c"
+       YYDEBUG(99, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'C':
+       case 'c':       goto yy130;
+       default:        goto yy43;
+       }
 yy100:
-       YYDEBUG(100, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(100, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -2716,13 +2802,13 @@ yy100:
        case 0xBC:
        case 0xBD:
        case 0xBE:
-       case 0xBF:      goto yy44;
+       case 0xBF:      goto yy42;
        default:        goto yy71;
        }
 yy101:
-       YYDEBUG(101, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(101, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0xA0:
        case 0xA1:
@@ -2759,9 +2845,9 @@ yy101:
        default:        goto yy71;
        }
 yy102:
-       YYDEBUG(102, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(102, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -2830,9 +2916,9 @@ yy102:
        default:        goto yy71;
        }
 yy103:
-       YYDEBUG(103, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(103, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x90:
        case 0x91:
@@ -2885,9 +2971,9 @@ yy103:
        default:        goto yy71;
        }
 yy104:
-       YYDEBUG(104, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(104, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -2956,9 +3042,9 @@ yy104:
        default:        goto yy71;
        }
 yy105:
-       YYDEBUG(105, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+       YYDEBUG(105, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -2979,31 +3065,64 @@ yy105:
        default:        goto yy71;
        }
 yy106:
-       YYDEBUG(106, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYDEBUG(106, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'd':       goto yy121;
-       default:        goto yy45;
+       case 'D':
+       case 'd':       goto yy131;
+       default:        goto yy43;
        }
 yy107:
-       YYDEBUG(107, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYDEBUG(107, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'd':       goto yy122;
-       default:        goto yy45;
+       case 'I':
+       case 'i':       goto yy132;
+       default:        goto yy43;
        }
 yy108:
-       YYDEBUG(108, YYPEEK ());
+       YYDEBUG(108, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'd':       goto yy133;
+       case 'n':       goto yy134;
+       default:        goto yy43;
+       }
+yy109:
+       YYDEBUG(109, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 's':       goto yy135;
+       default:        goto yy43;
+       }
+yy110:
+       YYDEBUG(110, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'd':       goto yy136;
+       default:        goto yy43;
+       }
+yy111:
+       YYDEBUG(111, YYPEEK());
        yyaccept = 6;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case '-':
        case '0':
@@ -3119,58 +3238,323 @@ yy108:
        case 0xF1:
        case 0xF2:
        case 0xF3:
-       case 0xF4:      goto yy45;
-       default:        goto yy109;
+       case 0xF4:      goto yy43;
+       default:        goto yy112;
        }
-yy109:
-       YYDEBUG(109, YYPEEK ());
-#line 229 "../wisitoken_grammar.re2c"
-       {*id =  5; continue;}
-#line 3130 "../wisitoken_grammar_re2c.c"
-yy110:
-       YYDEBUG(110, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy112:
+       YYDEBUG(112, YYPEEK());
+#line 261 "../wisitoken_grammar.re2c"
+       {*id = 9; continue;}
+#line 3249 "../wisitoken_grammar_re2c.c"
+yy113:
+       YYDEBUG(113, YYPEEK());
+       yyaccept = 7;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'y':       goto yy124;
-       default:        goto yy45;
+       case '-':
+       case '0':
+       case '1':
+       case '2':
+       case '3':
+       case '4':
+       case '5':
+       case '6':
+       case '7':
+       case '8':
+       case '9':
+       case 'A':
+       case 'B':
+       case 'C':
+       case 'D':
+       case 'E':
+       case 'F':
+       case 'G':
+       case 'H':
+       case 'I':
+       case 'J':
+       case 'K':
+       case 'L':
+       case 'M':
+       case 'N':
+       case 'O':
+       case 'P':
+       case 'Q':
+       case 'R':
+       case 'S':
+       case 'T':
+       case 'U':
+       case 'V':
+       case 'W':
+       case 'X':
+       case 'Y':
+       case 'Z':
+       case '_':
+       case 'a':
+       case 'b':
+       case 'c':
+       case 'd':
+       case 'e':
+       case 'f':
+       case 'g':
+       case 'h':
+       case 'i':
+       case 'j':
+       case 'k':
+       case 'l':
+       case 'm':
+       case 'n':
+       case 'o':
+       case 'p':
+       case 'q':
+       case 'r':
+       case 's':
+       case 't':
+       case 'u':
+       case 'v':
+       case 'w':
+       case 'x':
+       case 'y':
+       case 'z':
+       case 0xC2:
+       case 0xC3:
+       case 0xC4:
+       case 0xC5:
+       case 0xC6:
+       case 0xC7:
+       case 0xC8:
+       case 0xC9:
+       case 0xCA:
+       case 0xCB:
+       case 0xCC:
+       case 0xCD:
+       case 0xCE:
+       case 0xCF:
+       case 0xD0:
+       case 0xD1:
+       case 0xD2:
+       case 0xD3:
+       case 0xD4:
+       case 0xD5:
+       case 0xD6:
+       case 0xD7:
+       case 0xD8:
+       case 0xD9:
+       case 0xDA:
+       case 0xDB:
+       case 0xDC:
+       case 0xDD:
+       case 0xDE:
+       case 0xDF:
+       case 0xE0:
+       case 0xE1:
+       case 0xE2:
+       case 0xE3:
+       case 0xE4:
+       case 0xE5:
+       case 0xE6:
+       case 0xE7:
+       case 0xE8:
+       case 0xE9:
+       case 0xEA:
+       case 0xEB:
+       case 0xEC:
+       case 0xED:
+       case 0xEE:
+       case 0xEF:
+       case 0xF0:
+       case 0xF1:
+       case 0xF2:
+       case 0xF3:
+       case 0xF4:      goto yy43;
+       default:        goto yy114;
        }
-yy111:
-       YYDEBUG(111, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy114:
+       YYDEBUG(114, YYPEEK());
+#line 262 "../wisitoken_grammar.re2c"
+       {*id = 10; continue;}
+#line 3378 "../wisitoken_grammar_re2c.c"
+yy115:
+       YYDEBUG(115, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'n':       goto yy125;
-       default:        goto yy45;
+       case 'y':       goto yy138;
+       default:        goto yy43;
        }
-yy112:
-       YYDEBUG(112, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy116:
+       YYDEBUG(116, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'k':       goto yy126;
-       default:        goto yy45;
+       case 'n':       goto yy139;
+       default:        goto yy43;
        }
-yy113:
-       YYDEBUG(113, YYPEEK ());
-       YYSKIP ();
-       YYDEBUG(114, YYPEEK ());
-#line 238 "../wisitoken_grammar.re2c"
-       {*id =  14; continue;}
-#line 3167 "../wisitoken_grammar_re2c.c"
-yy115:
-       YYDEBUG(115, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+yy117:
+       YYDEBUG(117, YYPEEK());
+       yyaccept = 8;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 0x80:
-       case 0x81:
+       case '-':
+       case '0':
+       case '1':
+       case '2':
+       case '3':
+       case '4':
+       case '5':
+       case '6':
+       case '7':
+       case '8':
+       case '9':
+       case 'A':
+       case 'B':
+       case 'C':
+       case 'D':
+       case 'E':
+       case 'F':
+       case 'G':
+       case 'H':
+       case 'I':
+       case 'J':
+       case 'K':
+       case 'L':
+       case 'M':
+       case 'N':
+       case 'O':
+       case 'P':
+       case 'Q':
+       case 'R':
+       case 'S':
+       case 'T':
+       case 'U':
+       case 'V':
+       case 'W':
+       case 'X':
+       case 'Y':
+       case 'Z':
+       case '_':
+       case 'a':
+       case 'b':
+       case 'c':
+       case 'd':
+       case 'e':
+       case 'f':
+       case 'g':
+       case 'h':
+       case 'i':
+       case 'j':
+       case 'k':
+       case 'l':
+       case 'm':
+       case 'n':
+       case 'o':
+       case 'p':
+       case 'q':
+       case 'r':
+       case 's':
+       case 't':
+       case 'u':
+       case 'v':
+       case 'w':
+       case 'x':
+       case 'y':
+       case 'z':
+       case 0xC2:
+       case 0xC3:
+       case 0xC4:
+       case 0xC5:
+       case 0xC6:
+       case 0xC7:
+       case 0xC8:
+       case 0xC9:
+       case 0xCA:
+       case 0xCB:
+       case 0xCC:
+       case 0xCD:
+       case 0xCE:
+       case 0xCF:
+       case 0xD0:
+       case 0xD1:
+       case 0xD2:
+       case 0xD3:
+       case 0xD4:
+       case 0xD5:
+       case 0xD6:
+       case 0xD7:
+       case 0xD8:
+       case 0xD9:
+       case 0xDA:
+       case 0xDB:
+       case 0xDC:
+       case 0xDD:
+       case 0xDE:
+       case 0xDF:
+       case 0xE0:
+       case 0xE1:
+       case 0xE2:
+       case 0xE3:
+       case 0xE4:
+       case 0xE5:
+       case 0xE6:
+       case 0xE7:
+       case 0xE8:
+       case 0xE9:
+       case 0xEA:
+       case 0xEB:
+       case 0xEC:
+       case 0xED:
+       case 0xEE:
+       case 0xEF:
+       case 0xF0:
+       case 0xF1:
+       case 0xF2:
+       case 0xF3:
+       case 0xF4:      goto yy43;
+       default:        goto yy118;
+       }
+yy118:
+       YYDEBUG(118, YYPEEK());
+#line 265 "../wisitoken_grammar.re2c"
+       {*id = 13; continue;}
+#line 3527 "../wisitoken_grammar_re2c.c"
+yy119:
+       YYDEBUG(119, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'k':       goto yy140;
+       default:        goto yy43;
+       }
+yy120:
+       YYDEBUG(120, YYPEEK());
+       YYSKIP();
+       YYDEBUG(121, YYPEEK());
+#line 274 "../wisitoken_grammar.re2c"
+       {*id = 22; continue;}
+#line 3544 "../wisitoken_grammar_re2c.c"
+yy122:
+       YYDEBUG(122, YYPEEK());
+       YYSKIP();
+       YYDEBUG(123, YYPEEK());
+#line 254 "../wisitoken_grammar.re2c"
+       {*id = 2; if (lexer->cursor[-1] == 0x0a || (lexer->cursor[-1] == 0x0d 
&& lexer->cursor[-2] == 0x0a)) lexer->line++; continue;}
+#line 3551 "../wisitoken_grammar_re2c.c"
+yy124:
+       YYDEBUG(124, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 0x80:
+       case 0x81:
        case 0x82:
        case 0x83:
        case 0x84:
@@ -3235,10 +3619,10 @@ yy115:
        case 0xBF:      goto yy97;
        default:        goto yy71;
        }
-yy116:
-       YYDEBUG(116, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+yy125:
+       YYDEBUG(125, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0xA0:
        case 0xA1:
@@ -3271,13 +3655,13 @@ yy116:
        case 0xBC:
        case 0xBD:
        case 0xBE:
-       case 0xBF:      goto yy115;
+       case 0xBF:      goto yy124;
        default:        goto yy71;
        }
-yy117:
-       YYDEBUG(117, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+yy126:
+       YYDEBUG(126, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -3342,13 +3726,13 @@ yy117:
        case 0xBC:
        case 0xBD:
        case 0xBE:
-       case 0xBF:      goto yy115;
+       case 0xBF:      goto yy124;
        default:        goto yy71;
        }
-yy118:
-       YYDEBUG(118, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+yy127:
+       YYDEBUG(127, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x90:
        case 0x91:
@@ -3397,13 +3781,13 @@ yy118:
        case 0xBC:
        case 0xBD:
        case 0xBE:
-       case 0xBF:      goto yy117;
+       case 0xBF:      goto yy126;
        default:        goto yy71;
        }
-yy119:
-       YYDEBUG(119, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+yy128:
+       YYDEBUG(128, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -3468,13 +3852,13 @@ yy119:
        case 0xBC:
        case 0xBD:
        case 0xBE:
-       case 0xBF:      goto yy117;
+       case 0xBF:      goto yy126;
        default:        goto yy71;
        }
-yy120:
-       YYDEBUG(120, YYPEEK ());
-       YYSKIP ();
-       yych = YYPEEK ();
+yy129:
+       YYDEBUG(129, YYPEEK());
+       YYSKIP();
+       yych = YYPEEK();
        switch (yych) {
        case 0x80:
        case 0x81:
@@ -3491,25 +3875,78 @@ yy120:
        case 0x8C:
        case 0x8D:
        case 0x8E:
-       case 0x8F:      goto yy117;
+       case 0x8F:      goto yy126;
        default:        goto yy71;
        }
-yy121:
-       YYDEBUG(121, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy130:
+       YYDEBUG(130, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'e':       goto yy127;
-       default:        goto yy45;
+       case 'E':
+       case 'e':       goto yy141;
+       default:        goto yy43;
        }
-yy122:
-       YYDEBUG(122, YYPEEK ());
-       yyaccept = 7;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy131:
+       YYDEBUG(131, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'U':
+       case 'u':       goto yy142;
+       default:        goto yy43;
+       }
+yy132:
+       YYDEBUG(132, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'F':
+       case 'f':       goto yy143;
+       default:        goto yy43;
+       }
+yy133:
+       YYDEBUG(133, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'e':       goto yy144;
+       default:        goto yy43;
+       }
+yy134:
+       YYDEBUG(134, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'f':       goto yy146;
+       default:        goto yy43;
+       }
+yy135:
+       YYDEBUG(135, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'i':       goto yy147;
+       default:        goto yy43;
+       }
+yy136:
+       YYDEBUG(136, YYPEEK());
+       yyaccept = 9;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case '-':
        case '0':
@@ -3625,50 +4062,83 @@ yy122:
        case 0xF1:
        case 0xF2:
        case 0xF3:
-       case 0xF4:      goto yy45;
-       default:        goto yy123;
+       case 0xF4:      goto yy43;
+       default:        goto yy137;
        }
-yy123:
-       YYDEBUG(123, YYPEEK ());
-#line 228 "../wisitoken_grammar.re2c"
-       {*id =  4; continue;}
-#line 3636 "../wisitoken_grammar_re2c.c"
-yy124:
-       YYDEBUG(124, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy137:
+       YYDEBUG(137, YYPEEK());
+#line 259 "../wisitoken_grammar.re2c"
+       {*id = 7; continue;}
+#line 4073 "../wisitoken_grammar_re2c.c"
+yy138:
+       YYDEBUG(138, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'w':       goto yy129;
-       default:        goto yy45;
+       case 'w':       goto yy148;
+       default:        goto yy43;
        }
-yy125:
-       YYDEBUG(125, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy139:
+       YYDEBUG(139, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case '_':       goto yy130;
-       default:        goto yy45;
+       case '_':       goto yy149;
+       default:        goto yy43;
        }
-yy126:
-       YYDEBUG(126, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy140:
+       YYDEBUG(140, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'e':       goto yy131;
-       default:        goto yy45;
+       case 'e':       goto yy150;
+       default:        goto yy43;
        }
-yy127:
-       YYDEBUG(127, YYPEEK ());
-       yyaccept = 8;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy141:
+       YYDEBUG(141, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'P':
+       case 'p':       goto yy151;
+       default:        goto yy43;
+       }
+yy142:
+       YYDEBUG(142, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'C':
+       case 'c':       goto yy152;
+       default:        goto yy43;
+       }
+yy143:
+       YYDEBUG(143, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'T':
+       case 't':       goto yy153;
+       default:        goto yy43;
+       }
+yy144:
+       YYDEBUG(144, YYPEEK());
+       yyaccept = 10;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case '-':
        case '0':
@@ -3784,70 +4254,92 @@ yy127:
        case 0xF1:
        case 0xF2:
        case 0xF3:
-       case 0xF4:      goto yy45;
-       default:        goto yy128;
+       case 0xF4:      goto yy43;
+       default:        goto yy145;
        }
-yy128:
-       YYDEBUG(128, YYPEEK ());
-#line 227 "../wisitoken_grammar.re2c"
-       {*id =  3; continue;}
-#line 3795 "../wisitoken_grammar_re2c.c"
-yy129:
-       YYDEBUG(129, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy145:
+       YYDEBUG(145, YYPEEK());
+#line 256 "../wisitoken_grammar.re2c"
+       {*id = 4; continue;}
+#line 4265 "../wisitoken_grammar_re2c.c"
+yy146:
+       YYDEBUG(146, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'o':       goto yy132;
-       default:        goto yy45;
+       case 'l':       goto yy155;
+       default:        goto yy43;
        }
-yy130:
-       YYDEBUG(130, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy147:
+       YYDEBUG(147, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'g':       goto yy133;
-       default:        goto yy45;
+       case 'f':       goto yy156;
+       default:        goto yy43;
        }
-yy131:
-       YYDEBUG(131, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy148:
+       YYDEBUG(148, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'n':       goto yy134;
-       default:        goto yy45;
+       case 'o':       goto yy158;
+       default:        goto yy43;
        }
-yy132:
-       YYDEBUG(132, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy149:
+       YYDEBUG(149, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'r':       goto yy136;
-       default:        goto yy45;
+       case 'g':       goto yy159;
+       default:        goto yy43;
        }
-yy133:
-       YYDEBUG(133, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy150:
+       YYDEBUG(150, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'r':       goto yy137;
-       default:        goto yy45;
+       case 'n':       goto yy160;
+       default:        goto yy43;
        }
-yy134:
-       YYDEBUG(134, YYPEEK ());
-       yyaccept = 9;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy151:
+       YYDEBUG(151, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'T':
+       case 't':       goto yy162;
+       default:        goto yy43;
+       }
+yy152:
+       YYDEBUG(152, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'E':
+       case 'e':       goto yy163;
+       default:        goto yy43;
+       }
+yy153:
+       YYDEBUG(153, YYPEEK());
+       yyaccept = 11;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case '-':
        case '0':
@@ -3963,40 +4455,30 @@ yy134:
        case 0xF1:
        case 0xF2:
        case 0xF3:
-       case 0xF4:      goto yy45;
-       default:        goto yy135;
-       }
-yy135:
-       YYDEBUG(135, YYPEEK ());
-#line 232 "../wisitoken_grammar.re2c"
-       {*id =  8; continue;}
-#line 3974 "../wisitoken_grammar_re2c.c"
-yy136:
-       YYDEBUG(136, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
-       switch (yych) {
-       case 'd':       goto yy138;
-       default:        goto yy45;
+       case 0xF4:      goto yy43;
+       default:        goto yy154;
        }
-yy137:
-       YYDEBUG(137, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy154:
+       YYDEBUG(154, YYPEEK());
+#line 267 "../wisitoken_grammar.re2c"
+       {*id = 15; continue;}
+#line 4466 "../wisitoken_grammar_re2c.c"
+yy155:
+       YYDEBUG(155, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'a':       goto yy140;
-       default:        goto yy45;
+       case 'i':       goto yy165;
+       default:        goto yy43;
        }
-yy138:
-       YYDEBUG(138, YYPEEK ());
-       yyaccept = 10;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy156:
+       YYDEBUG(156, YYPEEK());
+       yyaccept = 12;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case '-':
        case '0':
@@ -4112,60 +4594,827 @@ yy138:
        case 0xF1:
        case 0xF2:
        case 0xF3:
-       case 0xF4:      goto yy45;
-       default:        goto yy139;
-       }
-yy139:
-       YYDEBUG(139, YYPEEK ());
-#line 230 "../wisitoken_grammar.re2c"
-       {*id =  6; continue;}
-#line 4123 "../wisitoken_grammar_re2c.c"
-yy140:
-       YYDEBUG(140, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
-       switch (yych) {
-       case 'm':       goto yy141;
-       default:        goto yy45;
+       case 0xF4:      goto yy43;
+       default:        goto yy157;
        }
-yy141:
-       YYDEBUG(141, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy157:
+       YYDEBUG(157, YYPEEK());
+#line 260 "../wisitoken_grammar.re2c"
+       {*id = 8; continue;}
+#line 4605 "../wisitoken_grammar_re2c.c"
+yy158:
+       YYDEBUG(158, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'm':       goto yy142;
-       default:        goto yy45;
+       case 'r':       goto yy166;
+       default:        goto yy43;
        }
-yy142:
-       YYDEBUG(142, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy159:
+       YYDEBUG(159, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'a':       goto yy143;
-       default:        goto yy45;
+       case 'r':       goto yy167;
+       default:        goto yy43;
        }
-yy143:
-       YYDEBUG(143, YYPEEK ());
-       yyaccept = 2;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+yy160:
+       YYDEBUG(160, YYPEEK());
+       yyaccept = 13;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
-       case 'r':       goto yy144;
-       default:        goto yy45;
-       }
-yy144:
-       YYDEBUG(144, YYPEEK ());
-       yyaccept = 11;
-       YYSKIP ();
-       YYBACKUP ();
-       yych = YYPEEK ();
+       case '-':
+       case '0':
+       case '1':
+       case '2':
+       case '3':
+       case '4':
+       case '5':
+       case '6':
+       case '7':
+       case '8':
+       case '9':
+       case 'A':
+       case 'B':
+       case 'C':
+       case 'D':
+       case 'E':
+       case 'F':
+       case 'G':
+       case 'H':
+       case 'I':
+       case 'J':
+       case 'K':
+       case 'L':
+       case 'M':
+       case 'N':
+       case 'O':
+       case 'P':
+       case 'Q':
+       case 'R':
+       case 'S':
+       case 'T':
+       case 'U':
+       case 'V':
+       case 'W':
+       case 'X':
+       case 'Y':
+       case 'Z':
+       case '_':
+       case 'a':
+       case 'b':
+       case 'c':
+       case 'd':
+       case 'e':
+       case 'f':
+       case 'g':
+       case 'h':
+       case 'i':
+       case 'j':
+       case 'k':
+       case 'l':
+       case 'm':
+       case 'n':
+       case 'o':
+       case 'p':
+       case 'q':
+       case 'r':
+       case 's':
+       case 't':
+       case 'u':
+       case 'v':
+       case 'w':
+       case 'x':
+       case 'y':
+       case 'z':
+       case 0xC2:
+       case 0xC3:
+       case 0xC4:
+       case 0xC5:
+       case 0xC6:
+       case 0xC7:
+       case 0xC8:
+       case 0xC9:
+       case 0xCA:
+       case 0xCB:
+       case 0xCC:
+       case 0xCD:
+       case 0xCE:
+       case 0xCF:
+       case 0xD0:
+       case 0xD1:
+       case 0xD2:
+       case 0xD3:
+       case 0xD4:
+       case 0xD5:
+       case 0xD6:
+       case 0xD7:
+       case 0xD8:
+       case 0xD9:
+       case 0xDA:
+       case 0xDB:
+       case 0xDC:
+       case 0xDD:
+       case 0xDE:
+       case 0xDF:
+       case 0xE0:
+       case 0xE1:
+       case 0xE2:
+       case 0xE3:
+       case 0xE4:
+       case 0xE5:
+       case 0xE6:
+       case 0xE7:
+       case 0xE8:
+       case 0xE9:
+       case 0xEA:
+       case 0xEB:
+       case 0xEC:
+       case 0xED:
+       case 0xEE:
+       case 0xEF:
+       case 0xF0:
+       case 0xF1:
+       case 0xF2:
+       case 0xF3:
+       case 0xF4:      goto yy43;
+       default:        goto yy161;
+       }
+yy161:
+       YYDEBUG(161, YYPEEK());
+#line 268 "../wisitoken_grammar.re2c"
+       {*id = 16; continue;}
+#line 4754 "../wisitoken_grammar_re2c.c"
+yy162:
+       YYDEBUG(162, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case '_':       goto yy168;
+       default:        goto yy43;
+       }
+yy163:
+       YYDEBUG(163, YYPEEK());
+       yyaccept = 14;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case '-':
+       case '0':
+       case '1':
+       case '2':
+       case '3':
+       case '4':
+       case '5':
+       case '6':
+       case '7':
+       case '8':
+       case '9':
+       case 'A':
+       case 'B':
+       case 'C':
+       case 'D':
+       case 'E':
+       case 'F':
+       case 'G':
+       case 'H':
+       case 'I':
+       case 'J':
+       case 'K':
+       case 'L':
+       case 'M':
+       case 'N':
+       case 'O':
+       case 'P':
+       case 'Q':
+       case 'R':
+       case 'S':
+       case 'T':
+       case 'U':
+       case 'V':
+       case 'W':
+       case 'X':
+       case 'Y':
+       case 'Z':
+       case '_':
+       case 'a':
+       case 'b':
+       case 'c':
+       case 'd':
+       case 'e':
+       case 'f':
+       case 'g':
+       case 'h':
+       case 'i':
+       case 'j':
+       case 'k':
+       case 'l':
+       case 'm':
+       case 'n':
+       case 'o':
+       case 'p':
+       case 'q':
+       case 'r':
+       case 's':
+       case 't':
+       case 'u':
+       case 'v':
+       case 'w':
+       case 'x':
+       case 'y':
+       case 'z':
+       case 0xC2:
+       case 0xC3:
+       case 0xC4:
+       case 0xC5:
+       case 0xC6:
+       case 0xC7:
+       case 0xC8:
+       case 0xC9:
+       case 0xCA:
+       case 0xCB:
+       case 0xCC:
+       case 0xCD:
+       case 0xCE:
+       case 0xCF:
+       case 0xD0:
+       case 0xD1:
+       case 0xD2:
+       case 0xD3:
+       case 0xD4:
+       case 0xD5:
+       case 0xD6:
+       case 0xD7:
+       case 0xD8:
+       case 0xD9:
+       case 0xDA:
+       case 0xDB:
+       case 0xDC:
+       case 0xDD:
+       case 0xDE:
+       case 0xDF:
+       case 0xE0:
+       case 0xE1:
+       case 0xE2:
+       case 0xE3:
+       case 0xE4:
+       case 0xE5:
+       case 0xE6:
+       case 0xE7:
+       case 0xE8:
+       case 0xE9:
+       case 0xEA:
+       case 0xEB:
+       case 0xEC:
+       case 0xED:
+       case 0xEE:
+       case 0xEF:
+       case 0xF0:
+       case 0xF1:
+       case 0xF2:
+       case 0xF3:
+       case 0xF4:      goto yy43;
+       default:        goto yy164;
+       }
+yy164:
+       YYDEBUG(164, YYPEEK());
+#line 266 "../wisitoken_grammar.re2c"
+       {*id = 14; continue;}
+#line 4893 "../wisitoken_grammar_re2c.c"
+yy165:
+       YYDEBUG(165, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'c':       goto yy169;
+       default:        goto yy43;
+       }
+yy166:
+       YYDEBUG(166, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'd':       goto yy170;
+       default:        goto yy43;
+       }
+yy167:
+       YYDEBUG(167, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'a':       goto yy172;
+       default:        goto yy43;
+       }
+yy168:
+       YYDEBUG(168, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'I':
+       case 'i':       goto yy173;
+       default:        goto yy43;
+       }
+yy169:
+       YYDEBUG(169, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 't':       goto yy174;
+       default:        goto yy43;
+       }
+yy170:
+       YYDEBUG(170, YYPEEK());
+       yyaccept = 15;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case '-':
+       case '0':
+       case '1':
+       case '2':
+       case '3':
+       case '4':
+       case '5':
+       case '6':
+       case '7':
+       case '8':
+       case '9':
+       case 'A':
+       case 'B':
+       case 'C':
+       case 'D':
+       case 'E':
+       case 'F':
+       case 'G':
+       case 'H':
+       case 'I':
+       case 'J':
+       case 'K':
+       case 'L':
+       case 'M':
+       case 'N':
+       case 'O':
+       case 'P':
+       case 'Q':
+       case 'R':
+       case 'S':
+       case 'T':
+       case 'U':
+       case 'V':
+       case 'W':
+       case 'X':
+       case 'Y':
+       case 'Z':
+       case '_':
+       case 'a':
+       case 'b':
+       case 'c':
+       case 'd':
+       case 'e':
+       case 'f':
+       case 'g':
+       case 'h':
+       case 'i':
+       case 'j':
+       case 'k':
+       case 'l':
+       case 'm':
+       case 'n':
+       case 'o':
+       case 'p':
+       case 'q':
+       case 'r':
+       case 's':
+       case 't':
+       case 'u':
+       case 'v':
+       case 'w':
+       case 'x':
+       case 'y':
+       case 'z':
+       case 0xC2:
+       case 0xC3:
+       case 0xC4:
+       case 0xC5:
+       case 0xC6:
+       case 0xC7:
+       case 0xC8:
+       case 0xC9:
+       case 0xCA:
+       case 0xCB:
+       case 0xCC:
+       case 0xCD:
+       case 0xCE:
+       case 0xCF:
+       case 0xD0:
+       case 0xD1:
+       case 0xD2:
+       case 0xD3:
+       case 0xD4:
+       case 0xD5:
+       case 0xD6:
+       case 0xD7:
+       case 0xD8:
+       case 0xD9:
+       case 0xDA:
+       case 0xDB:
+       case 0xDC:
+       case 0xDD:
+       case 0xDE:
+       case 0xDF:
+       case 0xE0:
+       case 0xE1:
+       case 0xE2:
+       case 0xE3:
+       case 0xE4:
+       case 0xE5:
+       case 0xE6:
+       case 0xE7:
+       case 0xE8:
+       case 0xE9:
+       case 0xEA:
+       case 0xEB:
+       case 0xEC:
+       case 0xED:
+       case 0xEE:
+       case 0xEF:
+       case 0xF0:
+       case 0xF1:
+       case 0xF2:
+       case 0xF3:
+       case 0xF4:      goto yy43;
+       default:        goto yy171;
+       }
+yy171:
+       YYDEBUG(171, YYPEEK());
+#line 263 "../wisitoken_grammar.re2c"
+       {*id = 11; continue;}
+#line 5073 "../wisitoken_grammar_re2c.c"
+yy172:
+       YYDEBUG(172, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'm':       goto yy176;
+       default:        goto yy43;
+       }
+yy173:
+       YYDEBUG(173, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'T':
+       case 't':       goto yy177;
+       default:        goto yy43;
+       }
+yy174:
+       YYDEBUG(174, YYPEEK());
+       yyaccept = 16;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case '-':
+       case '0':
+       case '1':
+       case '2':
+       case '3':
+       case '4':
+       case '5':
+       case '6':
+       case '7':
+       case '8':
+       case '9':
+       case 'A':
+       case 'B':
+       case 'C':
+       case 'D':
+       case 'E':
+       case 'F':
+       case 'G':
+       case 'H':
+       case 'I':
+       case 'J':
+       case 'K':
+       case 'L':
+       case 'M':
+       case 'N':
+       case 'O':
+       case 'P':
+       case 'Q':
+       case 'R':
+       case 'S':
+       case 'T':
+       case 'U':
+       case 'V':
+       case 'W':
+       case 'X':
+       case 'Y':
+       case 'Z':
+       case 'a':
+       case 'b':
+       case 'c':
+       case 'd':
+       case 'e':
+       case 'f':
+       case 'g':
+       case 'h':
+       case 'i':
+       case 'j':
+       case 'k':
+       case 'l':
+       case 'm':
+       case 'n':
+       case 'o':
+       case 'p':
+       case 'q':
+       case 'r':
+       case 's':
+       case 't':
+       case 'u':
+       case 'v':
+       case 'w':
+       case 'x':
+       case 'y':
+       case 'z':
+       case 0xC2:
+       case 0xC3:
+       case 0xC4:
+       case 0xC5:
+       case 0xC6:
+       case 0xC7:
+       case 0xC8:
+       case 0xC9:
+       case 0xCA:
+       case 0xCB:
+       case 0xCC:
+       case 0xCD:
+       case 0xCE:
+       case 0xCF:
+       case 0xD0:
+       case 0xD1:
+       case 0xD2:
+       case 0xD3:
+       case 0xD4:
+       case 0xD5:
+       case 0xD6:
+       case 0xD7:
+       case 0xD8:
+       case 0xD9:
+       case 0xDA:
+       case 0xDB:
+       case 0xDC:
+       case 0xDD:
+       case 0xDE:
+       case 0xDF:
+       case 0xE0:
+       case 0xE1:
+       case 0xE2:
+       case 0xE3:
+       case 0xE4:
+       case 0xE5:
+       case 0xE6:
+       case 0xE7:
+       case 0xE8:
+       case 0xE9:
+       case 0xEA:
+       case 0xEB:
+       case 0xEC:
+       case 0xED:
+       case 0xEE:
+       case 0xEF:
+       case 0xF0:
+       case 0xF1:
+       case 0xF2:
+       case 0xF3:
+       case 0xF4:      goto yy43;
+       case '_':       goto yy179;
+       default:        goto yy175;
+       }
+yy175:
+       YYDEBUG(175, YYPEEK());
+#line 257 "../wisitoken_grammar.re2c"
+       {*id = 5; continue;}
+#line 5223 "../wisitoken_grammar_re2c.c"
+yy176:
+       YYDEBUG(176, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'm':       goto yy180;
+       default:        goto yy43;
+       }
+yy177:
+       YYDEBUG(177, YYPEEK());
+       yyaccept = 17;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case '-':
+       case '0':
+       case '1':
+       case '2':
+       case '3':
+       case '4':
+       case '5':
+       case '6':
+       case '7':
+       case '8':
+       case '9':
+       case 'A':
+       case 'B':
+       case 'C':
+       case 'D':
+       case 'E':
+       case 'F':
+       case 'G':
+       case 'H':
+       case 'I':
+       case 'J':
+       case 'K':
+       case 'L':
+       case 'M':
+       case 'N':
+       case 'O':
+       case 'P':
+       case 'Q':
+       case 'R':
+       case 'S':
+       case 'T':
+       case 'U':
+       case 'V':
+       case 'W':
+       case 'X':
+       case 'Y':
+       case 'Z':
+       case '_':
+       case 'a':
+       case 'b':
+       case 'c':
+       case 'd':
+       case 'e':
+       case 'f':
+       case 'g':
+       case 'h':
+       case 'i':
+       case 'j':
+       case 'k':
+       case 'l':
+       case 'm':
+       case 'n':
+       case 'o':
+       case 'p':
+       case 'q':
+       case 'r':
+       case 's':
+       case 't':
+       case 'u':
+       case 'v':
+       case 'w':
+       case 'x':
+       case 'y':
+       case 'z':
+       case 0xC2:
+       case 0xC3:
+       case 0xC4:
+       case 0xC5:
+       case 0xC6:
+       case 0xC7:
+       case 0xC8:
+       case 0xC9:
+       case 0xCA:
+       case 0xCB:
+       case 0xCC:
+       case 0xCD:
+       case 0xCE:
+       case 0xCF:
+       case 0xD0:
+       case 0xD1:
+       case 0xD2:
+       case 0xD3:
+       case 0xD4:
+       case 0xD5:
+       case 0xD6:
+       case 0xD7:
+       case 0xD8:
+       case 0xD9:
+       case 0xDA:
+       case 0xDB:
+       case 0xDC:
+       case 0xDD:
+       case 0xDE:
+       case 0xDF:
+       case 0xE0:
+       case 0xE1:
+       case 0xE2:
+       case 0xE3:
+       case 0xE4:
+       case 0xE5:
+       case 0xE6:
+       case 0xE7:
+       case 0xE8:
+       case 0xE9:
+       case 0xEA:
+       case 0xEB:
+       case 0xEC:
+       case 0xED:
+       case 0xEE:
+       case 0xEF:
+       case 0xF0:
+       case 0xF1:
+       case 0xF2:
+       case 0xF3:
+       case 0xF4:      goto yy43;
+       default:        goto yy178;
+       }
+yy178:
+       YYDEBUG(178, YYPEEK());
+#line 255 "../wisitoken_grammar.re2c"
+       {*id = 3; continue;}
+#line 5362 "../wisitoken_grammar_re2c.c"
+yy179:
+       YYDEBUG(179, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'r':       goto yy181;
+       default:        goto yy43;
+       }
+yy180:
+       YYDEBUG(180, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'a':       goto yy182;
+       default:        goto yy43;
+       }
+yy181:
+       YYDEBUG(181, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'e':       goto yy183;
+       default:        goto yy43;
+       }
+yy182:
+       YYDEBUG(182, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'r':       goto yy184;
+       default:        goto yy43;
+       }
+yy183:
+       YYDEBUG(183, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 's':       goto yy186;
+       default:        goto yy43;
+       }
+yy184:
+       YYDEBUG(184, YYPEEK());
+       yyaccept = 18;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
        switch (yych) {
        case '-':
        case '0':
@@ -4281,16 +5530,215 @@ yy144:
        case 0xF1:
        case 0xF2:
        case 0xF3:
-       case 0xF4:      goto yy45;
-       default:        goto yy145;
+       case 0xF4:      goto yy43;
+       default:        goto yy185;
        }
-yy145:
-       YYDEBUG(145, YYPEEK ());
-#line 231 "../wisitoken_grammar.re2c"
-       {*id =  7; continue;}
-#line 4292 "../wisitoken_grammar_re2c.c"
+yy185:
+       YYDEBUG(185, YYPEEK());
+#line 264 "../wisitoken_grammar.re2c"
+       {*id = 12; continue;}
+#line 5541 "../wisitoken_grammar_re2c.c"
+yy186:
+       YYDEBUG(186, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'o':       goto yy187;
+       default:        goto yy43;
+       }
+yy187:
+       YYDEBUG(187, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'l':       goto yy188;
+       default:        goto yy43;
+       }
+yy188:
+       YYDEBUG(188, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'u':       goto yy189;
+       default:        goto yy43;
+       }
+yy189:
+       YYDEBUG(189, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 't':       goto yy190;
+       default:        goto yy43;
+       }
+yy190:
+       YYDEBUG(190, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'i':       goto yy191;
+       default:        goto yy43;
+       }
+yy191:
+       YYDEBUG(191, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'o':       goto yy192;
+       default:        goto yy43;
+       }
+yy192:
+       YYDEBUG(192, YYPEEK());
+       yyaccept = 3;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case 'n':       goto yy193;
+       default:        goto yy43;
+       }
+yy193:
+       YYDEBUG(193, YYPEEK());
+       yyaccept = 19;
+       YYSKIP();
+       YYBACKUP();
+       yych = YYPEEK();
+       switch (yych) {
+       case '-':
+       case '0':
+       case '1':
+       case '2':
+       case '3':
+       case '4':
+       case '5':
+       case '6':
+       case '7':
+       case '8':
+       case '9':
+       case 'A':
+       case 'B':
+       case 'C':
+       case 'D':
+       case 'E':
+       case 'F':
+       case 'G':
+       case 'H':
+       case 'I':
+       case 'J':
+       case 'K':
+       case 'L':
+       case 'M':
+       case 'N':
+       case 'O':
+       case 'P':
+       case 'Q':
+       case 'R':
+       case 'S':
+       case 'T':
+       case 'U':
+       case 'V':
+       case 'W':
+       case 'X':
+       case 'Y':
+       case 'Z':
+       case '_':
+       case 'a':
+       case 'b':
+       case 'c':
+       case 'd':
+       case 'e':
+       case 'f':
+       case 'g':
+       case 'h':
+       case 'i':
+       case 'j':
+       case 'k':
+       case 'l':
+       case 'm':
+       case 'n':
+       case 'o':
+       case 'p':
+       case 'q':
+       case 'r':
+       case 's':
+       case 't':
+       case 'u':
+       case 'v':
+       case 'w':
+       case 'x':
+       case 'y':
+       case 'z':
+       case 0xC2:
+       case 0xC3:
+       case 0xC4:
+       case 0xC5:
+       case 0xC6:
+       case 0xC7:
+       case 0xC8:
+       case 0xC9:
+       case 0xCA:
+       case 0xCB:
+       case 0xCC:
+       case 0xCD:
+       case 0xCE:
+       case 0xCF:
+       case 0xD0:
+       case 0xD1:
+       case 0xD2:
+       case 0xD3:
+       case 0xD4:
+       case 0xD5:
+       case 0xD6:
+       case 0xD7:
+       case 0xD8:
+       case 0xD9:
+       case 0xDA:
+       case 0xDB:
+       case 0xDC:
+       case 0xDD:
+       case 0xDE:
+       case 0xDF:
+       case 0xE0:
+       case 0xE1:
+       case 0xE2:
+       case 0xE3:
+       case 0xE4:
+       case 0xE5:
+       case 0xE6:
+       case 0xE7:
+       case 0xE8:
+       case 0xE9:
+       case 0xEA:
+       case 0xEB:
+       case 0xEC:
+       case 0xED:
+       case 0xEE:
+       case 0xEF:
+       case 0xF0:
+       case 0xF1:
+       case 0xF2:
+       case 0xF3:
+       case 0xF4:      goto yy43;
+       default:        goto yy194;
+       }
+yy194:
+       YYDEBUG(194, YYPEEK());
+#line 258 "../wisitoken_grammar.re2c"
+       {*id = 6; continue;}
+#line 5740 "../wisitoken_grammar_re2c.c"
 }
-#line 263 "../wisitoken_grammar.re2c"
+#line 297 "../wisitoken_grammar.re2c"
 
    }
    /* lexer->cursor and lexer ->char_pos are one char past end of token */
@@ -4299,5 +5747,6 @@ yy145:
    *char_position = lexer->char_token_start;
    *char_length   = lexer->char_pos - lexer->char_token_start;
    *line_start    = lexer->line_token_start;
+   *line_length   = lexer->line - lexer->line_token_start;
    return status;
    }
diff --git a/wisitoken_grammar_re2c_c.ads b/wisitoken_grammar_re2c_c.ads
index d494b3f2a8..c9d4c55b40 100644
--- a/wisitoken_grammar_re2c_c.ads
+++ b/wisitoken_grammar_re2c_c.ads
@@ -1,8 +1,8 @@
---  generated parser support file.
+--  generated parser support file. -*- buffer-read-only:t  -*-
 --  command line: wisitoken-bnf-generate.exe  --generate LALR Ada re2c 
wisitoken_grammar.wy
 --
 
---  Copyright (C) 2017 - 2019 Free Software Foundation, Inc.
+--  Copyright (C) 2017 - 2022 Free Software Foundation, Inc.
 --
 --  Author: Stephen Leake <stephe-leake@stephe-leake.org>
 --
@@ -28,13 +28,12 @@ package wisitoken_grammar_re2c_c is
 
    function New_Lexer
      (Buffer    : in System.Address;
-      Length    : in Interfaces.C.size_t;
-      Verbosity : in Interfaces.C.int)
+      Length    : in Interfaces.C.size_t)
      return System.Address
    with Import        => True,
         Convention    => C,
         External_Name => "wisitoken_grammar_new_lexer";
-   --  Create the lexer object, passing it the full text to process.
+   --  Create the lexer object, passing it the text buffer.
 
    procedure Free_Lexer (Lexer : in out System.Address)
    with Import        => True,
@@ -47,6 +46,21 @@ package wisitoken_grammar_re2c_c is
         Convention    => C,
         External_Name => "wisitoken_grammar_reset_lexer";
 
+   procedure Set_Verbosity
+     (Lexer     : in System.Address;
+      Verbosity : in Interfaces.C.int)
+   with Import        => True,
+        Convention    => C,
+        External_Name => "wisitoken_grammar_set_verbosity";
+   procedure Set_Position
+     (Lexer         : in System.Address;
+      Byte_Position : in Interfaces.C.size_t;
+      Char_Position : in Interfaces.C.size_t;
+      Line          : in Interfaces.C.int)
+   with Import        => True,
+        Convention    => C,
+        External_Name => "wisitoken_grammar_set_position";
+
    function Next_Token
      (Lexer         : in     System.Address;
       ID            :    out WisiToken.Token_ID;
@@ -54,7 +68,8 @@ package wisitoken_grammar_re2c_c is
       Byte_Length   :    out Interfaces.C.size_t;
       Char_Position :    out Interfaces.C.size_t;
       Char_Length   :    out Interfaces.C.size_t;
-      Line_Start    :    out Interfaces.C.int)
+      Line_Start    :    out Interfaces.C.int;
+      Line_Length   :    out Interfaces.C.int)
      return Interfaces.C.int
    with Import        => True,
         Convention    => C,
diff --git a/wisitoken_grammar_runtime.adb b/wisitoken_grammar_runtime.adb
index e40c1477e1..36a68f0a8e 100644
--- a/wisitoken_grammar_runtime.adb
+++ b/wisitoken_grammar_runtime.adb
@@ -2,7 +2,7 @@
 --
 --  See spec.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -17,13 +17,10 @@
 
 pragma License (Modified_GPL);
 
-with Ada.Characters.Handling;
 with Ada.Exceptions;
 with Ada.Strings.Unbounded;
-with Ada.Text_IO;
-with GNAT.Regexp;
-with SAL.Generic_Decimal_Image;
 with WisiToken.Generate;   use WisiToken.Generate;
+with WisiToken.Syntax_Trees.LR_Utils;
 package body WisiToken_Grammar_Runtime is
 
    use WisiToken;
@@ -32,128 +29,85 @@ package body WisiToken_Grammar_Runtime is
    ----------
    --  Body subprograms, misc order
 
-   function Get_Line
-     (Data : in User_Data_Type;
-      Tree : in Syntax_Trees.Tree;
-      Node : in WisiToken.Valid_Node_Index)
-     return WisiToken.Line_Number_Type
+   procedure Set_EBNF
+     (Tree : in Syntax_Trees.Tree;
+      Node : in Syntax_Trees.Valid_Node_Access)
    is
-      --  Find a source line for Node.
+      use all type WisiToken.Syntax_Trees.Augmented_Class_Access;
 
-      use WisiToken.Syntax_Trees;
-
-      Temp : Node_Index := Node;
+      Tree_Aug : constant WisiToken.Syntax_Trees.Augmented_Class_Access := 
Tree.Augmented (Node);
+      Aug      : constant Augmented_Access :=
+        (if Tree_Aug = null
+         then new Augmented
+         else Augmented_Access (Tree_Aug));
    begin
-      loop
-         if Tree.First_Shared_Terminal (Temp) = Invalid_Token_Index then
-            --  Node is empty or all virtual_identifiers; try parents.
-            Temp := Tree.Parent (Temp);
-            exit when Temp = Invalid_Node_Index;
-         else
-            return Data.Terminals.all (Tree.First_Shared_Terminal (Temp)).Line;
-         end if;
-      end loop;
-      return Invalid_Line_Number;
-   end Get_Line;
+      if Tree_Aug = null then
+         Tree.Set_Augmented (Node, 
WisiToken.Syntax_Trees.Augmented_Class_Access (Aug));
+      end if;
+      Aug.EBNF := True;
+   end Set_EBNF;
 
    function Get_Text
      (Data         : in User_Data_Type;
       Tree         : in Syntax_Trees.Tree;
-      Tree_Index   : in Valid_Node_Index;
+      Tree_Index   : in WisiToken.Syntax_Trees.Valid_Node_Access;
       Strip_Quotes : in Boolean := False)
      return String
-   is
-      use all type Syntax_Trees.Node_Label;
-
-      function Strip_Delimiters (Tree_Index : in Valid_Node_Index) return 
String
-      is
-         Region : Buffer_Region renames Data.Terminals.all (Tree.Terminal 
(Tree_Index)).Byte_Region;
-      begin
-         if -Tree.ID (Tree_Index) in RAW_CODE_ID | REGEXP_ID | ACTION_ID then
-            --  Strip delimiters. We don't strip leading/trailing spaces to 
preserve indent.
-            return Data.Grammar_Lexer.Buffer_Text ((Region.First + 2, 
Region.Last - 2));
-
-         elsif -Tree.ID (Tree_Index) in STRING_LITERAL_1_ID | 
STRING_LITERAL_2_ID and Strip_Quotes then
-            return Data.Grammar_Lexer.Buffer_Text ((Region.First + 1, 
Region.Last - 1));
-         else
-            return Data.Grammar_Lexer.Buffer_Text (Region);
-         end if;
-      end Strip_Delimiters;
-
-   begin
-      case Tree.Label (Tree_Index) is
-      when Shared_Terminal =>
-         return Strip_Delimiters (Tree_Index);
-
-      when Virtual_Terminal =>
-         --  Terminal keyword inserted during tree edit. We could check for
-         --  Identifier, but that will be caught later.
-         return Image (Tree.ID (Tree_Index), 
Wisitoken_Grammar_Actions.Descriptor);
-
-      when Virtual_Identifier =>
-         if Strip_Quotes then
-            declare
-               Quoted : constant String := -Data.Tokens.Virtual_Identifiers 
(Tree.Identifier (Tree_Index));
-            begin
-               return Quoted (Quoted'First + 1 .. Quoted'Last - 1);
-            end;
-         else
-            return -Data.Tokens.Virtual_Identifiers (Tree.Identifier 
(Tree_Index));
-         end if;
-
-      when Nonterm =>
-         declare
-            use all type Ada.Strings.Unbounded.Unbounded_String;
-            Result       : Ada.Strings.Unbounded.Unbounded_String;
-            Tree_Indices : constant Valid_Node_Index_Array := 
Tree.Get_Terminals (Tree_Index);
-            Need_Space   : Boolean                                      := 
False;
-         begin
-            for Tree_Index of Tree_Indices loop
-               Result := Result & (if Need_Space then " " else "") &
-                 Get_Text (Data, Tree, Tree_Index, Strip_Quotes);
-               Need_Space := True;
-            end loop;
-            return -Result;
-         end;
-      end case;
+   is begin
+      return Get_Text
+        (Data.Tokens.Virtual_Identifiers,
+         Tree, Tree_Index, Strip_Quotes);
    end Get_Text;
 
+   function Get_Item_Text
+     (Data         : in User_Data_Type;
+      Tree         : in Syntax_Trees.Tree;
+      Node         : in Syntax_Trees.Valid_Node_Access;
+      Strip_Quotes : in Boolean := False)
+     return String
+   is begin
+      return Get_Text (Data, Tree, Tree.Find_Descendant (Node, +rhs_item_ID), 
Strip_Quotes);
+   end Get_Item_Text;
+
    function Get_Child_Text
      (Data         : in User_Data_Type;
       Tree         : in Syntax_Trees.Tree;
-      Parent       : in Valid_Node_Index;
+      Parent       : in Syntax_Trees.Valid_Node_Access;
       Child        : in SAL.Peek_Type;
       Strip_Quotes : in Boolean := False)
      return String
    is
-      Tree_Indices : constant Valid_Node_Index_Array := Tree.Get_Terminals 
(Parent);
+      Tree_Indices : constant Syntax_Trees.Valid_Node_Access_Array := 
Tree.Get_Terminals (Parent);
    begin
       return Get_Text (Data, Tree, Tree_Indices (Child), Strip_Quotes);
    end Get_Child_Text;
 
    procedure Start_If_1
      (Data    : in out User_Data_Type;
-      Tree    : in     Syntax_Trees.Tree;
-      A_Index : in     Valid_Node_Index;
-      B_Index : in     Valid_Node_Index)
+      Tree    : in out Syntax_Trees.Tree;
+      A_Index : in     Syntax_Trees.Valid_Node_Access;
+      B_Index : in     Syntax_Trees.Valid_Node_Access)
    is
-      use all type WisiToken.BNF.Generate_Algorithm;
-      use all type WisiToken.BNF.Lexer_Type;
+      use WisiToken.BNF;
    begin
       if "lexer" = Get_Text (Data, Tree, A_Index) then
-         Data.If_Lexer_Present := True;
-         Data.Ignore_Lines     := Data.User_Lexer /= WisiToken.BNF.To_Lexer 
(Get_Text (Data, Tree, B_Index));
+         declare
+            Right : constant Lexer_Set := Get_Lexer_Set (Data, Tree, B_Index);
+         begin
+            Data.If_Lexer_Present := True;
+            Data.Ignore_Lines     := not Right (Data.User_Lexer);
+         end;
 
       elsif "parser" = Get_Text (Data, Tree, A_Index) then
          Data.If_Parser_Present := True;
-         Data.Ignore_Lines := Data.User_Parser /= 
WisiToken.BNF.Generate_Algorithm'Value
-           (Get_Text (Data, Tree, B_Index));
+         declare
+            Right : constant Generate_Algorithm_Set := 
Get_Generate_Algorithm_Set (Data, Tree, B_Index);
+         begin
+            Data.Ignore_Lines := not Right (Data.User_Parser);
+         end;
 
       else
-         raise Grammar_Error with
-           Error_Message
-             (Data.Grammar_Lexer.File_Name, Data.Terminals.all 
(Tree.First_Shared_Terminal (A_Index)).Line,
-              "invalid '%if'; must be one of {lexer | parser}");
+         Put_Error (Tree.Error_Message ((A_Index), "invalid '%if'; must be one 
of {lexer | parser}"));
       end if;
    end Start_If_1;
 
@@ -161,24 +115,35 @@ package body WisiToken_Grammar_Runtime is
      (Data   : in out User_Data_Type;
       Tree   : in     Syntax_Trees.Tree;
       Labels : in out WisiToken.BNF.String_Arrays.Vector;
-      Token  : in     Valid_Node_Index)
+      Token  : in     Syntax_Trees.Valid_Node_Access)
      return WisiToken.BNF.RHS_Type
    with Pre => Tree.ID (Token) = +rhs_ID
    is
+      use all type WisiToken.Syntax_Trees.Augmented_Class_Access;
       use all type SAL.Base_Peek_Type;
-      Children : constant Valid_Node_Index_Array := Tree.Children (Token);
+      Children : constant Syntax_Trees.Node_Access_Array := Tree.Children 
(Token);
    begin
       return RHS : WisiToken.BNF.RHS_Type do
-         RHS.Source_Line := Get_Line (Data, Tree, Token);
+         RHS.Source_Line := Tree.Line_Region (Token, Trailing_Non_Grammar => 
True).First;
+
+         if Tree.Augmented (Token) /= null then
+            declare
+               Aug : constant Augmented_Access := Augmented_Access 
(Tree.Augmented (Token));
+            begin
+               RHS.Auto_Token_Labels := Aug.Auto_Token_Labels;
+               RHS.Edited_Token_List := Aug.Edited_Token_List;
+            end;
+         end if;
 
          if Children'Length > 0 then
-            for I of Tree.Get_IDs (Children (1), +rhs_element_ID) loop
+            for  I of Tree.Get_IDs (Children (1), +rhs_element_ID) loop
                case Tree.RHS_Index (I) is
                when 0 =>
                   --  rhs_item
                   RHS.Tokens.Append
-                    ((Label      => +"",
-                      Identifier => +Get_Text (Data, Tree, Tree.Child (I, 
1))));
+                    (WisiToken.BNF.Labeled_Token'
+                       (Label      => +"",
+                        Identifier => +Get_Text (Data, Tree, Tree.Child (I, 
1))));
 
                when 1 =>
                   --  IDENTIFIER = rhs_item
@@ -186,8 +151,9 @@ package body WisiToken_Grammar_Runtime is
                      Label : constant String := Get_Text (Data, Tree, 
Tree.Child (I, 1));
                   begin
                      RHS.Tokens.Append
-                       ((Label      => +Label,
-                         Identifier => +Get_Text (Data, Tree, Tree.Child (I, 
3))));
+                       (WisiToken.BNF.Labeled_Token'
+                          (Label      => +Label,
+                           Identifier => +Get_Text (Data, Tree, Tree.Child (I, 
3))));
 
                      if (for all L of Labels => -L /= Label) then
                         Labels.Append (+Label);
@@ -195,7 +161,7 @@ package body WisiToken_Grammar_Runtime is
                   end;
 
                when others =>
-                  Raise_Programmer_Error ("Get_RHS; unimplimented token", 
Data, Tree, I);
+                  WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error 
("Get_RHS; unimplemented token", Tree, I);
                end case;
             end loop;
 
@@ -220,22 +186,28 @@ package body WisiToken_Grammar_Runtime is
    when SAL.Programmer_Error =>
       raise;
    when E : others =>
-      declare
-         use Ada.Exceptions;
-      begin
-         Raise_Programmer_Error ("Get_RHS: " & Exception_Name (E) & ": " & 
Exception_Message (E), Data, Tree, Token);
-      end;
+      if Debug_Mode then
+         raise;
+      else
+         declare
+            use Ada.Exceptions;
+         begin
+            WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error
+              ("Get_RHS: " & Exception_Name (E) & ": " & Exception_Message 
(E), Tree, Token);
+            raise; -- WORKAROUND; GNAT pro_22.0w-20201222 ignores 'pragma 
no_return' on Raise_Programmer_Error
+         end;
+      end if;
    end Get_RHS;
 
    procedure Get_Right_Hand_Sides
      (Data             : in out User_Data_Type;
-      Tree             : in     WisiToken.Syntax_Trees.Tree;
+      Tree             : in out WisiToken.Syntax_Trees.Tree;
       Right_Hand_Sides : in out WisiToken.BNF.RHS_Lists.List;
       Labels           : in out WisiToken.BNF.String_Arrays.Vector;
-      Token            : in     WisiToken.Valid_Node_Index)
+      Token            : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    with Pre => Tree.ID (Token) = +rhs_list_ID
    is
-      Tokens : constant Valid_Node_Index_Array := Tree.Children (Token);
+      Tokens : constant Syntax_Trees.Node_Access_Array := Tree.Children 
(Token);
    begin
       case Tree.RHS_Index (Token) is
       when 0 =>
@@ -252,18 +224,23 @@ package body WisiToken_Grammar_Runtime is
             Right_Hand_Sides.Append (Get_RHS (Data, Tree, Labels, Tokens (3)));
          end if;
 
-      when 2 =>
-         --  | rhs_list PERCENT IF IDENTIFIER EQUAL IDENTIFIER
+      when 2 | 4 =>
+         --  | rhs_list PERCENT (IF | ELSIF) IDENTIFIER EQUAL IDENTIFIER
+         Get_Right_Hand_Sides (Data, Tree, Right_Hand_Sides, Labels, Tokens 
(1));
+         Start_If_1 (Data, Tree, Tokens (4), Tokens (6));
+
+      when 3 | 5 =>
+         --  | rhs_list PERCENT (IF | ELSIF) IDENTIFIER IN IDENTIFIER_BAR_list
          Get_Right_Hand_Sides (Data, Tree, Right_Hand_Sides, Labels, Tokens 
(1));
          Start_If_1 (Data, Tree, Tokens (4), Tokens (6));
 
-      when 3 =>
+      when 6 =>
          --  | rhs_list PERCENT END IF
          Get_Right_Hand_Sides (Data, Tree, Right_Hand_Sides, Labels, Tokens 
(1));
          Data.Ignore_Lines := False;
 
       when others =>
-         Raise_Programmer_Error ("Get_Right_Hand_Sides", Data, Tree, Token);
+         WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error 
("Get_Right_Hand_Sides", Tree, Token);
       end case;
    end Get_Right_Hand_Sides;
 
@@ -271,21 +248,24 @@ package body WisiToken_Grammar_Runtime is
    --  Public subprograms, declaration order
 
    overriding
-   procedure Set_Lexer_Terminals
-     (User_Data : in out User_Data_Type;
-      Lexer     : in     WisiToken.Lexer.Handle;
-      Terminals : in     Base_Token_Array_Access_Constant)
-   is begin
-      User_Data.Grammar_Lexer := Lexer;
-      User_Data.Terminals     := Terminals;
-   end Set_Lexer_Terminals;
+   function Copy_Augmented
+     (User_Data : in User_Data_Type;
+      Augmented : in WisiToken.Syntax_Trees.Augmented_Class_Access)
+     return WisiToken.Syntax_Trees.Augmented_Class_Access
+   is
+      Old_Aug : WisiToken_Grammar_Runtime.Augmented renames Augmented_Access 
(Augmented).all;
+      New_Aug : constant Augmented_Access := new 
WisiToken_Grammar_Runtime.Augmented'
+        (Old_Aug.EBNF, Old_Aug.Auto_Token_Labels, Old_Aug.Edited_Token_List);
+   begin
+      return WisiToken.Syntax_Trees.Augmented_Class_Access (New_Aug);
+   end Copy_Augmented;
 
    overriding procedure Reset (Data : in out User_Data_Type)
    is begin
       --  Preserve data set in Phase Meta, or by Set_Lexer_Terminals, or by
       --  wisitoken-bnf-generate.
 
-      --  Preserve Grammar_Lexer
+      --  Preserve Lexer
       --  Preserve User_Lexer
       --  Preserve User_Parser
       --  Perserve Generate_Set
@@ -293,14 +273,15 @@ package body WisiToken_Grammar_Runtime is
       --  Preserve Phase
       --  Preserve Terminals
       --  Preserve Non_Grammar
-      --  EBNF_Nodes handled in Initialize_Actions
       Data.Raw_Code          := (others => <>);
       Data.Language_Params   :=
         (Case_Insensitive => Data.Language_Params.Case_Insensitive,
-         others => <>);
+         Error_Recover    => Data.Language_Params.Error_Recover,
+         others           => <>);
       Data.Tokens            :=
         (Virtual_Identifiers => Data.Tokens.Virtual_Identifiers,
          others => <>);
+      Data.Suppress.Clear;
       Data.Conflicts.Clear;
       Data.McKenzie_Recover  := (others => <>);
       Data.Rule_Count        := 0;
@@ -316,50 +297,66 @@ package body WisiToken_Grammar_Runtime is
      (Data : in out User_Data_Type;
       Tree : in     WisiToken.Syntax_Trees.Tree'Class)
    is begin
-      Data.EBNF_Nodes.Clear;
-      Data.EBNF_Nodes.Set_First_Last (Tree.First_Index, Tree.Last_Index);
+      null;
    end Initialize_Actions;
 
-   overriding
-   procedure Lexer_To_Augmented
-     (Data  : in out          User_Data_Type;
-      Tree  : in out          WisiToken.Syntax_Trees.Tree'Class;
-      Token : in              WisiToken.Base_Token;
-      Lexer : not null access WisiToken.Lexer.Instance'Class)
+   function Get_Lexer_Set
+     (User_Data : in out User_Data_Type;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Node      : in     Syntax_Trees.Valid_Node_Access)
+     return WisiToken.BNF.Lexer_Set
    is
-      pragma Unreferenced (Lexer);
-      use all type Ada.Containers.Count_Type;
+      use WisiToken.BNF;
    begin
-      if Token.ID < Wisitoken_Grammar_Actions.Descriptor.First_Terminal then
-         --  Non-grammar token
-         if Data.Terminals.Length = 0 then
-            Data.Leading_Non_Grammar.Append (Token);
+      return Result : Lexer_Set := (others => False) do
+         if Tree.ID (Node) = +IDENTIFIER_ID then
+            Result (To_Lexer (Get_Text (User_Data, Tree, Node))) := True;
          else
             declare
-               Containing_Aug : Augmented_Token_Access := 
Augmented_Token_Access
-                 (Tree.Augmented (Data.Last_Terminal_Node));
+               use WisiToken.Syntax_Trees.LR_Utils;
+               List : constant Constant_List := Creators.Create_List
+                 (Tree, Node, +IDENTIFIER_BAR_list_ID, +IDENTIFIER_ID);
             begin
-               if Containing_Aug = null then
-                  Containing_Aug := new Augmented_Token'
-                    (Data.Terminals.all (Tree.First_Shared_Terminal 
(Data.Last_Terminal_Node)) with Non_Grammar => <>);
-                  Tree.Set_Augmented (Data.Last_Terminal_Node, 
WisiToken.Base_Token_Class_Access (Containing_Aug));
-               end if;
+               for Item of List loop
+                  Result (To_Lexer (Get_Text (User_Data, Tree, Item))) := True;
+               end loop;
+            end;
+         end if;
+      end return;
+   end Get_Lexer_Set;
 
-               Containing_Aug.Non_Grammar.Append (Token);
+   function Get_Generate_Algorithm_Set
+     (User_Data : in out User_Data_Type;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Node      : in     Syntax_Trees.Valid_Node_Access)
+     return WisiToken.BNF.Generate_Algorithm_Set
+   is
+      use WisiToken.BNF;
+   begin
+      return Result : Generate_Algorithm_Set := (others => False) do
+         if Tree.ID (Node) = +IDENTIFIER_ID then
+            Result (To_Generate_Algorithm (Get_Text (User_Data, Tree, Node))) 
:= True;
+         else
+            declare
+               use WisiToken.Syntax_Trees.LR_Utils;
+               List : constant Constant_List := Creators.Create_List
+                 (Tree, Node, +IDENTIFIER_BAR_list_ID, +IDENTIFIER_ID);
+            begin
+               for Item of List loop
+                  Result (To_Generate_Algorithm (Get_Text (User_Data, Tree, 
Item))) := True;
+               end loop;
             end;
          end if;
-      else
-         Data.Last_Terminal_Node := Token.Tree_Index;
-      end if;
-   end Lexer_To_Augmented;
+      end return;
+   end Get_Generate_Algorithm_Set;
 
    procedure Start_If
      (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-      Tree      : in     WisiToken.Syntax_Trees.Tree;
-      Tokens    : in     WisiToken.Valid_Node_Index_Array)
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is begin
       --  all phases
-      Start_If_1 (User_Data_Type (User_Data), Tree, Tokens (3), Tokens (5));
+      Start_If_1 (User_Data_Type (User_Data), Tree, Tree.Child (Nonterm, 3), 
Tree.Child (Nonterm, 5));
    end Start_If;
 
    procedure End_If (User_Data : in out 
WisiToken.Syntax_Trees.User_Data_Type'Class)
@@ -372,52 +369,54 @@ package body WisiToken_Grammar_Runtime is
 
    procedure Add_Declaration
      (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-      Tree      : in     WisiToken.Syntax_Trees.Tree;
-      Tokens    : in     WisiToken.Valid_Node_Index_Array)
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
-      use all type WisiToken.Syntax_Trees.Node_Label;
       use all type Ada.Strings.Unbounded.Unbounded_String;
+      use all type SAL.Base_Peek_Type;
+      use all type WisiToken.Syntax_Trees.Node_Label;
 
       Data : User_Data_Type renames User_Data_Type (User_Data);
 
-      function Token (Index : in SAL.Peek_Type) return Base_Token
+      function Token_Byte_Region (Index : in SAL.Peek_Type) return 
Buffer_Region
       is
-         use all type SAL.Base_Peek_Type;
+         Child : constant Syntax_Trees.Valid_Node_Access := Tree.Child 
(Nonterm, Index);
       begin
-         if Tokens'Last < Index then
-            raise SAL.Programmer_Error;
-         elsif Tree.Label (Tokens (Index)) /= 
WisiToken.Syntax_Trees.Shared_Terminal then
-            raise SAL.Programmer_Error with "token at " & Image 
(Tree.Byte_Region (Tokens (Index))) &
-              " is a " & WisiToken.Syntax_Trees.Node_Label'Image (Tree.Label 
(Tokens (Index))) &
-              ", expecting Shared_Terminal";
+         if Tree.Label (Child) /= WisiToken.Syntax_Trees.Source_Terminal then
+            raise SAL.Programmer_Error with "token at " & Image
+              (Tree.Byte_Region (Child, Trailing_Non_Grammar => False)) &
+              " is a " & WisiToken.Syntax_Trees.Node_Label'Image (Tree.Label 
(Child)) &
+              ", expecting Source_Terminal";
          else
-            return Data.Terminals.all (Tree.Terminal (Tokens (Index)));
+            return Tree.Byte_Region (Child, Trailing_Non_Grammar => False);
          end if;
-      end Token;
+      end Token_Byte_Region;
 
       function Enum_ID (Index : in SAL.Peek_Type) return Token_Enum_ID
-      is (To_Token_Enum (Token (Index).ID));
+      is (To_Token_Enum (Tree.ID (Tree.Child (Nonterm, Index))));
 
    begin
       if Data.Phase = Meta then
-         if Tree.Label (Tokens (2)) = WisiToken.Syntax_Trees.Shared_Terminal 
then
+         if Tree.Label (Tree.Child (Nonterm, 2)) = 
WisiToken.Syntax_Trees.Source_Terminal then
             case Enum_ID (2) is
             when IDENTIFIER_ID =>
                declare
-                  Kind : constant String := Data.Grammar_Lexer.Buffer_Text 
(Token (2).Byte_Region);
+                  Kind : constant String := Tree.Lexer.Buffer_Text 
(Token_Byte_Region (2));
                begin
                   if Kind = "case_insensitive" then
                      Data.Language_Params.Case_Insensitive := True;
 
                   elsif Kind = "generate" then
                      declare
-                        use all type SAL.Base_Peek_Type;
-                        Children : constant Valid_Node_Index_Array := 
Tree.Get_Terminals (Tokens (3));
+                        Children : constant 
Syntax_Trees.Valid_Node_Access_Array := Tree.Get_Terminals
+                          (Tree.Child (Nonterm, 3));
                         Tuple    : WisiToken.BNF.Generate_Tuple;
                      begin
-                        Tuple.Gen_Alg  := WisiToken.BNF.To_Generate_Algorithm 
(Get_Text (Data, Tree, Children (1)));
+                        Tuple.Gen_Alg  := WisiToken.BNF.To_Generate_Algorithm
+                          (Get_Text (Data, Tree, Children (1)));
                         if Children'Last >= 2 then
-                           Tuple.Out_Lang := WisiToken.BNF.To_Output_Language 
(Get_Text (Data, Tree, Children (2)));
+                           Tuple.Out_Lang := WisiToken.BNF.To_Output_Language
+                             (Get_Text (Data, Tree, Children (2)));
                         end if;
                         for I in 3 .. SAL.Base_Peek_Type (Children'Length) loop
                            declare
@@ -429,36 +428,31 @@ package body WisiToken_Grammar_Runtime is
                               elsif (for some I of WisiToken.BNF.Lexer_Image 
=> Text = I.all) then
                                  Tuple.Lexer := WisiToken.BNF.To_Lexer (Text);
 
-                              elsif (for some I in 
WisiToken.BNF.Valid_Interface =>
-                                       WisiToken.BNF.To_Lower (Text) = 
WisiToken.BNF.To_Lower
-                                         (WisiToken.BNF.Valid_Interface'Image 
(I)))
-                              then
+                              elsif WisiToken.BNF.Is_Valid_Interface (Text) 
then
                                  Tuple.Interface_Kind := 
WisiToken.BNF.Valid_Interface'Value (Text);
                               else
-                                 declare
-                                    Token : Base_Token renames 
Data.Terminals.all (Tree.Terminal (Children (I)));
-                                 begin
-                                    raise Grammar_Error with Error_Message
-                                      (Data.Grammar_Lexer.File_Name, 
Token.Line, Token.Column,
-                                       "invalid generate param '" & Text & 
"'");
-                                 end;
+                                 Put_Error (Tree.Error_Message (Children (I), 
"invalid generate param '" & Text & "'"));
                               end if;
                            end;
                         end loop;
                         WisiToken.BNF.Add (Data.Generate_Set, Tuple);
                      end;
 
+                  elsif Kind'Length > 8 and then Kind (Kind'First .. 
Kind'First + 7) = "mckenzie" then
+                     Data.Language_Params.Error_Recover := True;
+
                   elsif Kind = "meta_syntax" then
                      if Data.Meta_Syntax = Unknown then
                         --  Don't overwrite; somebody set it for a reason.
                         declare
-                           Value_Str : constant String := 
WisiToken.BNF.To_Lower (Get_Text (Data, Tree, Tokens (3)));
+                           Value_Str : constant String := 
WisiToken.BNF.To_Lower
+                             (Get_Text (Data, Tree, Tree.Child (Nonterm, 3)));
                         begin
                            if Value_Str = "bnf" then
                               Data.Meta_Syntax := BNF_Syntax;
                            elsif Value_Str = "ebnf" then
                               Data.Meta_Syntax := EBNF_Syntax;
-                              Data.EBNF_Nodes (Tree.Find_Ancestor (Tokens (2), 
+declaration_ID)) := True;
+                              Set_EBNF (Tree, Tree.Find_Ancestor (Tree.Child 
(Nonterm, 2), +declaration_ID));
 
                            else
                               Put_Error ("invalid value for %meta_syntax; must 
be BNF | EBNF.");
@@ -481,2962 +475,638 @@ package body WisiToken_Grammar_Runtime is
          return;
       end if;
 
-      case Tree.Label (Tokens (2)) is
-      when Syntax_Trees.Nonterm =>
-         --  must be token_keyword_non_grammar
+      case Enum_ID (2) is
+      --  Same order as declaration rhs_list in wisitoken_grammar.wy
+      when Wisitoken_Grammar_Actions.TOKEN_ID | NON_GRAMMAR_ID =>
          declare
-            Children_2 : constant Valid_Node_Index_Array := Tree.Children 
(Tokens (2));
-            Child_1_ID : constant Token_Enum_ID := To_Token_Enum (Tree.ID 
(Children_2 (1)));
+            --  % TOKEN < kind > name value [repair_image]
+            --  1 2     3 4    5 6    7      8
+            Kind  : constant String := Get_Text (Data, Tree, Tree.Child 
(Nonterm, 4));
+            Name  : constant String := Get_Text (Data, Tree, Tree.Child 
(Nonterm, 6));
+            Value : constant String :=
+              (if Tree.Child_Count (Nonterm) >= 7 then Get_Text (Data, Tree, 
Tree.Child (Nonterm, 7)) else "");
+
+            Repair_Image : constant String :=
+              (if Tree.Child_Count (Nonterm) = 8 then Get_Text (Data, Tree, 
Tree.Child (Nonterm, 8)) else "");
          begin
-            case Child_1_ID is
-            when Wisitoken_Grammar_Actions.TOKEN_ID =>
-               declare
-                  Children_4 : constant Valid_Node_Index_Array := 
Tree.Children (Tokens (4));
-               begin
-                  WisiToken.BNF.Add_Token
-                    (Data.Tokens.Tokens,
-                     Kind         => Get_Text (Data, Tree, Children_2 (3)),
-                     Name         => Get_Text (Data, Tree, Tokens (3)),
-                     Value        => Get_Text (Data, Tree, Children_4 (1)),
-                     Repair_Image => (if Children_4'Length = 1 then "" else 
Get_Text (Data, Tree, Children_4 (2))));
-               end;
-
-            when KEYWORD_ID =>
-
-               Data.Tokens.Keywords.Append
-                 ((Name  => +Get_Text (Data, Tree, Tokens (3)),
-                   Value => +Get_Text (Data, Tree, Tokens (4))));
-
-            when NON_GRAMMAR_ID =>
+            if Kind = "delimited_text" or
+              Kind = "comment-one-line"
+            then
+               if Value = Repair_Image then
+                  Put_Error (Tree.Error_Message (Nonterm, "start, end 
delimiters must be different"));
+               end if;
+            end if;
 
+            if Enum_ID (2) = Wisitoken_Grammar_Actions.TOKEN_ID then
+               WisiToken.BNF.Add_Token
+                 (Data.Tokens.Tokens,
+                  Kind         => Kind,
+                  Name         => Name,
+                  Value        => Value,
+                  Repair_Image => Repair_Image);
+            else
                WisiToken.BNF.Add_Token
                  (Data.Tokens.Non_Grammar,
-                  Kind  => Get_Text (Data, Tree, Children_2 (3)),
-                  Name  => Get_Text (Data, Tree, Tokens (3)),
-                  Value => Get_Text (Data, Tree, Tokens (4)));
-
-            when others =>
-               raise SAL.Programmer_Error;
-            end case;
+                  Kind         => Kind,
+                  Name         => Name,
+                  Value        => Value,
+                  Repair_Image => Repair_Image);
+            end if;
          end;
 
-      when Syntax_Trees.Shared_Terminal =>
-         case Enum_ID (2) is
-         when CODE_ID =>
-            declare
-               Location : WisiToken.BNF.Raw_Code_Location;
-
-               --  % code identifier_list raw_code
-               --  1 2    3               4
-               --
-               --  identifier_list = "action spec context"
-               --  identifier_list children = identifier_list IDENTIFIER_ID
-               --  children = identifier_list IDENTIFIER_ID
-               --  children = IDENTIFIER_ID
-               function Get_Loc_List return Base_Token_Array
-               with Pre => Tree.ID (Tokens (3)) = +identifier_list_ID
-               is
-                  use all type SAL.Base_Peek_Type;
-                  use WisiToken.Syntax_Trees;
-                  Node   : Valid_Node_Index := Tokens (3);
-                  Result : Base_Token_Array (1 .. 3);
-                  First  : SAL.Peek_Type    := Result'Last + 1;
-               begin
-                  loop
-                     pragma Assert (Tree.ID (Node) = +identifier_list_ID);
-                     exit when not Tree.Has_Children (Node);
-                     declare
-                        Children : constant Valid_Node_Index_Array := 
Tree.Children (Node);
-                     begin
-                        if Children'Length = 1 then
-                           --  identifier_list : IDENTIFIER
-                           First := First - 1;
-                           Result (First) := Data.Terminals.all (Tree.Terminal 
(Children (1)));
-                           exit;
-
-                        elsif Children'Length = 2 then
-                           --  identifier_list : identifier_list IDENTIFIER
-                           First := First - 1;
-                           Result (First) := Data.Terminals.all (Tree.Terminal 
(Children (2)));
-
-                           Node := Children (1);
-                        else
-                           raise SAL.Programmer_Error;
-                        end if;
-                     end;
-                  end loop;
-                  return Result (First .. Result'Last);
-               end Get_Loc_List;
-
-               Loc_List : constant Base_Token_Array := Get_Loc_List;
+      when KEYWORD_ID =>
+         --  % TOKEN name value
+         --  1 2     3    4
+         Data.Tokens.Keywords.Append
+           ((Name  => +Get_Text (Data, Tree, Tree.Child (Nonterm, 3)),
+             Value => +Get_Text (Data, Tree, Tree.Child (Nonterm, 4))));
 
-               function Get_Loc (Index : in SAL.Peek_Type) return String
-               is (Data.Grammar_Lexer.Buffer_Text (Loc_List 
(Index).Byte_Region));
+      when CODE_ID =>
+         declare
+            Location : WisiToken.BNF.Raw_Code_Location;
 
+            --  % CODE identifier_list RAW_CODE
+            --  1 2    3               4
+            --
+            --  identifier_list = "action spec context"
+            --  identifier_list children = identifier_list IDENTIFIER_ID
+            --  children = identifier_list IDENTIFIER_ID
+            --  children = IDENTIFIER_ID
+            function Get_Loc_List return Syntax_Trees.Valid_Node_Access_Array
+            with Pre => Tree.ID (Tree.Child (Nonterm, 3)) = +identifier_list_ID
+            is
+               use WisiToken.Syntax_Trees;
+               Node   : Valid_Node_Access := Tree.Child (Nonterm, 3);
+               Result : Valid_Node_Access_Array (1 .. 3) := (others => 
Dummy_Node);
+               First  : SAL.Peek_Type    := Result'Last + 1;
             begin
-               if Get_Loc (Loc_List'First) = "actions" then
+               loop
+                  pragma Assert (Tree.ID (Node) = +identifier_list_ID);
+                  exit when not Tree.Has_Children (Node);
+                  declare
+                     Children : constant Node_Access_Array := Tree.Children 
(Node);
+                  begin
+                     if Children'Length = 1 then
+                        --  identifier_list : IDENTIFIER
+                        First := First - 1;
+                        Result (First) := Children (1);
+                        exit;
+
+                     elsif Children'Length = 2 then
+                        --  identifier_list : identifier_list IDENTIFIER
+                        First := First - 1;
+                        Result (First) := Children (2);
+
+                        Node := Children (1);
+                     else
+                        raise SAL.Programmer_Error;
+                     end if;
+                  end;
+               end loop;
+               return Result (First .. Result'Last);
+            end Get_Loc_List;
+
+            Loc_List : constant Syntax_Trees.Valid_Node_Access_Array := 
Get_Loc_List;
+
+            function Get_Loc (Index : in SAL.Peek_Type) return String
+            is (Tree.Lexer.Buffer_Text (Tree.Byte_Region (Loc_List (Index), 
Trailing_Non_Grammar => False)));
+
+         begin
+            if Get_Loc (Loc_List'First) = "actions" then
+               if (Get_Loc (2) = "spec" or Get_Loc (2) = "body") and
+                 (Get_Loc (3) = "context" or Get_Loc (3) = "pre" or Get_Loc 
(3) = "post")
+               then
                   Location :=
                     (if Get_Loc (2) = "spec" then
                        (if Get_Loc (3) = "context" then 
WisiToken.BNF.Actions_Spec_Context
                         elsif Get_Loc (3) = "pre" then 
WisiToken.BNF.Actions_Spec_Pre
                         elsif Get_Loc (3) = "post" then 
WisiToken.BNF.Actions_Spec_Post
-                        else raise Grammar_Error with
-                          Error_Message
-                            (Data.Grammar_Lexer.File_Name, Loc_List (2).Line,
-                             "expecting {context | pre | post}"))
+                        else raise SAL.Programmer_Error)
 
                      elsif Get_Loc (2) = "body" then
                        (if Get_Loc (3) = "context" then 
WisiToken.BNF.Actions_Body_Context
                         elsif Get_Loc (3) = "pre" then 
WisiToken.BNF.Actions_Body_Pre
                         elsif Get_Loc (3) = "post" then 
WisiToken.BNF.Actions_Body_Post
-                        else raise Grammar_Error with
-                          Error_Message
-                            (Data.Grammar_Lexer.File_Name, Loc_List (2).Line,
-                             "expecting {context | pre | post}"))
-
-                     else raise Grammar_Error);
-
-               elsif Get_Loc (Loc_List'First) = "copyright_license" then
-                  Location := WisiToken.BNF.Copyright_License;
+                        else raise SAL.Programmer_Error)
 
+                     else raise SAL.Programmer_Error);
                else
-                  raise Grammar_Error with
-                    Error_Message
-                      (Data.Grammar_Lexer.File_Name, Loc_List 
(Loc_List'First).Line,
-                       "expecting {actions | copyright_license}");
+                  Put_Error (Tree.Error_Message (Loc_List (2), "expecting 
{spec | body} {context | pre | post}"));
                end if;
 
-               Data.Raw_Code (Location) := WisiToken.BNF.Split_Lines (Get_Text 
(Data, Tree, Tokens (4)));
-            exception
-            when Grammar_Error =>
-               Put_Error
-                 (Error_Message
-                    (Data.Grammar_Lexer.File_Name, Token (2).Line, Token 
(2).Column,
-                     "invalid raw code location; actions {spec | body} 
{context | pre | post}"));
-            end;
+            elsif Get_Loc (Loc_List'First) = "copyright_license" then
+               Location := WisiToken.BNF.Copyright_License;
 
-         when IDENTIFIER_ID =>
-            declare
-               Kind : constant String := Data.Grammar_Lexer.Buffer_Text (Token 
(2).Byte_Region);
-            begin
-               --  Alphabetical by Kind
+            else
+               Put_Error (Tree.Error_Message (Loc_List (Loc_List'First), 
"expecting {actions | copyright_license}"));
+            end if;
 
-               if Kind = "case_insensitive" then
-                  --  Not in phase Other
-                  null;
+            Data.Raw_Code (Location) := WisiToken.BNF.Split_Lines (Get_Text 
(Data, Tree, Tree.Child (Nonterm, 4)));
+         end;
+
+      when CONFLICT_ID | CONFLICT_RESOLUTION_ID =>
+         declare
+            --  % CONFLICT conflict_item_list ON TOKEN on_symbol [: resolution]
+            --  1 2        3                  4  5     6          7 8
+            --
+            --  conflict_item_list : [action] LHS (| [action] LHS)*
+
+            Conflict_Items : constant Syntax_Trees.Valid_Node_Access_Array := 
Tree.Get_Terminals
+              (Tree.Child (Nonterm, 3));
+
+            Conflict : BNF.Conflict;
+         begin
+            Conflict.Source_Line := Tree.Line_Region (Nonterm, 
Trailing_Non_Grammar => True).First;
+
+            if Conflict_Items'Length < 3 or else
+              Tree.ID (Conflict_Items (3)) /= +BAR_ID
+            then
+               --  Tree_Sitter format
+               for LHS of Conflict_Items loop
+                  Conflict.Items.Append
+                    ((Name  => +"",
+                      Value => +Get_Text (Data, Tree, LHS)));
+               end loop;
+
+            else
+               --  wisi format
+               declare
+                  I : SAL.Peek_Type := 1;
+               begin
+                  loop
+                     Conflict.Items.Append
+                       ((Name  => +Get_Text (Data, Tree, Conflict_Items (I)),
+                         Value => +Get_Text (Data, Tree, Conflict_Items (I + 
1))));
+
+                     I := I + 2;
+                     exit when I > Conflict_Items'Last;
+                     I := I + 1;
+                  end loop;
+               end;
+
+               Conflict.On := +Get_Text (Data, Tree, Tree.Child (Nonterm, 6));
+            end if;
+
+            if Tree.Child_Count (Nonterm) = 8 then
+               Conflict.Resolution := +Get_Text (Data, Tree, Tree.Child 
(Nonterm, 8));
+            end if;
+            Data.Conflicts.Append (Conflict);
+         end;
+
+      when IDENTIFIER_ID =>
+         declare
+            Kind : constant String := Tree.Lexer.Buffer_Text 
(Token_Byte_Region (2));
+         begin
+            --  Alphabetical by Kind
+
+            if Kind = "case_insensitive" then
+               --  Not in phase Other
+               null;
+
+            elsif Kind = "end" then
+               --  matching '%if' specified current lexer.
+               null;
+
+            elsif Kind = "elisp_face" then
+               Data.Tokens.Faces.Append (Get_Text (Data, Tree, Tree.Child 
(Nonterm, 3), Strip_Quotes => True));
 
-               elsif Kind = "conflict" then
+            elsif Kind = "elisp_indent" then
+               declare
+                  use WisiToken.Syntax_Trees.LR_Utils;
+
+                  Items : constant Constant_List := Creators.Create_List
+                    (Tree, Tree.Child (Nonterm, 3), +declaration_item_list_ID, 
+declaration_item_ID);
+                  Iter : constant Constant_Iterator := Iterate_Constant 
(Items);
+                  Item : Cursor := Items.First;
+                  Elisp_Name : constant String := Get_Text (Data, Tree, Items 
(Item), Strip_Quotes => True);
+               begin
+                  Item := Iter.Next (Item);
                   declare
-                     Tree_Indices : constant Valid_Node_Index_Array := 
Tree.Get_Terminals
-                       (Tokens (3));
-                     --   %conflict <action_a>/<action_b> in state <LHS_A>, 
<LHS_B> on token <on>
-                     --              1        2 3         4  5      6     7  8 
     9  10     11
+                     Ada_Name             : constant String := Get_Text (Data, 
Tree, Items (Item));
+                     Function_Args_Region : Buffer_Region   := 
Null_Buffer_Region;
                   begin
-                     Data.Conflicts.Append
-                       ((Source_Line => Data.Terminals.all (Tree.Terminal 
(Tree_Indices (1))).Line,
-                         Action_A    => +Get_Text (Data, Tree, Tree_Indices 
(1)),
-                         LHS_A       => +Get_Text (Data, Tree, Tree_Indices 
(6)),
-                         Action_B    => +Get_Text (Data, Tree, Tree_Indices 
(3)),
-                         LHS_B       => +Get_Text (Data, Tree, Tree_Indices 
(8)),
-                         On          => +Get_Text (Data, Tree, Tree_Indices 
(11))));
+                     Item := Iter.Next (Item);
+                     if Has_Element (Item) then
+                        Function_Args_Region := Tree.Byte_Region (Items 
(Item), Trailing_Non_Grammar => False);
+                        loop
+                           Item := Iter.Next (Item);
+                           exit when not Has_Element (Item);
+
+                           Function_Args_Region.Last := Tree.Byte_Region
+                             (Items (Item), Trailing_Non_Grammar => 
False).Last;
+                        end loop;
+                     end if;
+
+                     Data.Tokens.Indents.Insert
+                       (Key      => +Elisp_Name,
+                        New_Item =>
+                          (Name  => +Ada_Name,
+                           Value =>
+                             +(if Function_Args_Region = Null_Buffer_Region
+                               then ""
+                               else Tree.Lexer.Buffer_Text 
(Function_Args_Region))));
                   end;
+               end;
 
-               elsif Kind = "end" then
-                  --  matching '%if' specified current lexer.
-                  null;
+            elsif Kind = "elisp_action" then
+               Data.Tokens.Actions.Insert
+                 (Key      => +Get_Child_Text (Data, Tree, Tree.Child 
(Nonterm, 3), 2),
+                  New_Item =>
+                    (Name  => +Get_Child_Text (Data, Tree, Tree.Child 
(Nonterm, 3), 1),   -- post-parse action
+                     Value => +Get_Child_Text (Data, Tree, Tree.Child 
(Nonterm, 3), 3))); -- Ada name
 
-               elsif Kind = "elisp_face" then
-                  Data.Tokens.Faces.Append (Get_Text (Data, Tree, Tokens (3), 
Strip_Quotes => True));
+            elsif Kind = "end_names_optional_option" then
+               Data.Language_Params.End_Names_Optional_Option := +Get_Text 
(Data, Tree, Tree.Child (Nonterm, 3));
 
-               elsif Kind = "elisp_indent" then
-                  Data.Tokens.Indents.Append
-                    ((Name  => +Get_Child_Text (Data, Tree, Tokens (3), 1, 
Strip_Quotes => True),
-                      Value => +Get_Child_Text (Data, Tree, Tokens (3), 2)));
+            elsif Kind = "escape_delimiter_doubled" then
+               Data.Tokens.Escape_Delimiter_Doubled.Append
+                 (Get_Text (Data, Tree, Tree.Child (Nonterm, 3), Strip_Quotes 
=> True));
 
-               elsif Kind = "elisp_action" then
-                  Data.Tokens.Actions.Insert
-                    (Key             => +Get_Child_Text (Data, Tree, Tokens 
(3), 2),
-                     New_Item        =>
-                       (Action_Label => +Get_Child_Text (Data, Tree, Tokens 
(3), 1),
-                        Ada_Name     => +Get_Child_Text (Data, Tree, Tokens 
(3), 3)));
+            elsif Kind = "generate" then
+               --  Not in Other phase
+               null;
 
-               elsif Kind = "end_names_optional_option" then
-                  Data.Language_Params.End_Names_Optional_Option := +Get_Text 
(Data, Tree, Tokens (3));
+            elsif Kind = "language_runtime" then
+               Data.Language_Params.Language_Runtime_Name :=
+                 +Get_Text (Data, Tree, Tree.Child (Nonterm, 3), Strip_Quotes 
=> True);
 
-               elsif Kind = "generate" then
-                  --  Not in Other phase
-                  null;
+            elsif Kind = "lr1_hash_table_size" then
+               Data.Language_Params.LR1_Hash_Table_Size :=
+                 Positive'Value (Get_Text (Data, Tree, Tree.Child (Nonterm, 
3), Strip_Quotes => True));
 
-               elsif Kind = "language_runtime" then
-                  Data.Language_Params.Language_Runtime_Name :=
-                    +Get_Text (Data, Tree, Tokens (3), Strip_Quotes => True);
-
-               elsif Kind = "mckenzie_check_limit" then
-                  Data.Language_Params.Error_Recover := True;
-                  Data.McKenzie_Recover.Check_Limit := Token_Index'Value 
(Get_Text (Data, Tree, Tokens (3)));
-
-               elsif Kind = "mckenzie_check_delta_limit" then
-                  Data.Language_Params.Error_Recover := True;
-                  Data.McKenzie_Recover.Check_Delta_Limit := Integer'Value 
(Get_Text (Data, Tree, Tokens (3)));
-
-               elsif Kind = "mckenzie_cost_default" then
-                  if Tree.Get_Terminals (Tokens (3))'Length /= 4 then
-                     raise Grammar_Error with
-                       Error_Message
-                         (Data.Grammar_Lexer.File_Name,
-                          Data.Terminals.all (Tree.First_Shared_Terminal 
(Tokens (3))).Line,
-                          "too " & (if Tree.Get_Terminals (Tokens (3))'Length 
> 4 then "many" else "few") &
-                            " default costs; should be 'insert, delete, push 
back, ignore check fail'.");
-                  end if;
+            elsif Kind = "max_parallel" then
+               Data.Max_Parallel := SAL.Base_Peek_Type'Value (Get_Text (Data, 
Tree, Tree.Child (Nonterm, 3)));
 
-                  Data.Language_Params.Error_Recover := True;
-                  Data.McKenzie_Recover.Source_Line := Data.Terminals.all
-                    (Tree.First_Shared_Terminal (Tokens (1))).Line;
-
-                  Data.McKenzie_Recover.Default_Insert          := 
Natural'Value
-                    (Get_Child_Text (Data, Tree, Tokens (3), 1));
-                  Data.McKenzie_Recover.Default_Delete_Terminal := 
Natural'Value
-                    (Get_Child_Text (Data, Tree, Tokens (3), 2));
-                  Data.McKenzie_Recover.Default_Push_Back       := 
Natural'Value
-                    (Get_Child_Text (Data, Tree, Tokens (3), 3));
-                  Data.McKenzie_Recover.Ignore_Check_Fail       := 
Natural'Value
-                    (Get_Child_Text (Data, Tree, Tokens (3), 4));
-
-               elsif Kind = "mckenzie_cost_delete" then
-                  Data.Language_Params.Error_Recover := True;
-                  Data.McKenzie_Recover.Delete.Append
-                    ((+Get_Child_Text (Data, Tree, Tokens (3), 1),
-                      +Get_Child_Text (Data, Tree, Tokens (3), 2)));
-
-               elsif Kind = "mckenzie_cost_fast_forward" then
-                  Data.Language_Params.Error_Recover := True;
-                  Data.McKenzie_Recover.Fast_Forward :=
-                    Integer'Value (Get_Text (Data, Tree, Tokens (3)));
-
-               elsif Kind = "mckenzie_cost_insert" then
-                  Data.Language_Params.Error_Recover := True;
-                  Data.McKenzie_Recover.Insert.Append
-                    ((+Get_Child_Text (Data, Tree, Tokens (3), 1),
-                      +Get_Child_Text (Data, Tree, Tokens (3), 2)));
-
-               elsif Kind = "mckenzie_cost_matching_begin" then
-                  Data.Language_Params.Error_Recover := True;
-                  Data.McKenzie_Recover.Matching_Begin :=
-                    Integer'Value (Get_Text (Data, Tree, Tokens (3)));
-
-               elsif Kind = "mckenzie_cost_push_back" then
-                  Data.Language_Params.Error_Recover := True;
-                  Data.McKenzie_Recover.Push_Back.Append
-                    ((+Get_Child_Text (Data, Tree, Tokens (3), 1),
-                      +Get_Child_Text (Data, Tree, Tokens (3), 2)));
-
-               elsif Kind = "mckenzie_cost_undo_reduce" then
-                  Data.Language_Params.Error_Recover := True;
-                  Data.McKenzie_Recover.Undo_Reduce.Append
-                    ((+Get_Child_Text (Data, Tree, Tokens (3), 1),
-                      +Get_Child_Text (Data, Tree, Tokens (3), 2)));
-
-               elsif Kind = "mckenzie_enqueue_limit" then
-                  Data.Language_Params.Error_Recover := True;
-                  Data.McKenzie_Recover.Enqueue_Limit := Natural'Value 
(Get_Text (Data, Tree, Tokens (3)));
-
-               elsif Kind = "mckenzie_minimal_complete_cost_delta" then
-                  Data.Language_Params.Error_Recover := True;
-                  Data.McKenzie_Recover.Minimal_Complete_Cost_Delta :=
-                    Integer'Value (Get_Text (Data, Tree, Tokens (3)));
-
-               elsif Kind = "meta_syntax" then
-                  --  not in Other phase
-                  null;
+            elsif Kind = "mckenzie_check_limit" then
+               Data.McKenzie_Recover.Check_Limit := 
Syntax_Trees.Sequential_Index'Value
+                 (Get_Text (Data, Tree, Tree.Child (Nonterm, 3)));
 
-               elsif Kind = "no_enum" then
-                  Data.Language_Params.Declare_Enums := False;
+            elsif Kind = "mckenzie_check_delta_limit" then
+               Data.McKenzie_Recover.Check_Delta_Limit := Integer'Value
+                 (Get_Text (Data, Tree, Tree.Child (Nonterm, 3)));
 
-               elsif Kind = "no_language_runtime" then
-                  Data.Language_Params.Use_Language_Runtime := False;
+            elsif Kind = "mckenzie_cost_default" then
+               if Tree.Get_Terminals (Tree.Child (Nonterm, 3))'Length /= 4 then
+                  Put_Error
+                    (Tree.Error_Message
+                      (Tree.Child (Nonterm, 3),
+                       "too " & (if Tree.Get_Terminals (Tree.Child (Nonterm, 
3))'Length > 4 then "many" else "few") &
+                         " default costs; should be 'insert, delete, push 
back, ignore check fail'."));
+               end if;
 
-               elsif Kind = "partial_recursion" then
-                  Data.Language_Params.Partial_Recursion := True;
+               Data.McKenzie_Recover.Default_Insert          := Natural'Value
+                 (Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 3), 1));
+               Data.McKenzie_Recover.Default_Delete_Terminal := Natural'Value
+                 (Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 3), 2));
+               Data.McKenzie_Recover.Default_Push_Back       := Natural'Value
+                 (Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 3), 3));
+               Data.McKenzie_Recover.Ignore_Check_Fail       := Natural'Value
+                 (Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 3), 4));
+
+            elsif Kind = "mckenzie_cost_delete" then
+               Data.McKenzie_Recover.Delete.Append
+                 ((+Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 3), 1),
+                   +Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 3), 2)));
+
+            elsif Kind = "mckenzie_cost_fast_forward" then
+               Data.McKenzie_Recover.Fast_Forward :=
+                 Integer'Value (Get_Text (Data, Tree, Tree.Child (Nonterm, 
3)));
+
+            elsif Kind = "mckenzie_cost_insert" then
+               Data.McKenzie_Recover.Insert.Append
+                 ((+Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 3), 1),
+                   +Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 3), 2)));
+
+            elsif Kind = "mckenzie_cost_matching_begin" then
+               Data.McKenzie_Recover.Matching_Begin :=
+                 Integer'Value (Get_Text (Data, Tree, Tree.Child (Nonterm, 
3)));
+
+            elsif Kind = "mckenzie_cost_push_back" then
+               Data.McKenzie_Recover.Push_Back.Append
+                 ((+Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 3), 1),
+                   +Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 3), 2)));
+
+            elsif Kind = "mckenzie_cost_undo_reduce" then
+               Data.McKenzie_Recover.Undo_Reduce.Append
+                 ((+Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 3), 1),
+                   +Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 3), 2)));
+
+            elsif Kind = "mckenzie_enqueue_limit" then
+               Data.McKenzie_Recover.Enqueue_Limit := Natural'Value (Get_Text 
(Data, Tree, Tree.Child (Nonterm, 3)));
+
+            elsif Kind = "mckenzie_minimal_complete_cost_delta" then
+               Data.McKenzie_Recover.Minimal_Complete_Cost_Delta :=
+                 Integer'Value (Get_Text (Data, Tree, Tree.Child (Nonterm, 
3)));
+
+            elsif Kind = "mckenzie_zombie_limit" then
+               Data.McKenzie_Recover.Zombie_Limit := Integer'Value
+                 (Get_Text (Data, Tree, Tree.Child (Nonterm, 3)));
+
+            elsif Kind = "meta_syntax" then
+               --  not in Other phase
+               null;
 
-               elsif Kind = "start" then
-                  Data.Language_Params.Start_Token := +Get_Text (Data, Tree, 
Tokens (3));
+            elsif Kind = "no_enum" then
+               Data.Language_Params.Declare_Enums := False;
 
-               elsif Kind = "re2c_regexp" then
-                  Data.Tokens.re2c_Regexps.Append
-                    ((+Get_Child_Text (Data, Tree, Tokens (3), 1),
-                      +Get_Child_Text (Data, Tree, Tokens (3), 2)));
+            elsif Kind = "no_language_runtime" then
+               Data.Language_Params.Use_Language_Runtime := False;
 
-               else
-                  raise Grammar_Error with Error_Message
-                    (Data.Grammar_Lexer.File_Name, Token (2).Line, Token 
(2).Column, "unexpected syntax");
+            elsif Kind = "no_error_recover" then
+               Data.Language_Params.Error_Recover := False;
 
-               end if;
-            end;
+            elsif Kind = "partial_recursion" then
+               Data.Language_Params.Partial_Recursion := True;
 
-         when others =>
-            raise Grammar_Error with Error_Message
-              (Data.Grammar_Lexer.File_Name, Token (2).Line, Token (2).Column, 
"unexpected syntax");
-         end case;
+            elsif Kind = "start" then
+               Data.Language_Params.Start_Token := +Get_Text (Data, Tree, 
Tree.Child (Nonterm, 3));
 
-      when Syntax_Trees.Virtual_Terminal | Syntax_Trees.Virtual_Identifier =>
-         raise SAL.Programmer_Error;
+            elsif Kind = "suppress" then
+               Data.Suppress.Append
+                 ((Name  => +Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 
3), 1),
+                   Value => +Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 
3), 2, Strip_Quotes => True)));
+
+            elsif Kind = "lexer_regexp" then
+               Data.Tokens.Lexer_Regexps.Append
+                 ((+Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 3), 1),
+                   +Get_Child_Text (Data, Tree, Tree.Child (Nonterm, 3), 2)));
+
+            else
+               Put_Error (Tree.Error_Message (Tree.Child (Nonterm, 2), 
"unexpected syntax"));
+            end if;
+         end;
+
+      when others =>
+         Put_Error (Tree.Error_Message (Tree.Child (Nonterm, 2), "unexpected 
syntax"));
       end case;
+
    end Add_Declaration;
 
    procedure Add_Nonterminal
      (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-      Tree      : in     WisiToken.Syntax_Trees.Tree;
-      Tokens    : in     WisiToken.Valid_Node_Index_Array)
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access)
    is
       use all type Ada.Containers.Count_Type;
       use WisiToken.Syntax_Trees;
 
       Data : User_Data_Type renames User_Data_Type (User_Data);
 
-      LHS_Node   : constant Valid_Node_Index := Tokens (1);
-      LHS_String : constant String           := Get_Text (Data, Tree, 
LHS_Node);
+      LHS_Node   : constant Valid_Node_Access := Tree.Child (Nonterm, 1);
+      LHS_String : constant String            := Get_Text (Data, Tree, 
LHS_Node);
 
       Right_Hand_Sides : WisiToken.BNF.RHS_Lists.List;
       Labels           : WisiToken.BNF.String_Arrays.Vector;
-   begin
-      if Data.Phase = Meta or Data.Ignore_Lines then
-         return;
-      end if;
 
-      Data.Rule_Count := Data.Rule_Count + 1;
+      function Is_Optimized_List return Boolean
+      is begin
+         if Data.User_Parser not in WisiToken.BNF.LR_Generate_Algorithm then
+            return False;
+         end if;
+         --  From optimized_list.wy:
+         --  declarations
+         --  : declaration
+         --  | declarations declaration
+         --  | declarations declarations
+         --  ;
+         --
+         --  From ada_lite_ebnf_bnf.wy
+         --
+         --  optimized list with separator:
+         --  term
+         --    : factor
+         --    | term multiplying_operator factor
+         --    | term multiplying_operator term
+         --    ;
+         --
+         --  AND_relation_list
+         --    : AND relation
+         --    | AND_relation_list AND relation
+         --    | AND_relation_list AND_relation_list
+         --    ;
+         --
+         --  ELSIF_expression_list
+         --    : ELSIF expression THEN sequence_of_statements
+         --    | ELSIF_expression_list ELSIF expression THEN 
sequence_of_statements
+         --    | ELSIF_expression_list ELSIF_expression_list
+         --    ;
+
+         if Right_Hand_Sides.Length /= 3 then
+            return False;
+         end if;
 
-      Get_Right_Hand_Sides (Data, Tree, Right_Hand_Sides, Labels, Tokens (3));
+         declare
+            use Ada.Containers;
+            use Ada.Strings.Unbounded;
+            use WisiToken.BNF.RHS_Lists;
+
+            RHS                 : Cursor     := Right_Hand_Sides.First;
+            Element             : Unbounded_String;
+            Element_Token_Count : Count_Type := 0;
+            Has_Separator       : Boolean    := False;
+            Separator           : Unbounded_String;
+         begin
+            for Tok of Right_Hand_Sides (RHS).Tokens loop
+               Append (Element, Tok.Identifier);
+               Element_Token_Count := @ + 1;
+            end loop;
 
-      if WisiToken.BNF.Is_Present (Data.Tokens.Rules, LHS_String) then
-         case Tree.Label (LHS_Node) is
-         when Shared_Terminal =>
-            declare
-               LHS_Token : Base_Token renames Data.Terminals.all 
(Tree.Terminal (LHS_Node));
-            begin
-               raise Grammar_Error with Error_Message
-                 (Data.Grammar_Lexer.File_Name, LHS_Token.Line, 
LHS_Token.Column, "duplicate nonterm");
-            end;
+            Next (RHS);
+            if -Right_Hand_Sides (RHS).Tokens (1).Identifier /= LHS_String then
+               return False;
+            end if;
 
-         when Virtual_Identifier =>
-            raise Grammar_Error with Error_Message
-              (Data.Grammar_Lexer.File_Name, 1, 1, "duplicate virtual nonterm 
'" & LHS_String & "'");
+            if Element_Token_Count = 1 then
+               case Right_Hand_Sides (RHS).Tokens.Length is
+               when 2 =>
+                  null;
 
-         when others =>
-            Raise_Programmer_Error ("Add_Nonterminal", Data, Tree, LHS_Node);
-         end case;
-      else
-         Data.Label_Count := Data.Label_Count + Labels.Length;
+               when 3 =>
+                  Has_Separator := True;
+                  Separator     := Right_Hand_Sides (RHS).Tokens 
(2).Identifier;
 
-         Data.Tokens.Rules.Append
-           ((+LHS_String, Right_Hand_Sides, Labels,
-             Source_Line =>
-               (case Tree.Label (LHS_Node) is
-                when Shared_Terminal    => Data.Terminals.all 
(Tree.First_Shared_Terminal (LHS_Node)).Line,
-                when Virtual_Identifier => Invalid_Line_Number, -- IMPROVEME: 
get line from Right_Hand_Sides
-                when others             => raise SAL.Programmer_Error)));
-      end if;
-   end Add_Nonterminal;
+               when others =>
+                  return False;
+               end case;
 
-   function Image_Grammar_Action (Action : in 
WisiToken.Syntax_Trees.Semantic_Action) return String
-   is
-      pragma Unreferenced (Action);
-   begin
-      return "action";
-   end Image_Grammar_Action;
-
-   procedure Check_EBNF
-     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-      Tree      : in     WisiToken.Syntax_Trees.Tree;
-      Tokens    : in     WisiToken.Valid_Node_Index_Array;
-      Token     : in     WisiToken.Positive_Index_Type)
-   is
-      Data : User_Data_Type renames User_Data_Type (User_Data);
-   begin
-      case Data.Phase is
-      when Meta =>
-         Data.EBNF_Nodes (Tokens (Token)) := True;
-
-         if Data.Meta_Syntax /= EBNF_Syntax then
-            declare
-               Tok  : Base_Token renames Data.Terminals.all 
(Tree.First_Shared_Terminal (Tokens (Token)));
-            begin
-               raise Grammar_Error with Error_Message
-                 (Data.Grammar_Lexer.File_Name, Tok.Line, Tok.Column,
-                  "EBNF syntax used, but BNF specified; set '%meta_syntax 
EBNF'");
-            end;
-         end if;
-      when Other =>
-         Raise_Programmer_Error ("untranslated EBNF node", Data, Tree, 
Tree.Parent (Tokens (Token)));
-      end case;
-   end Check_EBNF;
-
-   procedure Raise_Programmer_Error
-     (Label : in String;
-      Data  : in User_Data_Type;
-      Tree  : in WisiToken.Syntax_Trees.Tree;
-      Node  : in WisiToken.Node_Index)
-   is begin
-      WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error
-        (Label, Wisitoken_Grammar_Actions.Descriptor, Data.Grammar_Lexer, 
Tree, Data.Terminals.all, Node);
-   end Raise_Programmer_Error;
-
-   function Find_Declaration
-     (Data : in     User_Data_Type;
-      Tree : in out WisiToken.Syntax_Trees.Tree;
-      Name : in     String)
-     return WisiToken.Node_Index
-   is
-      use WisiToken.Syntax_Trees.LR_Utils;
-      use WisiToken.Syntax_Trees.LR_Utils.Creators;
-
-      function Decl_Name (Decl : in Valid_Node_Index) return String
-      is begin
-         case To_Token_Enum (Tree.ID (Decl)) is
-         when declaration_ID =>
-            case Tree.RHS_Index (Decl) is
-            when 0 =>
-               return Get_Text (Data, Tree, Tree.Child (Decl, 3));
-
-            when 2 | 3 =>
-               return Get_Text (Data, Tree, Tree.Child (Decl, 2));
-
-            when others =>
-               return "";
-            end case;
-
-         when nonterminal_ID =>
-            return Get_Text (Data, Tree, Tree.Child (Decl, 1));
-
-         when others =>
-            return "";
-         end case;
-      end Decl_Name;
-
-      --  Tree.Root is wisitoken_accept
-      List : constant Constant_List := Create_List
-        (Tree, Tree.Child (Tree.Root, 1), +compilation_unit_list_ID, 
+compilation_unit_ID);
-   begin
-      for N of List loop
-         declare
-            Decl : constant Valid_Node_Index := Tree.Child (N, 1);
-         begin
-            if Name = Decl_Name (Decl) then
-               return Decl;
-            end if;
-         end;
-      end loop;
-      return Invalid_Node_Index;
-   end Find_Declaration;
-
-   procedure Translate_EBNF_To_BNF
-     (Tree : in out WisiToken.Syntax_Trees.Tree;
-      Data : in out User_Data_Type)
-   is
-      use all type SAL.Base_Peek_Type;
-      use WisiToken.Syntax_Trees;
-
-      Copied_EBNF_Nodes : WisiToken.Valid_Node_Index_Arrays.Vector;
-
-      Symbol_Regexp : constant GNAT.Regexp.Regexp := GNAT.Regexp.Compile
-        ((if Data.Language_Params.Case_Insensitive
-          then "[A-Z0-9_]+"
-          else "[a-zA-Z0-9_]+"),
-         Case_Sensitive => not Data.Language_Params.Case_Insensitive);
-
-      procedure Erase_Copied_EBNF_Node (Node : in Valid_Node_Index)
-      is
-         use Ada.Text_IO;
-         Found : Boolean := False;
-      begin
-         if Trace_Generate_EBNF > Outline then
-            Put_Line ("erase copied deleted EBNF node" & Node'Image);
-         end if;
-         --  Vector Delete replaces content with
-         --  Valid_Node_Index_Arrays.Default_Element = Valid_Node_Index'Last =
-         --  Deleted_Child; this is clearer.
-
-         for I in Copied_EBNF_Nodes.First_Index .. 
Copied_EBNF_Nodes.Last_Index loop
-            if Copied_EBNF_Nodes (I) = Node then
-               Copied_EBNF_Nodes (I) := Deleted_Child;
-               Found := True;
-               exit;
-            end if;
-         end loop;
-         if not Found then
-            Put_Line (Current_Error, Tree.Image
-                        (Node, Wisitoken_Grammar_Actions.Descriptor, 
Node_Numbers => True) &
-                        " not found in Copied_EBNF_Nodes");
-            raise SAL.Programmer_Error;
-         end if;
-      end Erase_Copied_EBNF_Node;
-
-      procedure Clear_EBNF_Node (Node : in Valid_Node_Index)
-      is begin
-         if Node in Data.EBNF_Nodes.First_Index .. Data.EBNF_Nodes.Last_Index 
then
-            if Trace_Generate_EBNF > Outline then
-               Ada.Text_IO.Put_Line ("clear translated EBNF node" & 
Node'Image);
-            end if;
-
-            Data.EBNF_Nodes (Node) := False;
-         else
-            Erase_Copied_EBNF_Node (Node);
-         end if;
-      end Clear_EBNF_Node;
-
-      function New_Identifier (Text : in String) return Identifier_Index
-      is
-         ID : constant Identifier_Index := Base_Identifier_Index 
(Data.Tokens.Virtual_Identifiers.Length) + 1;
-      begin
-         Data.Tokens.Virtual_Identifiers.Append (+Text);
-         return ID;
-      end New_Identifier;
-
-      Keyword_Ident : constant Identifier_Index := New_Identifier ("keyword");
-      Percent_Ident : constant Identifier_Index := New_Identifier ("percent");
-
-      function Next_Nonterm_Name (Suffix : in String := "") return 
Identifier_Index
-      is
-         function Image is new SAL.Generic_Decimal_Image (Identifier_Index);
-         ID : constant Identifier_Index := Identifier_Index 
(Data.Tokens.Virtual_Identifiers.Length) + 1;
-      begin
-
-         if ID > 999 then
-            --  We assume 3 digits below
-            raise SAL.Programmer_Error with "more than 3 digits needed for 
virtual identifiers in EBNF translate";
-         end if;
-
-         Data.Tokens.Virtual_Identifiers.Append (+("nonterminal_" & Image (ID, 
Width => 3) & Suffix));
-
-         return ID;
-      end Next_Nonterm_Name;
-
-      function Find_Nonterminal
-        (Target : in String;
-         Equal  : in WisiToken.Syntax_Trees.LR_Utils.Find_Equal)
-        return Node_Index
-      is
-         use WisiToken.Syntax_Trees.LR_Utils;
-      begin
-         return Get_Node
-           (Creators.Create_List
-              (Tree, Tree.Child (Tree.Root, 1), +compilation_unit_list_ID, 
+compilation_unit_ID).Find
-              (Target, Equal));
-      end Find_Nonterminal;
-
-      function Tree_Add_Nonterminal
-        (Child_1 : in Valid_Node_Index;
-         Child_2 : in Valid_Node_Index;
-         Child_3 : in Valid_Node_Index;
-         Child_4 : in Valid_Node_Index)
-        return Valid_Node_Index
-      is begin
-         --  Work around GNAT error about arbitrary evaluation order in
-         --  aggregates (no error about the arbitrary order in subprogram
-         --  parameter_assocation_lists!).
-         return Tree.Add_Nonterm
-           (Production => (+nonterminal_ID, 0),
-            Children   => (Child_1, Child_2, Child_3, Child_4),
-            Action     => Wisitoken_Grammar_Actions.nonterminal_0'Access);
-      end Tree_Add_Nonterminal;
-
-      function Duplicate
-        (List        : in Syntax_Trees.LR_Utils.List;
-         New_Content : in Node_Index)
-        return Boolean
-      is
-         --  We don't require New_Content.ID = List.Element_ID; since we are
-         --  comparing result of Get_Text.
-         New_Content_Str : constant String :=
-           (if New_Content = Invalid_Node_Index
-            then "" --  Empty RHS
-            else Get_Text (Data, Tree, New_Content));
-      begin
-         for N of List loop
-            if New_Content_Str = Get_Text (Data, Tree, N) then
-               return True;
-            end if;
-         end loop;
-         return False;
-      end Duplicate;
-
-      procedure Insert_Empty_RHS
-        (RHS_List : in out WisiToken.Syntax_Trees.LR_Utils.List;
-         After    : in     Valid_Node_Index)
-      with Pre => RHS_List.List_ID = +rhs_list_ID and RHS_List.Element_ID = 
+rhs_ID and
-                  Tree.ID (After) = +rhs_ID and RHS_List.Contains (After)
-      is begin
-         RHS_List.Insert
-           (New_Element => Tree.Add_Nonterm
-              ((+rhs_ID, 0),
-               (1 .. 0 => Invalid_Node_Index)),
-            After => RHS_List.To_Cursor (After));
-      end Insert_Empty_RHS;
-
-      procedure Insert_RHS
-        (RHS_List          : in out WisiToken.Syntax_Trees.LR_Utils.List;
-         New_RHS_Item_List : in     Valid_Node_Index;
-         After             : in     Valid_Node_Index)
-      with Pre => RHS_List.List_ID = +rhs_list_ID and RHS_List.Element_ID = 
+rhs_ID and
-                  Tree.ID (New_RHS_Item_List) = +rhs_item_list_ID and
-                  Tree.ID (After) = +rhs_ID and RHS_List.Contains (After)
-      is begin
-         RHS_List.Insert
-           (New_Element => Tree.Add_Nonterm
-              (Production => (+rhs_ID, Tree.RHS_Index (After)),
-               Children =>
-                 (case Tree.RHS_Index (After) is
-                  when 1 => (1 => New_RHS_Item_List),
-                  when 2 => (New_RHS_Item_List, Tree.Copy_Subtree (Tree.Child 
(After, 2))),
-                  when 3 => (New_RHS_Item_List,
-                             Tree.Copy_Subtree (Tree.Child (After, 2)),
-                             Tree.Copy_Subtree (Tree.Child (After, 3))),
-                  when others => raise SAL.Programmer_Error)),
-            After => RHS_List.To_Cursor (After));
-      end Insert_RHS;
-
-      procedure Record_Copied_EBNF_Nodes (Node : in Valid_Node_Index)
-      is
-         procedure Record_Copied_Node
-           (Tree : in out WisiToken.Syntax_Trees.Tree;
-            Node : in WisiToken.Valid_Node_Index)
-         is begin
-            if To_Token_Enum (Tree.ID (Node)) in
-              rhs_optional_item_ID |
-              rhs_multiple_item_ID |
-              rhs_group_item_ID |
-              rhs_attribute_ID |
-              STRING_LITERAL_2_ID
-            then
-               if Trace_Generate_EBNF > Outline then
-                  Ada.Text_IO.Put_Line
-                    ("new EBNF node " & Tree.Image
-                       (Node, Wisitoken_Grammar_Actions.Descriptor,
-                        Node_Numbers => True));
-               end if;
-               Copied_EBNF_Nodes.Append (Node);
-            end if;
-         end Record_Copied_Node;
-      begin
-         Tree.Process_Tree (Record_Copied_Node'Access, Node);
-      end Record_Copied_EBNF_Nodes;
-
-      procedure Erase_Deleted_EBNF_Nodes (Node : in Valid_Node_Index)
-      is
-         procedure Erase_Deleted_Node
-           (Tree : in out WisiToken.Syntax_Trees.Tree;
-            Node : in WisiToken.Valid_Node_Index)
-         is begin
-            if To_Token_Enum (Tree.ID (Node)) in
-              rhs_optional_item_ID |
-              rhs_multiple_item_ID |
-              rhs_group_item_ID |
-              rhs_attribute_ID |
-              STRING_LITERAL_2_ID
-            then
-               if Node in Data.EBNF_Nodes.First_Index .. 
Data.EBNF_Nodes.Last_Index then
-                  --  Node is original, not copied
-                  if Trace_Generate_EBNF > Outline then
-                     Ada.Text_IO.Put_Line ("erase original deleted EBNF node" 
& Node'Image);
-                  end if;
-                  Data.EBNF_Nodes (Node) := False;
-               else
-                  Erase_Copied_EBNF_Node (Node);
-               end if;
-            end if;
-         end Erase_Deleted_Node;
-      begin
-         Tree.Process_Tree (Erase_Deleted_Node'Access, Node);
-      end Erase_Deleted_EBNF_Nodes;
-
-      function Insert_Optional_RHS (B : in Valid_Node_Index) return 
Valid_Node_Index
-      with Pre => Tree.ID (B) in +rhs_multiple_item_ID | +rhs_optional_item_ID 
| +IDENTIFIER_ID
-      is
-         --  B is an optional item in an rhs_item_list:
-         --  | A B? C
-         --
-         --  or B is a rhs_multiple_item that is allowed to be empty:
-         --  | A B* C
-         --
-         --  or B is a virtual identifier naming the new nonterm replacing the
-         --  original
-         --
-         --  A, C can be empty. The containing element may be rhs or
-         --  rhs_alternative_list.
-         --
-         --  Insert either a second rhs, or a second rhs_item_list, after the
-         --  one containing B, without B.
-         --
-         --  Return the List_Root of the edited list.
-
-         use Syntax_Trees.LR_Utils;
-         use Syntax_Trees.LR_Utils.Creators;
-         use all type Ada.Containers.Count_Type;
-
-         function Find_Skips return Skip_Info
-         is
-            Non_Empty_List : Node_Index := Invalid_Node_Index;
-            --  First (nearest) rhs_item_list ancestor of B that will not be 
empty
-            --  when B is skipped.
-
-            Skip_Last        : Positive_Index_Type'Base := 
Positive_Index_Type'First;
-            Last_Skip_Node   : Valid_Node_Index         := Tree.Find_Ancestor 
(B, +rhs_element_ID);
-            Reset_Search_For : WisiToken.Token_ID       := +rhs_item_list_ID;
-
-            procedure Search (Result : in out Skip_Info)
-            is
-               Skip_Node  : Valid_Node_Index   := Last_Skip_Node;
-               Search_For : WisiToken.Token_ID := Reset_Search_For;
-            begin
-               loop
-                  case To_Token_Enum (Search_For) is
-                  when rhs_item_list_ID =>
-                     Skip_Node := Tree.Find_Ancestor (Skip_Node, 
+rhs_item_list_ID);
-
-                     Skip_Node := List_Root (Tree, Skip_Node, 
+rhs_item_list_ID);
-
-                     Search_For := +rhs_element_ID;
-
-                     if Result.Skips'Length = 0 then
-                        declare
-                           List_Count : constant Ada.Containers.Count_Type := 
Create_List
-                             (Tree, Skip_Node, +rhs_item_list_ID, 
+rhs_element_ID).Count;
-                        begin
-                           if List_Count > 1 then
-                              Non_Empty_List := List_Root (Tree, Skip_Node, 
+rhs_item_list_ID);
-                              exit;
-
-                           elsif Skip_Last = Positive_Index_Type'First and 
List_Count = 1 then
-                              --  This list will be empty; no need to descend 
into it
-                              Last_Skip_Node   := Skip_Node;
-                              Reset_Search_For := Search_For;
-                           else
-                              Skip_Last := Skip_Last + 1;
-                           end if;
-                        end;
-                     else
-                        Result.Skips (Skip_Last) :=
-                          (Label             => Nested,
-                           Element           => Skip_Node,
-                           List_Root         => Skip_Node,
-                           List_ID           => +rhs_item_list_ID,
-                           Element_ID        => +rhs_element_ID,
-                           Separator_ID      => Invalid_Token_ID,
-                           Multi_Element_RHS => 1);
-
-                        Skip_Last := Skip_Last - 1;
-                     end if;
-
-                  when rhs_element_ID =>
-                     declare
-                        List_Node : Valid_Node_Index := Tree.Find_Ancestor
-                          (Skip_Node, (+rhs_ID, +rhs_alternative_list_ID));
-                     begin
-
-                        if Result.Skips'Length = 0 and then
-                          Tree.ID (List_Node) = +rhs_ID
-                        then
-                           Non_Empty_List := List_Root (Tree, Skip_Node, 
+rhs_item_list_ID);
-                           Skip_Last      := Skip_Last - 1;
-                           exit;
-                        end if;
-
-                        List_Node := List_Root (Tree, List_Node, 
+rhs_alternative_list_ID);
-                        Skip_Node := Tree.Find_Ancestor (Skip_Node, 
+rhs_element_ID);
-
-                        Search_For := +rhs_item_list_ID;
-
-                        if Result.Skips'Length = 0 then
-                           if Skip_Last = Positive_Index_Type'First then
-                              --  This list will be empty; no need to descend 
into it
-                              Last_Skip_Node   := Skip_Node;
-                              Reset_Search_For := Search_For;
-                           else
-                              Skip_Last := Skip_Last + 1;
-                           end if;
-                        else
-                           Result.Skips (Skip_Last) :=
-                             (Label             => Nested,
-                              Element           => Skip_Node,
-                              List_Root         => List_Node,
-                              List_ID           => +rhs_alternative_list_ID,
-                              Element_ID        => +rhs_item_list_ID,
-                              Separator_ID      => +BAR_ID,
-                              Multi_Element_RHS => 1);
-
-                           Skip_Last := Skip_Last - 1;
-                        end if;
-                     end;
-                  when others =>
-                     raise SAL.Programmer_Error;
-                  end case;
-
-               end loop;
-            end Search;
-
-            Result_1 : Skip_Info (Skip_Last => Positive_Index_Type'First - 1);
-         begin
-            --  First count the number of Skip_Items we need, and set
-            --  Non_Empty_List.
-            Search (Result_1);
-
-            declare
-               Result : Skip_Info (Skip_Last);
-            begin
-               if Result.Skips'Length = 0 then
-                  return Result;
-               end if;
-
-               Result.Start_List_Root  := Non_Empty_List;
-               Result.Start_List_ID    := +rhs_item_list_ID;
-               Result.Start_Element_ID := +rhs_element_ID;
-
-               Result.Start_Separator_ID      := Invalid_Token_ID;
-               Result.Start_Multi_Element_RHS := 1;
-
-               Result.Skips (Skip_Last) := (Skip, Last_Skip_Node);
-
-               if Result.Skips'Length = 1 then
-                  return Result;
-               end if;
-
-               Search (Result);
-               return Result;
-            end;
-         end Find_Skips;
-
-         Container : Valid_Node_Index := Tree.Find_Ancestor (B, (+rhs_ID, 
+rhs_alternative_list_ID));
-         Container_ID : WisiToken.Token_ID := Tree.ID (Container);
-
-         Container_List : Syntax_Trees.LR_Utils.List :=
-           (if Container_ID = +rhs_ID
-            then Create_From_Element
-              (Tree,
-               Element      => Container,
-               List_ID      => +rhs_list_ID,
-               Element_ID   => +rhs_ID,
-               Separator_ID => +BAR_ID)
-            else Create_List
-              (Tree,
-               Root         => List_Root (Tree, Container, 
+rhs_alternative_list_ID),
-               List_ID      => +rhs_alternative_list_ID,
-               Element_ID   => +rhs_item_list_ID,
-               Separator_ID => +BAR_ID));
-
-      begin
-         if Trace_Generate_EBNF > Extra then
-            Ada.Text_IO.New_Line;
-            Ada.Text_IO.Put_Line ("Insert_Optional_RHS start:");
-            Tree.Print_Tree (Wisitoken_Grammar_Actions.Descriptor, Container);
-         end if;
-
-         declare
-            Skip_List : constant Skip_Info := Find_Skips;
-
-            New_RHS_AC   : Node_Index := Invalid_Node_Index;
-            Is_Duplicate : Boolean    := False;
-         begin
-            if WisiToken.Trace_Generate_EBNF > Extra then
-               Ada.Text_IO.New_Line;
-               Ada.Text_IO.Put_Line ("skip: " & Image (Skip_List, 
Wisitoken_Grammar_Actions.Descriptor));
-            end if;
-
-            if Skip_List.Skips'Length = 0 or else
-              +rhs_ID = Tree.ID (Tree.Parent (Skip_List.Start_List_Root))
-            then
-               --  Insert an edited rhs into the rhs_list.
-               --
-               --  We can't insert an empty rhs_item_list into an
-               --  rhs_alterative_list, so we insert an empty rhs.
-
-               if Container_ID = +rhs_alternative_list_ID then
-
-                  Container := Tree.Find_Ancestor (B, +rhs_ID);
-
-                  Container_ID := +rhs_ID;
-
-                  Container_List := Create_From_Element
-                    (Tree,
-                     Element      => Container,
-                     List_ID      => +rhs_list_ID,
-                     Element_ID   => +rhs_ID,
-                     Separator_ID => +BAR_ID);
-               end if;
-
-               if Skip_List.Skips'Length = 0 then
-                  --  New rhs is empty; no rhs_item_list
-                  null;
-               else
-                  New_RHS_AC := Copy_Skip_Nested (Skip_List, Tree);
-               end if;
-
-               if Duplicate (Container_List, New_RHS_AC) then
-                  Is_Duplicate := True;
-               else
-                  if Skip_List.Skips'Length = 0 then
-                     Insert_Empty_RHS (Container_List, Container);
-                  else
-                     Insert_RHS (Container_List, New_RHS_AC, After => 
Container);
-                  end if;
-               end if;
-
-            else
-               --  Insert an edited rhs_item_list into an rhs_alternative_list
-
-               New_RHS_AC := Copy_Skip_Nested (Skip_List, Tree);
-
-               if Duplicate (Container_List, New_RHS_AC) then
-                  --  IMPROVEME: check for duplicate before do copy; requires 
version of
-                  --  Get_Text that understands Skip_Info
-                  Is_Duplicate := True;
-               else
-                  declare
-                     After : Valid_Node_Index := B;
-                  begin
-                     loop
-                        After := List_Root (Tree, Tree.Find_Ancestor (After, 
+rhs_item_list_ID), +rhs_item_list_ID);
-                        exit when Container_List.Contains (After);
-                     end loop;
-
-                     Container_List.Insert
-                       (New_Element => New_RHS_AC,
-                        After       => Container_List.To_Cursor (After));
-                  end;
-               end if;
-            end if;
-
-            if Trace_Generate_EBNF > Detail then
-               Ada.Text_IO.New_Line;
-               if Is_Duplicate then
-                  Ada.Text_IO.Put_Line ("Insert_Optional_RHS duplicate '" & 
Get_Text (Data, Tree, New_RHS_AC) & "'");
-               else
-                  if Container_ID = +rhs_ID then
-                     Ada.Text_IO.Put_Line ("Insert_Optional_RHS old rhs, new 
rhs:");
-                     Tree.Print_Tree (Wisitoken_Grammar_Actions.Descriptor, 
Container_List.Root);
-                  else
-                     Ada.Text_IO.Put_Line ("Insert_Optional_RHS edited 
rhs_alternative_list:");
-                     Tree.Print_Tree (Wisitoken_Grammar_Actions.Descriptor, 
Tree.Parent (Container_List.Root, 1));
-                  end if;
-               end if;
-            end if;
-
-            if not (Skip_List.Skips'Length = 0 or Is_Duplicate) then
-               Record_Copied_EBNF_Nodes (New_RHS_AC);
-            end if;
-         end;
-         return Container_List.Root;
-      end Insert_Optional_RHS;
-
-      procedure Add_Compilation_Unit (Label : in String; Unit : in 
Valid_Node_Index; Prepend : in Boolean := False)
-      with Pre => Tree.ID (Unit) in +declaration_ID | +nonterminal_ID
-      is
-         use WisiToken.Syntax_Trees.LR_Utils;
-
-         List : Syntax_Trees.LR_Utils.List := Creators.Create_List
-           (Tree, Tree.Child (Tree.Root, 1), +compilation_unit_list_ID, 
+compilation_unit_ID, Invalid_Token_ID);
-
-         Comp_Unit : constant Valid_Node_Index := Tree.Add_Nonterm
-           ((+compilation_unit_ID, (if Tree.ID (Unit) = +declaration_ID then 0 
else 1)),
-            (1 => Unit));
-
-         function Equal
-           (Target    : in String;
-            List      : in LR_Utils.Constant_List'Class;
-            Comp_Unit : in Valid_Node_Index)
-           return Boolean
-         is
-            pragma Unreferenced (List);
-            Decl : constant Valid_Node_Index := Tree.Child (Comp_Unit, 1);
-         begin
-            return Tree.ID (Decl) = +declaration_ID and then Target =
-              (case Tree.RHS_Index (Decl) is
-               when 0      => Get_Text (Data, Tree, Tree.Child (Decl, 3)),
-               when 2 | 3  => Get_Text (Data, Tree, Tree.Child (Decl, 2)),
-               when others => "");
-         end Equal;
-
-      begin
-         if Prepend then
-            --  Prepend is true for keywords, which must be declared before 
they
-            --  are used. We put them all after the %meta_syntax declaration, 
to
-            --  closer match the likely original EBNF layout.
-            declare
-               Meta_Syntax : constant Cursor := List.Find ("meta_syntax", 
Equal'Unrestricted_Access);
-            begin
-               List.Insert (Comp_Unit, After => Meta_Syntax);
-            end;
-         else
-            List.Append (Comp_Unit);
-         end if;
-
-         if Trace_Generate_EBNF > Extra then
-            Ada.Text_IO.New_Line;
-            Ada.Text_IO.Put_Line ("new " & Label & ":" & Comp_Unit'Image & ": 
'" & Get_Text (Data, Tree, Unit) & "'");
-         end if;
-      end Add_Compilation_Unit;
-
-      function To_RHS_List (RHS_Element : in Valid_Node_Index) return 
Valid_Node_Index
-      with Pre => Tree.ID (RHS_Element) = +rhs_element_ID
-      is
-         RHS_Item_List : constant Valid_Node_Index := Tree.Add_Nonterm 
((+rhs_item_list_ID, 0), (1 => RHS_Element));
-         RHS           : constant Valid_Node_Index := Tree.Add_Nonterm 
((+rhs_ID, 1),           (1 => RHS_Item_List));
-      begin
-         return Tree.Add_Nonterm ((+rhs_list_ID, 0), (1 => RHS));
-      end To_RHS_List;
-
-      function Convert_RHS_Alternative (Content : in Valid_Node_Index) return 
Valid_Node_Index
-      with Pre => Tree.ID (Content) = +rhs_alternative_list_ID
-      is
-         --  Convert rhs_alternative_list rooted at Content to an rhs_list
-         Node : Valid_Node_Index := Content;
-      begin
-         loop
-            exit when Tree.RHS_Index (Node) = 0;
-
-            --  current tree:
-            --  rhs_alternative_list : Node
-            --  | rhs_alternative_list: Node.Child (1)
-            --  | |  ...
-            --  | BAR: Node.child (2)
-            --  | rhs_item_list: Node.Child (3)
-
-            --  new tree:
-            --  rhs_list: Node
-            --  | rhs_alternative_list: keep Node.Child (1)
-            --  | |  ...
-            --  | BAR: keep
-            --  | rhs: new
-            --  | | rhs_item_list: keep Node,Child (3)
-
-            if not Tree.Has_Children (Tree.Child (Node, 3)) then
-               --  Convert empty rhs_item_list to empty rhs
-               Tree.Set_Children
-                 (Tree.Child (Node, 3),
-                  (+rhs_ID, 0),
-                  (1 .. 0 => Invalid_Node_Index));
-
-               Tree.Set_Children
-                 (Node,
-                  (+rhs_list_ID, 1),
-                  (1 => Tree.Child (Node, 1),
-                   2 => Tree.Child (Node, 2),
-                   3 => Tree.Child (Node, 3)));
-            else
-               Tree.Set_Children
-                 (Node,
-                  (+rhs_list_ID, 1),
-                  (1 => Tree.Child (Node, 1),
-                   2 => Tree.Child (Node, 2),
-                   3 => Tree.Add_Nonterm
-                     ((+rhs_ID, 1),
-                      (1 => Tree.Child (Node, 3)))));
-            end if;
-
-            Node := Tree.Child (Node, 1);
-         end loop;
-
-         --  current tree:
-         --  rhs_alternative_list : Node
-         --  | rhs_item_list: Node.Child (1)
-
-         --  new tree:
-         --  rhs_list: Node
-         --  | rhs: new
-         --  | | rhs_item_list: Node.Child (1)
-
-         Tree.Set_Children
-           (Node,
-            (+rhs_list_ID, 0),
-            (1 => Tree.Add_Nonterm ((+rhs_ID, 1), (1 => Tree.Child (Node, 
1)))));
-
-         return Content;
-      end Convert_RHS_Alternative;
-
-      procedure New_Nonterminal
-        (Label          : in String;
-         New_Identifier : in Identifier_Index;
-         Content        : in Valid_Node_Index)
-      with Pre => To_Token_Enum (Tree.ID (Content)) in rhs_alternative_list_ID 
| rhs_element_ID
-      is
-         --  Convert subtree rooted at Content to an rhs_list contained by a 
new nonterminal
-         --  named New_Identifier.
-      begin
-         declare
-            New_Nonterm : constant Valid_Node_Index := Tree_Add_Nonterminal
-              (Child_1   => Tree.Add_Identifier (+IDENTIFIER_ID, 
New_Identifier, Tree.Byte_Region (Content)),
-               Child_2   => Tree.Add_Terminal (+COLON_ID),
-               Child_3   =>
-                 (case To_Token_Enum (Tree.ID (Content)) is
-                  when rhs_element_ID          => To_RHS_List (Content),
-                  when rhs_alternative_list_ID => Convert_RHS_Alternative 
(Content),
-                  when others => raise SAL.Programmer_Error),
-               Child_4   => Tree.Add_Nonterm
-                 ((+semicolon_opt_ID, 0),
-                  (1     => Tree.Add_Terminal (+SEMICOLON_ID))));
-         begin
-            Add_Compilation_Unit (Label & New_Identifier'Image, New_Nonterm);
-         end;
-      end New_Nonterminal;
-
-      procedure New_Nonterminal_List_1
-        (List_Nonterm  : in Identifier_Index;
-         RHS_Element_1 : in Valid_Node_Index;
-         RHS_Element_3 : in Valid_Node_Index;
-         Byte_Region   : in Buffer_Region)
-      with Pre => Tree.ID (RHS_Element_1) = +rhs_element_ID and
-                  Tree.ID (RHS_Element_3) = +rhs_element_ID
-      is
-         --  nonterminal: foo_list
-         --  | IDENTIFIER: "foo_list" List_Nonterm
-         --  | COLON:
-         --  | rhs_list:
-         --  | | rhs_list: RHS_List_2
-         --  | | | rhs: RHS_2
-         --  | | | | rhs_item_list: RHS_Item_List_1
-         --  | | | | | rhs_element: RHS_Element_1
-         --  | | | | | | rhs_item: RHS_Item_1
-         --  | | | | | | | IDENTIFIER: List_Element
-         --  | | BAR:
-         --  | | rhs: RHS_3
-         --  | | | rhs_item_list: RHS_Item_List_2
-         --  | | | | | rhs_item_list: RHS_Item_List_3
-         --  | | | | | |  rhs_element: RHS_Element_2
-         --  | | | | | | | rhs_item: RHS_Item_2
-         --  | | | | | | | | IDENTIFIER: List_Nonterm
-         --  | | | | rhs_element: RHS_Element_3
-         --  | | | | | rhs_item: RHS_Item_3
-         --  | | | | | | IDENTIFIER: List_Element
-         --  | semicolon_opt:
-         --  | | SEMICOLON:
-
-         RHS_Item_2 : constant Valid_Node_Index := Tree.Add_Nonterm
-           ((+rhs_item_ID, 0), (1 => Tree.Add_Identifier (+IDENTIFIER_ID, 
List_Nonterm, Byte_Region)));
-
-         RHS_Element_2 : constant Valid_Node_Index := Tree.Add_Nonterm 
((+rhs_element_ID, 0), (1 => RHS_Item_2));
-
-         RHS_Item_List_1 : constant Valid_Node_Index := Tree.Add_Nonterm 
((+rhs_item_list_ID, 0), (1 => RHS_Element_1));
-         RHS_Item_List_3 : constant Valid_Node_Index := Tree.Add_Nonterm 
((+rhs_item_list_ID, 0), (1 => RHS_Element_2));
-         RHS_Item_List_2 : constant Valid_Node_Index := Tree.Add_Nonterm
-           ((+rhs_item_list_ID, 1), (1 => RHS_Item_List_3, 2 => 
RHS_Element_3));
-
-         RHS_2 : constant Valid_Node_Index := Tree.Add_Nonterm ((+rhs_ID, 1), 
(1 => RHS_Item_List_1));
-         RHS_3 : constant Valid_Node_Index := Tree.Add_Nonterm ((+rhs_ID, 1), 
(1 => RHS_Item_List_2));
-
-         Bar_1 : constant Valid_Node_Index := Tree.Add_Terminal (+BAR_ID);
-
-         RHS_List_2 : constant Valid_Node_Index := Tree.Add_Nonterm 
((+rhs_list_ID, 0), (1 => RHS_2));
-
-         List_Nonterminal : constant Valid_Node_Index := Tree_Add_Nonterminal
-           (Child_1   => Tree.Add_Identifier (+IDENTIFIER_ID, List_Nonterm, 
Byte_Region),
-            Child_2   => Tree.Add_Terminal (+COLON_ID),
-            Child_3   => Tree.Add_Nonterm
-              ((+rhs_list_ID, 1),
-               (1     => RHS_List_2,
-                2     => Bar_1,
-                3     => RHS_3)),
-            Child_4   => Tree.Add_Nonterm
-              ((+semicolon_opt_ID, 0),
-               (1     => Tree.Add_Terminal (+SEMICOLON_ID))));
-      begin
-         Add_Compilation_Unit ("canonical list" & List_Nonterm'Image, 
List_Nonterminal);
-      end New_Nonterminal_List_1;
-
-      procedure New_Nonterminal_List
-        (List_Nonterm : in Identifier_Index;
-         List_Element : in Identifier_Index;
-         Byte_Region  : in Buffer_Region)
-      is
-         RHS_Item_1 : constant Valid_Node_Index := Tree.Add_Nonterm
-           ((+rhs_item_ID, 0), (1 => Tree.Add_Identifier (+IDENTIFIER_ID, 
List_Element, Byte_Region)));
-         RHS_Item_3 : constant Valid_Node_Index := Tree.Add_Nonterm
-           ((+rhs_item_ID, 0), (1 => Tree.Add_Identifier (+IDENTIFIER_ID, 
List_Element, Byte_Region)));
-         RHS_Element_1 : constant Valid_Node_Index := Tree.Add_Nonterm 
((+rhs_element_ID, 0), (1 => RHS_Item_1));
-         RHS_Element_3 : constant Valid_Node_Index := Tree.Add_Nonterm 
((+rhs_element_ID, 0), (1 => RHS_Item_3));
-      begin
-         New_Nonterminal_List_1 (List_Nonterm, RHS_Element_1, RHS_Element_3, 
Byte_Region);
-      end New_Nonterminal_List;
-
-      procedure New_Nonterminal_List
-        (List_Nonterm : in Identifier_Index;
-         List_Element : in Token_Index;
-         Terminals    : in Base_Token_Arrays.Vector;
-         Byte_Region  : in Buffer_Region)
-      is
-         RHS_Item_1 : constant Valid_Node_Index := Tree.Add_Nonterm
-           ((+rhs_item_ID, 0), (1 => Tree.Add_Terminal (List_Element, 
Terminals)));
-         RHS_Item_3 : constant Valid_Node_Index := Tree.Add_Nonterm
-           ((+rhs_item_ID, 0), (1 => Tree.Add_Terminal (List_Element, 
Terminals)));
-         RHS_Element_1 : constant Valid_Node_Index := Tree.Add_Nonterm 
((+rhs_element_ID, 0), (1 => RHS_Item_1));
-         RHS_Element_3 : constant Valid_Node_Index := Tree.Add_Nonterm 
((+rhs_element_ID, 0), (1 => RHS_Item_3));
-      begin
-         New_Nonterminal_List_1 (List_Nonterm, RHS_Element_1, RHS_Element_3, 
Byte_Region);
-      end New_Nonterminal_List;
-
-      procedure Copy_Non_Grammar
-        (From : in Valid_Node_Index;
-         To   : in Valid_Node_Index)
-      is
-         From_Aug : constant Base_Token_Class_Access := Tree.Augmented (From);
-      begin
-         if From_Aug /= null then
-            declare
-               New_Aug : constant Augmented_Token_Access := new 
Augmented_Token'
-                 (ID          => Tree.ID (From),
-                  Tree_Index  => To,
-                  Non_Grammar => Augmented_Token_Access (From_Aug).Non_Grammar,
-                  others => <>);
-            begin
-               Tree.Set_Augmented (To, Base_Token_Class_Access (New_Aug));
-            end;
-         end if;
-      end Copy_Non_Grammar;
-
-      procedure Translate_RHS_Group_Item (Node : in Valid_Node_Index)
-      is
-         --  Current tree:
-         --
-         --  rhs_element: Parent (Node, 2)
-         --  | rhs_item: Parent (Node, 1)
-         --  | | rhs_group_item: Node
-         --  | | | LEFT_PAREN
-         --  | | | rhs_alternative_list: Child (Node, 2)
-         --  | | | RIGHT_PAREN
-
-         use Syntax_Trees.LR_Utils;
-
-         Element_Content  : constant String           := Get_Text (Data, Tree, 
Tree.Child (Node, 2));
-         Right_Paren_Node : constant Valid_Node_Index := Tree.Child (Node, 3);
-         List             : constant Constant_List    := Creators.Create_List
-           (Tree, Tree.Child (Tree.Root, 1), +compilation_unit_list_ID, 
+compilation_unit_ID);
-         Name_Node        : Node_Index;
-         New_Ident        : Base_Identifier_Index     := 
Invalid_Identifier_Index;
-      begin
-         --  See if there's an existing nonterminal for this content.
-         for N of List loop
-
-            if Tree.Production_ID (Tree.Child (N, 1)) = (+nonterminal_ID, 0) 
then
-               --  Target nonterm is:
-               --
-               --  (compilation_unit_1, (111 . 128))
-               --  | (nonterminal_0, (111 . 128))
-               --  | |  7;(IDENTIFIER, (111 . 128))
-               --  | | (COLON)
-               --  | | (rhs_list_1, (111 . 128))
-               --  | | | ...
-               declare
-                  RHS_List_1 : constant Node_Index := Tree.Child (Tree.Child 
(N, 1), 3);
-               begin
-                  if RHS_List_1 /= Invalid_Node_Index and then
-                    Element_Content = Get_Text (Data, Tree, RHS_List_1)
-                  then
-                     Name_Node := Tree.Child (Tree.Child (N, 1), 1);
-                     case Tree.Label (Name_Node) is
-                     when Shared_Terminal =>
-                        New_Ident := New_Identifier (Get_Text (Data, Tree, 
Name_Node));
-                     when Virtual_Identifier =>
-                        New_Ident := Tree.Identifier (Name_Node);
-                     when others =>
-                        Raise_Programmer_Error ("process_node rhs_group_item", 
Data, Tree, Name_Node);
-                     end case;
-
-                     exit;
-                  end if;
-               end;
-            end if;
-         end loop;
-
-         if New_Ident = Invalid_Identifier_Index then
-            New_Ident := Next_Nonterm_Name;
-            New_Nonterminal ("group item", New_Ident, Tree.Child (Node, 2));
-         else
-            Erase_Deleted_EBNF_Nodes (Tree.Child (Node, 2));
-         end if;
-
-         Tree.Set_Node_Identifier (Node, +IDENTIFIER_ID, New_Ident);
-         Copy_Non_Grammar (Right_Paren_Node, Node);
-         Tree.Set_Children (Tree.Parent (Node), (+rhs_item_ID, 0), (1 => 
Node));
-         Clear_EBNF_Node (Node);
-      end Translate_RHS_Group_Item;
-
-      procedure Translate_RHS_Multiple_Item (Node : in Valid_Node_Index)
-      is
-         --  We have one of:
-         --
-         --  | a { b }  c
-         --  | a { b } - c
-         --  | a ( b ) + c
-         --  | a ( b ) * c
-         --  | a b+ c
-         --  | a b* c
-         --
-         --  where a and/or c can be empty. Replace it with a new canonical
-         --  list nonterminal:
-         --
-         --  nonterminal_nnn_list
-         --  : b
-         --  | nonterminal_nnn_list b
-         --
-         --  and a second RHS if it can be empty:
-         --  | a c
-
-         --  Current tree:
-         --
-         --  rhs_element : Parent (Node, 2)
-         --  | rhs_item: Parent (Node, 1)
-         --  | | rhs_multiple_item: Node
-         --  | | | LEFT_BRACE | LEFT_PAREN
-         --  | | | rhs_alternative_list
-         --  | | | ...
-         --  | | | RIGHT_BRACE | RIGHT_PAREN
-         --  | | | [MINUS | PLUS | STAR]
-
-         --  or:
-         --
-         --  rhs_element : Parent (Node, 2)
-         --  | rhs_item: Parent (Node, 1)
-         --  | | rhs_multiple_item: Node
-         --  | | | IDENTIFIER
-         --  | | | PLUS | STAR
-
-         Done                       : Boolean                   := False;
-         Parent_RHS_Item            : constant Valid_Node_Index := Tree.Parent 
(Node);
-         List_Nonterm_Virtual_Name  : Base_Identifier_Index     := 
Invalid_Identifier_Index;
-         List_Nonterm_Terminal_Name : Base_Token_Index          := 
Invalid_Token_Index;
-
-         procedure Check_Canonical_List
-         is
-            --  In EBNF, a canonical list with a separator looks like:
-            --
-            --  enumConstants : enumConstant (',' enumConstant)* ;
-            --
-            --  or, with no separator:
-            --
-            --  SwitchLabels : SwitchLabel {SwitchLabel} ;
-            --
-            --  where Node is the rhs_multiple_item containing "(','
-            --  enumConstant)*" or "{SwitchLabel}".
-            --
-            --  The tokens may have labels.
-            --
-            --  Handling these cases specially eliminates a conflict between
-            --  reducing to enumConstants and reducing to the introduced 
nonterm
-            --  list.
-            --
-            --  Alternately, the no separator case can be:
-            --
-            --  enumConstants : enumConstant+ ;
-            --
-            --  Handling this no separator case specially would not eliminate 
any conflicts.
-
-            use Syntax_Trees.LR_Utils;
-            use Syntax_Trees.LR_Utils.Creators;
-            use all type Ada.Containers.Count_Type;
-
-            List_Name_Node : constant Valid_Node_Index := Tree.Find_Ancestor 
(Node, +nonterminal_ID);
-            RHS_List_Root  : constant Valid_Node_Index := Tree.Child 
(List_Name_Node, 3);
-
-            RHS_2 : constant Valid_Node_Index := Tree.Find_Ancestor
-              (Node, (+rhs_ID, +rhs_alternative_list_ID));
-            --  If rhs_ID, the RHS containing the canonical list candidate.
-            --  If rhs_alternative_list_ID, not useful (FIXME: actually a 
canonical list candidate)
-
-            RHS_2_Item_List_List : constant Constant_List :=
-              (if Tree.ID (RHS_2) = +rhs_ID
-               then Create_List (Tree, Tree.Child (RHS_2, 1), 
+rhs_item_list_ID, +rhs_element_ID)
-               else Invalid_List (Tree));
-
-            Alt_List_List : constant Constant_List :=
-              (case Tree.RHS_Index (Node) is
-               when 0 | 3 =>
-                  Create_List (Tree, Tree.Child (Node, 2), 
+rhs_alternative_list_ID, +rhs_item_list_ID),
-               when others => Invalid_List (Tree));
-            --  Iterator on the rhs_alternative_list of the rhs_multiple_item.
-
-            Alt_List_Item_List : constant Constant_List :=
-              (if Alt_List_List.Is_Invalid
-               then Invalid_List (Tree)
-               else Create_List (Tree, Get_Node (Alt_List_List.First), 
+rhs_item_list_ID, +rhs_element_ID));
-            --  Iterator on the content of the rhs_multiple_item. Note that we
-            --  don't support a non-empty multiple_item; a canonical list can 
be
-            --  empty.
-
-            RHS_2_Item_List_Iter : constant Constant_Iterator := 
RHS_2_Item_List_List.Iterate_Constant;
-
-            Element_2 : constant Cursor :=
-              (if Is_Invalid (RHS_2_Item_List_List)
-               then No_Element
-               else RHS_2_Item_List_List.To_Cursor (Tree.Parent (Node, 2)));
-            --  The rhs_element containing the rhs_multiple_item
-
-            Element_1 : constant Node_Index :=
-              (if Is_Invalid (RHS_2_Item_List_List)
-               then Invalid_Node_Index
-               else Get_Node (RHS_2_Item_List_Iter.Previous (Element_2)));
-            --  The list element
-         begin
-            if Tree.ID (RHS_2) = +rhs_alternative_list_ID or else
-              Create_List (Tree, RHS_List_Root, +rhs_list_ID, +rhs_ID).Count 
/= 1
-            then
-               --  Something else going on
-               return;
-            end if;
-            pragma Assert (Tree.ID (RHS_2) = +rhs_ID);
-
-            if RHS_2_Item_List_List.Count = 2 and then
-              (Tree.RHS_Index (Node) in 4 .. 5 or else
-                 Alt_List_Item_List.Count in 1 .. 2)
-            then
-               null;
-            else
-               return;
-            end if;
-
-            if Element_1 = Invalid_Node_Index or else
-              Get_Text (Data, Tree, Tree.Find_Descendant (Element_1, 
+rhs_item_ID)) /=
-              Get_Text (Data, Tree, Tree.Find_Descendant (Get_Node 
(Alt_List_Item_List.Last), +rhs_item_ID))
-            then
-               return;
-            end if;
-
-            if Trace_Generate_EBNF > Detail then
-               Ada.Text_IO.Put_Line ("canonical list");
-            end if;
-
-            --  We have a canonical list declaration. Rewrite it to:
-            --
-            --  with separator:
-            --  rhs_list: keep
-            --  | rhs_list:
-            --  | | rhs: new, RHS_1
-            --  | | | rhs_item_list: new, RHS_Item_List_1
-            --  | | | | rhs_element: keep, Element_1
-            --  | | | | | rhs_item: keep
-            --  | | | | | | IDENTIFIER: keep; element name
-            --  | BAR: new
-            --  | rhs: keep, RHS_2
-            --  | | rhs_item_list: new, RHS_Item_List_2
-            --  | | | rhs_item_list: keep, rhs_item_list_3
-            --  | | | | rhs_item_list: keep, rhs_item_list_4
-            --  | | | | | rhs_element: new
-            --  | | | | | | rhs_item: new
-            --  | | | | | | | IDENTIFIER: new, list name
-            --  | | | | rhs_element: keep
-            --  | | | | | rhs_item: keep
-            --  | | | | | | IDENTIFIER: keep, separator
-            --  | | | rhs_element: keep, alt_list_elements (last)
-            --  | | | | rhs_item: keep
-            --  | | | | | IDENTIFIER: keep, element name
-            --
-            --  no separator:
-            --  rhs_list: keep
-            --  | rhs_list:
-            --  | | rhs: new, RHS_1
-            --  | | | rhs_item_list: new, RHS_Item_List_1
-            --  | | | | rhs_element: keep, Element_1
-            --  | | | | | rhs_item: keep
-            --  | | | | | | IDENTIFIER: keep; element name
-            --  | BAR: new
-            --  | rhs: keep, RHS_2
-            --  | | rhs_item_list: keep, rhs_item_list_3
-            --  | | | rhs_item_list: new, rhs_item_list_4
-            --  | | | | rhs_element: new
-            --  | | | | | rhs_item: new
-            --  | | | | | | IDENTIFIER: new, list name
-            --  | | | rhs_element: keep, alt_list_elements (last)
-            --  | | | | rhs_item: keep
-            --  | | | | | IDENTIFIER: keep, element name
-
-            declare
-               List_Name_Tok    : constant Token_Index      := 
Tree.First_Shared_Terminal (List_Name_Node);
-               List_Name_Region : constant Buffer_Region    := 
Data.Terminals.all (List_Name_Tok).Byte_Region;
-               List_Name        : constant String           := 
Data.Grammar_Lexer.Buffer_Text (List_Name_Region);
-
-               RHS_2_Index    : constant Integer       := Tree.RHS_Index 
(RHS_2);
-               RHS_2_Children : Valid_Node_Index_Array := Tree.Children 
(RHS_2);
-
-               RHS_1_Item_List    : constant Valid_Node_Index := 
Tree.Add_Nonterm
-                 ((+rhs_item_list_ID, 0), (1 => Element_1));
-
-               RHS_1_Action : constant Node_Index :=
-                 (case RHS_2_Index is
-                  when 2 | 3 => Tree.Add_Terminal
-                    (Tree.First_Shared_Terminal (RHS_2_Children (2)), 
Data.Terminals.all),
-                  when others => Invalid_Node_Index);
-
-               RHS_1_Check : constant Node_Index :=
-                 (case RHS_2_Index is
-                  when 3 => Tree.Add_Terminal
-                    (Tree.First_Shared_Terminal (RHS_2_Children (3)), 
Data.Terminals.all),
-                  when others => Invalid_Node_Index);
-
-               RHS_1              : constant Valid_Node_Index :=
-                 (case RHS_2_Index is
-                  when 1 => Tree.Add_Nonterm ((+rhs_ID, 1), (1 => 
RHS_1_Item_List)),
-                  when 2 => Tree.Add_Nonterm ((+rhs_ID, 2), (1 => 
RHS_1_Item_List, 2 => RHS_1_Action)),
-                  when 3 => Tree.Add_Nonterm
-                    ((+rhs_ID, 3), (1 => RHS_1_Item_List, 2 => RHS_1_Action, 3 
=> RHS_1_Check)),
-                  when others => raise SAL.Programmer_Error);
-
-               Bar                   : constant Valid_Node_Index := 
Tree.Add_Terminal (+BAR_ID);
-               RHS_Item_List_3       : constant Valid_Node_Index := Tree.Child 
(RHS_2, 1);
-               RHS_Item_List_4       : constant Valid_Node_Index := Tree.Child 
(RHS_Item_List_3, 1);
-               New_List_Name_Term    : constant Valid_Node_Index := 
Tree.Add_Terminal
-                 (List_Name_Tok, Data.Terminals.all);
-               New_List_Name_Item    : constant Valid_Node_Index := 
Tree.Add_Nonterm
-                 ((+rhs_item_ID, 0),
-                  (1      => New_List_Name_Term));
-
-               New_List_Name_Label : constant Node_Index :=
-                 (if Tree.RHS_Index (Element_1) = 1
-                  then --  tokens have labels
-                     Tree.Add_Identifier (+IDENTIFIER_ID, New_Identifier 
(List_Name), List_Name_Region)
-                  else Invalid_Node_Index);
-
-               New_List_Name_Element : constant Valid_Node_Index :=
-                 (if Tree.RHS_Index (Element_1) = 1
-                  then --  tokens have labels
-                     Tree.Add_Nonterm
-                       ((+rhs_element_ID, 1),
-                        (1 => New_List_Name_Label,
-                         2 => Tree.Add_Terminal (+EQUAL_ID),
-                         3 => New_List_Name_Item))
-                  else
-                     Tree.Add_Nonterm ((+rhs_element_ID, 0), (1 => 
New_List_Name_Item)));
-
-               Alt_List_Elements : constant Valid_Node_Index_Array := 
Tree.Get_IDs (Node, +rhs_element_ID);
-               RHS_Item_List_2   : constant Node_Index       :=
-                 (if Alt_List_Elements'Last = 1
-                  then Invalid_Node_Index -- no separator
-                  else Tree.Add_Nonterm
-                    ((+rhs_item_list_ID, 1),
-                     (1 => RHS_Item_List_3,
-                      2 => Alt_List_Elements (Alt_List_Elements'Last))));
-
-            begin
-               Tree.Set_Children (RHS_Item_List_4, (+rhs_item_list_ID, 0), (1 
=> New_List_Name_Element));
-
-               Tree.Set_Children
-                 (RHS_Item_List_3,
-                  (+rhs_item_list_ID, 1),
-                  (1 => RHS_Item_List_4,
-                   2 => Alt_List_Elements (1)));
-
-               RHS_2_Children (1) :=
-                 (if Alt_List_Elements'Last = 1
-                  then RHS_Item_List_3 -- no separator
-                  else RHS_Item_List_2);
-               Tree.Set_Children (RHS_2, (+rhs_ID, Tree.RHS_Index (RHS_2)), 
RHS_2_Children);
-
-               Tree.Set_Children
-                 (Tree.Parent (RHS_2),
-                  (+rhs_list_ID, 1),
-                  (1 => Tree.Add_Nonterm ((+rhs_list_ID, 0), (1 => RHS_1)),
-                   2 => Bar,
-                   3 => RHS_2));
-            end;
-
-            Done := True;
-
-            Clear_EBNF_Node (Node);
-
-            if Trace_Generate_EBNF > Extra then
-               Ada.Text_IO.New_Line;
-               Ada.Text_IO.Put_Line ("Check_Canonical_List edited rhs_list:");
-               Tree.Print_Tree (Wisitoken_Grammar_Actions.Descriptor, 
Tree.Parent (RHS_2));
-            end if;
-         end Check_Canonical_List;
-
-         procedure Find_List_Nonterminal_1 (Element_Content : in String)
-         is
-            --  Search for a nonterm (virtual or not) implementing a list for
-            --  Element_Content, which is a single rhs_element; no List_Element
-            --  Nonterminal. If found, set List_Nonterm_Virtual_Name or
-            --  List_Nonterm_Terminal_Name
-            use Syntax_Trees.LR_Utils;
-
-            List : constant Constant_List := Creators.Create_List
-              (Tree, Tree.Child (Tree.Root, 1), +compilation_unit_list_ID, 
+compilation_unit_ID);
-         begin
-            for N of List loop
-
-               if Tree.Production_ID (Tree.Child (N, 1)) = (+nonterminal_ID, 
0) then
-                  --  Target List_Nonterm is:
-                  --
-                  --  nonterminal_nnn_list
-                  --     : element
-                  --     | nonterminal_nnn_list element
-                  --
-                  --  compilation_unit
-                  --  | nonterminal
-                  --  | | IDENTIFIER : list_nonterm
-                  --  | | COLON
-                  --  | | rhs_list: rhs_list_1
-                  --  | | | rhs_list: rhs_list_2
-                  --  | | | | rhs
-                  --  | | | | | ... List_element
-                  --  | | | BAR
-                  --  | | | rhs: ... list_nonterm list_element
-                  declare
-                     Name_Node  : constant Node_Index := Tree.Child 
(Tree.Child (N, 1), 1);
-                     RHS_List_1 : constant Node_Index := Tree.Child 
(Tree.Child (N, 1), 3);
-                     RHS_List_2 : constant Node_Index :=
-                       (if RHS_List_1 = Invalid_Node_Index
-                        then Invalid_Node_Index
-                        else Tree.Child (RHS_List_1, 1));
-                  begin
-                     if RHS_List_2 /= Invalid_Node_Index and
-                       Tree.Child (RHS_List_1, 3) /= Invalid_Node_Index and -- 
second rhs present
-                       Tree.Child (RHS_List_2, 3) = Invalid_Node_Index -- no 
third rhs
-                     then
-                        declare
-                           RHS_1 : constant String := Get_Text (Data, Tree, 
RHS_List_2);
-                           RHS_2 : constant String := Get_Text (Data, Tree, 
Tree.Child (RHS_List_1, 3));
-                           Expected_RHS_2 : constant String := Get_Text (Data, 
Tree, Name_Node) & " " &
-                             Element_Content;
-                        begin
-                           if Element_Content = RHS_1 and RHS_2 = 
Expected_RHS_2 then
-                              case Tree.Label (Name_Node) is
-                              when Shared_Terminal =>
-                                 List_Nonterm_Terminal_Name := 
Tree.First_Shared_Terminal (Name_Node);
-                              when Virtual_Identifier =>
-                                 List_Nonterm_Virtual_Name := Tree.Identifier 
(Name_Node);
-                              when others =>
-                                 Raise_Programmer_Error
-                                   ("unimplemented Find_List_Nonterminal_1 
case '" & Element_Content & "'",
-                                    Data, Tree, Name_Node);
-                              end case;
-
-                              exit;
-                           end if;
-                        end;
-                     end if;
-                  end;
-               end if;
-            end loop;
-         end Find_List_Nonterminal_1;
-
-         procedure Find_List_Nonterminal_2 (Element_Content : in String)
-         is
-            --  Look for a pair of nonterms implementing a list of 
Element_Content.
-            --  If found, set List_Nonterm_*_Name
-            use Syntax_Trees.LR_Utils;
-
-            List : constant Constant_List := Creators.Create_List
-              (Tree, Tree.Child (Tree.Root, 1), +compilation_unit_list_ID, 
+compilation_unit_ID);
-         begin
-            for Comp_Unit of List loop
-               declare
-                  Nonterm : constant Valid_Node_Index := Tree.Child 
(Comp_Unit, 1);
-               begin
-                  if Tree.Production_ID (Nonterm) = (+nonterminal_ID, 0) and 
then
-                    Element_Content = Get_Text (Data, Tree, Tree.Child 
(Nonterm, 3))
-                  then
-                     Find_List_Nonterminal_1 (Get_Text (Data, Tree, Tree.Child 
(Nonterm, 1)));
-                     exit;
-                  end if;
-               end;
-            end loop;
-         end Find_List_Nonterminal_2;
-
-         Container_List_Root : Node_Index := Invalid_Node_Index;
-      begin
-         --  Check if this is a recognized pattern
-         Check_Canonical_List;
-         if Done then return; end if;
-
-         --  Check to see if there is an already declared nonterminal
-         --  list with the same content; if not, create one.
-         case Tree.RHS_Index (Node) is
-         when 0 .. 3 =>
-            --  0: { rhs_alternative_list }
-            --  1: { rhs_alternative_list } -
-            --  2: ( rhs_alternative_list ) +
-            --  3: ( rhs_alternative_list ) *
-
-            if Tree.RHS_Index (Node) in 0 | 3 then
-               Container_List_Root := Insert_Optional_RHS (Node);
-            end if;
-
-            if 0 = Tree.RHS_Index (Tree.Child (Node, 2)) and then
-              0 = Tree.RHS_Index (Tree.Child (Tree.Child (Node, 2), 1))
-            then
-               --  Only one element in the rhs_alternative_list, and in the 
rhs_item_list
-               Find_List_Nonterminal_1 (Get_Text (Data, Tree, Tree.Child 
(Node, 2)));
-
-               if List_Nonterm_Virtual_Name = Invalid_Identifier_Index and
-                 List_Nonterm_Terminal_Name = Invalid_Token_Index
-               then
-                  List_Nonterm_Virtual_Name := Next_Nonterm_Name ("_list");
-                  New_Nonterminal_List
-                    (List_Nonterm_Virtual_Name, Tree.First_Shared_Terminal 
(Tree.Child (Node, 2)),
-                     Data.Terminals.all, Tree.Byte_Region (Node));
-               else
-                  Erase_Deleted_EBNF_Nodes (Tree.Child (Node, 2));
-               end if;
-            else
-               Find_List_Nonterminal_2 (Get_Text (Data, Tree, Tree.Child 
(Node, 2)));
-
-               if List_Nonterm_Virtual_Name = Invalid_Identifier_Index then
-                  List_Nonterm_Virtual_Name := Next_Nonterm_Name ("_list");
-                  declare
-                     List_Element_Virtual_Name : constant Identifier_Index := 
Next_Nonterm_Name;
-                  begin
-                     New_Nonterminal ("canonical list element", 
List_Element_Virtual_Name, Tree.Child (Node, 2));
-                     New_Nonterminal_List
-                       (List_Nonterm_Virtual_Name, List_Element_Virtual_Name, 
Tree.Byte_Region (Node));
-                  end;
-               else
-                  Erase_Deleted_EBNF_Nodes (Tree.Child (Node, 2));
-               end if;
-            end if;
-
-         when 4 | 5 =>
-            --  IDENTIFIER + | *
-            Find_List_Nonterminal_1 (Get_Text (Data, Tree, Tree.Child (Node, 
1)));
-
-            if List_Nonterm_Virtual_Name = Invalid_Identifier_Index then
-               List_Nonterm_Virtual_Name := Next_Nonterm_Name ("_list");
-
-               New_Nonterminal_List
-                 (List_Nonterm_Virtual_Name,
-                  Tree.First_Shared_Terminal (Tree.Child (Node, 1)), 
Data.Terminals.all,
-                  Tree.Byte_Region (Node));
-            else
-               --  nothing to erase
-               null;
-            end if;
-
-            if Tree.RHS_Index (Node) = 5 then
-               Container_List_Root := Insert_Optional_RHS (Node);
-            end if;
-
-         when others =>
-            Raise_Programmer_Error ("Translate_RHS_Multiple_Item 
unimplemented", Data, Tree, Node);
-         end case;
-
-         --  Edit rhs_item to use list name
-         declare
-            Child : constant Valid_Node_Index :=
-              (if List_Nonterm_Virtual_Name /= Invalid_Identifier_Index
-               then Tree.Add_Identifier
-                 (+IDENTIFIER_ID, List_Nonterm_Virtual_Name, Tree.Byte_Region 
(Parent_RHS_Item))
-               elsif List_Nonterm_Terminal_Name /= Invalid_Token_Index
-               then Tree.Add_Terminal (List_Nonterm_Terminal_Name, 
Data.Terminals.all)
-               else raise SAL.Programmer_Error);
-         begin
-            Tree.Set_Children (Parent_RHS_Item, (+rhs_item_ID, 0), (1 => 
Child));
-         end;
-
-         Clear_EBNF_Node (Node);
-
-         if Trace_Generate_EBNF > Extra then
-            Ada.Text_IO.New_Line;
-            Ada.Text_IO.Put_Line ("Translate_RHS_Multiple_Item edited:");
-            Tree.Print_Tree
-              (Wisitoken_Grammar_Actions.Descriptor,
-               (if Container_List_Root = Invalid_Node_Index
-                then Parent_RHS_Item
-                else Container_List_Root));
-         end if;
-      end Translate_RHS_Multiple_Item;
-
-      procedure Translate_RHS_Optional_Item (B : in Valid_Node_Index)
-      is
-         --  Source looks like:
-         --
-         --  | A [B] C
-         --
-         --  where A, B, C are token sequences. All are contained in one
-         --  rhs_item_list, which may be contained in an rhs or an
-         --  rhs_alternative_list. B contains an rhs_alternative_list.
-         --
-         --  First add a second rhs_item_list without B:
-         --  | A C
-         --
-         --  then for each alternative in B, splice together rhs_item_lists A,
-         --  B_i, C, copying A, C on all after the first:
-         --  | A B_i C
-         --
-         --  See nested_ebnf_optional.wy for an example of nested optional
-         --  items.
-         --
-         --  We don't create a separate nonterminal for B, so token labels stay
-         --  in the same RHS for actions.
-         --
-         --  current tree:
-         --
-         --  rhs_list:
-         --  | rhs | rhs_alternative_list:
-         --  | | rhs_item_list
-         --  | | | rhs_item_list
-         --  | | ...
-         --  | | | | | rhs_element: a.last
-         --  | | | | | | rhs_item:
-         --  | | | | rhs_element:
-         --  | | | | | rhs_item: contains b
-         --  | | | | | | rhs_optional_item: B
-         --  | | | | | | | LEFT_BRACKET: B.Children (1)
-         --  | | | | | | | rhs_alternative_list: B.Children (2) b
-         --  | | | | | | | RIGHT_BRACKET: B.Children (3)
-         --  | | | rhs_element: c.first
-         --  | | | | rhs_item:
-
-         use Syntax_Trees.LR_Utils;
-         use Syntax_Trees.LR_Utils.Creators;
-
-         Container_List_Root : constant Valid_Node_Index := 
Insert_Optional_RHS (B);
-      begin
-         if Trace_Generate_EBNF > Extra then
-            Ada.Text_IO.New_Line;
-            Ada.Text_IO.Put_Line ("Translate_RHS_Optional_Item start");
-         end if;
-
-         case Tree.RHS_Index (B) is
-         when 0 | 1 =>
-            --  : LEFT_BRACKET rhs_alternative_list RIGHT_BRACKET
-            --  | LEFT_PAREN rhs_alternative_list RIGHT_PAREN QUESTION
-
-            declare
-               Container_List : Syntax_Trees.LR_Utils.List :=
-                 (if Tree.ID (Container_List_Root) = +rhs_list_ID
-                  then Create_List
-                    (Tree,
-                     Root         => Container_List_Root,
-                     List_ID      => +rhs_list_ID,
-                     Element_ID   => +rhs_ID,
-                     Separator_ID => +BAR_ID)
-                  else Create_List
-                    (Tree,
-                     Root         => Container_List_Root,
-                     List_ID      => +rhs_alternative_list_ID,
-                     Element_ID   => +rhs_item_list_ID,
-                     Separator_ID => +BAR_ID));
-
-               Container_Cur : Cursor := Container_List.Find
-                 (if Container_List.Element_ID = +rhs_ID
-                  then Tree.Find_Ancestor (B, +rhs_ID)
-                  else List_Root (Tree, Tree.Find_Ancestor (B, 
+rhs_item_list_ID), +rhs_item_list_ID));
-
-               ABC_List : List := Create_From_Element
-                 (Tree, Tree.Parent (B, 2),
-                  List_ID      => +rhs_item_list_ID,
-                  Element_ID   => +rhs_element_ID,
-                  Separator_ID => Invalid_Token_ID);
-
-               ABC_Iter : constant Iterator := ABC_List.Iterate;
-
-               ABC_B_Cur   : constant Cursor := ABC_List.To_Cursor 
(Tree.Parent (B, 2));
-               ABC_A_Last  : constant Cursor := ABC_Iter.Previous (ABC_B_Cur);
-               ABC_C_First : constant Cursor := ABC_Iter.Next (ABC_B_Cur);
-
-               B_Alternative_List : constant Constant_List := Create_List
-                 (Tree, Tree.Child (B, 2), +rhs_alternative_list_ID, 
+rhs_item_list_ID);
-
-            begin
-               --  An alternate design would be to splice together the 
existing A,
-               --  B_i, C; but it's too hard to get all the parent updates 
right.
-               for Alt of reverse B_Alternative_List loop
-
-                  declare
-                     B_Item_List : constant Constant_List := Create_List
-                       (Tree, Alt, +rhs_item_list_ID, +rhs_element_ID);
-
-                     New_ABC : List := Empty_List (ABC_List);
-                  begin
-                     if Has_Element (ABC_A_Last) then
-                        Copy (Source_List => ABC_List,
-                              Source_Last => ABC_A_Last,
-                              Dest_List   => New_ABC);
-                     end if;
-
-                     Copy (B_Item_List, Dest_List => New_ABC);
-
-                     if Has_Element (ABC_C_First) then
-                        Copy (ABC_List, Source_First => ABC_C_First, Dest_List 
=> New_ABC);
-                     end if;
-
-                     if Container_List.Element_ID = +rhs_ID then
-                        Insert_RHS (Container_List, New_ABC.Root, After => 
Get_Node (Container_Cur));
-                     else
-                        Container_List.Insert (New_ABC.Root, After => 
Container_Cur);
-                     end if;
-
-                     Record_Copied_EBNF_Nodes (New_ABC.Root);
-                  end;
-               end loop;
-
-               Erase_Deleted_EBNF_Nodes (Get_Node (Container_Cur));
-               --  This includes B, so we don't do 'Clear_EBNF_Node (B)'.
-
-               Container_List.Delete (Container_Cur);
-            end;
-
-         when 2 =>
-            --  | IDENTIFIER QUESTION
-            --
-            --  Current tree:
-            --   rhs_item_3
-            --   | rhs_optional_item_2: B
-            --   | | IDENTIFIER
-            --   | | QUESTION
-            --
-            --  Change to:
-            --   rhs_item_0
-            --   | IDENTIFIER
-
-            Tree.Set_Children (Tree.Parent (B), (+rhs_item_ID, 0), (1 => 
Tree.Child (B, 1)));
-            Clear_EBNF_Node (B);
-
-         when 3 =>
-            --  | STRING_LITERAL_2 QUESTION
-            Tree.Set_Children (Tree.Parent (B), (+rhs_item_ID, 1), (1 => 
Tree.Child (B, 1)));
-            Clear_EBNF_Node (B);
-
-         when others =>
-            Raise_Programmer_Error ("translate_ebnf_to_bnf rhs_optional_item 
unimplemented", Data, Tree, B);
-         end case;
-
-         if WisiToken.Trace_Generate_EBNF > Detail then
-            Ada.Text_IO.New_Line;
-            Ada.Text_IO.Put_Line ("Translate_RHS_Optional_Item edited:");
-            Tree.Print_Tree (Wisitoken_Grammar_Actions.Descriptor, 
Container_List_Root);
-         end if;
-      end Translate_RHS_Optional_Item;
-
-      procedure Translate_Token_Literal (Node : in Valid_Node_Index)
-      is
-         use Syntax_Trees.LR_Utils;
-
-         Name_Ident : Identifier_Index;
-
-         function Equal
-           (Target : in String;
-            List   : in Constant_List'Class;
-            N      : in Valid_Node_Index)
-           return Boolean
-         is
-            pragma Unreferenced (List);
-         begin
-            if Tree.Production_ID (Tree.Child (N, 1)) = (+declaration_ID, 0) 
then
-               declare
-                  Decl       : constant Node_Index       := Tree.Child (N, 1);
-                  Value_Node : constant Valid_Node_Index := Tree.Child 
(Tree.Child (Decl, 4), 1);
-               begin
-                  if Tree.ID (Value_Node) = +declaration_item_ID and then
-                    Tree.ID (Tree.Child (Value_Node, 1)) in
-                    +IDENTIFIER_ID | +STRING_LITERAL_1_ID | 
+STRING_LITERAL_2_ID and then
-                    Target = Get_Text (Data, Tree, Tree.Child (Value_Node, 1), 
Strip_Quotes => True)
-                  then
-                     case Tree.Label (Tree.Child (Decl, 3)) is
-                     when Shared_Terminal =>
-                        Name_Ident := New_Identifier (Get_Text (Data, Tree, 
Tree.Child (Decl, 3)));
-                     when Virtual_Identifier =>
-                        Name_Ident := Tree.Identifier (Tree.Child (Decl, 3));
-                     when others =>
-                        raise SAL.Programmer_Error;
-                     end case;
-                     return True;
-                  else
-                     return False;
-                  end if;
-               end;
-            else
-               return False;
-            end if;
-         end Equal;
-
-         Value : constant String     := Get_Text (Data, Tree, Node, 
Strip_Quotes => True);
-         Found : constant Node_Index := Find_Nonterminal (Value, 
Equal'Unrestricted_Access);
-      begin
-         if Found = Invalid_Node_Index then
-            if GNAT.Regexp.Match (Value, Symbol_Regexp) then
-               Name_Ident := New_Identifier (Ada.Characters.Handling.To_Upper 
(Value));
-            else
-               Put_Error
-                 (Error_Message
-                    (Data.Grammar_Lexer.File_Name, Get_Line (Data, Tree, Node),
-                     "punctuation token '" & Value & "' not declared"));
-               return;
-            end if;
-         end if;
-
-         --  Replace string literal in rhs_item
-         declare
-            Parent : constant Valid_Node_Index := Tree.Parent (Node);
-         begin
-            case To_Token_Enum (Tree.ID (Parent)) is
-            when rhs_item_ID =>
-               Tree.Set_Children
-                 (Tree.Parent (Node),
-                  (+rhs_item_ID, 0),
-                  (1 => Tree.Add_Identifier (+IDENTIFIER_ID, Name_Ident, 
Tree.Byte_Region (Node))));
-
-            when rhs_optional_item_ID =>
-               Tree.Set_Children
-                 (Tree.Parent (Node),
-                  (+rhs_optional_item_ID, 2),
-                  (Tree.Add_Identifier (+IDENTIFIER_ID, Name_Ident, 
Tree.Byte_Region (Node)),
-                   Tree.Child (Tree.Parent (Node), 2)));
-
-            when others =>
-               Raise_Programmer_Error ("translate_ebnf_to_bnf string_literal_2 
unimplemented", Data, Tree, Node);
-            end case;
-         end;
-
-         Clear_EBNF_Node (Node);
-         if Found /= Invalid_Node_Index then
-            return;
-         end if;
-
-         --  Declare token for keyword string literal
-         declare
-            Keyword        : constant Valid_Node_Index := Tree.Add_Identifier
-              (+KEYWORD_ID, Keyword_Ident, Tree.Byte_Region (Node));
-            Kind           : constant Valid_Node_Index := Tree.Add_Nonterm
-              ((+token_keyword_non_grammar_ID, 0),
-               (1 => Keyword));
-            Value_Literal  : constant Valid_Node_Index := Tree.Add_Identifier
-              (+STRING_LITERAL_1_ID, New_Identifier ('"' & Value & '"'), 
Tree.Byte_Region (Node));
-            Decl_Item      : constant Valid_Node_Index := Tree.Add_Nonterm
-              ((+declaration_item_ID, 1),
-               (1 => Value_Literal));
-            Decl_Item_List : constant Valid_Node_Index := Tree.Add_Nonterm
-              ((+declaration_item_list_ID, 0),
-               (1 => Decl_Item));
-
-            Percent : constant Valid_Node_Index := Tree.Add_Identifier
-              (+PERCENT_ID, Percent_Ident, Tree.Byte_Region (Node));
-            Name    : constant Valid_Node_Index := Tree.Add_Identifier
-              (+IDENTIFIER_ID, Name_Ident, Tree.Byte_Region (Node));
-            Decl    : constant Valid_Node_Index := Tree.Add_Nonterm
-              ((+declaration_ID, 0), (Percent, Kind, Name, Decl_Item_List), 
Action => declaration_0'Access);
-         begin
-            Add_Compilation_Unit ("literal token", Decl, Prepend => True);
-         end;
-
-      end Translate_Token_Literal;
-
-      procedure Process_Node (Node : in Valid_Node_Index)
-      is begin
-         case To_Token_Enum (Tree.ID (Node)) is
-         --  Token_Enum_ID alphabetical order
-         when declaration_ID =>
-            --  Must be "%meta_syntax EBNF"; change to BNF
-            declare
-               Decl_Item    : constant Valid_Node_Index       := 
Tree.Find_Descendant
-                 (Tree.Child (Node, 3), +declaration_item_ID);
-               Old_Children : constant Valid_Node_Index_Array := Tree.Children 
(Decl_Item);
-               New_Children : constant Valid_Node_Index_Array :=
-                 (1 => Tree.Add_Identifier
-                    (+IDENTIFIER_ID, New_Identifier ("BNF"), Tree.Byte_Region 
(Decl_Item)));
-            begin
-               Copy_Non_Grammar (Old_Children (1), New_Children (1));
-               Tree.Set_Children (Decl_Item, (+declaration_item_ID, 1), 
New_Children);
-            end;
-            Clear_EBNF_Node (Node);
-
-         when rhs_alternative_list_ID =>
-            --  All handled by New_Nonterminal*
-            raise SAL.Programmer_Error;
-
-         when rhs_attribute_ID =>
-            --  Just delete it
-            declare
-               use WisiToken.Syntax_Trees.LR_Utils;
-               RHS_Item_List : List := Creators.Create_From_Element
-                 (Tree, Tree.Parent (Node, 2), +rhs_item_list_ID, 
+rhs_element_ID, Invalid_Token_ID);
-               Element : Cursor := RHS_Item_List.To_Cursor (Tree.Parent (Node, 
2));
-            begin
-               RHS_Item_List.Delete (Element);
-            end;
-            Clear_EBNF_Node (Node);
-
-         when rhs_group_item_ID =>
-            Translate_RHS_Group_Item (Node);
-
-         when rhs_multiple_item_ID =>
-            Translate_RHS_Multiple_Item (Node);
-
-         when rhs_optional_item_ID =>
-            Translate_RHS_Optional_Item (Node);
-
-         when STRING_LITERAL_2_ID =>
-            Translate_Token_Literal (Node);
-
-         when others =>
-            Raise_Programmer_Error ("unimplemented EBNF node", Data, Tree, 
Node);
-         end case;
-      exception
-      when SAL.Programmer_Error =>
-         raise;
-      when E : others =>
-         Raise_Programmer_Error
-           ("unhandled exception " & Ada.Exceptions.Exception_Name (E) & ": " &
-              Ada.Exceptions.Exception_Message (E),
-            Data, Tree, Node);
-      end Process_Node;
-
-      EBNF_Allowed : Boolean := True;
-
-      procedure Validate_Node
-        (Tree              : in     Syntax_Trees.Tree;
-         Node              : in     Valid_Node_Index;
-         Node_Image_Output : in out Boolean)
-      is
-         use Ada.Text_IO;
-
-         procedure Put_Error (Msg : in String)
-         is begin
-            if not Node_Image_Output then
-               Node_Image_Output := True;
-               Put_Line
-                 (Current_Error,
-                  Error_Message
-                    (Tree, Data.Terminals, Node, Data.Grammar_Lexer.File_Name,
-                     Tree.Image
-                       (Node, Wisitoken_Grammar_Actions.Descriptor,
-                        Include_RHS_Index => True,
-                        Include_Children  => Trace_Generate_EBNF > Detail,
-                        Node_Numbers      => True)));
-            end if;
-            Put_Line (Current_Error, "... " & Msg);
-            WisiToken.Generate.Error := True;
-         end Put_Error;
-
-         procedure Check_EBNF_Allowed
-         is begin
-            if not EBNF_Allowed then
-               Put_Error ("no EBNF allowed");
-            end if;
-         end Check_EBNF_Allowed;
-
-      begin
-         if Tree.Label (Node) /= Nonterm then
-            return;
-         end if;
-
-         declare
-            use all type Ada.Containers.Count_Type;
-            Children  : constant Valid_Node_Index_Array := Tree.Children 
(Node);
-            RHS_Index : constant Natural                := Tree.RHS_Index 
(Node);
-         begin
-            case To_Token_Enum (Tree.ID (Node)) is
-            when nonterminal_ID =>
-               null;
-
-            when rhs_list_ID =>
-               case RHS_Index is
-               when 0 =>
-                  if Children'Length /= 1 then
-                     Put_Error ("expected child_count 1");
-                  elsif Tree.ID (Children (1)) /= +rhs_ID then
-                     Put_Error ("child 1 not rhs");
-                  end if;
-
-               when 1 =>
-                  if Tree.Child_Count (Node) /= 3 then
-                     Put_Error ("expected child_count 3");
-                  elsif Tree.ID (Children (1)) /= +rhs_list_ID or
-                    Tree.ID (Children (2)) /= +BAR_ID or
-                    Tree.ID (Children (3)) /= +rhs_ID
-                  then
-                     Put_Error ("expecting rhs_list BAR rhs");
-                  end if;
-
-               when others =>
-                  Put_Error ("unexpected RHS_Index");
-               end case;
-
-            when rhs_ID =>
-               case RHS_Index is
-               when 0 =>
-                  if Children'Length /= 0 then
-                     Put_Error ("expected child_count 0");
-                  end if;
-
-               when 1 =>
-                  if Tree.Child_Count (Node) /= 1 then
-                     Put_Error ("expected child_count 1");
-                  elsif Tree.ID (Children (1)) /= +rhs_item_list_ID then
-                     Put_Error ("expecting rhs_item_list");
-                  end if;
-
-               when 2 =>
-                  if Tree.Child_Count (Node) /= 2 then
-                     Put_Error ("expected child_count 2");
-                  elsif Tree.ID (Children (1)) /= +rhs_item_list_ID or
-                    Tree.ID (Children (2)) /= +ACTION_ID
-                  then
-                     Put_Error ("expecting rhs_item_list ACTION");
-                  end if;
-
-               when 3 =>
-                  if Tree.Child_Count (Node) /= 3 then
-                     Put_Error ("expected child_count 3");
-                  elsif Tree.ID (Children (1)) /= +rhs_item_list_ID or
-                    Tree.ID (Children (2)) /= +ACTION_ID or
-                    Tree.ID (Children (3)) /= +ACTION_ID
-                  then
-                     Put_Error ("expecting rhs_item_list ACTION ACTION");
-                  end if;
-
-               when others =>
-                  Put_Error ("unexpected RHS_Index");
-               end case;
-
-            when rhs_attribute_ID =>
-               Check_EBNF_Allowed;
-
-            when rhs_element_ID =>
-               case RHS_Index is
-               when 0 =>
-                  if Tree.Child_Count (Node) /= 1 then
-                     Put_Error ("expected child_count 1");
-                  elsif Tree.ID (Children (1)) /= +rhs_item_ID then
-                     Put_Error ("expecting rhs_item");
-                  end if;
-
-               when 1 =>
-                  if Tree.Child_Count (Node) /= 3 then
-                     Put_Error ("expected child_count 3");
-                  elsif Tree.ID (Children (1)) /= +IDENTIFIER_ID or
-                    Tree.ID (Children (2)) /= +EQUAL_ID or
-                    Tree.ID (Children (3)) /= +rhs_item_ID
-                  then
-                     Put_Error ("expecting IDENTIFIER EQUAL rhs_item");
-                  end if;
-
-               when others =>
-                  Put_Error ("unexpected RHS_Index");
-               end case;
-
-            when rhs_item_list_ID =>
-               case RHS_Index is
-               when 0 =>
-                  if Tree.Child_Count (Node) /= 1 then
-                     Put_Error ("expected child_count 1");
-                  elsif Tree.ID (Children (1)) /= +rhs_element_ID then
-                     Put_Error ("expecting rhs_element");
-                  end if;
-
-               when 1 =>
-                  if Tree.Child_Count (Node) /= 2 then
-                     Put_Error ("expected child_count 2");
-                  elsif Tree.ID (Children (1)) /= +rhs_item_list_ID or
-                    Tree.ID (Children (2)) /= +rhs_element_ID
-                  then
-                     Put_Error ("expecting rhs_item_list ELEMENT");
-                  end if;
-
-               when others =>
-                  Put_Error ("unexpected RHS_Index");
-               end case;
-
-            when rhs_item_ID =>
-               if Tree.Child_Count (Node) /= 1 then
-                  Put_Error ("expected child_count 1");
-               end if;
-
-               case RHS_Index is
-               when 0 =>
-                  if Tree.ID (Children (1)) /= +IDENTIFIER_ID then
-                     Put_Error ("expecting IDENTIFIER");
-                  end if;
-
-               when 1 =>
-                  if Tree.ID (Children (1)) /= +STRING_LITERAL_2_ID then
-                     Put_Error ("expecting STRING_LITERAL_2");
-                  end if;
-
-               when 2 =>
-                  if Tree.ID (Children (1)) /= +rhs_attribute_ID then
-                     Put_Error ("expecting rhs_attribute");
-                  end if;
-
-               when 3 =>
-                  if Tree.ID (Children (1)) /= +rhs_optional_item_ID then
-                     Put_Error ("expecting rhs_optional_item");
-                  end if;
-
-               when 4 =>
-                  if Tree.ID (Children (1)) /= +rhs_multiple_item_ID then
-                     Put_Error ("expecting rhs_multiple_item");
-                  end if;
-
-               when 5 =>
-                  if Tree.ID (Children (1)) /= +rhs_group_item_ID then
-                     Put_Error ("expecting rhs_group_item");
-                  end if;
-
-               when others =>
-                  Put_Error ("unexpected RHS_Index");
-               end case;
-
-            when rhs_group_item_ID =>
-               Check_EBNF_Allowed;
-               if RHS_Index /= 0 or
-                 (Children'Length /= 3 or else
-                    (Tree.ID (Children (1)) /= +LEFT_PAREN_ID or
-                       Tree.ID (Children (2)) /= +rhs_alternative_list_ID or
-                       Tree.ID (Children (3)) /= +RIGHT_PAREN_ID))
+               if Has_Separator and then Right_Hand_Sides (RHS).Tokens 
(2).Identifier /= Separator then
+                  return False;
+               end if;
+               if Right_Hand_Sides (RHS).Tokens (Right_Hand_Sides 
(RHS).Tokens.Last_Index).Identifier /= Element
                then
-                  Put_Error ("expecting RHS_Index 0, LEFT_PAREN 
rhs_alternative_list RIGHT_PAREN");
+                  return False;
+               end if;
+            else
+               if Right_Hand_Sides (RHS).Tokens.Length /= 1 + 
Element_Token_Count then
+                  return False;
                end if;
 
-            when rhs_optional_item_ID =>
-               Check_EBNF_Allowed;
-               case RHS_Index is
-               when 0 =>
-                  if Children'Length /= 3 or else
-                    (Tree.ID (Children (1)) /= +LEFT_BRACKET_ID or
-                       Tree.ID (Children (2)) /= +rhs_alternative_list_ID or
-                       Tree.ID (Children (3)) /= +RIGHT_BRACKET_ID)
-                  then
-                     Put_Error ("expecting LEFT_BRACKET rhs_alternative_list 
RIGHT_BRACKET");
-                  end if;
-
-               when 1 =>
-                  if Children'Length /= 4 or else
-                    (Tree.ID (Children (1)) /= +LEFT_PAREN_ID or
-                       Tree.ID (Children (2)) /= +rhs_alternative_list_ID or
-                       Tree.ID (Children (3)) /= +RIGHT_PAREN_ID or
-                       Tree.ID (Children (4)) /= +QUESTION_ID)
-                  then
-                     Put_Error ("expecting LEFT_PAREN rhs_alternative_list 
RIGHT_PAREN QUESTION");
-                  end if;
-
-               when 2 =>
-                  if Children'Length /= 2 or else
-                    (Tree.ID (Children (1)) /= +IDENTIFIER_ID or
-                       Tree.ID (Children (2)) /= +QUESTION_ID)
-                  then
-                     Put_Error ("expecting IDENTIFIER QUESTION");
-                  end if;
-
-               when 3 =>
-                  if Children'Length /= 2 or else
-                    (Tree.ID (Children (1)) /= +STRING_LITERAL_2_ID or
-                       Tree.ID (Children (2)) /= +QUESTION_ID)
-                  then
-                     Put_Error ("expecting STRING_LITERAL_2 QUESTION");
-                  end if;
-
-               when others =>
-                  Put_Error ("unexpected RHS_Index");
-               end case;
-
-            when rhs_multiple_item_ID =>
-               Check_EBNF_Allowed;
-               case RHS_Index is
-               when 0 =>
-                  if Children'Length /= 3 or else
-                    (Tree.ID (Children (1)) /= +LEFT_BRACE_ID or
-                       Tree.ID (Children (2)) /= +rhs_alternative_list_ID or
-                       Tree.ID (Children (3)) /= +RIGHT_BRACE_ID)
-                  then
-                     Put_Error ("expecting LEFT_BRACE rhs_alternative_list 
RIGHT_BRACE");
-                  end if;
-
-               when 1 =>
-                  if Children'Length /= 4 or else
-                    (Tree.ID (Children (1)) /= +LEFT_BRACE_ID or
-                       Tree.ID (Children (2)) /= +rhs_alternative_list_ID or
-                       Tree.ID (Children (3)) /= +RIGHT_BRACE_ID or
-                       Tree.ID (Children (4)) /= +MINUS_ID)
-                  then
-                     Put_Error ("expecting LEFT_BRACE rhs_alternative_list 
RIGHT_BRACE MINUS");
-                  end if;
-
-               when 2 =>
-                  if Children'Length /= 4 or else
-                    (Tree.ID (Children (1)) /= +LEFT_PAREN_ID or
-                       Tree.ID (Children (2)) /= +rhs_alternative_list_ID or
-                       Tree.ID (Children (3)) /= +RIGHT_PAREN_ID or
-                       Tree.ID (Children (4)) /= +PLUS_ID)
-                  then
-                     Put_Error ("expecting LEFT_PAREN rhs_alternative_list 
RIGHT_PAREN PLUS");
-                  end if;
-
-               when 3 =>
-                  if Children'Length /= 4 or else
-                    (Tree.ID (Children (1)) /= +LEFT_PAREN_ID or
-                       Tree.ID (Children (2)) /= +rhs_alternative_list_ID or
-                       Tree.ID (Children (3)) /= +RIGHT_PAREN_ID or
-                       Tree.ID (Children (4)) /= +STAR_ID)
-                  then
-                     Put_Error ("expecting LEFT_PAREN rhs_alternative_list 
RIGHT_PAREN STAR");
-                  end if;
-
-               when 4 =>
-                  if Children'Length /= 2 or else
-                    (Tree.ID (Children (1)) /= +IDENTIFIER_ID or
-                       Tree.ID (Children (2)) /= +PLUS_ID)
-                  then
-                     Put_Error ("expecting IDENTIFIER PLUS");
-                  end if;
-
-               when 5 =>
-                  if Children'Length /= 2 or else
-                    (Tree.ID (Children (1)) /= +IDENTIFIER_ID or
-                       Tree.ID (Children (2)) /= +STAR_ID)
-                  then
-                     Put_Error ("expecting IDENTIFIER STAR");
-                  end if;
-
-               when others =>
-                  Put_Error ("unexpected RHS_Index");
-               end case;
-
-            when rhs_alternative_list_ID =>
-               Check_EBNF_Allowed;
-               case RHS_Index is
-               when 0 =>
-                  if Children'Length /= 1 or else
-                    (Tree.ID (Children (1)) /= +rhs_item_list_ID)
-                  then
-                     Put_Error ("expecting rhs_item_list");
-                  end if;
+               declare
+                  Temp             : Unbounded_String;
+                  Temp_Token_Count : Count_Type := 0;
+               begin
+                  for I in 2 .. Positive_Index_Type (Right_Hand_Sides 
(RHS).Tokens.Length) loop
+                     Append (Temp, Right_Hand_Sides (RHS).Tokens 
(I).Identifier);
+                     Temp_Token_Count := @ + 1;
+                  end loop;
 
-               when 1 =>
-                  if Children'Length /= 3 or else
-                    (Tree.ID (Children (1)) /= +rhs_alternative_list_ID or
-                       Tree.ID (Children (2)) /= +BAR_ID or
-                       Tree.ID (Children (3)) /= +rhs_item_list_ID)
-                  then
-                     Put_Error ("expecting rhs_alternative_list BAR 
rhs_item_list");
+                  if Temp /= Element or Temp_Token_Count /= 
Element_Token_Count then
+                     return False;
                   end if;
-               when others =>
-                  Put_Error ("unexpected RHS_Index");
-               end case;
-
-            when compilation_unit_ID =>
-               null;
-
-            when compilation_unit_list_ID =>
-               null;
-
-            when others =>
-               null;
-            end case;
-         end;
-      end Validate_Node;
-
-      procedure Check_Original_EBNF
-      is
-         use Ada.Text_IO;
-         Sub_Tree_Root : Node_Index;
-      begin
-         for N in Data.EBNF_Nodes.First_Index .. Data.EBNF_Nodes.Last_Index 
loop
-            if Data.EBNF_Nodes (N) then
-               Sub_Tree_Root := Tree.Sub_Tree_Root (N);
-               if Sub_Tree_Root /= Tree.Root then
-                  Put_Line
-                    (Current_Error,
-                     Error_Message
-                       (Tree, Data.Terminals, N, Data.Grammar_Lexer.File_Name,
-                        Tree.Image
-                          (N, Wisitoken_Grammar_Actions.Descriptor,
-                           Node_Numbers      => True)));
-                  Put_Line (Current_Error, "... not in tree; in root" & 
Sub_Tree_Root'Image);
-                  WisiToken.Generate.Error := True;
-               end if;
+               end;
             end if;
-         end loop;
-      end Check_Original_EBNF;
 
-      procedure Check_Copied_EBNF
-      is
-         use Ada.Text_IO;
-         Sub_Tree_Root : Node_Index;
-      begin
-         for N of Copied_EBNF_Nodes loop
-            if N /= Deleted_Child then
-               Sub_Tree_Root := Tree.Sub_Tree_Root (N);
-               if Sub_Tree_Root /= Tree.Root then
-                  Put_Line
-                    (Current_Error,
-                     Error_Message
-                       (Tree, Data.Terminals, N, Data.Grammar_Lexer.File_Name,
-                        Tree.Image
-                          (N, Wisitoken_Grammar_Actions.Descriptor,
-                           Node_Numbers      => True)));
-                  Put_Line (Current_Error, "... not in tree; in root" & 
Sub_Tree_Root'Image);
-                  WisiToken.Generate.Error := True;
+            Next (RHS);
+            if Right_Hand_Sides (RHS).Tokens.Length /= (if Has_Separator then 
3 else 2) then
+               return False;
+            end if;
+            if -Right_Hand_Sides (RHS).Tokens (1).Identifier /= LHS_String then
+               return False;
+            end if;
+            if Has_Separator then
+               if Right_Hand_Sides (RHS).Tokens (2).Identifier /= Separator 
then
+                  return False;
+               end if;
+               if Right_Hand_Sides (RHS).Tokens (3).Identifier /= LHS_String 
then
+                  return False;
+               end if;
+            else
+               if Right_Hand_Sides (RHS).Tokens (2).Identifier /= LHS_String 
then
+                  return False;
                end if;
             end if;
-         end loop;
-      end Check_Copied_EBNF;
+            return True;
+         end;
+      end Is_Optimized_List;
 
    begin
-      --  Process nodes in node increasing order, so contained items are
-      --  translated first, so duplicates of the containing item can be found
-      for I in Data.EBNF_Nodes.First_Index .. Data.EBNF_Nodes.Last_Index loop
-         if Data.EBNF_Nodes (I) then
-            if Trace_Generate_EBNF > Outline then
-               Ada.Text_IO.New_Line;
-               Ada.Text_IO.Put_Line
-                 ("translate original node " & Tree.Image
-                    (I, Wisitoken_Grammar_Actions.Descriptor,
-                     Include_RHS_Index => True,
-                     Node_Numbers      => True));
-            end if;
-
-            Process_Node (I);
+      if Data.Phase = Meta or Data.Ignore_Lines then
+         return;
+      end if;
 
-            Tree.Validate_Tree
-              (Data.Terminals, Wisitoken_Grammar_Actions.Descriptor, 
Data.Grammar_Lexer.File_Name, Tree.Root,
-               Validate_Node'Unrestricted_Access);
-            Check_Original_EBNF;
-            Check_Copied_EBNF;
-         end if;
-      end loop;
+      Data.Rule_Count := Data.Rule_Count + 1;
 
-      declare
-         use Ada.Text_IO;
-      begin
-         for Node in Data.EBNF_Nodes.First_Index .. Data.EBNF_Nodes.Last_Index 
loop
-            if Data.EBNF_Nodes (Node) then
-               Put_Line
-                 (Current_Error,
-                  Error_Message
-                    (Tree, Data.Terminals, Node, Data.Grammar_Lexer.File_Name,
-                     Tree.Image
-                       (Node, Wisitoken_Grammar_Actions.Descriptor,
-                        Include_RHS_Index => True,
-                        Include_Children  => Trace_Generate_EBNF > Detail,
-                        Node_Numbers      => True)));
-               Put_Line (Current_Error, "... original EBNF node not 
translated");
-            end if;
-         end loop;
-      end;
+      Get_Right_Hand_Sides (Data, Tree, Right_Hand_Sides, Labels, Tree.Child 
(Nonterm, 3));
 
-      declare
-         I : SAL.Base_Peek_Type := Copied_EBNF_Nodes.First_Index;
-      begin
-         --  Processing copied nodes may produce more copied nodes, so we can't
-         --  use a 'for' loop.
-         loop
-            exit when I > Copied_EBNF_Nodes.Last_Index;
-            if Copied_EBNF_Nodes (I) = Deleted_Child then
-               --  Deleted
-               if Trace_Generate_EBNF > Outline then
-                  Ada.Text_IO.New_Line;
-                  Ada.Text_IO.Put_Line
-                    ("skipping deleted copied node " & Tree.Image
-                       (Copied_EBNF_Nodes (I), 
Wisitoken_Grammar_Actions.Descriptor,
-                        Include_RHS_Index => True,
-                        Node_Numbers      => True));
-               end if;
-            else
-               if Trace_Generate_EBNF > Outline then
-                  Ada.Text_IO.New_Line;
-                  Ada.Text_IO.Put_Line
-                    ("translate copied node " & Tree.Image
-                       (Copied_EBNF_Nodes (I), 
Wisitoken_Grammar_Actions.Descriptor,
-                        Include_RHS_Index => True,
-                        Node_Numbers      => True));
-               end if;
+      if WisiToken.BNF.Is_Present (Data.Tokens.Rules, LHS_String) then
+         case Tree.Label (LHS_Node) is
+         when Source_Terminal =>
+            Put_Error (Tree.Error_Message (LHS_Node, "duplicate nonterm"));
 
-               Process_Node (Copied_EBNF_Nodes (I));
+         when Virtual_Identifier =>
+            Put_Error (Error_Message (Tree.Lexer.File_Name, 1, 1, "duplicate 
virtual nonterm '" & LHS_String & "'"));
 
-               Tree.Validate_Tree
-                 (Data.Terminals, Wisitoken_Grammar_Actions.Descriptor, 
Data.Grammar_Lexer.File_Name, Tree.Root,
-                  Validate_Node'Unrestricted_Access);
-               Check_Copied_EBNF;
-            end if;
-            I := I + 1;
-         end loop;
-      end;
+         when others =>
+            WisiToken.Syntax_Trees.LR_Utils.Raise_Programmer_Error 
("Add_Nonterminal", Tree, LHS_Node);
+         end case;
+      else
+         Data.Label_Count := Data.Label_Count + Labels.Length;
 
-      declare
-         use Ada.Text_IO;
-      begin
-         for Node of Copied_EBNF_Nodes loop
-            if Node /= Deleted_Child then
-               Put_Line
-                 (Current_Error,
-                  Error_Message
-                    (Tree, Data.Terminals, Node, Data.Grammar_Lexer.File_Name,
-                     Tree.Image
-                       (Node, Wisitoken_Grammar_Actions.Descriptor,
-                        Include_RHS_Index => True,
-                        Include_Children  => Trace_Generate_EBNF > Detail,
-                        Node_Numbers      => True)));
-               Put_Line (Current_Error, "... copied EBNF node not translated");
-            end if;
-         end loop;
-      end;
-
-      EBNF_Allowed := False;
-      Tree.Validate_Tree
-        (Data.Terminals, Wisitoken_Grammar_Actions.Descriptor, 
Data.Grammar_Lexer.File_Name, Tree.Root,
-         Validate_Node'Unrestricted_Access);
-
-      Data.Meta_Syntax := BNF_Syntax;
-
-      if Trace_Generate_EBNF > Detail then
-         Ada.Text_IO.New_Line;
-         Ada.Text_IO.Put_Line ("Identifiers:");
-         for I in Data.Tokens.Virtual_Identifiers.First_Index .. 
Data.Tokens.Virtual_Identifiers.Last_Index loop
-            Ada.Text_IO.Put_Line (Base_Identifier_Index'Image (I) & " " & 
(-Data.Tokens.Virtual_Identifiers (I)));
-         end loop;
+         Data.Tokens.Rules.Append
+           ((+LHS_String, Right_Hand_Sides, Labels,
+             Optimized_List => Is_Optimized_List,
+             Source_Line    =>
+               (case Tree.Label (LHS_Node) is
+                when Source_Terminal    => Tree.Line_Region (LHS_Node, 
Trailing_Non_Grammar => True).First,
+                when Virtual_Identifier => Line_Number_Type'First, -- 
IMPROVEME: get line from Right_Hand_Sides
+                when others             => raise SAL.Programmer_Error)));
       end if;
-   end Translate_EBNF_To_BNF;
+   end Add_Nonterminal;
 
-   procedure Print_Source
-     (File_Name : in String;
-      Tree      : in WisiToken.Syntax_Trees.Tree;
-      Data      : in User_Data_Type)
+   procedure Check_EBNF
+     (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
+      Tree      : in     WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access;
+      Token     : in     WisiToken.Positive_Index_Type)
    is
-      use Ada.Text_IO;
-      use WisiToken.Syntax_Trees;
-
-      File : File_Type;
-
-      procedure Put_Comments
-        (Node           : in Valid_Node_Index;
-         Force_New_Line : in Boolean := False;
-         Force_Comment  : in String  := "")
-      is
-         Last_Term : constant Node_Index              := Tree.Last_Terminal 
(Node);
-         Aug       : constant Base_Token_Class_Access :=
-           (if Last_Term = Invalid_Node_Index
-            then null
-            else Tree.Augmented (Last_Term));
+      Data : User_Data_Type renames User_Data_Type (User_Data);
+   begin
+      if Data.Meta_Syntax = EBNF_Syntax then
+         Set_EBNF (Tree, Tree.Child (Nonterm, Token));
+         return;
+      end if;
 
-         Comments_Include_Newline : Boolean := False;
-      begin
-         if Aug = null then
-            if Force_Comment /= "" then
-               Put_Line (File, Force_Comment);
+      case Data.Phase is
+      when Meta =>
+         raise Grammar_Error with Tree.Error_Message
+           (Tree.Child (Nonterm, Token), "EBNF syntax used, but BNF specified; 
set '%meta_syntax EBNF'");
+      when Other =>
+         Put_Error
+           (Tree.Error_Message (Tree.Child (Nonterm, Token), "untranslated 
EBNF node") &
+              (if Debug_Mode
+               then " " & Tree.Image (Tree.Child (Nonterm, Token), 
Node_Numbers => True, RHS_Index => True)
+               else ""));
+         raise SAL.Programmer_Error;
+      end case;
+   end Check_EBNF;
 
-            elsif Force_New_Line then
-               New_Line (File);
-            end if;
-         else
-            for Token of Augmented_Token_Access (Aug).Non_Grammar loop
-               if Token.ID = +NEW_LINE_ID then
-                  Comments_Include_Newline := True;
-               end if;
-               Put (File, Data.Grammar_Lexer.Buffer_Text (Token.Byte_Region));
-            end loop;
-            if Force_New_Line and not Comments_Include_Newline then
-               New_Line (File);
-            end if;
-         end if;
-      end Put_Comments;
+   function Get_Text
+     (Virtual_Identifiers : in WisiToken.BNF.String_Arrays.Vector;
+      Tree                : in WisiToken.Syntax_Trees.Tree;
+      Tree_Index          : in WisiToken.Syntax_Trees.Node_Access;
+      Strip_Quotes        : in Boolean := False)
+     return String
+   is
+      use all type Syntax_Trees.Node_Label;
 
-      procedure Put_Declaration_Item (Node : in Valid_Node_Index)
+      function Strip_Delimiters (Tree_Index : in 
Syntax_Trees.Valid_Node_Access) return String
       is
-         Children : constant Valid_Node_Index_Array := Tree.Children (Node);
+         Region : Buffer_Region renames Tree.Byte_Region (Tree_Index, 
Trailing_Non_Grammar => False);
       begin
-         case To_Token_Enum (Tree.ID (Children (1))) is
-         when IDENTIFIER_ID | NUMERIC_LITERAL_ID | STRING_LITERAL_1_ID | 
STRING_LITERAL_2_ID =>
-            Put (File, ' ' & Get_Text (Data, Tree, Children (1)));
-         when REGEXP_ID =>
-            Put (File, " %[" & Get_Text (Data, Tree, Children (1)) & "]%");
-         when others =>
-            Put (File, Image (Tree.ID (Children (1)), 
Wisitoken_Grammar_Actions.Descriptor));
-         end case;
-      end Put_Declaration_Item;
+         if -Tree.ID (Tree_Index) in RAW_CODE_ID | REGEXP_ID | ACTION_ID then
+            --  Strip delimiters. We don't strip leading/trailing spaces to 
preserve indent.
+            return Tree.Lexer.Buffer_Text ((Region.First + 2, Region.Last - 
2));
 
-      procedure Put_Declaration_Item_List (Node : in Valid_Node_Index)
-      is
-         Children : constant Valid_Node_Index_Array := Tree.Children (Node);
-      begin
-         if Children'Length = 1 then
-            Put_Declaration_Item (Children (1));
+         elsif -Tree.ID (Tree_Index) in STRING_LITERAL_1_ID | 
STRING_LITERAL_2_ID and Strip_Quotes then
+            return Tree.Lexer.Buffer_Text ((Region.First + 1, Region.Last - 
1));
          else
-            Put_Declaration_Item_List (Children (1));
-            Put_Declaration_Item (Children (2));
+            return Tree.Lexer.Buffer_Text (Region);
          end if;
-      end Put_Declaration_Item_List;
+      end Strip_Delimiters;
 
-      procedure Put_Identifier_List (Node : in Valid_Node_Index)
-      is
-         Children : constant Valid_Node_Index_Array := Tree.Children (Node);
-      begin
-         if Children'Length = 1 then
-            Put (File, Get_Text (Data, Tree, Children (1)));
-         else
-            Put_Identifier_List (Children (1));
-            Put (File, ' ');
-            Put (File, Get_Text (Data, Tree, Children (2)));
-         end if;
-      end Put_Identifier_List;
+   begin
+      if Tree_Index = Syntax_Trees.Invalid_Node_Access then
+         return "<deleted child>";
+      end if;
 
-      procedure Put_RHS_Element (Node : in Valid_Node_Index)
-      with Pre => Tree.ID (Node) = +rhs_element_ID
-      is begin
-         --  We don't raise an exception for errors here; it's easier to debug 
from the
-         --  mangled source listing.
+      case Tree.Label (Tree_Index) is
+      when Source_Terminal =>
+         return Strip_Delimiters (Tree_Index);
 
-         case Tree.RHS_Index (Node) is
-         when 0 =>
-            Put (File, Get_Text (Data, Tree, Node));
+      when Virtual_Terminal =>
+         --  Terminal keyword inserted during tree edit. We could check for
+         --  Identifier, but that will be caught later.
+         return Image (Tree.ID (Tree_Index), 
Wisitoken_Grammar_Actions.Descriptor);
 
-         when 1 =>
-            --  Output no spaces around "="
+      when Virtual_Identifier =>
+         if Strip_Quotes then
             declare
-               Children : constant Valid_Node_Index_Array := Tree.Children 
(Node);
+               Quoted : constant String := -Virtual_Identifiers 
(Tree.Identifier (Tree_Index));
             begin
-               Put (File, Get_Text (Data, Tree, Children (1)) & "=" & Get_Text 
(Data, Tree, Children (3)));
+               return Quoted (Quoted'First + 1 .. Quoted'Last - 1);
             end;
-
-         when others =>
-            New_Line (File);
-            Put (File, " ;; not translated: " & Node_Index'Image (Node) & ":" &
-                   Tree.Image (Node, Wisitoken_Grammar_Actions.Descriptor,
-                               Include_Children  => True,
-                               Include_RHS_Index => True,
-                               Node_Numbers      => True));
-         end case;
-      exception
-      when SAL.Programmer_Error =>
-         raise;
-
-      when E : others =>
-         declare
-            use Ada.Exceptions;
-         begin
-            Raise_Programmer_Error
-              ("Put_RHS_Element: " & Exception_Name (E) & ": " & 
Exception_Message (E), Data, Tree, Node);
-         end;
-      end Put_RHS_Element;
-
-      procedure Put_RHS_Item_List (Node : in Valid_Node_Index)
-      with Pre => Tree.ID (Node) = +rhs_item_list_ID
-      is
-         Children : constant Valid_Node_Index_Array := Tree.Children (Node);
-      begin
-         if Children'Length = 1 then
-            Put_RHS_Element (Children (1));
          else
-            Put_RHS_Item_List (Children (1));
-            Put (File, ' ');
-            Put_RHS_Element (Children (2));
+            return -Virtual_Identifiers (Tree.Identifier (Tree_Index));
          end if;
-      exception
-      when SAL.Programmer_Error =>
-         raise;
-
-      when E : others =>
-         declare
-            use Ada.Exceptions;
-         begin
-            Raise_Programmer_Error
-              ("Put_RHS_Item_List: " & Exception_Name (E) & ": " & 
Exception_Message (E), Data, Tree, Node);
-         end;
-      end Put_RHS_Item_List;
-
-      procedure Put_RHS
-        (Node  : in Valid_Node_Index;
-         First : in Boolean)
-      with Pre => Tree.ID (Node) = +rhs_ID
-      is
-         Children : constant Valid_Node_Index_Array := Tree.Children (Node);
-      begin
-         Put (File, (if First then "  : " else "  | "));
-         case Tree.RHS_Index (Node) is
-         when 0 =>
-            Put_Comments (Tree.Parent (Node), Force_Comment => ";; empty");
-
-         when 1 .. 3 =>
-            Put_RHS_Item_List (Children (1));
-            Put_Comments (Children (1), Force_New_Line => True);
-
-            if Tree.RHS_Index (Node) > 1 then
-               Put (File, "    %(" & Get_Text (Data, Tree, Children (2)) & 
")%"); -- action
-               Put_Comments (Children (2), Force_New_Line => True);
-
-               if Tree.RHS_Index (Node) > 2 then
-                  Put (File, "    %(" & Get_Text (Data, Tree, Children (3)) & 
")%"); -- check
-                  Put_Comments (Children (3), Force_New_Line => True);
-               end if;
-            end if;
-
-         when others =>
-            Raise_Programmer_Error ("Put_RHS", Data, Tree, Node);
-         end case;
-      exception
-      when SAL.Programmer_Error =>
-         raise;
-
-      when E : others =>
-         declare
-            use Ada.Exceptions;
-         begin
-            Raise_Programmer_Error ("Put_RHS: " & Exception_Name (E) & ": " & 
Exception_Message (E), Data, Tree, Node);
-         end;
-      end Put_RHS;
-
-      procedure Put_RHS_List
-        (Node    : in     Valid_Node_Index;
-         First   : in out Boolean;
-         Virtual : in     Boolean)
-      with Pre => Tree.ID (Node) = +rhs_list_ID
-      is
-         Children : constant Valid_Node_Index_Array := Tree.Children (Node);
-      begin
-         case Tree.RHS_Index (Node) is
-         when 0 =>
-            Put_RHS (Children (1), First);
-            First := False;
-         when 1 =>
-            Put_RHS_List (Children (1), First, Virtual);
-            Put_RHS (Children (3), First => False);
-         when 2 =>
-            Put
-              (File, "%if " & Get_Text (Data, Tree, Children (3)) & " = " & 
Get_Text (Data, Tree, Children (4)));
-            Put_Comments (Node);
-
-         when 3 =>
-            Put (File, "%end if");
-            Put_Comments (Node);
-
-         when others =>
-            Raise_Programmer_Error ("Put_RHS_List", Data, Tree, Node);
-         end case;
-      exception
-      when SAL.Programmer_Error =>
-         raise;
 
-      when E : others =>
+      when Nonterm =>
          declare
-            use Ada.Exceptions;
+            use all type Ada.Strings.Unbounded.Unbounded_String;
+            Result       : Ada.Strings.Unbounded.Unbounded_String;
+            Tree_Indices : constant Syntax_Trees.Valid_Node_Access_Array := 
Tree.Get_Terminals (Tree_Index);
+            Need_Space   : Boolean                                      := 
False;
          begin
-            Raise_Programmer_Error
-              ("Put_RHS_List: " & Exception_Name (E) & ": " & 
Exception_Message (E), Data, Tree, Node);
+            for Tree_Index of Tree_Indices loop
+               Result := Result & (if Need_Space then " " else "") &
+                 Get_Text (Virtual_Identifiers, Tree, Tree_Index, 
Strip_Quotes);
+               Need_Space := True;
+            end loop;
+            return -Result;
          end;
-      end Put_RHS_List;
-
-      procedure Process_Node (Node : in Valid_Node_Index)
-      is begin
-         case To_Token_Enum (Tree.ID (Node)) is
-         --  Enum_Token_ID alphabetical order
-         when compilation_unit_ID =>
-            Process_Node (Tree.Child (Node, 1));
-
-         when compilation_unit_list_ID =>
-            declare
-               Children : constant Valid_Node_Index_Array := Tree.Children 
(Node);
-            begin
-               case To_Token_Enum (Tree.ID (Children (1))) is
-               when compilation_unit_list_ID =>
-                  Process_Node (Children (1));
-                  Process_Node (Children (2));
-               when compilation_unit_ID =>
-                  Process_Node (Children (1));
-               when others =>
-                  raise SAL.Programmer_Error;
-               end case;
-            end;
-
-         when declaration_ID =>
-            declare
-               Children : constant Valid_Node_Index_Array := Tree.Children 
(Node);
-            begin
-               case Tree.RHS_Index (Node) is
-               when 0 =>
-                  case Tree.RHS_Index (Children (2)) is
-                  when 0 =>
-                     Put (File, "%keyword");
-                  when 1 =>
-                     Put (File, "%non_grammar <" & Get_Text (Data, Tree, 
Tree.Child (Children (2), 3)) & ">");
-                  when 2 =>
-                     Put (File, "%token <" & Get_Text (Data, Tree, Tree.Child 
(Children (2), 3)) & ">");
-                  when others =>
-                     raise SAL.Programmer_Error;
-                  end case;
-
-                  Put (File, " " & Get_Text (Data, Tree, Children (3)));
-                  Put_Declaration_Item_List (Children (4));
-                  Put_Comments (Children (4), Force_New_Line => True);
-
-               when 1 =>
-                  Put (File, "%code ");
-                  Put_Identifier_List (Children (3));
-                  Put (File, " %{" & Get_Text (Data, Tree, Children (4)) & 
"}%"); -- RAW_CODE
-                  Put_Comments (Node);
-
-               when 2 =>
-                  declare
-                     Key : constant String := Get_Text (Data, Tree, Children 
(2));
-                  begin
-                     if Key = "conflict" then
-                        Put (File, Data.Grammar_Lexer.Buffer_Text 
(Tree.Byte_Region (Node)));
-                     else
-                        Put (File, "%" & Key);
-                        Put_Declaration_Item_List (Children (3));
-                     end if;
-                  end;
-                  Put_Comments (Children (3));
-
-               when 3 =>
-                  Put (File, "%" & Get_Text (Data, Tree, Children (2)));
-                  Put_Comments (Children (2));
-
-               when 4 =>
-                  Put
-                    (File, "%if" & Get_Text (Data, Tree, Children (2)) & " = " 
& Get_Text (Data, Tree, Children (4)));
-                  Put_Comments (Node);
-
-               when 5 =>
-                  Put (File, "%end if");
-                  Put_Comments (Node);
-
-               when others =>
-                  raise SAL.Programmer_Error;
-               end case;
-            end;
-
-         when nonterminal_ID =>
-            declare
-               Children : constant Valid_Node_Index_Array := Tree.Children 
(Node);
-               Virtual  : constant Boolean                := Tree.Label 
(Children (1)) = Virtual_Identifier;
-               First    : Boolean                         := True;
-            begin
-               Put (File, Get_Text (Data, Tree, Children (1)));
-               Put_Comments (Children (1), Force_New_Line => True);
-
-               Put_RHS_List (Children (3), First, Virtual);
-
-               if Tree.Children (Children (4))'Length > 0 then
-                  if Virtual then
-                     Put_Line (File, "  ;");
-                  else
-                     Put (File, "  ;");
-                     Put_Comments (Children (4));
-                  end if;
-               end if;
-            end;
-
-         when wisitoken_accept_ID =>
-            Process_Node (Tree.Child (Node, 1));
-
-         when others =>
-            raise SAL.Not_Implemented with Image (Tree.ID (Node), 
Wisitoken_Grammar_Actions.Descriptor);
-         end case;
-      end Process_Node;
-   begin
-      Create (File, Out_File, File_Name);
-      Put_Line (File, ";;; generated from " & Data.Grammar_Lexer.File_Name & " 
-*- buffer-read-only:t -*-");
-      Put_Line (File, ";;;");
-
-      for Token of Data.Leading_Non_Grammar loop
-         Put (File, Data.Grammar_Lexer.Buffer_Text (Token.Byte_Region));
-      end loop;
-
-      Process_Node (Tree.Root);
-
-      Close (File);
-   exception
-   when E : SAL.Not_Implemented =>
-      Close (File);
-      Ada.Text_IO.Put_Line
-        (Ada.Text_IO.Standard_Error, "Print_Source not implemented: " & 
Ada.Exceptions.Exception_Message (E));
-   end Print_Source;
+      end case;
+   end Get_Text;
 
 end WisiToken_Grammar_Runtime;
 --  Local Variables:
---  ada-which-func-parse-size: 50000
+--  ada-case-strict: nil
 --  End:
diff --git a/wisitoken_grammar_runtime.ads b/wisitoken_grammar_runtime.ads
index a9de9505de..24fbe23a6b 100644
--- a/wisitoken_grammar_runtime.ads
+++ b/wisitoken_grammar_runtime.ads
@@ -1,8 +1,8 @@
 --  Abstract :
 --
---  Runtime utils for wisi_grammar.wy actions.
+--  Runtime utils for wisitoken_grammar.wy actions.
 --
---  Copyright (C) 2018 - 2020 Free Software Foundation, Inc.
+--  Copyright (C) 2018 - 2022 Free Software Foundation, Inc.
 --
 --  This library is free software;  you can redistribute it and/or modify it
 --  under terms of the  GNU General Public License  as published by the Free
@@ -18,12 +18,13 @@
 pragma License (Modified_GPL);
 
 with Ada.Containers;
+with SAL;
 with WisiToken.BNF;
-with WisiToken.Lexer;
 with WisiToken.Syntax_Trees;
 with Wisitoken_Grammar_Actions;
-with WisiToken.Syntax_Trees.LR_Utils;
 package WisiToken_Grammar_Runtime is
+   use all type WisiToken.Syntax_Trees.Node_Access;
+   use all type Wisitoken_Grammar_Actions.Token_Enum_ID;
 
    type Meta_Syntax is (Unknown, BNF_Syntax, EBNF_Syntax);
    --  Syntax used in grammar file.
@@ -32,14 +33,12 @@ package WisiToken_Grammar_Runtime is
 
    type User_Data_Type is new WisiToken.Syntax_Trees.User_Data_Type with
    record
-      Grammar_Lexer : WisiToken.Lexer.Handle; -- used to read the .wy file now.
-
       User_Lexer : WisiToken.BNF.Lexer_Type := WisiToken.BNF.None;
       --  Used to read the user language file, after user parser is generated;
       --  used now in '%if lexer' statements.
 
       User_Parser : WisiToken.BNF.Generate_Algorithm := WisiToken.BNF.None;
-      --  Used to generate the user parser; used now in '%if parser'
+      --  Used to read the user language file; used now in '%if parser'
       --  statements.
 
       Generate_Set : WisiToken.BNF.Generate_Set_Access;
@@ -47,51 +46,66 @@ package WisiToken_Grammar_Runtime is
 
       Phase : Action_Phase := Meta;
       --  Determines which actions Execute_Actions executes:
-      --  Meta  - meta declarations, like %meta_syntax, %generate
+      --  Meta  - meta declarations, like %meta_syntax, %if, %generate
       --  Other - everything else
 
-      Meta_Syntax      : WisiToken_Grammar_Runtime.Meta_Syntax := Unknown;
-      Terminals        : WisiToken.Base_Token_Array_Access_Constant;
-      Raw_Code         : WisiToken.BNF.Raw_Code;
-      Language_Params  : WisiToken.BNF.Language_Param_Type;
-      Tokens           : aliased WisiToken.BNF.Tokens;
-      Conflicts        : WisiToken.BNF.Conflict_Lists.List;
-      McKenzie_Recover : WisiToken.BNF.McKenzie_Recover_Param_Type;
+      EBNF_Ok : Boolean := False;
+      --  Set True when don't need to translate EBNF to BNF.
+
+      Meta_Syntax          : WisiToken_Grammar_Runtime.Meta_Syntax := Unknown;
+      Raw_Code             : WisiToken.BNF.Raw_Code;
+      Language_Params      : WisiToken.BNF.Language_Param_Type;
+      Tokens               : aliased WisiToken.BNF.Tokens;
 
-      Leading_Non_Grammar : WisiToken.Base_Token_Arrays.Vector;
-      --  leading blank lines and comments
+      Suppress : WisiToken.BNF.String_Pair_Lists.List;
+      --  Declaration name, warning label; suppress warnings.
 
-      Last_Terminal_Node : WisiToken.Node_Index := 
WisiToken.Invalid_Node_Index;
+      Conflicts            : WisiToken.BNF.Conflict_Lists.List;
+      McKenzie_Recover     : WisiToken.BNF.McKenzie_Recover_Param_Type;
+      Max_Parallel         : SAL.Base_Peek_Type                    := 15;
 
       Rule_Count   : Integer                   := 0;
       Action_Count : Integer                   := 0;
       Check_Count  : Integer                   := 0;
       Label_Count  : Ada.Containers.Count_Type := 0;
 
-      EBNF_Nodes : WisiToken.Syntax_Trees.Node_Sets.Vector;
-
       If_Lexer_Present  : Boolean := False;
       If_Parser_Present : Boolean := False;
       --  Set True by %if statements in Execute_Actions.
 
       Ignore_Lines : Boolean := False;
       --  An '%if' specified a different lexer, during Execute_Actions
+
+      Error_Reported : WisiToken.Syntax_Trees.Node_Sets.Set;
+      --  Used with Syntax_Trees.Validate_Tree.
+
    end record;
+   type User_Data_Access is access all User_Data_Type;
 
-   type Augmented_Token is new WisiToken.Base_Token with
+   type Augmented is new WisiToken.Syntax_Trees.Base_Augmented with
    record
-      Non_Grammar : WisiToken.Base_Token_Arrays.Vector;
+      EBNF : Boolean := False;
+
+      Auto_Token_Labels : Boolean := False;
+      --  Valid in an RHS node; True when token labels are generated by
+      --  Translate_EBNF_To_BNF
+
+      Edited_Token_List : Boolean := False;
+      --  Valid in an RHS node; matches Wisitoken.BNF RHS.Edited_Token_List
    end record;
-   type Augmented_Token_Access is access all Augmented_Token;
+   type Augmented_Access is access all Augmented;
+   type Augmented_Access_Constant is access constant Augmented;
 
-   function Image (Item : in WisiToken.Base_Token_Class_Access) return String
-     is (WisiToken.Image (Augmented_Token_Access (Item).Non_Grammar, 
Wisitoken_Grammar_Actions.Descriptor));
+   function Image (Item : in 
WisiToken.Syntax_Trees.Augmented_Class_Access_Constant) return String
+   is (Augmented_Access_Constant (Item).EBNF'Image & " " &
+         Augmented_Access_Constant (Item).Auto_Token_Labels'Image & " " &
+         Augmented_Access_Constant (Item).Edited_Token_List'Image);
 
    overriding
-   procedure Set_Lexer_Terminals
-     (User_Data : in out User_Data_Type;
-      Lexer     : in     WisiToken.Lexer.Handle;
-      Terminals : in     WisiToken.Base_Token_Array_Access_Constant);
+   function Copy_Augmented
+     (User_Data : in User_Data_Type;
+      Augmented : in WisiToken.Syntax_Trees.Augmented_Class_Access)
+     return WisiToken.Syntax_Trees.Augmented_Class_Access;
 
    overriding procedure Reset (Data : in out User_Data_Type);
 
@@ -100,67 +114,71 @@ package WisiToken_Grammar_Runtime is
      (Data : in out User_Data_Type;
       Tree : in WisiToken.Syntax_Trees.Tree'Class);
 
-   overriding
-   procedure Lexer_To_Augmented
-     (Data  : in out          User_Data_Type;
-      Tree  : in out          WisiToken.Syntax_Trees.Tree'Class;
-      Token : in              WisiToken.Base_Token;
-      Lexer : not null access WisiToken.Lexer.Instance'Class);
+   function Get_Lexer_Set
+     (User_Data     : in out User_Data_Type;
+      Tree          : in out WisiToken.Syntax_Trees.Tree;
+      Node          : in     WisiToken.Syntax_Trees.Valid_Node_Access)
+     return WisiToken.BNF.Lexer_Set
+   with Pre => To_Token_Enum (Tree.ID (Node)) in IDENTIFIER_ID | 
IDENTIFIER_BAR_list_ID;
+
+   function Get_Generate_Algorithm_Set
+     (User_Data : in out User_Data_Type;
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Node      : in     WisiToken.Syntax_Trees.Valid_Node_Access)
+     return WisiToken.BNF.Generate_Algorithm_Set
+   with Pre => To_Token_Enum (Tree.ID (Node)) in IDENTIFIER_ID | 
IDENTIFIER_BAR_list_ID;
 
    procedure Start_If
      (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-      Tree      : in     WisiToken.Syntax_Trees.Tree;
-      Tokens    : in     WisiToken.Valid_Node_Index_Array);
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
 
    procedure End_If (User_Data : in out 
WisiToken.Syntax_Trees.User_Data_Type'Class);
 
    procedure Add_Declaration
      (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-      Tree      : in     WisiToken.Syntax_Trees.Tree;
-      Tokens    : in     WisiToken.Valid_Node_Index_Array);
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
 
    procedure Add_Nonterminal
      (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
-      Tree      : in     WisiToken.Syntax_Trees.Tree;
-      Tokens    : in     WisiToken.Valid_Node_Index_Array);
-
-   function Image_Grammar_Action (Action : in 
WisiToken.Syntax_Trees.Semantic_Action) return String;
-   --  For Syntax_Trees.Print_Tree.
+      Tree      : in out WisiToken.Syntax_Trees.Tree;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access);
 
    procedure Check_EBNF
      (User_Data : in out WisiToken.Syntax_Trees.User_Data_Type'Class;
       Tree      : in     WisiToken.Syntax_Trees.Tree;
-      Tokens    : in     WisiToken.Valid_Node_Index_Array;
+      Nonterm   : in     WisiToken.Syntax_Trees.Valid_Node_Access;
       Token     : in     WisiToken.Positive_Index_Type);
 
-   procedure Raise_Programmer_Error
-     (Label : in String;
-      Data  : in User_Data_Type;
-      Tree  : in WisiToken.Syntax_Trees.Tree;
-      Node  : in WisiToken.Node_Index);
-   pragma No_Return (Raise_Programmer_Error);
-
-   function Find_Declaration
-     (Data : in     User_Data_Type;
-      Tree : in out WisiToken.Syntax_Trees.Tree;
-      Name : in     String)
-     return WisiToken.Node_Index;
-   --  Return the node that declares Name, Invalid_Node_Index if none.
-   --  The node is either a declaration or a nonterminal.
-
-   procedure Translate_EBNF_To_BNF
-     (Tree : in out WisiToken.Syntax_Trees.Tree;
-      Data : in out User_Data_Type);
-   --  Process EBNF nonterms, adding new nonterms as needed, resulting in
-   --  a BNF tree.
+   ----------
+   --  Visible for WisiToken_Grammar_Editing
+
+   function Get_Text
+     (Virtual_Identifiers : in WisiToken.BNF.String_Arrays.Vector;
+      Tree                : in WisiToken.Syntax_Trees.Tree;
+      Tree_Index          : in WisiToken.Syntax_Trees.Node_Access;
+      Strip_Quotes        : in Boolean := False)
+     return String;
+   --  If Tree_Index = Invalid_Node_Access, returns "<deleted child>".
+   function Get_Text
+     (Data         : in User_Data_Type;
+      Tree         : in WisiToken.Syntax_Trees.Tree;
+      Tree_Index   : in WisiToken.Syntax_Trees.Valid_Node_Access;
+      Strip_Quotes : in Boolean := False)
+     return String;
+   --  Return source text for Tree_Index.
    --
-   --  Generator.LR.*_Generate requires a BNF grammar.
-
-   procedure Print_Source
-     (File_Name : in String;
-      Tree      : in WisiToken.Syntax_Trees.Tree;
-      Data      : in User_Data_Type);
-   --  Print the wisitoken grammar source represented by Tree, Terminals
-   --  to a new file File_Name.
+   --  This fetches each token separately, without the non-grammar text.
+
+   function Get_Item_Text
+     (Data         : in User_Data_Type;
+      Tree         : in WisiToken.Syntax_Trees.Tree;
+      Node         : in WisiToken.Syntax_Trees.Valid_Node_Access;
+      Strip_Quotes : in Boolean := False)
+     return String
+   with Pre => Tree.Is_Nonterm (Node);
+   --  Find first descendant of Node that has rhs_item_ID, return source
+   --  text for it.
 
 end WisiToken_Grammar_Runtime;



reply via email to

[Prev in Thread] Current Thread [Next in Thread]