/usr/share/ada/adainclude/opentoken/opentoken-token-enumerated-analyzer.ads is in libopentoken3-dev 4.0b-4.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 | -------------------------------------------------------------------------------
--
-- Copyright (C) 2002, 2003, 2009 Stephe Leake
-- Copyright (C) 1999 FlightSafety International and Ted Dennison
--
-- This file is part of the OpenToken package.
--
-- The OpenToken package is free software; you can redistribute it
-- and/or modify it under the terms of the GNU General Public License
-- as published by the Free Software Foundation; either version 3, or
-- (at your option) any later version. The OpenToken package is
-- distributed in the hope that it will be useful, but WITHOUT ANY
-- WARRANTY; without even the implied warranty of MERCHANTABILITY or
-- FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
-- License for more details. You should have received a copy of the
-- GNU General Public License distributed with the OpenToken package;
-- see file GPL.txt. If not, write to the Free Software Foundation,
-- 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
--
-- As a special exception, if other files instantiate generics from
-- this unit, or you link this unit with other files to produce an
-- executable, this unit does not by itself cause the resulting
-- executable to be covered by the GNU General Public License. This
-- exception does not however invalidate any other reasons why the
-- executable file might be covered by the GNU Public License.
--
-- This software was originally developed by the following company,
-- and was released as open-source software as a service to the
-- community:
--
-- FlightSafety International Simulation Systems Division
-- Broken Arrow, OK USA 918-259-4000
--
-------------------------------------------------------------------------------
with OpenToken.Recognizer;
with OpenToken.Text_Feeder;
with OpenToken.Text_Feeder.Text_IO;
-----------------------------------------------------------------------------
-- This package implements a mostly full-strength tokenizer (or
-- lexical analyizer).
--
-- To use it, create a function to feed text strings into the
-- Analyzer. Create an enumerated type of all the tokens you want to
-- recognize. Instantiate this package with the function and the
-- enumerated type.
--
-- Next, define a token subclass for each token in Tokens. Then
-- create a Syntax which matches up the tokens to their appropriate
-- token class and pass it into Set_Syntax.
--
-- Once that is done, you may repeatedly call Get_Next to get tokens.
-----------------------------------------------------------------------------
generic
Last_Terminal : in Token_ID := Token_ID'Last;
package OpenToken.Token.Enumerated.Analyzer is
subtype Terminal_ID is Token_ID range Token_ID'First .. Last_Terminal;
-- Descriptor for what an individual token in this language looks
-- like. Also provides storage for Lexeme and Recognizer from
-- recognized tokens. This is required by lookahead; the lexeme
-- and recognizer are only available when the token is recognized
-- in the input stream, not later when it is read from the
-- lookahead queue. Copies of the recognized token are pushed onto
-- the lookahead queue, after Create is called.
type Recognizable_Token is record
Recognizer : Recognizer_Handle;
Token_Handle : Handle;
end record;
-- The syntax of a language, which is defined by the set of valid tokens.
type Syntax is array (Terminal_ID) of Recognizable_Token;
type Instance is new Source with private;
-- Need to revisit token definitions or raise Max_String_Length
Token_Too_Long : exception;
--------------------------------------------------------------------------
-- The Text Feeder is an object that has a function for returning
-- strings to the analyzer to process. The default (Input_Feeder)
-- reads from Ada.Text_IO.Current_Input.
--------------------------------------------------------------------------
type Text_Feeder_Ptr is access all OpenToken.Text_Feeder.Instance'Class;
Input_Feeder : aliased OpenToken.Text_Feeder.Text_IO.Instance;
--------------------------------------------------------------------------
-- Return a new recognizable token, using the given token
-- values. This is a convienence routine for more easily creating
-- Syntaxes. It will dynamically allocate the memory for the
-- recognizer and token.
--------------------------------------------------------------------------
function Get
(Recognizer : in OpenToken.Recognizer.Class;
New_Token : in OpenToken.Token.Enumerated.Class := Get)
return Recognizable_Token;
----------------------------------------------------------------------------
-- Return an Analyzer with the given syntax and text feeder.
----------------------------------------------------------------------------
function Initialize
(Language_Syntax : in Syntax;
Feeder : in Text_Feeder_Ptr := Input_Feeder'Access)
return Instance;
function Initialize
(Language_Syntax : in Syntax;
Default : in Terminal_ID;
Feeder : in Text_Feeder_Ptr := Input_Feeder'Access)
return Instance;
----------------------------------------------------------------------
-- Return name of ID token in Analyzer.Syntax
----------------------------------------------------------------------
function Name (Analyzer : in Instance; ID : in Token_ID) return String;
--------------------------------------------------------------------
-- Reset Analyzer, to start finding tokens. This is appropriate
-- when the Feeder text has been changed.
--------------------------------------------------------------------
procedure Reset (Analyzer : in out Instance);
----------------------------------------------------------------------------
-- Set the Analyzer's syntax to the given value.
--
-- Due to the accessability rules of Ada, you cannot create syntax
-- objects in which the component tokens are declared at a deeper
-- dynamic scope than the instantiation of this package using
-- 'access on the tokens. 'Unchecked_Access is safe to use as long
-- as the Analyzer does not have a longer lifetime than its
-- tokens.
--
-- Note that the Syntax structure contains pointers to
-- recognizers, which have dynamic state. Set_Syntax does a simple
-- copy of the array, not a deep copy of the recognizer objects.
-- Therefore this Analyzer will share those recognizers with any
-- other Analyzer using the same syntax, which can happen in a
-- multi-threaded system.
--
-- We could make Syntax Limited_Controlled and provide a deep copy
-- in Adjust. But that would significantly complicate creating a
-- syntax, and make it expensive to switch syntaxes during a parse
-- (as HTML_Lexer does).
--
----------------------------------------------------------------------
procedure Set_Syntax (Analyzer : in out Instance; Language_Syntax : in Syntax);
----------------------------------------------------------------------------
-- Set the analyzer's text feeder.
----------------------------------------------------------------------------
procedure Set_Text_Feeder (Analyzer : in out Instance; Feeder : in Text_Feeder_Ptr);
------------------------------------------------------------------------
-- True if Analyzer's internal buffer is empty, and
-- Analyzer.Text_Feeder reports End_Of_Text.
function End_Of_Text (Analyzer : in Instance) return Boolean;
------------------------------------------------------------------------
-- True if Analyzer's internal buffer is empty.
function End_Of_Buffered_Text (Analyzer : in Instance) return Boolean;
--------------------------------------------------------------------------
-- Discard text in Analyzer's internal buffer. Do this when a
-- parse error is encountered, and you want to start over.
--------------------------------------------------------------------------
procedure Discard_Buffered_Text (Analyzer : in out Instance);
----------------------------------------------------------------------------
-- Set the analyzer's default token to the given ID.
--
-- If Find_Next can't find a matching token, it will set Token to
-- this token id, instead of raising syntax error. The Lexeme in
-- this situation will be contain all the contiguous characters
-- that fail to match an token. In practice this will be much less
-- efficient than an "error" token that explicitly matches
-- unmatchable strings. But often those are quite difficult to
-- construct. The default token will be checked for legitimate
-- matches. If this is not the behavior you want, it would be best
-- to use a token that can't match any legitimate string (eg:
-- Opentoken.Recognizer.Nothing)
--------------------------------------------------------------------------
procedure Set_Default
(Analyzer : in out Instance;
Default : in Terminal_ID);
--------------------------------------------------------------------------
-- Reset the analyzer to have *no* default token ID. If Find_Next
-- doesn't find a matching token, Syntax_Error will be raised.
--------------------------------------------------------------------------
procedure Unset_Default (Analyzer : in out Instance);
--------------------------------------------------------------------------
-- Locate the next token.
--
-- The next token will be the token that matches the *longest*
-- sequence of characters before failing. Ties go to the token
-- with the smallest Terminal_Id.
--
-- Raises Syntax_Error if no token could be found (unless there is
-- a default token defined).
--------------------------------------------------------------------------
overriding procedure Find_Next
(Analyzer : in out Instance;
Look_Ahead : in Boolean := False);
type Queue_Mark is new Token.Queue_Mark with private;
overriding function Mark_Push_Back (Analyzer : in Instance) return Token.Queue_Mark'Class;
overriding procedure Push_Back (Analyzer : in out Instance; Mark : in Token.Queue_Mark'Class);
--------------------------------------------------------------------------
-- Returns the current text line at which processing will resume.
-- This is particularly useful for printing error messages when
-- syntax errors are detected.
--------------------------------------------------------------------------
function Line (Analyzer : in Instance) return Natural;
--------------------------------------------------------------------------
-- Returns the current text column at which processing will
-- resume. This is particularly useful for printing error messages
-- when syntax errors are detected.
--------------------------------------------------------------------------
function Column (Analyzer : in Instance) return Natural;
--------------------------------------------------------------------------
-- Returns True if the next token will be at the start of its text
-- line. The main purpose of this routine is to assist in writing
-- recognizers for tokens that must start a line.
--------------------------------------------------------------------------
function First_Column (Analyzer : in Instance) return Boolean;
--------------------------------------------------------------------------
-- Returns the column at which the the next token starts on its
-- text line. The main purpose of this routine is to assist in
-- writing recognizers for tokens that must start on a specific
-- column
--------------------------------------------------------------------------
function Next_Token_Column (Analyzer : in Instance) return Integer;
--------------------------------------------------------------------------
-- Returns the last token that was matched.
--------------------------------------------------------------------------
overriding function Get (Analyzer : in Instance) return OpenToken.Token.Class;
----------------------------------------------------------------------------
-- Returns the last token ID that was matched.
----------------------------------------------------------------------------
function ID (Analyzer : in Instance) return Terminal_ID;
overriding function Lexeme (Analyzer : in Instance) return String;
overriding function Last_Recognizer (Analyzer : in Instance) return Recognizer_Handle;
private
type Token_List_Node;
type Token_List_Node_Pointer is access Token_List_Node;
-- Visible for unit tests
type Token_List_Node is record
Token_Handle : OpenToken.Token.Enumerated.Handle;
Prev : Token_List_Node_Pointer;
Next : Token_List_Node_Pointer;
end record;
procedure Free is new Ada.Unchecked_Deallocation (Token_List_Node, Token_List_Node_Pointer);
-- Put all the Analyzer's state information in here, so there can
-- be several Analyzers running at once.
type Instance is new Source with record
-- User-settable attributes
Syntax_List : Syntax;
Feeder : Text_Feeder_Ptr := Input_Feeder'Access;
Has_Default : Boolean := False;
Default_Token : Terminal_ID;
-- User-gettable attributes
Line : Natural := 1;
Column : Natural := 1;
Lexeme_Head : Natural := 1;
Lexeme_Tail : Natural := 0;
Last_Token : Terminal_ID;
Read_From_Lookahead : Boolean;
-- Internal state information
Buffer : String (1 .. Max_String_Length);
Buffer_Head : Natural := 1;
Buffer_Tail : Natural := 0;
Buffer_Size : Natural := 0;
Next_Line : Natural := 1;
Next_Column : Natural := 1;
Lookahead_Queue : Token_List_Node_Pointer; -- Read from here or text source when Look_Ahead is false
Lookahead_Head : Token_List_Node_Pointer; -- Read from here or text source when Look_Ahead is true
Lookahead_Tail : Token_List_Node_Pointer; -- Most recent token read from text source with Look_Ahead true
Lookahead_Count : Integer;
Max_Lookahead : Integer;
end record;
type Queue_Mark is new Token.Queue_Mark with record
Head : Token_List_Node_Pointer;
Tail : Token_List_Node_Pointer;
end record;
end OpenToken.Token.Enumerated.Analyzer;
|