rokojori_action_library/Runtime/Text/Parsing/AST/Token.cs

208 lines
6.2 KiB
C#

using System.Collections;
using System.Collections.Generic;
using System.Text.RegularExpressions;
using System.Text;
using System.Globalization;
using Godot;
namespace Rokojori;
public class Token:ASTNode
{
// public class PredicateData
// {
// public string type;
// public string match;
// public bool Matches( ASTNode node )
// {
// return node.IsToken( type, match );
// }
// public System.Predicate<Token> predicate
// {
// get
// {
// System.Predicate<Token> pred = ( tk ) => tk.Is( type, match );
// return pred;
// }
// }
// public static PredicateData Create( string type, string match = null )
// {
// var pd = new PredicateData();
// pd.type = type;
// pd.match = match;
// return pd;
// }
// public static TokenPredicateData Lexed( Lexer lexer, string value )
// {
// var lexedSequence = CreateLexedSequenceData( lexer, value );
// return lexedSequence[ 0 ];
// }
// }
public LexerEvent lexerEvent;
public string match => lexerEvent.match;
public string type => lexerEvent.type;
public override string ToString()
{
var matchInfo = match.Replace( "\n", "\\n" ).Replace( "\r", "\\r" );
return $"Token{{ '{matchInfo}' ({type}) }}";
}
public bool Is( string type, string match = null )
{
return lexerEvent.Is( type, match );
}
public bool Is( LexerMatcher matcher, string match = null )
{
return lexerEvent.Is( matcher.type, match );
}
public override ASTViewNode CreateViewNode()
{
LexerMatcher[] wordLike =
[
LexerMatcherLibrary.CwordMatcher,
LexerMatcherLibrary.CFunctionMatcher,
LexerMatcherLibrary.UsingMatcher,
LexerMatcherLibrary.AccessModifierMatcher,
LexerMatcherLibrary.CSAccessModifierMatcher,
LexerMatcherLibrary.ClassMatcher,
LexerMatcherLibrary.RecordMatcher,
LexerMatcherLibrary.StructMatcher,
LexerMatcherLibrary.InterfaceMatcher,
LexerMatcherLibrary.EnumMatcher,
];
LexerMatcher[] constants =
[
LexerMatcherLibrary.BoolMatcher,
LexerMatcherLibrary.SingleQuotedStringMatcher,
LexerMatcherLibrary.DoubleQuotedStringMatcher,
LexerMatcherLibrary.NumberMatcher,
LexerMatcherLibrary.NullMatcher
];
var viewNode = IsIgnoreToken() ? new ASTViewIgnoreToken() :
IsAnyTokenOf( wordLike ) ? new ASTViewWordToken() :
IsAnyTokenOf( constants ) ? new ASTViewConstantToken() :
IsToken( LexerMatcherLibrary.LogicMatcher ) ? new ASTViewLogicToken() :
IsToken( LexerMatcherLibrary.BracketMatcher ) ? new ASTViewBracketToken() :
IsToken( LexerMatcherLibrary.OperatorMatcher ) ? new ASTViewOperatorToken() :
new ASTViewToken();
viewNode.astNodeReference = this;
viewNode.astNodeType = GetType().Name;
viewNode.tokenType = type;
viewNode.match = match;
viewNode.characterOffset = lexerEvent.offset;
viewNode.Name = "Token " + type + " '" + match + "'";
return viewNode;
}
ASTFileRoot _root;
public string lineInfo
{
get
{
if ( _root == null )
{
_root = ASTWalker.instance.GetInParents( this, n => n is ASTFileRoot ) as ASTFileRoot;
}
var textLine = _root.GetTextLinesMapper().GetLine( lexerEvent.offset );
var anchor = _root.GetTextLinesMapper().GetAnchor( lexerEvent.offset, true );
return "[ " + anchor.lineIndex + ":" + anchor.characterIndex + " ] | " + textLine.GetContent( _root.GetSource() );
}
}
public static Token Create( LexerEvent le, ASTNode parent = null )
{
var t = new Token();
t.lexerEvent = le;
t.parent = parent;
return t;
}
public System.Predicate<Token> ToPredicate( Lexer lexer = null )
{
var isVariableMatch = lexer == null ? Lexer.IsVariableMatchType( type ) : lexer.HasVariableMatch( type );
var match = isVariableMatch ? null : this.match;
System.Predicate<Token> predicate = ( tk ) => tk.Is( type, match );
return predicate;
}
public TokenPredicateData ToPredicateData( Lexer lexer = null )
{
var isVariableMatch = lexer == null ? Lexer.IsVariableMatchType( type ) : lexer.HasVariableMatch( type );
var match = isVariableMatch ? null : this.match;
return TokenPredicateData.Create( type, match );
}
public static System.Predicate<Token> CreateRawPredicate( LexerMatcher matcher, string match = null )
{
System.Predicate<Token> predicate = ( tk ) => tk.Is( matcher, match );
return predicate;
}
public static List<System.Predicate<Token>> CreateLexedSequence( Lexer lexer, string sequence )
{
lexer.Clear();
var lexedSequence = lexer.LexToList( sequence );
if ( lexer.hasError )
{
throw new System.Exception( "Lexer has error at char " + lexedSequence.Last().offset + " >> '" + sequence + "'" );
}
lexedSequence.Pop();
lexer.GrabMatches( lexedSequence, sequence );
var filteredEvents = lexedSequence.Filter( le => ! le.IsAnyOf( LexerMatcherLibrary.Ignore ) );
// RJLog.Log( "Creating predicates:", "'" + sequence + "'", ">", filteredEvents.Count, ">>", filteredEvents.Map( e => e.type + " : " + e.match ).Join( ", " ) );
var tokenSequence = filteredEvents.Map( le => Create( le ).ToPredicate( lexer ) );
return tokenSequence;
}
public static List<TokenPredicateData> CreateLexedSequenceData( Lexer lexer, string sequence )
{
lexer.Clear();
var lexedSequence = lexer.LexToList( sequence );
if ( lexer.hasError )
{
throw new System.Exception( "Lexer has error at char " + lexedSequence.Last().offset + " >> '" + sequence + "'" );
}
lexedSequence.Pop();
lexer.GrabMatches( lexedSequence, sequence );
var filteredEvents = lexedSequence.Filter( le => ! le.IsAnyOf( LexerMatcherLibrary.Ignore ) );
// RJLog.Log( "Creating predicates:", "'" + sequence + "'", ">", filteredEvents.Count, ">>", filteredEvents.Map( e => e.type + " : " + e.match ).Join( ", " ) );
var tokenSequence = filteredEvents.Map( le => Create( le ).ToPredicateData( lexer ) );
return tokenSequence;
}
}