918 lines
22 KiB
C#
918 lines
22 KiB
C#
using System.Collections;
|
|
using System.Collections.Generic;
|
|
using System.Text.RegularExpressions;
|
|
using System.Text;
|
|
|
|
using System.Globalization;
|
|
using Godot;
|
|
using System;
|
|
|
|
namespace Rokojori;
|
|
|
|
public abstract class ASTNode
|
|
{
|
|
Parser _parser;
|
|
|
|
public Parser parser
|
|
{
|
|
get
|
|
{
|
|
if ( _parser == null )
|
|
{
|
|
var root = this is ASTFileRoot ? (ASTFileRoot) this :
|
|
( ASTWalker.instance.GetParentWithType<ASTFileRoot>( this ) as ASTFileRoot );
|
|
|
|
_parser = root.GetParser();
|
|
}
|
|
|
|
return _parser;
|
|
}
|
|
}
|
|
|
|
public ASTNode parent;
|
|
public List<ASTNode> children = [];
|
|
|
|
|
|
int ComputeIndex( int index )
|
|
{
|
|
return index >= 0 ? index : ( children.Count + index );
|
|
}
|
|
|
|
|
|
public string CreateDebugTreeInfo()
|
|
{
|
|
var walker = ASTWalker.instance;
|
|
|
|
var end = walker.IterationEndOf( this );
|
|
|
|
var it = this;
|
|
|
|
var depthMap = new Dictionary<ASTNode,int>();
|
|
|
|
Func<ASTNode,int> GetDepth = null;
|
|
|
|
GetDepth = n =>
|
|
{
|
|
if ( depthMap.ContainsKey( n ) )
|
|
{
|
|
return depthMap[ n ];
|
|
}
|
|
|
|
if ( n.parent == null )
|
|
{
|
|
return 0;
|
|
}
|
|
|
|
var parentDepth = GetDepth( n.parent );
|
|
var ownDepth = parentDepth + 1;
|
|
|
|
depthMap[ n ] = ownDepth;
|
|
|
|
return ownDepth;
|
|
};
|
|
|
|
var lines = new List<string>();
|
|
|
|
while ( it != end )
|
|
{
|
|
var indent = " ".Repeat( GetDepth( it ) );
|
|
var info = indent + "-- [#" + it.childIndex + "] " + it.GetType();
|
|
|
|
|
|
if ( it is Token tk )
|
|
{
|
|
info += "'" + tk.match+ "'";
|
|
}
|
|
else
|
|
{
|
|
info += "(" + it.children.Count + ")";
|
|
}
|
|
|
|
lines.Add( info );
|
|
|
|
it = walker.NextNode( it );
|
|
}
|
|
|
|
return lines.Join( "\n");
|
|
|
|
}
|
|
|
|
public List<Token> GrabTokenRange( Token start, Token end, bool filter )
|
|
{
|
|
if ( start.parent != this || end.parent != this )
|
|
{
|
|
RJLog.Error( "Tokens are not children of this", GetType().Name, "start:", start, "end:", end );
|
|
return null;
|
|
}
|
|
|
|
var startIndex = start.childIndex;
|
|
var endIndex = end.childIndex;
|
|
|
|
// RJLog.Log( "Grabbing Child Range:", startIndex, "to", endIndex, " num children:", children.Count );
|
|
var unfiltered = children.Range( start.childIndex, end.childIndex );
|
|
var filtered = unfiltered.Filter( n => n is Token && ( ! filter || ! n.IsIgnoreToken() ) );
|
|
return filtered.Map( f => f as Token );
|
|
}
|
|
|
|
public void MergeChildren( int start, int length, ASTNode merger )
|
|
{
|
|
merger.parent = this;
|
|
merger.children = children.Sub( start, length );
|
|
merger.children.ForEach( c => c.parent = merger );
|
|
|
|
children.ReplaceRange( start, length, [ merger ] );
|
|
}
|
|
|
|
public T MergeInner<T>( int indexStart, int indexEnd ) where T:ASTNode,new()
|
|
{
|
|
var start = indexStart + 1;
|
|
var length = indexEnd - start;
|
|
|
|
return MergeChildrenWith<T>( start, length );
|
|
}
|
|
|
|
public T MergeOuter<T>( int indexStart, int indexEnd ) where T:ASTNode,new()
|
|
{
|
|
var start = indexStart;
|
|
var length = indexEnd - start + 1;
|
|
|
|
return MergeChildrenWith<T>( start, length );
|
|
}
|
|
|
|
public T MergeOuter<T>( ASTNode nodeStart, ASTNode nodeEnd ) where T:ASTNode,new()
|
|
{
|
|
if ( nodeStart.parent != this || nodeEnd.parent != this )
|
|
{
|
|
RJLog.Error( "Nodes are not child of this node:", this,
|
|
"start:", nodeStart, "start is child:", nodeStart.parent == this,
|
|
"end:", nodeEnd, "end is child:", nodeEnd.parent == this
|
|
);
|
|
return null;
|
|
}
|
|
|
|
var startIndex = nodeStart.childIndex;
|
|
var endIndex = children.FindIndex( startIndex, c => c == nodeEnd );
|
|
|
|
return MergeOuter<T>( startIndex, endIndex );
|
|
}
|
|
|
|
public T MergeInner<T>( ASTNode nodeStart, ASTNode nodeEnd ) where T:ASTNode,new()
|
|
{
|
|
if ( nodeStart.parent != this || nodeEnd.parent != this )
|
|
{
|
|
RJLog.Error( "Nodes are not child of this node:", this, "start:", nodeStart, "end:", nodeEnd );
|
|
return null;
|
|
}
|
|
|
|
var startIndex = nodeStart.childIndex;
|
|
var endIndex = children.FindIndex( startIndex, c => c == nodeEnd );
|
|
|
|
RJLog.Log( "Merging:", startIndex, endIndex );
|
|
|
|
if ( startIndex == -1 || endIndex == - 1 )
|
|
{
|
|
return null;
|
|
}
|
|
|
|
return MergeInner<T>( startIndex, endIndex );
|
|
}
|
|
|
|
public int childIndex => parent == null ? -1 : parent.children.IndexOf( this );
|
|
public int reverseIndexOffset => parent == null ? -1 :
|
|
( ( parent.children.Count - 1 ) - childIndex );
|
|
|
|
public string CombinedMatch()
|
|
{
|
|
if ( this is Token tk )
|
|
{
|
|
return tk.match;
|
|
}
|
|
|
|
var tokens = children.FilterType<ASTNode,Token>();
|
|
|
|
return tokens.Map( t => t.match ).Join( "" );
|
|
}
|
|
|
|
public void ExpandToPrevious( ASTNode previousSibling )
|
|
{
|
|
if ( parent == null || previousSibling == null || parent != previousSibling.parent )
|
|
{
|
|
RJLog.Error( "Invalid sibling", previousSibling?.GetType().Name ?? "null" );
|
|
return;
|
|
}
|
|
|
|
var start = previousSibling.childIndex;
|
|
var length = childIndex - start;
|
|
|
|
var beforeCount = children.Count;
|
|
|
|
var range = parent.children.Sub( start, length );
|
|
parent.children.RemoveRange( start, length );
|
|
|
|
range.ForEach( r => r.parent = this );
|
|
children.Prepend( range );
|
|
|
|
RJLog.Log( "Expanded from", beforeCount, "to", children.Count, "( " + length + " )" );
|
|
}
|
|
|
|
public ASTWalker walker => Singleton<ASTWalker>.Get();
|
|
|
|
public T GetParentWithType<T>()
|
|
{
|
|
return walker.GetParentWithType<T>( this );
|
|
}
|
|
|
|
public T MergeChildrenWith<T>( int start, int length ) where T:ASTNode, new()
|
|
{
|
|
var t = new T();
|
|
MergeChildren( start, length, t );
|
|
return t;
|
|
}
|
|
|
|
public int FindIndex( System.Func<ASTNode,bool> evaluator, int offset = 0, int end = -1, bool forward = true )
|
|
{
|
|
offset = ComputeIndex( offset );
|
|
end = ComputeIndex( end );
|
|
|
|
var change = forward ? 1 : -1;
|
|
end += change;
|
|
|
|
// RJLog.Log( "start:", offset, "end:", end, "direction:", change > 0 ? "forward" : "backwards", change, "children:", children.Count );
|
|
for ( int i = offset; i != end; i += change )
|
|
{
|
|
if ( evaluator( children[ i ] ) )
|
|
{
|
|
return i;
|
|
}
|
|
else
|
|
{
|
|
// RJLog.Log( "not matching", children[ i ] );
|
|
}
|
|
}
|
|
|
|
return -1;
|
|
}
|
|
|
|
public ASTNode FindNode( System.Func<ASTNode,bool> evaluator, int offset = 0 )
|
|
{
|
|
var index = FindIndex( evaluator, offset );
|
|
return index == -1 ? null : children[ index ];
|
|
}
|
|
|
|
public int ReverseFindBracketOpenerIndex( string open, string close, int closeIndex )
|
|
{
|
|
closeIndex --;
|
|
var count = 1;
|
|
|
|
// RJLog.Log( "ReverseFindBracketOpenerIndex", open, close, closeIndex );
|
|
|
|
for ( int i = closeIndex; i >= 0; i-- )
|
|
{
|
|
|
|
var child = children[ i ];
|
|
|
|
// RJLog.Log( "index", i, count, child );
|
|
|
|
if ( child.IsToken( LexerMatcherLibrary.BracketMatcher, open ) )
|
|
{
|
|
count --;
|
|
|
|
if ( count == 0 )
|
|
{
|
|
return i;
|
|
}
|
|
}
|
|
else if ( child.IsToken( LexerMatcherLibrary.BracketMatcher, close ) )
|
|
{
|
|
count ++;
|
|
}
|
|
|
|
// RJLog.Log( "index", i, count );
|
|
}
|
|
|
|
return -1;
|
|
}
|
|
|
|
public int FindBracketCloserIndex( string open, string close, int openIndex )
|
|
{
|
|
openIndex ++;
|
|
var count = 1;
|
|
|
|
// RJLog.Log( "ReverseFindBracketOpenerIndex", open, close, closeIndex );
|
|
|
|
for ( int i = openIndex; i < children.Count; i++ )
|
|
{
|
|
|
|
var child = children[ i ];
|
|
|
|
// RJLog.Log( "index", i, count, child );
|
|
|
|
if ( child.IsToken( LexerMatcherLibrary.BracketMatcher, close ) )
|
|
{
|
|
count --;
|
|
|
|
if ( count == 0 )
|
|
{
|
|
return i;
|
|
}
|
|
}
|
|
else if ( child.IsToken( LexerMatcherLibrary.BracketMatcher, open ) )
|
|
{
|
|
count ++;
|
|
}
|
|
|
|
// RJLog.Log( "index", i, count );
|
|
}
|
|
|
|
return -1;
|
|
}
|
|
|
|
public List<int> GetSeparatorIndices( string separator, int start, int end, List<TokenPredicateData> blockPredicates = null, Lexer lexer = null )
|
|
{
|
|
lexer = lexer ?? new CSLexer();
|
|
|
|
var hasBlockPredicated = blockPredicates != null;
|
|
|
|
var separatorPredicate = TokenPredicateData.Lexed( lexer, separator );
|
|
|
|
if ( ! hasBlockPredicated )
|
|
{
|
|
blockPredicates = new List<TokenPredicateData>();
|
|
|
|
blockPredicates.Add( TokenPredicateData.Lexed( lexer, "<" ) );
|
|
blockPredicates.Add( TokenPredicateData.Lexed( lexer, ">" ) );
|
|
|
|
blockPredicates.Add( TokenPredicateData.Lexed( lexer, "[" ) );
|
|
blockPredicates.Add( TokenPredicateData.Lexed( lexer, "]" ) );
|
|
|
|
blockPredicates.Add( TokenPredicateData.Lexed( lexer, "(" ) );
|
|
blockPredicates.Add( TokenPredicateData.Lexed( lexer, ")" ) );
|
|
|
|
blockPredicates.Add( TokenPredicateData.Lexed( lexer, "{" ) );
|
|
blockPredicates.Add( TokenPredicateData.Lexed( lexer, "}" ) );
|
|
}
|
|
|
|
var bcCounter = new List<int>();
|
|
|
|
for ( int i = 0; i < blockPredicates.Count; i+= 2 )
|
|
{
|
|
bcCounter.Add( 0 );
|
|
}
|
|
|
|
|
|
var inBlock = false;
|
|
|
|
var indices = new List<int>();
|
|
|
|
for ( int i = start + 1; i < end; i++ )
|
|
{
|
|
if ( children[ i ].IsIgnoreToken() )
|
|
{
|
|
continue;
|
|
}
|
|
|
|
if ( ! inBlock && separatorPredicate.Matches( children[ i ] ) )
|
|
{
|
|
indices.Add( i );
|
|
continue;
|
|
}
|
|
|
|
var bcIndex = blockPredicates.FindIndex( bc => bc.Matches( children[ i ] ) );
|
|
|
|
if ( bcIndex == -1 )
|
|
{
|
|
continue;
|
|
}
|
|
|
|
var index = bcIndex / 2;
|
|
var isUp = index * 2 == bcIndex;
|
|
|
|
var change = isUp ? 1 : -1;
|
|
|
|
bcCounter[ index ] += change;
|
|
|
|
var sum = 0; bcCounter.ForEach( b => sum += b );
|
|
inBlock = sum != 0;
|
|
|
|
}
|
|
|
|
return indices;
|
|
}
|
|
|
|
public void CreateSeperatedList<T>( string separator, Action<T> callback = null ) where T:ASTNode, new()
|
|
{
|
|
var indices = GetSeparatorIndices( separator, -1, children.Count );
|
|
|
|
indices.Insert( 0, -1 );
|
|
indices.Add( children.Count );
|
|
|
|
RJLog.Log( "Parameters Indices:", indices.Count, CombinedMatch() );
|
|
|
|
var nodes = new List<ASTNode>();
|
|
|
|
|
|
for ( int i = 0; i < indices.Count - 1; i++ )
|
|
{
|
|
var start = indices[ i ] + 1;
|
|
var end = indices[ i + 1 ] - 1;
|
|
|
|
var t = new T();
|
|
|
|
RJLog.Log( "Parameters Grabbing Indices:", start, end );
|
|
|
|
if ( start > end || start < 0 || end < 0 || start >= children.Count || end >= children.Count )
|
|
{
|
|
RJLog.Error( "Invalid seperation indices", this, CombinedMatch() );
|
|
throw new Exception();
|
|
}
|
|
|
|
if ( start == end )
|
|
{
|
|
t.children = [ children[ start ] ];
|
|
}
|
|
else
|
|
{
|
|
t.children = children.Range( start, end );
|
|
}
|
|
|
|
t.children.ForEach( c => c.parent = t );
|
|
nodes.Add( t );
|
|
t.parent = this;
|
|
}
|
|
|
|
children = nodes;
|
|
|
|
if ( callback != null )
|
|
{
|
|
children.ForEach( n => callback( n as T ) );
|
|
}
|
|
|
|
}
|
|
|
|
|
|
public Token FindBracketCloser( string open, string close, int openIndex )
|
|
{
|
|
var index = FindBracketCloserIndex( open, close, openIndex );
|
|
|
|
return index == -1 ? null : ( children[ index ] as Token );
|
|
}
|
|
|
|
public Token ReverseFindBracketOpener( string open, string close, int closeIndex )
|
|
{
|
|
var index = ReverseFindBracketOpenerIndex( open, close, closeIndex );
|
|
|
|
return index == -1 ? null : ( children[ index ] as Token );
|
|
}
|
|
|
|
public bool IsToken( LexerMatcher matcher, string match = null )
|
|
{
|
|
return this is Token tk && tk.Is( matcher, match );
|
|
}
|
|
|
|
public bool IsToken( string type, string match = null )
|
|
{
|
|
return this is Token tk && tk.Is( type, match );
|
|
}
|
|
|
|
public bool IsAnyTokenOf( LexerMatcher[] matchers )
|
|
{
|
|
return this is Token tk && tk.lexerEvent.IsAnyOf( matchers );
|
|
}
|
|
|
|
public bool IsAnyTokenOf( List<TokenPredicateData> predicates )
|
|
{
|
|
return this is Token && predicates.Find( p => p.Matches( this ) ) != null ;
|
|
}
|
|
|
|
public bool IsIgnoreToken()
|
|
{
|
|
return IsAnyTokenOf( LexerMatcherLibrary.Ignore );
|
|
}
|
|
|
|
public int IndexOffset( int index, int offset )
|
|
{
|
|
if ( offset < 0 )
|
|
{
|
|
while ( offset < 0 )
|
|
{
|
|
index = PreviousIndex( index );
|
|
offset ++;
|
|
}
|
|
}
|
|
else
|
|
{
|
|
while ( offset > 0 )
|
|
{
|
|
index = NextIndex( index );
|
|
offset--;
|
|
}
|
|
}
|
|
|
|
return index;
|
|
}
|
|
|
|
public int PreviousIndex( int index )
|
|
{
|
|
for ( int i = index - 1; i >= 0; i-- )
|
|
{
|
|
if ( children[ i ].IsIgnoreToken() )
|
|
{
|
|
continue;
|
|
}
|
|
|
|
return i;
|
|
}
|
|
|
|
return -1;
|
|
}
|
|
|
|
public ASTNode PreviousNode( int index )
|
|
{
|
|
return IndexToChildNode( PreviousIndex( index ) );
|
|
}
|
|
|
|
public int NextIndex( int index )
|
|
{
|
|
for ( int i = index + 1; i < children.Count; i++ )
|
|
{
|
|
if ( children[ i ].IsIgnoreToken() )
|
|
{
|
|
continue;
|
|
}
|
|
|
|
return i;
|
|
}
|
|
|
|
return -1;
|
|
}
|
|
|
|
public ASTNode NextNode( int index )
|
|
{
|
|
return IndexToChildNode( NextIndex( index ) );
|
|
}
|
|
|
|
ASTNode IndexToChildNode( int index )
|
|
{
|
|
return index == -1 ? null : children[ index ];
|
|
}
|
|
|
|
public Token FindToken( LexerMatcher matcher, int offset = 0 )
|
|
{
|
|
return FindToken( matcher, null, offset );
|
|
}
|
|
|
|
public Token FindToken( TokenPredicateData predicateData, int offset = 0 )
|
|
{
|
|
return FindToken( predicateData.type, predicateData.match, offset );
|
|
}
|
|
|
|
public Token FindToken( string type, string match, int offset = 0 )
|
|
{
|
|
return FindNode( n => n.IsToken( type, match ), offset ) as Token;
|
|
}
|
|
|
|
|
|
public Token FindToken( LexerMatcher matcher, string match, int offset = 0 )
|
|
{
|
|
return FindNode( n => n.IsToken( matcher, match ), offset ) as Token;
|
|
}
|
|
|
|
public int FindTokenIndex( LexerMatcher matcher, string match, int offset = 0, int end = -1, bool forward = true )
|
|
{
|
|
return FindIndex( n => n.IsToken( matcher, match ), offset, end, forward );
|
|
}
|
|
|
|
public int FindTokenIndex( string type, string match, int offset = 0, int end = -1, bool forward = true )
|
|
{
|
|
return FindIndex( n => n.IsToken( type, match ), offset, end, forward );
|
|
}
|
|
|
|
public int FindTokenIndex( LexerMatcher matcher, int offset = 0, int end = -1, bool forward = true )
|
|
{
|
|
return FindIndex( n => n.IsToken( matcher, null ), offset, end, forward );
|
|
}
|
|
|
|
public int ReverseFindTokenIndex( LexerMatcher matcher, int reverseOffset = 0)
|
|
{
|
|
return FindTokenIndex( matcher, -( 1 + reverseOffset ), 0, false );
|
|
}
|
|
|
|
public int ReverseFindTokenIndex( TokenPredicateData predicateData, int reverseOffset = 0 )
|
|
{
|
|
return FindTokenIndex( predicateData.type, predicateData.match, -( 1 + reverseOffset ), 0, false );
|
|
}
|
|
|
|
public Token ReverseFindToken( TokenPredicateData predicateData, int reverseOffset = 0 )
|
|
{
|
|
var index = ReverseFindTokenIndex( predicateData, reverseOffset );
|
|
|
|
return index == -1 ? null : ( children[ index ] as Token );
|
|
}
|
|
|
|
public List<Token> GetFilteredTokens( int offset = 0, int end = -1 )
|
|
{
|
|
var tokens = new List<Token>();
|
|
|
|
IterateTokens( tokens.Add, offset, end );
|
|
|
|
return tokens;
|
|
}
|
|
|
|
|
|
|
|
public void Iterate<T>( Action<T> action, Predicate<T> filter = null, int offset = 0, int end = -1, bool forward = true )
|
|
{
|
|
offset = ComputeIndex( offset );
|
|
end = ComputeIndex( end );
|
|
|
|
int change = forward ? 1 : -1;
|
|
end += change;
|
|
|
|
for ( int i = offset; i != end; i += change )
|
|
{
|
|
if ( children[ i ] is T t && ( filter == null || filter( t ) ) )
|
|
{
|
|
action( t );
|
|
}
|
|
}
|
|
|
|
|
|
}
|
|
|
|
public void IterateTokens( Action<Token> action, int start = 0, int end = -1, LexerMatcher[] ignoreMatchers = null, bool forward = true )
|
|
{
|
|
start = ComputeIndex( start );
|
|
end = ComputeIndex( end );
|
|
|
|
ignoreMatchers = ignoreMatchers ?? LexerMatcherLibrary.Ignore;
|
|
Iterate<Token>( action, ( t ) => ! t.IsAnyTokenOf( ignoreMatchers ), start, end, true );
|
|
}
|
|
|
|
public string GetTokenMatches( int offset = 0, int end = -1, LexerMatcher[] ignoreMatchers = null )
|
|
{
|
|
var sb = new StringBuilder();
|
|
|
|
IterateTokens(
|
|
( tk )=>
|
|
{
|
|
sb.Append( tk.match );
|
|
},
|
|
offset, end,
|
|
ignoreMatchers,
|
|
true
|
|
);
|
|
|
|
return sb.ToString();
|
|
}
|
|
|
|
public int SearchIndex( Predicate<ASTNode> searchPredicate, Predicate<ASTNode> ignore = null, int offset = 0, int end = -1, bool forward = true )
|
|
{
|
|
offset = ComputeIndex( offset );
|
|
end = ComputeIndex( end );
|
|
|
|
var change = forward ? 1 : -1;
|
|
end += change;
|
|
|
|
// RJLog.Log( "SearchIndex", offset, end, "Change", change );
|
|
|
|
if ( change > 0 && end < offset )
|
|
{
|
|
throw new System.Exception( "Invalid range, offset is higher than end: " + offset + " >> " + end );
|
|
}
|
|
else if ( change < 0 && end > offset )
|
|
{
|
|
throw new System.Exception( "Invalid range, offset is lower than end: " + offset + " >> " + end );
|
|
}
|
|
|
|
for ( int i = offset; i != end; i += change )
|
|
{
|
|
if ( searchPredicate( children[ i ] ) )
|
|
{
|
|
return i;
|
|
}
|
|
|
|
if ( ignore( children[ i ] ) )
|
|
{
|
|
continue;
|
|
}
|
|
|
|
return -1;
|
|
}
|
|
|
|
|
|
return -1;
|
|
}
|
|
|
|
public int SearchFilteredTokenIndex( LexerMatcher matcher, string match, int offset = 0 )
|
|
{
|
|
return SearchIndex(
|
|
( n ) => n.IsToken( matcher, match ),
|
|
( n ) => n.IsAnyTokenOf( LexerMatcherLibrary.Ignore ),
|
|
offset, -1,
|
|
true
|
|
);
|
|
}
|
|
|
|
public int SearchFilteredTokenIndex( string type, string match, int offset = 0 )
|
|
{
|
|
return SearchIndex(
|
|
( n ) => n.IsToken( type, match ),
|
|
( n ) => n.IsAnyTokenOf( LexerMatcherLibrary.Ignore ),
|
|
offset, -1,
|
|
true
|
|
);
|
|
}
|
|
|
|
public int SearchFilteredTokenIndex( Predicate<Token> predicate, int offset = 0 )
|
|
{
|
|
return SearchIndex(
|
|
( n ) => n is Token t && predicate( t ),
|
|
( n ) => n.IsAnyTokenOf( LexerMatcherLibrary.Ignore ),
|
|
offset, -1,
|
|
true
|
|
);
|
|
}
|
|
|
|
public int SearchFilteredTokenIndex( LexerMatcher matcher, int offset = 0 )
|
|
{
|
|
return SearchFilteredTokenIndex( matcher, null, offset );
|
|
}
|
|
|
|
public List<int> SearchFilteredSequence( List<Predicate<Token>> sequence, int offset = 0 )
|
|
{
|
|
var output = new List<int>();
|
|
|
|
for ( int i = 0; i < sequence.Count; i++ )
|
|
{
|
|
var next = SearchFilteredTokenIndex( sequence[ i ], offset );
|
|
|
|
if ( next == -1 )
|
|
{
|
|
// RJLog.Log( "Could not find sequence step", i, "at offset", offset );
|
|
return null;
|
|
}
|
|
|
|
// RJLog.Log( "Found sequence step", i, "at offset", offset, "in", next );
|
|
output.Add( next );
|
|
|
|
offset = next + 1;
|
|
}
|
|
|
|
|
|
return output;
|
|
}
|
|
|
|
public List<int> SearchFilteredRepeatSequence( List<Predicate<Token>> start, List<Predicate<Token>> sequence, List<Predicate<Token>> end, int offset = 0 )
|
|
{
|
|
// RJLog.Log( "==== SearchFilteredRepeatSequence", offset );
|
|
var output = new List<int>();
|
|
|
|
if ( start != null )
|
|
{
|
|
var startList = SearchFilteredSequence( start, offset );
|
|
|
|
if ( startList == null )
|
|
{
|
|
// RJLog.Log( "Start not matched", offset );
|
|
return null;
|
|
}
|
|
|
|
// RJLog.Log( "Start matched", offset );
|
|
output.AddRange( startList );
|
|
|
|
offset = output[ output.Count - 1 ] + 1;
|
|
|
|
// RJLog.Log( "Offset after start", offset );
|
|
}
|
|
|
|
if ( sequence.Count > 0 )
|
|
{
|
|
var repeatedNext = SearchFilteredSequence( sequence, offset );
|
|
|
|
if ( repeatedNext == null )
|
|
{
|
|
// RJLog.Log( "Repeat not matched", offset );
|
|
}
|
|
|
|
var maxSteps = 1000;
|
|
var steps = 0;
|
|
|
|
while ( repeatedNext != null && steps < maxSteps )
|
|
{
|
|
// RJLog.Log( "Repeat matched:",
|
|
// "offset", offset,
|
|
// "steps", steps
|
|
// );
|
|
output.AddRange( repeatedNext );
|
|
offset = output[ output.Count - 1 ] + 1;
|
|
repeatedNext = SearchFilteredSequence( sequence, offset );
|
|
|
|
steps ++;
|
|
}
|
|
|
|
|
|
// RJLog.Log( "Offset after repeat", offset );
|
|
|
|
}
|
|
|
|
if ( end != null )
|
|
{
|
|
var endList = SearchFilteredSequence( end, offset );
|
|
|
|
if ( endList == null )
|
|
{
|
|
// RJLog.Log( "End not matched", offset );
|
|
return null;
|
|
}
|
|
|
|
// RJLog.Log( "End matched", offset );
|
|
output.AddRange( endList );
|
|
}
|
|
|
|
return output;
|
|
}
|
|
|
|
public Token FindTriggerToken( int offset, List<TokenPredicateData> triggerPredicates, List<TokenPredicateData> blockPredicates )
|
|
{
|
|
var index = FindTriggerTokenIndex( offset, triggerPredicates, blockPredicates );
|
|
|
|
return index == -1 ? null : children[ index ] as Token;
|
|
}
|
|
|
|
static bool _IsInsideBlock( List<int> counters )
|
|
{
|
|
for ( int i = 0; i < counters.Count; i++ )
|
|
{
|
|
if ( counters[ i ] > 0 )
|
|
{
|
|
return true;
|
|
}
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
public int FindTriggerTokenIndex( int offset, List<TokenPredicateData> triggerPredicates, List<TokenPredicateData> blockPredicates )
|
|
{
|
|
var blockTypesCounter = new List<int>();
|
|
var insideBlock = false;
|
|
|
|
for ( int i = 0; i < blockPredicates.Count / 2; i++ )
|
|
{
|
|
blockTypesCounter.Add( 0 );
|
|
}
|
|
|
|
for ( int i = offset; i < children.Count; i++ )
|
|
{
|
|
|
|
var child = children[ i ];
|
|
|
|
if ( child.IsIgnoreToken() )
|
|
{
|
|
continue;
|
|
}
|
|
|
|
|
|
|
|
if ( ! insideBlock )
|
|
{
|
|
var triggerIndex = triggerPredicates.FindIndex( t => t.Matches( child ) );
|
|
|
|
// RJLog.Log( "Checking:", insideBlock, children[ i ], triggerIndex );
|
|
|
|
if ( triggerIndex != -1 )
|
|
{
|
|
return i;
|
|
}
|
|
|
|
}
|
|
else
|
|
{
|
|
// RJLog.Log( "Checking:", insideBlock, children[ i ] );
|
|
}
|
|
|
|
var blockIndex = blockPredicates.FindIndex( b => b.Matches( child ) );
|
|
|
|
if ( blockIndex == -1 )
|
|
{
|
|
continue;
|
|
}
|
|
|
|
var typeIndex = blockIndex / 2;
|
|
|
|
var change = typeIndex * 2 == blockIndex ? 1 : -1;
|
|
|
|
blockTypesCounter[ typeIndex ] += change;
|
|
|
|
|
|
insideBlock = _IsInsideBlock( blockTypesCounter );
|
|
|
|
|
|
|
|
// RJLog.Log( "bracket type:", typeIndex, "change:", change, "insideBlock:", insideBlock, "counters:", blockTypesCounter.Join( ", " ) );
|
|
}
|
|
|
|
return -1;
|
|
}
|
|
|
|
} |