Antlr token manager now is responsible of filtering non default tokens

This commit is contained in:
Tomi De Lucca
2019-02-17 17:36:15 -03:00
parent 3cff4f3d22
commit decb969491
6 changed files with 26 additions and 31 deletions

View File

@ -71,4 +71,8 @@ public class AntlrToken implements GenericToken {
public boolean isHidden() {
return token.getChannel() == Lexer.HIDDEN;
}
public boolean isDefault() {
return token.getChannel() == Lexer.DEFAULT_TOKEN_CHANNEL;
}
}

View File

@ -14,8 +14,6 @@ import net.sourceforge.pmd.lang.antlr.AntlrTokenManager;
*/
public class AntlrTokenFilter extends BaseTokenFilter<AntlrToken> {
private boolean discardingHiddenTokens = false;
/**
* Creates a new AntlrTokenFilter
* @param tokenManager The token manager from which to retrieve tokens to be filtered
@ -28,18 +26,4 @@ public class AntlrTokenFilter extends BaseTokenFilter<AntlrToken> {
protected boolean shouldStopProcessing(final AntlrToken currentToken) {
return currentToken.getType() == EOF;
}
@Override
protected void analyzeToken(final AntlrToken currentToken) {
analyzeHiddenTokens(currentToken);
}
@Override
protected boolean isLanguageSpecificDiscarding() {
return super.isLanguageSpecificDiscarding() || discardingHiddenTokens;
}
private void analyzeHiddenTokens(final AntlrToken token) {
discardingHiddenTokens = token.isHidden();
}
}

View File

@ -61,6 +61,17 @@ public abstract class BaseTokenFilter<T extends GenericToken> implements TokenFi
}
}
/**
* Extension point for subclasses to analyze all tokens (before filtering)
* and update internal status to decide on custom discard rules.
*
* @param currentToken The token to be analyzed
* @see #isLanguageSpecificDiscarding()
*/
protected void analyzeToken(final GenericToken currentToken) {
// noop
}
/**
* Extension point for subclasses to indicate tokens are to be filtered.
*
@ -78,13 +89,4 @@ public abstract class BaseTokenFilter<T extends GenericToken> implements TokenFi
*/
protected abstract boolean shouldStopProcessing(T currentToken);
/**
* Extension point for subclasses to analyze all tokens (before filtering)
* and update internal status to decide on custom discard rules.
*
* @param currentToken The token to be analyzed
* @see #isLanguageSpecificDiscarding()
*/
protected abstract void analyzeToken(T currentToken);
}

View File

@ -25,9 +25,4 @@ public class JavaCCTokenFilter extends BaseTokenFilter<GenericToken> {
protected boolean shouldStopProcessing(final GenericToken currentToken) {
return currentToken.getImage().isEmpty();
}
@Override
protected void analyzeToken(final GenericToken currentToken) {
// noop
}
}

View File

@ -34,6 +34,14 @@ public class AntlrTokenManager implements TokenManager {
@Override
public Object getNextToken() {
AntlrToken nextToken = getNextTokenFromAnyChannel();
while (!nextToken.isDefault()) {
nextToken = getNextTokenFromAnyChannel();
}
return nextToken;
}
private AntlrToken getNextTokenFromAnyChannel() {
final AntlrToken previousComment = previousToken != null && previousToken.isHidden() ? previousToken : null;
final AntlrToken currentToken = new AntlrToken(lexer.nextToken(), previousComment);
previousToken = currentToken;

View File

@ -26,4 +26,6 @@ import (
func main() {
fmt.Println(stringutil.Reverse("!selpmaxe oG ,olleH"))
}
}
/* Comment */