forked from phoedos/pmd
Antlr token manager now is responsible of filtering non default tokens
This commit is contained in:
@ -71,4 +71,8 @@ public class AntlrToken implements GenericToken {
|
||||
public boolean isHidden() {
|
||||
return token.getChannel() == Lexer.HIDDEN;
|
||||
}
|
||||
|
||||
public boolean isDefault() {
|
||||
return token.getChannel() == Lexer.DEFAULT_TOKEN_CHANNEL;
|
||||
}
|
||||
}
|
||||
|
@ -14,8 +14,6 @@ import net.sourceforge.pmd.lang.antlr.AntlrTokenManager;
|
||||
*/
|
||||
public class AntlrTokenFilter extends BaseTokenFilter<AntlrToken> {
|
||||
|
||||
private boolean discardingHiddenTokens = false;
|
||||
|
||||
/**
|
||||
* Creates a new AntlrTokenFilter
|
||||
* @param tokenManager The token manager from which to retrieve tokens to be filtered
|
||||
@ -28,18 +26,4 @@ public class AntlrTokenFilter extends BaseTokenFilter<AntlrToken> {
|
||||
protected boolean shouldStopProcessing(final AntlrToken currentToken) {
|
||||
return currentToken.getType() == EOF;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeToken(final AntlrToken currentToken) {
|
||||
analyzeHiddenTokens(currentToken);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isLanguageSpecificDiscarding() {
|
||||
return super.isLanguageSpecificDiscarding() || discardingHiddenTokens;
|
||||
}
|
||||
|
||||
private void analyzeHiddenTokens(final AntlrToken token) {
|
||||
discardingHiddenTokens = token.isHidden();
|
||||
}
|
||||
}
|
||||
|
@ -61,6 +61,17 @@ public abstract class BaseTokenFilter<T extends GenericToken> implements TokenFi
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extension point for subclasses to analyze all tokens (before filtering)
|
||||
* and update internal status to decide on custom discard rules.
|
||||
*
|
||||
* @param currentToken The token to be analyzed
|
||||
* @see #isLanguageSpecificDiscarding()
|
||||
*/
|
||||
protected void analyzeToken(final GenericToken currentToken) {
|
||||
// noop
|
||||
}
|
||||
|
||||
/**
|
||||
* Extension point for subclasses to indicate tokens are to be filtered.
|
||||
*
|
||||
@ -78,13 +89,4 @@ public abstract class BaseTokenFilter<T extends GenericToken> implements TokenFi
|
||||
*/
|
||||
protected abstract boolean shouldStopProcessing(T currentToken);
|
||||
|
||||
/**
|
||||
* Extension point for subclasses to analyze all tokens (before filtering)
|
||||
* and update internal status to decide on custom discard rules.
|
||||
*
|
||||
* @param currentToken The token to be analyzed
|
||||
* @see #isLanguageSpecificDiscarding()
|
||||
*/
|
||||
protected abstract void analyzeToken(T currentToken);
|
||||
|
||||
}
|
||||
|
@ -25,9 +25,4 @@ public class JavaCCTokenFilter extends BaseTokenFilter<GenericToken> {
|
||||
protected boolean shouldStopProcessing(final GenericToken currentToken) {
|
||||
return currentToken.getImage().isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeToken(final GenericToken currentToken) {
|
||||
// noop
|
||||
}
|
||||
}
|
||||
|
@ -34,6 +34,14 @@ public class AntlrTokenManager implements TokenManager {
|
||||
|
||||
@Override
|
||||
public Object getNextToken() {
|
||||
AntlrToken nextToken = getNextTokenFromAnyChannel();
|
||||
while (!nextToken.isDefault()) {
|
||||
nextToken = getNextTokenFromAnyChannel();
|
||||
}
|
||||
return nextToken;
|
||||
}
|
||||
|
||||
private AntlrToken getNextTokenFromAnyChannel() {
|
||||
final AntlrToken previousComment = previousToken != null && previousToken.isHidden() ? previousToken : null;
|
||||
final AntlrToken currentToken = new AntlrToken(lexer.nextToken(), previousComment);
|
||||
previousToken = currentToken;
|
||||
|
@ -26,4 +26,6 @@ import (
|
||||
|
||||
func main() {
|
||||
fmt.Println(stringutil.Reverse("!selpmaxe oG ,olleH"))
|
||||
}
|
||||
}
|
||||
|
||||
/* Comment */
|
Reference in New Issue
Block a user