forked from phoedos/pmd
Minor fixes
This commit is contained in:
@ -22,8 +22,8 @@ public abstract class AntlrTokenizer implements Tokenizer {
|
||||
@Override
|
||||
public void tokenize(final SourceCode sourceCode, final Tokens tokenEntries) {
|
||||
|
||||
AntlrTokenManager tokenManager = getLexerForSource(sourceCode);
|
||||
AntlrTokenFilter tokenFilter = getTokenFilter(tokenManager);
|
||||
final AntlrTokenManager tokenManager = getLexerForSource(sourceCode);
|
||||
final AntlrTokenFilter tokenFilter = getTokenFilter(tokenManager);
|
||||
|
||||
try {
|
||||
AntlrToken currentToken = tokenFilter.getNextToken();
|
||||
@ -42,7 +42,7 @@ public abstract class AntlrTokenizer implements Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
protected AntlrTokenFilter getTokenFilter(AntlrTokenManager tokenManager) {
|
||||
protected AntlrTokenFilter getTokenFilter(final AntlrTokenManager tokenManager) {
|
||||
return new AntlrTokenFilter(tokenManager);
|
||||
}
|
||||
|
||||
@ -51,7 +51,7 @@ public abstract class AntlrTokenizer implements Tokenizer {
|
||||
return CharStreams.fromString(buffer.toString());
|
||||
}
|
||||
|
||||
private void processToken(Tokens tokenEntries, String fileName, AntlrToken token) {
|
||||
private void processToken(final Tokens tokenEntries, final String fileName, final AntlrToken token) {
|
||||
final TokenEntry tokenEntry = new TokenEntry(token.getImage(), fileName, token.getBeginLine());
|
||||
tokenEntries.add(tokenEntry);
|
||||
}
|
||||
|
@ -31,7 +31,6 @@ public class AntlrTokenFilter extends BaseTokenFilter<AntlrToken> {
|
||||
|
||||
@Override
|
||||
protected void analyzeToken(final AntlrToken currentToken) {
|
||||
super.analyzeToken(currentToken);
|
||||
analyzeHiddenTokens(currentToken);
|
||||
}
|
||||
|
||||
|
@ -61,14 +61,6 @@ public abstract class BaseTokenFilter<T extends GenericToken> implements TokenFi
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extension point for subclasses to indicate when to stop filtering tokens.
|
||||
*
|
||||
* @param currentToken The token to be analyzed
|
||||
* @return True if the token filter has finished consuming all tokens, false otherwise
|
||||
*/
|
||||
protected abstract boolean shouldStopProcessing(T currentToken);
|
||||
|
||||
/**
|
||||
* Extension point for subclasses to indicate tokens are to be filtered.
|
||||
*
|
||||
@ -78,6 +70,14 @@ public abstract class BaseTokenFilter<T extends GenericToken> implements TokenFi
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extension point for subclasses to indicate when to stop filtering tokens.
|
||||
*
|
||||
* @param currentToken The token to be analyzed
|
||||
* @return True if the token filter has finished consuming all tokens, false otherwise
|
||||
*/
|
||||
protected abstract boolean shouldStopProcessing(T currentToken);
|
||||
|
||||
/**
|
||||
* Extension point for subclasses to analyze all tokens (before filtering)
|
||||
* and update internal status to decide on custom discard rules.
|
||||
@ -85,8 +85,6 @@ public abstract class BaseTokenFilter<T extends GenericToken> implements TokenFi
|
||||
* @param currentToken The token to be analyzed
|
||||
* @see #isLanguageSpecificDiscarding()
|
||||
*/
|
||||
protected void analyzeToken(final T currentToken) {
|
||||
// noop
|
||||
}
|
||||
protected abstract void analyzeToken(T currentToken);
|
||||
|
||||
}
|
||||
|
@ -25,4 +25,9 @@ public class JavaCCTokenFilter extends BaseTokenFilter<GenericToken> {
|
||||
protected boolean shouldStopProcessing(final GenericToken currentToken) {
|
||||
return currentToken.getImage().isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void analyzeToken(final GenericToken currentToken) {
|
||||
// noop
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user