Merge branch '7.0.x' into java-grammar

This commit is contained in:
Clément Fournier
2020-01-28 14:01:40 +01:00
255 changed files with 3039 additions and 3802 deletions

View File

@ -19,11 +19,23 @@ This is a {{ site.pmd.release_type }} release.
### New and noteworthy
#### Updated PMD Designer
This PMD release ships a new version of the pmd-designer.
For the changes, see [PMD Designer Changelog](https://github.com/pmd/pmd-designer/releases/tag/6.21.0).
### Fixed Issues
* java-errorprone
* [#2250](https://github.com/pmd/pmd/issues/2250): \[java] InvalidLogMessageFormat flags logging calls using a slf4j-Marker
### API Changes
### External Contributions
* [#2251](https://github.com/pmd/pmd/pull/2251): \[java] FP for InvalidLogMessageFormat when using slf4j-Markers - [Kris Scheibe](https://github.com/kris-scheibe)
* [#2253](https://github.com/pmd/pmd/pull/2253): \[modelica] Remove duplicated dependencies - [Piotrek Żygieło](https://github.com/pzygielo)
* [#2256](https://github.com/pmd/pmd/pull/2256): \[doc] Corrected XML attributes in release notes - [Maikel Steneker](https://github.com/maikelsteneker)
{% endtocmaker %}

View File

@ -66,9 +66,9 @@ If a CPD language doesn't provide these exact information, then these additional
Each `<file>` element in the XML format now has 3 new attributes:
* attribute `endLine`
* attribute `beginColumn` (if there is column information available)
* attribute `endColumn` (if there is column information available)
* attribute `endline`
* attribute `column` (if there is column information available)
* attribute `endcolumn` (if there is column information available)
#### Modified Rules

478
javacc-wrapper.xml Normal file

File diff suppressed because it is too large Load Diff

View File

@ -9,8 +9,8 @@
<property name="base-ast-package" value="net.sourceforge.pmd.lang.ast" />
<property name="base-ast-package.dir" value="${target}/net/sourceforge/pmd/lang/ast" />
<property name="target-package" value="${base-ast-package}" />
<property name="target-package.dir" value="${base-ast-package.dir}" />
<property name="target-package" value="${base-ast-package}.impl.javacc" />
<property name="target-package.dir" value="${base-ast-package.dir}/impl/javacc" />
<target name="alljavacc"
@ -57,7 +57,7 @@
<!-- Generate ASCII w/ Unicode Escapes CharStream implementation -->
<javacc usercharstream="false"
unicodeinput="false"
unicodeinput="true"
javaunicodeescape="true"
static="false"
target="${tmp-package.dir}/dummy.jj"
@ -74,71 +74,36 @@
<!-- Patch JavaCharStream -->
<replace file="${tmp-package.dir}/JavaCharStream.java"
token="JavaCharStream"
value="JavaCharStreamBase"/>
<replace file="${tmp-package.dir}/JavaCharStream.java"
token="class JavaCharStreamBase"
value="abstract class JavaCharStreamBase implements ${target-package}.CharStream" />
<replace file="${tmp-package.dir}/JavaCharStream.java"
token="char c;"
value="char c; beforeReadChar();" />
<replace file="${tmp-package.dir}/JavaCharStream.java"
token="/** Read a character. */"
value="protected void beforeReadChar() { }" />
<replace file="${tmp-package.dir}/JavaCharStream.java"
token="abstract class JavaCharStreamBase"
value="@Deprecated @net.sourceforge.pmd.annotation.InternalApi abstract class JavaCharStreamBase" />
<move overwrite="true"
file="${tmp-package.dir}/JavaCharStream.java"
tofile="${target-package.dir}/JavaCharStreamBase.java" />
<!-- Generate ASCII w/o Unicode Escapes CharStream implementation -->
<javacc usercharstream="false"
unicodeinput="false"
javaunicodeescape="false"
static="false"
target="${tmp-package.dir}/dummy.jj"
outputdirectory="${tmp-package.dir}"
javacchome="${javacc-home.path}" />
<replace file="${tmp-package.dir}/SimpleCharStream.java"
token="public class SimpleCharStream"
value="@Deprecated @net.sourceforge.pmd.annotation.InternalApi public class SimpleCharStream implements CharStream" />
<replace file="${tmp-package.dir}/TokenMgrError.java"
token="${target-package}"
value="${base-ast-package}" />
<replace file="${tmp-package.dir}/TokenMgrError.java"
token="extends Error"
value="extends RuntimeException" />
<replace file="${tmp-package.dir}/TokenMgrError.java"
token="static final int"
value="public static final int" />
<replace file="${tmp-package.dir}/TokenMgrError.java">
<replacetoken><![CDATA["Lexical error at line "]]></replacetoken>
<replacevalue>&quot;Lexical error in file &quot; + net.sourceforge.pmd.lang.ast.AbstractTokenManager.getFileName() + &quot; at line &quot;</replacevalue>
</replace>
<move overwrite="true"
file="${tmp-package.dir}/TokenMgrError.java"
tofile="${base-ast-package.dir}/TokenMgrError.java" />
<move overwrite="true"
todir="${target-package.dir}">
<fileset dir="${tmp-package.dir}">
<include name="SimpleCharStream.java" />
<include name="CharStream.java" />
</fileset>
</move>
<antcall target="patch-char-stream">
<param name="cs.prefix" value="Java" />
</antcall>
<delete dir="${tmp-package.dir}" />
</target>
<target name="patch-char-stream">
<replace file="${tmp-package.dir}/${cs.prefix}CharStream.java"
token="${cs.prefix}CharStream"
value="${cs.prefix}CharStreamBase"/>
<replace file="${tmp-package.dir}/${cs.prefix}CharStream.java"
token="class ${cs.prefix}CharStreamBase"
value="abstract class ${cs.prefix}CharStreamBase implements ${base-ast-package}.CharStream" />
<replace file="${tmp-package.dir}/${cs.prefix}CharStream.java"
token="/** Read a character. */"
value="protected boolean doEscape() { return true; }" />
<replace file="${tmp-package.dir}/${cs.prefix}CharStream.java"
token="if ((buffer[bufpos] = c = ReadByte()) == '\\')"
value="if ((buffer[bufpos] = c = ReadByte()) == '\\' &amp;&amp; doEscape())" />
<move overwrite="true"
file="${tmp-package.dir}/${cs.prefix}CharStream.java"
tofile="${target-package.dir}/${cs.prefix}CharStreamBase.java" />
</target>
</project>

View File

@ -40,9 +40,7 @@ public abstract class AntlrTokenizer implements Tokenizer {
} catch (final AntlrTokenManager.ANTLRSyntaxError err) {
// Wrap exceptions of the ANTLR tokenizer in a TokenMgrError, so they are correctly handled
// when CPD is executed with the '--skipLexicalErrors' command line option
throw new TokenMgrError("Lexical error in file " + tokenManager.getFileName() + " at line "
+ err.getLine() + ", column " + err.getColumn() + ". Encountered: " + err.getMessage(),
TokenMgrError.LEXICAL_ERROR);
throw new TokenMgrError(err.getLine(), err.getColumn(), tokenManager.getFileName(), err.getMessage(), null);
} finally {
tokenEntries.add(TokenEntry.getEOF());
}

View File

@ -55,7 +55,7 @@ public final class IteratorUtil {
return tmp.iterator();
}
public static <T, R> Iterator<R> flatMap(Iterator<? extends T> iter, Function<? super T, ? extends Iterator<? extends R>> f) {
public static <T, R> Iterator<R> flatMap(Iterator<? extends T> iter, Function<? super T, ? extends @Nullable Iterator<? extends R>> f) {
return new AbstractIterator<R>() {
private Iterator<? extends R> current = null;
@ -158,7 +158,7 @@ public final class IteratorUtil {
return filter(iter, seen::add);
}
public static <T> List<T> toList(Iterator<T> it) {
public static <T> List<T> toList(Iterator<? extends T> it) {
List<T> list = new ArrayList<>();
while (it.hasNext()) {
list.add(it.next());
@ -166,6 +166,17 @@ public final class IteratorUtil {
return list;
}
public static <T> List<@NonNull T> toNonNullList(Iterator<? extends @Nullable T> it) {
List<@NonNull T> list = new ArrayList<>();
while (it.hasNext()) {
T next = it.next();
if (next != null) {
list.add(next);
}
}
return list;
}
public static <T> Iterable<T> toIterable(final Iterator<T> it) {
return () -> it;
}
@ -180,7 +191,7 @@ public final class IteratorUtil {
return count;
}
public static <T> @Nullable T last(Iterator<T> iterator) {
public static <T> @Nullable T last(Iterator<? extends T> iterator) {
T next = null;
while (iterator.hasNext()) {
next = iterator.next();
@ -194,7 +205,7 @@ public final class IteratorUtil {
*
* @throws IllegalArgumentException If n is negative
*/
public static <T> @Nullable T getNth(Iterator<T> iterator, int n) {
public static <T> @Nullable T getNth(Iterator<? extends T> iterator, int n) {
advance(iterator, n);
return iterator.hasNext() ? iterator.next() : null;
}
@ -210,7 +221,7 @@ public final class IteratorUtil {
}
/** Limit the number of elements yielded by this iterator to the given number. */
public static <T> Iterator<T> take(Iterator<T> iterator, final int n) {
public static <T> Iterator<T> take(Iterator<? extends T> iterator, final int n) {
AssertionUtil.requireNonNegative("n", n);
if (n == 0) {
return Collections.emptyIterator();
@ -232,10 +243,10 @@ public final class IteratorUtil {
}
/** Produce an iterator whose first element is the nth element of the given source. */
public static <T> Iterator<T> drop(Iterator<T> source, final int n) {
public static <T> Iterator<T> drop(Iterator<? extends T> source, final int n) {
AssertionUtil.requireNonNegative("n", n);
if (n == 0) {
return source;
return (Iterator<T>) source;
}
return new AbstractIterator<T>() {

View File

@ -10,7 +10,15 @@ import net.sourceforge.pmd.lang.ast.Node;
import net.sourceforge.pmd.lang.ast.ParseException;
/**
* Common interface for calling tree-building parsers or source files.
* Produces an AST from a source file. Instances of this interface must
* be stateless (which makes them trivially threadsafe).
*
* TODO
* - Ideally ParserOptions would be an argument to ::parse
* - ::parse would also take some more parameters, eg an error collector
* - The reader + filename would be a TextDocument
* - Remove TokenManager from here. Only JavaCC implementations support that,
* and it's barely used.
*
* @author Pieter_Van_Raemdonck - Application Engineers NV/SA - www.ae.be
*/

View File

@ -0,0 +1,120 @@
/*
* BSD-style license; for more info see http://pmd.sourceforge.net/license.html
*/
package net.sourceforge.pmd.lang.ast;
import java.io.IOException;
import net.sourceforge.pmd.lang.ast.impl.javacc.JavaccTokenDocument;
/**
* PMD flavour of character streams used by JavaCC parsers.
*
* TODO for when all JavaCC languages are aligned:
* * rename methods to match decent naming conventions
* * move to impl.javacc package
*/
public interface CharStream {
/**
* Returns the next character from the input. After a {@link #backup(int)},
* some of the already read chars must be spit out again.
*
* @return The next character
*
* @throws IOException If the underlying char stream throws
*/
char readChar() throws IOException;
/**
* Calls {@link #readChar()} and returns its value, marking its position
* as the beginning of the next token. All characters must remain in
* the buffer between two successive calls to this method to implement
* backup correctly.
*/
char BeginToken() throws IOException; // SUPPRESS CHECKSTYLE we'll rename it later
/**
* Returns a string made up of characters from the token mark up to
* to the current buffer position.
*/
String GetImage(); // SUPPRESS CHECKSTYLE we'll rename it later
/**
* Returns an array of characters that make up the suffix of length 'len' for
* the current token. This is used to build up the matched string
* for use in actions in the case of MORE. A simple and inefficient
* implementation of this is as follows :
*
* <pre>{@code
* String t = tokenImage();
* return t.substring(t.length() - len, t.length()).toCharArray();
* }</pre>
*
* @param len Length of the returned array
*
* @return The suffix
*
* @throws IndexOutOfBoundsException If len is greater than the length of the
* current token
*/
char[] GetSuffix(int len); // SUPPRESS CHECKSTYLE we'll rename it later
/**
* Pushes a given number of already read chars into the buffer.
* Subsequent calls to {@link #readChar()} will read those characters
* before proceeding to read the underlying char stream.
*
* <p>A lexer calls this method if it has already read some characters,
* but cannot use them to match a (longer) token. So, they will
* be used again as the prefix of the next token.
*
* @throws AssertionError If the requested amount is greater than the
* number of read chars
*/
void backup(int amount);
@Deprecated
int getBeginColumn();
@Deprecated
int getBeginLine();
/** Returns the column number of the last character for the current token. */
int getEndColumn();
/** Returns the line number of the last character for current token. */
int getEndLine();
// These methods are added by PMD
/**
* Returns the token document for the tokens being built. Having it
* here is the most convenient place for the time being.
*/
default JavaccTokenDocument getTokenDocument() {
return null; // for VelocityCharStream
}
/** Returns the start offset of the current token (in the original source), inclusive. */
default int getStartOffset() {
return -1;
}
/** Returns the end offset of the current token (in the original source), exclusive. */
default int getEndOffset() {
return -1;
}
}

View File

@ -5,12 +5,15 @@
package net.sourceforge.pmd.lang.ast;
/**
* Represents a language-independent token such as constants, values language reserved keywords, or comments.
* Represents a language-independent token such as constants, values language reserved keywords, or comments.
*
* TODO make generic
*/
public interface GenericToken {
/**
* Obtain the next generic token according to the input stream which generated the instance of this token.
*
* @return the next generic token if it exists; null if it does not exist
*/
GenericToken getNext();
@ -18,6 +21,7 @@ public interface GenericToken {
/**
* Obtain a comment-type token which, according to the input stream which generated the instance of this token,
* precedes this instance token and succeeds the previous generic token (if there is any).
*
* @return the comment-type token if it exists; null if it does not exist
*/
GenericToken getPreviousComment();
@ -27,6 +31,7 @@ public interface GenericToken {
*/
String getImage();
// TODO these default implementations are here for compatibility because
// the functionality is only used in pmd-java for now, though it could
// be ported. I prefer doing this as changing all the GenericToken in
@ -47,25 +52,41 @@ public interface GenericToken {
/**
* Gets the line where the token's region begins
*
* @return a non-negative integer containing the begin line
*/
int getBeginLine();
/**
* Gets the line where the token's region ends
*
* @return a non-negative integer containing the end line
*/
int getEndLine();
/**
* Gets the column offset from the start of the begin line where the token's region begins
*
* @return a non-negative integer containing the begin column
*/
int getBeginColumn();
/**
* Gets the column offset from the start of the end line where the token's region ends
*
* @return a non-negative integer containing the begin column
*/
int getEndColumn();
/**
* Returns true if this token is implicit, ie was inserted artificially
* and has a zero-length image.
*/
default boolean isImplicit() {
return false;
}
}

View File

@ -4,7 +4,6 @@
package net.sourceforge.pmd.lang.ast;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
@ -18,8 +17,8 @@ import org.jaxen.JaxenException;
import org.w3c.dom.Document;
import net.sourceforge.pmd.annotation.InternalApi;
import net.sourceforge.pmd.lang.ast.NodeStream.DescendantNodeStream;
import net.sourceforge.pmd.lang.ast.internal.StreamImpl;
import net.sourceforge.pmd.lang.ast.internal.TraversalUtils;
import net.sourceforge.pmd.lang.ast.xpath.Attribute;
import net.sourceforge.pmd.lang.ast.xpath.AttributeAxisIterator;
import net.sourceforge.pmd.lang.ast.xpath.DocumentNavigator;
@ -233,13 +232,20 @@ public interface Node {
}
/**
* Returns true if this node is considered a boundary by traversal methods. Traversal methods such as {@link
* #getFirstDescendantOfType(Class)} don't look past such boundaries by default, which is usually the expected thing
* to do. For example, in Java, lambdas and nested classes are considered find boundaries.
* Returns true if this node is considered a boundary by traversal
* methods. Traversal methods such as {@link #descendants()}
* don't look past such boundaries by default, which is usually the
* expected thing to do. For example, in Java, lambdas and nested
* classes are considered find boundaries.
*
* <p>Note: This attribute is deprecated for XPath queries. It is not useful
* for XPath queries and will be removed with PMD 7.0.0.
*
* @return True if this node is a find boundary
*
* @see DescendantNodeStream#crossFindBoundaries(boolean)
*/
@DeprecatedAttribute
default boolean isFindBoundary() {
@ -337,7 +343,7 @@ public interface Node {
*/
@Deprecated
default <T extends Node> void findDescendantsOfType(Class<T> targetType, List<T> results, boolean crossFindBoundaries) {
TraversalUtils.findDescendantsOfType(this, targetType, results, crossFindBoundaries);
descendants(targetType).crossFindBoundaries(crossFindBoundaries).forEach(results::add);
}
/**
@ -352,9 +358,7 @@ public interface Node {
* @return List of all matching descendants
*/
default <T extends Node> List<T> findDescendantsOfType(Class<T> targetType, boolean crossFindBoundaries) {
List<T> results = new ArrayList<>();
TraversalUtils.findDescendantsOfType(this, targetType, results, crossFindBoundaries);
return results;
return descendants(targetType).crossFindBoundaries(crossFindBoundaries).toList();
}
/**
@ -365,7 +369,7 @@ public interface Node {
* @see #getFirstDescendantOfType(Class) if traversal of the entire tree is needed.
*/
default <T extends Node> T getFirstChildOfType(Class<T> childType) {
return children().first(childType);
return children(childType).first();
}
@ -571,26 +575,26 @@ public interface Node {
/**
* Returns a node stream containing all the descendants
* of this node, in depth-first order.
* of this node. See {@link DescendantNodeStream} for details.
*
* @return A node stream of the descendants of this node
*
* @see NodeStream#descendants()
*/
default NodeStream<Node> descendants() {
default DescendantNodeStream<Node> descendants() {
return StreamImpl.descendants(this);
}
/**
* Returns a node stream containing this node, then all its
* descendants in depth-first order.
* descendants. See {@link DescendantNodeStream} for details.
*
* @return A node stream of the whole subtree topped by this node
*
* @see NodeStream#descendantsOrSelf()
*/
default NodeStream<Node> descendantsOrSelf() {
default DescendantNodeStream<Node> descendantsOrSelf() {
return StreamImpl.descendantsOrSelf(this);
}
@ -641,7 +645,8 @@ public interface Node {
/**
* Returns a {@linkplain NodeStream node stream} of the {@linkplain #descendants() descendants}
* of this node that are of the given type.
* of this node that are of the given type. See {@link DescendantNodeStream}
* for details.
*
* @param rClass Type of node the returned stream should contain
* @param <R> Type of node the returned stream should contain
@ -650,7 +655,7 @@ public interface Node {
*
* @see NodeStream#descendants(Class)
*/
default <R extends Node> NodeStream<R> descendants(Class<R> rClass) {
default <R extends Node> DescendantNodeStream<R> descendants(Class<R> rClass) {
return StreamImpl.descendants(this, rClass);
}

View File

@ -4,21 +4,137 @@
package net.sourceforge.pmd.lang.ast;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.stream.Collectors;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import net.sourceforge.pmd.lang.ast.impl.javacc.JavaccToken;
import net.sourceforge.pmd.lang.ast.impl.javacc.JavaccTokenDocument;
import net.sourceforge.pmd.util.StringUtil;
public class ParseException extends RuntimeException {
/**
* This is the last token that has been consumed successfully. If
* this object has been created due to a parse error, the token
* followng this token will (therefore) be the first error token.
*/
public final @Nullable GenericToken currentToken;
public ParseException() {
super();
this.currentToken = null;
}
public ParseException(String message) {
super(message);
this.currentToken = null;
}
public ParseException(Throwable cause) {
super(cause);
this.currentToken = null;
}
public ParseException(String message, Throwable cause) {
super(message, cause);
this.currentToken = null;
}
public ParseException(JavaccToken token, String message) {
super(message);
this.currentToken = token;
}
/**
* This constructor is called by Javacc.
*/
public ParseException(@NonNull JavaccToken currentTokenVal,
int[][] expectedTokenSequencesVal) {
super(makeMessage(currentTokenVal, expectedTokenSequencesVal));
currentToken = currentTokenVal;
}
/**
* It uses "currentToken" and "expectedTokenSequences" to generate a parse
* error message and returns it. If this object has been created
* due to a parse error, and you do not catch it (it gets thrown
* from the parser) the correct error message
* gets displayed.
*/
private static String makeMessage(@NonNull JavaccToken currentToken,
int[][] expectedTokenSequences) {
JavaccTokenDocument document = currentToken.getDocument();
String eol = System.lineSeparator();
Set<String> expectedBranches = new LinkedHashSet<>();
int maxSize = 0;
for (int[] expectedTokenSequence : expectedTokenSequences) {
StringBuilder expected = new StringBuilder();
if (maxSize < expectedTokenSequence.length) {
maxSize = expectedTokenSequence.length;
}
for (int i : expectedTokenSequence) {
expected.append(document.describeKind(i)).append(' ');
}
if (expectedTokenSequence[expectedTokenSequence.length - 1] != 0) {
expected.append("...");
}
expectedBranches.add(expected.toString());
}
String expected = expectedBranches.stream().collect(Collectors.joining(System.lineSeparator() + " "));
StringBuilder retval = new StringBuilder("Encountered ");
if (maxSize > 1) {
retval.append('[');
}
JavaccToken tok = currentToken.next;
for (int i = 0; i < maxSize; i++) {
if (i != 0) {
retval.append(' ');
}
if (tok.kind == 0) {
retval.append(document.describeKind(0));
break;
}
String kindStr = document.describeKind(tok.kind);
String image = StringUtil.escapeJava(tok.getImage());
retval.append(kindStr);
if (!isEnquotedVersion(kindStr, image)) {
// then it's an angle-braced name
retval.deleteCharAt(retval.length() - 1); // remove '>'
retval.append(": \"");
retval.append(image);
retval.append("\">");
}
tok = tok.next;
}
if (maxSize > 1) {
retval.append(']');
}
retval.append(" at line ").append(currentToken.next.getBeginLine()).append(", column ").append(currentToken.next.getBeginColumn());
retval.append('.').append(eol);
if (expectedTokenSequences.length == 1) {
retval.append("Was expecting:").append(eol).append(" ");
} else {
retval.append("Was expecting one of:").append(eol).append(" ");
}
retval.append(expected);
return retval.toString();
}
private static boolean isEnquotedVersion(String kindStr, String image) {
return kindStr.equals('"' + image + '"');
}
}

View File

@ -0,0 +1,120 @@
/*
* BSD-style license; for more info see http://pmd.sourceforge.net/license.html
*/
package net.sourceforge.pmd.lang.ast;
import org.checkerframework.checker.nullness.qual.Nullable;
import net.sourceforge.pmd.annotation.InternalApi;
import net.sourceforge.pmd.util.StringUtil;
/**
* An error thrown during lexical analysis of a file.
*/
public final class TokenMgrError extends RuntimeException {
/**
* @deprecated Will be removed when all modules are ported
*/
@Deprecated
public static final int LEXICAL_ERROR = 0;
/**
* @deprecated Will be removed when all modules are ported,
* see {@link #TokenMgrError(String, int)}
*/
@Deprecated
public static final int INVALID_LEXICAL_STATE = 1;
private final int line;
private final int column;
private final String filename;
public TokenMgrError(int line, int column, @Nullable String filename, String message, @Nullable Throwable cause) {
super(message, cause);
this.line = line;
this.column = column;
this.filename = filename;
}
public TokenMgrError(int line, int column, String message, @Nullable Throwable cause) {
this(line, column, null, message, cause);
}
/**
* @deprecated This is used by javacc but those usages are being replaced with an IllegalArgumentException
*/
@Deprecated
@SuppressWarnings("PMD.UnusedFormalParameter")
public TokenMgrError(String message, int errorCode) {
this(-1, -1, null, message, null);
}
/**
* Constructor called by JavaCC.
*/
@InternalApi
@SuppressWarnings("PMD.UnusedFormalParameter")
public TokenMgrError(boolean eofSeen, String lexStateName, int errorLine, int errorColumn, String errorAfter, char curChar) {
super(makeReason(eofSeen, lexStateName, errorAfter, curChar));
line = errorLine;
column = errorColumn;
filename = AbstractTokenManager.getFileName();
}
/**
* Constructor called by JavaCC.
*
* @deprecated The error code is useless, ported modules use the other constructor
*/
@Deprecated
@SuppressWarnings("PMD.UnusedFormalParameter")
public TokenMgrError(boolean eofSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int errorCode) {
super(makeReason(eofSeen, String.valueOf(lexState), errorAfter, curChar));
line = errorLine;
column = errorColumn;
filename = AbstractTokenManager.getFileName();
}
public int getLine() {
return line;
}
public int getColumn() {
return column;
}
public @Nullable String getFilename() {
return filename;
}
@Override
public String getMessage() {
String leader = filename != null ? "Lexical error in file " + filename : "Lexical error";
return leader + " at line " + line + ", column " + column + ". Encountered: " + super.getMessage();
}
/**
* Replace the file name of this error.
*
* @param filename New filename
*/
public TokenMgrError withFileName(String filename) {
return new TokenMgrError(this.line, this.column, filename, this.getMessage(), this.getCause());
}
private static String makeReason(boolean eofseen, String lexStateName, String errorAfter, char curChar) {
String message;
if (eofseen) {
message = "<EOF> ";
} else {
message = "\"" + StringUtil.escapeJava(String.valueOf(curChar)) + "\"" + " (" + (int) curChar + "), ";
}
message += "after : \"" + StringUtil.escapeJava(errorAfter) + "\" (in lexical state " + lexStateName + ")";
return message;
}
}

View File

@ -5,15 +5,15 @@
package net.sourceforge.pmd.lang.ast.impl;
import net.sourceforge.pmd.annotation.Experimental;
import net.sourceforge.pmd.lang.ast.GenericToken;
import net.sourceforge.pmd.lang.ast.SourceCodePositioner;
import net.sourceforge.pmd.util.StringUtil;
/**
* Maybe this can be used to eg double link tokens, provide an identity
* for them, idk.
* Token layer of a parsed file.
*/
@Experimental
public class TokenDocument {
public abstract class TokenDocument<T extends GenericToken> {
private final String fullText;
private final SourceCodePositioner positioner;
@ -28,7 +28,6 @@ public class TokenDocument {
return fullText;
}
public int lineNumberFromOffset(int offset) {
return positioner.lineNumberFromOffset(offset);
}
@ -37,4 +36,12 @@ public class TokenDocument {
return StringUtil.columnNumberAt(fullText, offsetInclusive);
}
/**
* Returns the first token of the token chain.
*
* @throws IllegalStateException If the document has not been parsed yet
*/
public abstract T getFirstToken();
}

View File

@ -6,8 +6,10 @@ package net.sourceforge.pmd.lang.ast.impl.javacc;
import net.sourceforge.pmd.annotation.Experimental;
import net.sourceforge.pmd.lang.ast.AbstractNode;
import net.sourceforge.pmd.lang.ast.GenericToken;
import net.sourceforge.pmd.lang.ast.Node;
import net.sourceforge.pmd.lang.ast.NodeStream;
import net.sourceforge.pmd.lang.ast.TextAvailableNode;
/**
* Base class for node produced by JJTree. JJTree specific functionality
@ -18,16 +20,40 @@ import net.sourceforge.pmd.lang.ast.NodeStream;
* unforeseeable ways. Don't use it directly, use the node interfaces.
*/
@Experimental
public abstract class AbstractJjtreeNode<N extends Node> extends AbstractNode {
public abstract class AbstractJjtreeNode<N extends Node> extends AbstractNode implements TextAvailableNode {
public AbstractJjtreeNode(int id) {
super(id);
}
public AbstractJjtreeNode(int id, int theBeginLine, int theEndLine, int theBeginColumn, int theEndColumn) {
super(id, theBeginLine, theEndLine, theBeginColumn, theEndColumn);
@Override
public CharSequence getText() {
String fullText = jjtGetFirstToken().document.getFullText();
return fullText.substring(getStartOffset(), getEndOffset());
}
@Override
public JavaccToken jjtGetFirstToken() {
return (JavaccToken) super.jjtGetFirstToken();
}
@Override
public JavaccToken jjtGetLastToken() {
return (JavaccToken) super.jjtGetLastToken();
}
// the super methods query line & column, which we want to avoid
@Override
public void jjtSetLastToken(GenericToken token) {
this.lastToken = token;
}
@Override
public void jjtSetFirstToken(GenericToken token) {
this.firstToken = token;
}
@Override
@SuppressWarnings("unchecked")
@ -41,10 +67,46 @@ public abstract class AbstractJjtreeNode<N extends Node> extends AbstractNode {
return (N) super.getParent();
}
@Override
@SuppressWarnings("unchecked")
public NodeStream<? extends N> children() {
return (NodeStream<N>) super.children();
}
@Override
public int getBeginLine() {
return firstToken.getBeginLine();
}
@Override
public int getBeginColumn() {
return firstToken.getBeginColumn();
}
@Override
public int getEndLine() {
return lastToken.getEndLine();
}
@Override
public int getEndColumn() {
return lastToken.getEndColumn();
}
/**
* This toString implementation is only meant for debugging purposes.
*/
@Override
public String toString() {
return "[" + getXPathNodeName() + ":" + getBeginLine() + ":" + getBeginColumn() + "]" + getText();
}
private int getStartOffset() {
return this.jjtGetFirstToken().getStartInDocument();
}
private int getEndOffset() {
return this.jjtGetLastToken().getEndInDocument();
}
}

View File

@ -11,11 +11,7 @@ import java.util.function.Function;
import org.apache.commons.io.IOUtils;
import net.sourceforge.pmd.lang.ast.CharStream;
import net.sourceforge.pmd.lang.ast.JavaCharStream;
import net.sourceforge.pmd.lang.ast.SimpleCharStream;
import net.sourceforge.pmd.lang.ast.impl.TokenDocument;
@SuppressWarnings("PMD.UnusedFormalParameter") // for later
public final class CharStreamFactory {
private CharStreamFactory() {
@ -26,29 +22,32 @@ public final class CharStreamFactory {
* A char stream that doesn't perform any escape translation.
*/
public static CharStream simpleCharStream(Reader input) {
return simpleCharStream(input, TokenDocument::new);
return simpleCharStream(input, JavaccTokenDocument::new);
}
/**
* A char stream that doesn't perform any escape translation.
*/
public static CharStream simpleCharStream(Reader input, Function<? super String, ? extends TokenDocument> documentMaker) {
return new SimpleCharStream(input);
public static CharStream simpleCharStream(Reader input, Function<? super String, ? extends JavaccTokenDocument> documentMaker) {
String source = toString(input);
JavaccTokenDocument document = documentMaker.apply(source);
return new SimpleCharStream(document);
}
/**
* A char stream that translates java unicode sequences.
*/
public static CharStream javaCharStream(Reader input) {
return javaCharStream(input, TokenDocument::new);
return javaCharStream(input, JavaccTokenDocument::new);
}
/**
* A char stream that translates java unicode sequences.
*/
public static CharStream javaCharStream(Reader input, Function<? super String, ? extends TokenDocument> documentMaker) {
public static CharStream javaCharStream(Reader input, Function<? super String, ? extends JavaccTokenDocument> documentMaker) {
String source = toString(input);
return new JavaCharStream(source);
JavaccTokenDocument tokens = documentMaker.apply(source);
return new JavaCharStream(tokens);
}
/**

View File

@ -2,48 +2,37 @@
* BSD-style license; for more info see http://pmd.sourceforge.net/license.html
*/
package net.sourceforge.pmd.lang.ast;
package net.sourceforge.pmd.lang.ast.impl.javacc;
import java.io.EOFException;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import org.apache.commons.io.IOUtils;
import net.sourceforge.pmd.lang.ast.impl.TokenDocument;
import net.sourceforge.pmd.lang.ast.impl.javacc.JavaccToken;
/**
* This stream buffers the whole file in memory before parsing,
* and track start/end offsets of tokens. This allows building {@link JavaccToken}.
* The buffer is assumed to be composed of only ASCII characters,
* and the stream unescapes Unicode escapes. The {@link #getTokenDocument() token document}
* stores the original file with escapes and all.
*
* TODO this is to be moved into the impl.javacc subpackage
*/
public class JavaCharStream extends JavaCharStreamBase {
// full text with nothing escaped and all
private final String fullText;
private final TokenDocument document;
private final JavaccTokenDocument document;
private int[] startOffsets;
public JavaCharStream(String fulltext) {
super(new StringReader(fulltext));
this.fullText = fulltext;
this.document = new TokenDocument(fullText);
public JavaCharStream(JavaccTokenDocument document) {
super(new StringReader(document.getFullText()));
this.fullText = document.getFullText();
this.document = document;
this.startOffsets = new int[bufsize];
maxNextCharInd = fullText.length();
nextCharBuf = null;
}
public JavaCharStream(Reader toDump) {
this(toString(toDump));
}
@Override
protected void ExpandBuff(boolean wrapAround) {
int[] newStartOffsets = new int[bufsize + 2048];
@ -61,25 +50,27 @@ public class JavaCharStream extends JavaCharStreamBase {
}
@Override
protected void beforeReadChar() {
if (bufpos + 1 < available) {
startOffsets[bufpos + 1] = nextCharInd + 1;
}
protected void UpdateLineColumn(char c) {
startOffsets[bufpos] = nextCharInd;
super.UpdateLineColumn(c);
}
@Override
public int getStartOffset() {
return startOffsets[tokenBegin];
}
@Override
public int getEndOffset() {
if (bufpos >= startOffsets.length) {
if (isAtEof()) {
return fullText.length();
} else {
return startOffsets[bufpos] + 1; // + 1 for exclusive
}
}
public TokenDocument getTokenDocument() {
@Override
public JavaccTokenDocument getTokenDocument() {
return document;
}
@ -87,31 +78,31 @@ public class JavaCharStream extends JavaCharStreamBase {
protected char ReadByte() throws IOException {
++nextCharInd;
if (nextCharInd >= fullText.length()) {
if (isAtEof()) {
if (bufpos != 0) {
--bufpos;
backup(0);
if (bufpos < 0) {
bufpos += bufsize;
}
} else {
bufline[bufpos] = line;
bufcolumn[bufpos] = column;
startOffsets[bufpos] = fullText.length();
}
throw new IOException();
throw new EOFException();
}
return fullText.charAt(nextCharInd);
}
private boolean isAtEof() {
return nextCharInd >= fullText.length();
}
@Override
protected void FillBuff() {
throw new IllegalStateException("Buffer shouldn't be refilled");
}
private static String toString(Reader dstream) {
try (Reader r = dstream) {
return IOUtils.toString(r);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}

View File

@ -4,12 +4,13 @@
package net.sourceforge.pmd.lang.ast.impl.javacc;
import java.util.Comparator;
import net.sourceforge.pmd.lang.ast.CharStream;
import net.sourceforge.pmd.lang.ast.GenericToken;
import net.sourceforge.pmd.lang.ast.impl.TokenDocument;
/**
* A generic token implementation for JavaCC parsers. Will probably help
* remove those duplicated implementations that all have the same name.
* A generic token implementation for JavaCC parsers.
*
* <p>Largely has the same interface as the default generated token class.
* The main difference is that the position of the token is encoded as
@ -21,19 +22,27 @@ import net.sourceforge.pmd.lang.ast.impl.TokenDocument;
* and column bounds can be derived as well - though this should not be
* done systematically because it's costlier.
* <li>It's a bit lighter. Token instances are one of the most numerous
* class in a typical PMD run and this may reduce GC pressur.
* class in a typical PMD run and this may reduce GC pressure.
* </ul>
*
* <p>TODO replace duplicates over PMD.
*/
public class JavaccToken implements GenericToken, java.io.Serializable {
public class JavaccToken implements GenericToken, Comparable<JavaccToken> {
/**
* The version identifier for this Serializable class.
* Increment only if the <i>serialized</i> form of the
* class changes.
* Kind for EOF tokens.
*/
private static final long serialVersionUID = 4L;
public static final int EOF = 0;
/**
* Kind for implicit tokens. Negative because JavaCC only picks
* positive numbers for token kinds.
*/
public static final int IMPLICIT_TOKEN = -1;
private static final Comparator<JavaccToken> COMPARATOR =
Comparator.comparingInt(JavaccToken::getStartInDocument)
.thenComparing(JavaccToken::getEndInDocument);
/**
* An integer that describes the kind of this token. This numbering
@ -41,6 +50,12 @@ public class JavaccToken implements GenericToken, java.io.Serializable {
* stored in the file ...Constants.java.
*/
public final int kind;
protected final JavaccTokenDocument document;
private final CharSequence image;
private final int startInclusive;
private final int endExclusive;
/**
* A reference to the next regular (non-special) token from the input
* stream. If this is the last token from the input stream, or if the
@ -50,6 +65,7 @@ public class JavaccToken implements GenericToken, java.io.Serializable {
* this field.
*/
public JavaccToken next;
/**
* This field is used to access special tokens that occur prior to this
* token, but after the immediately preceding regular (non-special) token.
@ -64,28 +80,33 @@ public class JavaccToken implements GenericToken, java.io.Serializable {
*/
public JavaccToken specialToken;
private final CharSequence image;
private final int startInclusive;
private final int endExclusive;
protected final TokenDocument document;
/** {@link #undefined()} */
private JavaccToken() {
this(null);
}
/**
* @deprecated This is used by a few deprecated tests about comments,
* will be removed when they're updated.
*/
@Deprecated
public JavaccToken(String image) {
this(-1, image, -1, -1, null);
}
/**
* Constructs a new token for the specified Image and Kind.
* Builds a new token of the specified kind.
*
* @param kind Kind of token
* @param image Image of the token (after translating escapes if any)
* @param startInclusive Start character of the token in the text file (before translating escapes)
* @param endExclusive End of the token in the text file (before translating escapes)
* @param document Document owning the token
*/
public JavaccToken(int kind,
CharSequence image,
int startInclusive,
int endExclusive,
TokenDocument document) {
JavaccTokenDocument document) {
assert startInclusive <= endExclusive
: "Offsets should be correctly ordered: " + startInclusive + " <= " + endExclusive;
this.kind = kind;
this.image = image;
this.startInclusive = startInclusive;
@ -93,14 +114,21 @@ public class JavaccToken implements GenericToken, java.io.Serializable {
this.document = document;
}
/**
* Returns the document owning this token.
*/
public JavaccTokenDocument getDocument() {
return document;
}
@Override
public GenericToken getNext() {
public JavaccToken getNext() {
return next;
}
@Override
public GenericToken getPreviousComment() {
public JavaccToken getPreviousComment() {
return specialToken;
}
@ -126,7 +154,7 @@ public class JavaccToken implements GenericToken, java.io.Serializable {
@Override
public int getEndLine() {
return document == null ? -1 : document.lineNumberFromOffset(endExclusive - 1);
return document == null ? -1 : document.lineNumberFromOffset(endExclusive);
}
@Override
@ -136,19 +164,105 @@ public class JavaccToken implements GenericToken, java.io.Serializable {
@Override
public int getEndColumn() {
return document == null ? -1 : document.columnFromOffset(endExclusive - 1);
return document == null ? -1 : document.columnFromOffset(endExclusive);
}
@Override
public boolean isImplicit() {
return kind == IMPLICIT_TOKEN;
}
@Override
public String toString() {
return document.describeKind(kind) + ": " + getImage();
}
/**
* Returns the image.
* Returns a new token with the same kind as this one, whose image
* is replaced by the one marked on the char stream.
*
* @param charStream Char stream from which to start
*
* @return A new token
*/
@Override
public String toString() {
return image.toString();
public JavaccToken replaceImage(CharStream charStream) {
return new JavaccToken(
this.kind,
charStream.GetImage(),
this.startInclusive,
charStream.getEndOffset(),
this.document
);
}
public static JavaccToken undefined() {
return new JavaccToken();
/**
* Returns a new token with the given kind, and all other parameters
* identical to this one.
*
* @param newKind Char stream from which to start
*
* @return A new token
*/
public JavaccToken withKind(int newKind) {
JavaccToken tok = new JavaccToken(
newKind,
this.image,
this.startInclusive,
this.endExclusive,
this.document
);
tok.specialToken = this.specialToken;
tok.next = this.next;
return tok;
}
@Override
public int compareTo(JavaccToken o) {
return COMPARATOR.compare(this, o);
}
/**
* Creates an implicit token, with zero length, that is linked to
* the given token as its special predecessor.
*
* @param next Token before which to insert the new token
*
* @return A new token
*/
public static JavaccToken implicitBefore(JavaccToken next) {
JavaccToken implicit = newImplicit(next.getStartInDocument(), next.document);
// insert it right before the next token
// as a special token
implicit.next = next;
if (next.specialToken != null) {
next.specialToken.next = implicit;
implicit.specialToken = next.specialToken;
}
next.specialToken = implicit;
return implicit;
}
/**
* Returns a new implicit token, positioned at the given offset.
*
* @param offset Offset of the token
* @param document Document owning the token
*
* @return A new token
*/
public static JavaccToken newImplicit(int offset, JavaccTokenDocument document) {
return new JavaccToken(IMPLICIT_TOKEN,
"",
offset,
offset,
document);
}
}

View File

@ -0,0 +1,112 @@
/*
* BSD-style license; for more info see http://pmd.sourceforge.net/license.html
*/
package net.sourceforge.pmd.lang.ast.impl.javacc;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import net.sourceforge.pmd.lang.ast.CharStream;
import net.sourceforge.pmd.lang.ast.impl.TokenDocument;
/**
* Token document for Javacc implementations. This is a helper object
* for generated token managers.
*/
public class JavaccTokenDocument extends TokenDocument<JavaccToken> {
private JavaccToken first;
public JavaccTokenDocument(String fullText) {
super(fullText);
}
/**
* Open the document. This is only meant to be used by a Javacc-generated
* parser.
*
* @return The token for the document start. This token is implicit and
* will never end up in the final token chain.
*
* @throws IllegalStateException If the document has already been opened
*/
public JavaccToken open() {
synchronized (this) {
if (first != null) {
throw new RuntimeException("Document is already opened");
}
first = JavaccToken.newImplicit(0, this);
}
return first;
}
@Override
public JavaccToken getFirstToken() {
if (first == null || first.next == null) {
throw new IllegalStateException("Document has not been opened");
}
return first.next;
}
/**
* Returns a string that describes the token kind.
*
* @param kind Kind of token
*
* @return A descriptive string
*/
public final @NonNull String describeKind(int kind) {
if (kind == JavaccToken.IMPLICIT_TOKEN) {
return "<implicit token>";
}
String impl = describeKindImpl(kind);
if (impl != null) {
return impl;
}
return "<token of kind " + kind + ">";
}
/**
* Describe the given kind. If this returns a non-null value, then
* that's what {@link #describeKind(int)} will use. Otherwise a default
* implementation is used.
*
* <p>An implementation typically uses the JavaCC-generated array
* named {@code <parser name>Constants.tokenImage}. Remember to
* check the bounds of the array.
*
* @param kind Kind of token
*
* @return A descriptive string, or null to use default
*/
protected @Nullable String describeKindImpl(int kind) {
return null;
}
/**
* Creates a new token with the given kind. This is called back to
* by JavaCC-generated token managers (jjFillToken). Note that a
* created token is not guaranteed to end up in the final token chain.
*
* @param kind Kind of the token
* @param cs Char stream of the file. This can be used to get text
* coordinates and the image
* @param image Shared instance of the image token. If this is non-null,
* then no call to {@link CharStream#GetImage()} should be
* issued.
*
* @return A new token
*/
public JavaccToken createToken(int kind, CharStream cs, @Nullable String image) {
return new JavaccToken(
kind,
image == null ? cs.GetImage() : image,
cs.getStartOffset(),
cs.getEndOffset(),
this
);
}
}

View File

@ -0,0 +1,217 @@
/*
* BSD-style license; for more info see http://pmd.sourceforge.net/license.html
*/
package net.sourceforge.pmd.lang.ast.impl.javacc;
import java.util.ArrayList;
import java.util.List;
/**
* Shared implementation of the tree builder generated by JJTree.
*
* @param <N> Type of node this takes
*/
public final class JjtreeBuilder<N extends AbstractJjtreeNode<?>> {
private final List<N> nodes = new ArrayList<>();
private final List<Integer> marks = new ArrayList<>();
private int sp = 0; // number of nodes on stack
private int mk = 0; // current mark
private boolean nodeCreated;
/*** If non-zero, then the top "n" nodes of the stack will be injected as the first children of the next
* node to be opened. This is not very flexible, but it's enough. The grammar needs to take
* care of the order in which nodes are opened in a few places, in most cases this just means using
* eg A() B() #N(2) instead of (A() B()) #N, so as not to open N before A.
*/
private int numPendingInjection;
/**
* Determines whether the current node was actually closed and
* pushed. This should only be called in the final user action of a
* node scope.
*/
public boolean nodeCreated() {
return nodeCreated;
}
/**
* Call this to reinitialize the node stack. It is called
* automatically by the parser's ReInit() method.
*/
public void reset() {
nodes.clear();
marks.clear();
sp = 0;
mk = 0;
}
/**
* Returns the root node of the AST. It only makes sense to call
* this after a successful parse.
*/
public N rootNode() {
return nodes.get(0);
}
/***
* Extend the number of children of the current node of one to the left.
* If the node is closed, one additional node from the stack will be popped
* and added to its children. This allows mimicking "left-recursive" nodes,
* while keeping the parsing iterative.
*
* <p>Note that when the total number of children is definitely known, you
* can use "definite nodes", ie write the expected number of children (including
* the ones to the left) in the JJTree annotation (eg {@code #AdditiveExpression(2)}).
* So this is only useful when the number of children of the current node is not certain.
*
* <p>This method does not affect the stack unless the current jjtThis is
* closed in the future.
*/
public void extendLeft() {
mk--;
}
/***
* Peek the nth node from the top of the stack.
* peekNode(0) == peekNode()
*/
public N peekNode(int n) {
return nodes.get(nodes.size() - n - 1);
}
public boolean isInjectionPending() {
return numPendingInjection > 0;
}
public void injectRight(int n) {
numPendingInjection = n;
}
/** Pushes a node on to the stack. */
public void pushNode(N n) {
nodes.add(n);
++sp;
}
/**
* Returns the node on the top of the stack, and remove it from the
* stack.
*/
public N popNode() {
--sp;
if (sp < mk) {
mk = marks.remove(marks.size() - 1);
}
return nodes.remove(nodes.size() - 1);
}
/** Returns the node currently on the top of the stack. */
public N peekNode() {
return nodes.get(nodes.size() - 1);
}
/**
* Returns the number of children on the stack in the current node
* scope.
*/
public int nodeArity() {
return sp - mk;
}
public void clearNodeScope(N n) {
while (sp > mk) {
popNode();
}
mk = marks.remove(marks.size() - 1);
}
public void openNodeScope(N n, JavaccToken firstToken) {
marks.add(mk);
mk = sp;
if (isInjectionPending()) {
mk -= numPendingInjection;
numPendingInjection = 0;
}
n.jjtSetFirstToken(firstToken);
n.jjtOpen();
}
/**
* Close the node scope and adds the given number of children to the
* node. A definite node is constructed from a specified number of
* children. That number of nodes are popped from the stack and
* made the children of the definite node. Then the definite node
* is pushed on to the stack.
*/
public void closeNodeScope(N n, final int num, JavaccToken lastToken) {
int a = nodeArity();
mk = marks.remove(marks.size() - 1);
N child = null;
int i = num;
while (i-- > 0) {
child = popNode();
n.jjtAddChild(child, i);
}
if (child != null && num > a) {
// this node has more children that what was in its node scope
// (ie first token is wrong)
n.jjtSetFirstToken(child.jjtGetFirstToken());
}
closeImpl(n, lastToken);
}
/**
* Close the node scope if the condition is true.
* All the nodes that have been pushed since the node was opened are
* made children of the conditional node, which is then pushed on to
* the stack. If the condition is false the node is not constructed
* and they are left on the stack.
*
* @param n Node to close
* @param condition Whether to close the node or not
* @param lastToken Last token that was consumed while the node scope was open
*/
public void closeNodeScope(N n, boolean condition, JavaccToken lastToken) {
if (condition) {
int a = nodeArity();
mk = marks.remove(marks.size() - 1);
while (a-- > 0) {
n.jjtAddChild(popNode(), a);
}
closeImpl(n, lastToken);
} else {
mk = marks.remove(marks.size() - 1);
nodeCreated = false;
}
}
private void closeImpl(N n, JavaccToken lastToken) {
if (lastToken.getNext() == n.jjtGetFirstToken()) {
// this means, that the node has zero length.
// create an implicit token to represent this case.
JavaccToken implicit = JavaccToken.implicitBefore(lastToken.getNext());
n.jjtSetFirstToken(implicit);
n.jjtSetLastToken(implicit);
} else {
n.jjtSetLastToken(lastToken);
}
// note that the last token has been set before jjtClose
n.jjtClose();
pushNode(n);
nodeCreated = true;
}
}

View File

@ -0,0 +1,73 @@
/**
* BSD-style license; for more info see http://pmd.sourceforge.net/license.html
*/
package net.sourceforge.pmd.lang.ast.impl.javacc;
import java.io.Reader;
import net.sourceforge.pmd.lang.Parser;
import net.sourceforge.pmd.lang.ParserOptions;
import net.sourceforge.pmd.lang.TokenManager;
import net.sourceforge.pmd.lang.ast.CharStream;
import net.sourceforge.pmd.lang.ast.ParseException;
import net.sourceforge.pmd.lang.ast.RootNode;
import net.sourceforge.pmd.lang.ast.TokenMgrError;
/**
* Base implementation of the {@link Parser} interface for JavaCC language
* implementations. This wraps a parser generated by JavaCC, it's not meant
* as a base class for the generated parser.
*
* @param <R> Type of the root node of this language
*/
public abstract class JjtreeParserAdapter<R extends RootNode> implements Parser {
protected final ParserOptions parserOptions;
protected JjtreeParserAdapter(ParserOptions parserOptions) {
this.parserOptions = parserOptions;
}
@Override
public ParserOptions getParserOptions() {
return parserOptions;
}
@Override
public TokenManager getTokenManager(String fileName, Reader source) {
TokenManager tokenManager = createTokenManager(source);
tokenManager.setFileName(fileName);
return tokenManager;
}
protected abstract TokenManager createTokenManager(Reader source);
protected abstract JavaccTokenDocument newDocument(String fullText);
protected CharStream newCharStream(JavaccTokenDocument tokenDocument) {
return new SimpleCharStream(tokenDocument);
}
@Override
public R parse(String fileName, Reader source) throws ParseException {
String text = CharStreamFactory.toString(source);
JavaccTokenDocument doc = newDocument(text);
CharStream charStream = newCharStream(doc);
try {
return parseImpl(charStream, getParserOptions());
} catch (TokenMgrError tme) {
throw tme.withFileName(fileName);
}
}
protected abstract R parseImpl(CharStream cs, ParserOptions options) throws ParseException;
@Override
public String toString() {
return getClass().getSimpleName();
}
}

View File

@ -0,0 +1,20 @@
/*
* BSD-style license; for more info see http://pmd.sourceforge.net/license.html
*/
package net.sourceforge.pmd.lang.ast.impl.javacc;
/**
* A char stream that does not perform unicode escaping.
*/
public class SimpleCharStream extends JavaCharStream {
public SimpleCharStream(JavaccTokenDocument document) {
super(document);
}
@Override
protected boolean doEscape() {
return false;
}
}

View File

@ -0,0 +1,8 @@
/*
* BSD-style license; for more info see http://pmd.sourceforge.net/license.html
*/
/**
* Support classes for language implementations based on JavaCC.
*/
package net.sourceforge.pmd.lang.ast.impl.javacc;

View File

@ -1,44 +0,0 @@
/*
* BSD-style license; for more info see http://pmd.sourceforge.net/license.html
*/
package net.sourceforge.pmd.lang.ast.internal;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Iterator;
import org.checkerframework.checker.nullness.qual.NonNull;
import net.sourceforge.pmd.lang.ast.Node;
/** Iterates over a node and its descendants. */
class DescendantOrSelfIterator implements Iterator<@NonNull Node> {
private final Deque<Node> queue = new ArrayDeque<>();
/** Always {@link #hasNext()} after exiting the constructor. */
DescendantOrSelfIterator(Node top) {
queue.addFirst(top);
}
@Override
public boolean hasNext() {
return !queue.isEmpty();
}
@Override
public @NonNull Node next() {
Node node = queue.removeFirst();
enqueueChildren(node);
return node;
}
private void enqueueChildren(Node n) {
for (int i = n.getNumChildren() - 1; i >= 0; i--) {
queue.addFirst(n.getChild(i));
}
}
}

View File

@ -23,24 +23,24 @@ import net.sourceforge.pmd.lang.ast.Node;
interface Filtermap<I, O> extends Function<@NonNull I, @Nullable O>, Predicate<@NonNull I> {
Filtermap<Node, Node> NODE_IDENTITY = emptyFilter();
Filtermap<Node, Node> NODE_IDENTITY = identityFilter();
/**
* Returns a null value if the filter accepts the value. Otherwise
* returns the transformed value.
* returns the transformed value. MUST return null for null parameter.
*/
@Override
@Nullable O apply(@NonNull I i);
@Nullable O apply(@Nullable I i);
@Override
default boolean test(@NonNull I i) {
default boolean test(@Nullable I i) {
return apply(i) != null;
}
/** Filter an iterator. */
default Iterator<O> filterMap(Iterator<I> iter) {
default Iterator<O> filterMap(Iterator<? extends I> iter) {
return IteratorUtil.mapNotNull(iter, this);
}
@ -49,6 +49,9 @@ interface Filtermap<I, O> extends Function<@NonNull I, @Nullable O>, Predicate<@
default <R> Filtermap<I, R> thenApply(Function<@NonNull ? super O, @Nullable ? extends R> then) {
Objects.requireNonNull(then);
return i -> {
if (i == null) {
return null;
}
O o = this.apply(i);
return o == null ? null : then.apply(o);
};
@ -60,34 +63,56 @@ interface Filtermap<I, O> extends Function<@NonNull I, @Nullable O>, Predicate<@
}
static <I> Filtermap<I, I> emptyFilter() {
default Filtermap<I, O> thenFilter(Predicate<? super O> rClass) {
return thenApply(filter(rClass));
}
static <I> Filtermap<I, I> identityFilter() {
return new Filtermap<I, I>() {
@Override
public I apply(@NonNull I i) {
public I apply(@Nullable I i) {
return i;
}
@Override
@SuppressWarnings("unchecked")
public <R> Filtermap<I, R> thenApply(Function<@NonNull ? super I, @Nullable ? extends R> then) {
return then instanceof Filtermap ? (Filtermap<I, R>) then : then::apply;
return then instanceof Filtermap ? (Filtermap<I, R>) then : Filtermap.super.thenApply(then);
}
@Override
public Iterator<I> filterMap(Iterator<I> iter) {
return iter;
@SuppressWarnings("unchecked")
public Iterator<I> filterMap(Iterator<? extends I> iter) {
return (Iterator<I>) iter;
}
@Override
public String toString() {
return "IdentityFilter";
}
};
}
static <I> Filtermap<I, I> filter(Predicate<? super I> pred) {
return i -> pred.test(i) ? i : null;
static <I> Filtermap<I, I> filter(Predicate<? super @NonNull I> pred) {
return i -> i != null && pred.test(i) ? i : null;
}
static <I, O> Filtermap<I, O> isInstance(Class<O> oClass) {
return i -> oClass.isInstance(i) ? oClass.cast(i) : null;
return new Filtermap<I, O>() {
@Override
@SuppressWarnings("unchecked")
public @Nullable O apply(@Nullable I i) {
return oClass.isInstance(i) ? (O) i : null;
}
@Override
public String toString() {
return "IsInstance[" + oClass + "]";
}
};
}
}

View File

@ -0,0 +1,77 @@
/*
* BSD-style license; for more info see http://pmd.sourceforge.net/license.html
*/
package net.sourceforge.pmd.lang.ast.internal;
import java.util.Iterator;
import java.util.List;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.function.Function;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import net.sourceforge.pmd.internal.util.AssertionUtil;
import net.sourceforge.pmd.internal.util.IteratorUtil;
import net.sourceforge.pmd.lang.ast.Node;
import net.sourceforge.pmd.lang.ast.NodeStream;
/**
* A greedy stream evaluates all axis operations, except for descendants,
* greedily.
*/
abstract class GreedyNStream<T extends Node> extends IteratorBasedNStream<T> {
@Override
protected <R extends Node> NodeStream<R> mapIter(Function<Iterator<T>, Iterator<R>> fun) {
return StreamImpl.fromNonNullList(IteratorUtil.toNonNullList(fun.apply(iterator())));
}
@Override
public T first() {
return toList().get(0);
}
@Override
public @Nullable T get(int n) {
AssertionUtil.requireNonNegative("n", n);
List<T> tList = toList();
return n < tList.size() ? tList.get(n) : null;
}
@Override
public Iterator<T> iterator() {
return toList().iterator();
}
@Override
public abstract List<T> toList();
@Override
public Spliterator<T> spliterator() {
Spliterator<T> spliter = toList().spliterator();
return Spliterators.spliterator(iterator(), spliter.estimateSize(),
spliter.characteristics() | Spliterator.NONNULL);
}
@Override
public NodeStream<T> cached() {
return this;
}
static class GreedyKnownNStream<T extends Node> extends GreedyNStream<T> {
private final List<@NonNull T> coll;
GreedyKnownNStream(List<@NonNull T> coll) {
this.coll = coll;
}
@Override
public List<T> toList() {
return coll;
}
}
}

View File

@ -62,10 +62,37 @@ abstract class IteratorBasedNStream<T extends Node> implements NodeStream<T> {
}
@Override
public NodeStream<T> filter(Predicate<? super T> predicate) {
public NodeStream<T> filter(Predicate<? super @NonNull T> predicate) {
return mapIter(it -> IteratorUtil.mapNotNull(it, Filtermap.filter(predicate)));
}
@Override
public <R extends Node> NodeStream<R> filterIs(Class<R> rClass) {
return mapIter(it -> IteratorUtil.mapNotNull(it, Filtermap.isInstance(rClass)));
}
@Override
public DescendantNodeStream<Node> descendants() {
return flatMapDescendants(Node::descendants);
}
@Override
public DescendantNodeStream<Node> descendantsOrSelf() {
return flatMapDescendants(Node::descendantsOrSelf);
}
@Override
public <R extends Node> DescendantNodeStream<R> descendants(Class<R> rClass) {
return flatMapDescendants(node -> node.descendants(rClass));
}
@NonNull
protected <R extends Node> DescendantNodeStream<R> flatMapDescendants(Function<T, DescendantNodeStream<R>> mapper) {
return new DescendantMapping<>(this, mapper);
}
@Override
public void forEach(Consumer<? super T> action) {
iterator().forEachRemaining(action);
@ -190,37 +217,19 @@ abstract class IteratorBasedNStream<T extends Node> implements NodeStream<T> {
@Override
public NodeStream<T> cached() {
return new IteratorBasedNStream<T>() {
private List<T> cache;
@Override
public Iterator<T> iterator() {
return toList().iterator();
}
@Override
public int count() {
return toList().size();
}
@Override
public List<T> toList() {
if (cache == null) {
cache = IteratorBasedNStream.this.toList();
}
return cache;
}
@Override
public String toString() {
return "CachedStream[" + IteratorBasedNStream.this + "]";
}
};
return StreamImpl.fromNonNullList(toList());
}
private <R extends Node> IteratorMapping<R> mapIter(Function<Iterator<T>, Iterator<R>> fun) {
return new IteratorMapping<R>(fun);
protected <R extends Node> NodeStream<R> mapIter(Function<Iterator<T>, Iterator<R>> fun) {
return new IteratorMapping<>(fun);
}
@Override
public String toString() {
return getClass().getSimpleName() + " ["
+ toStream().map(Objects::toString).collect(Collectors.joining(", "))
+ "]";
}
private class IteratorMapping<S extends Node> extends IteratorBasedNStream<S> {
@ -239,8 +248,35 @@ abstract class IteratorBasedNStream<T extends Node> implements NodeStream<T> {
}
}
@Override
public String toString() {
return getClass().getSimpleName() + " [" + toStream().map(Objects::toString).collect(Collectors.joining(", ")) + "]";
private static class DescendantMapping<T extends Node, S extends Node> extends IteratorBasedNStream<S> implements DescendantNodeStream<S> {
private final Function<T, DescendantNodeStream<S>> fun;
private final TreeWalker walker;
private final IteratorBasedNStream<T> upstream;
private DescendantMapping(IteratorBasedNStream<T> upstream, Function<T, DescendantNodeStream<S>> fun, TreeWalker walker) {
this.fun = fun;
this.walker = walker;
this.upstream = upstream;
}
DescendantMapping(IteratorBasedNStream<T> upstream, Function<T, DescendantNodeStream<S>> fun) {
this(upstream, fun, TreeWalker.DEFAULT);
}
@Override
public Iterator<S> iterator() {
return IteratorUtil.flatMap(upstream.iterator(),
fun.andThen(walker::apply).andThen(NodeStream::iterator));
}
@Override
public DescendantNodeStream<S> crossFindBoundaries(boolean cross) {
return walker.isCrossFindBoundaries() == cross
? this
: new DescendantMapping<>(upstream, fun, walker.crossFindBoundaries(cross));
}
}
}

View File

@ -179,17 +179,17 @@ final class SingletonNodeStream<T extends Node> extends IteratorBasedNStream<T>
}
@Override
public NodeStream<Node> descendants() {
public DescendantNodeStream<Node> descendants() {
return StreamImpl.descendants(node);
}
@Override
public <R extends Node> NodeStream<R> descendants(Class<R> rClass) {
public <R extends Node> DescendantNodeStream<R> descendants(Class<R> rClass) {
return StreamImpl.descendants(node, rClass);
}
@Override
public NodeStream<Node> descendantsOrSelf() {
public DescendantNodeStream<Node> descendantsOrSelf() {
return StreamImpl.descendantsOrSelf(node);
}

View File

@ -5,6 +5,7 @@
package net.sourceforge.pmd.lang.ast.internal;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
@ -18,6 +19,7 @@ import org.checkerframework.checker.nullness.qual.Nullable;
import net.sourceforge.pmd.internal.util.IteratorUtil;
import net.sourceforge.pmd.lang.ast.Node;
import net.sourceforge.pmd.lang.ast.NodeStream;
import net.sourceforge.pmd.lang.ast.NodeStream.DescendantNodeStream;
import net.sourceforge.pmd.lang.ast.internal.AxisStream.AncestorOrSelfStream;
import net.sourceforge.pmd.lang.ast.internal.AxisStream.ChildrenStream;
import net.sourceforge.pmd.lang.ast.internal.AxisStream.DescendantOrSelfStream;
@ -25,31 +27,12 @@ import net.sourceforge.pmd.lang.ast.internal.AxisStream.DescendantStream;
import net.sourceforge.pmd.lang.ast.internal.AxisStream.FilteredAncestorOrSelfStream;
import net.sourceforge.pmd.lang.ast.internal.AxisStream.FilteredChildrenStream;
import net.sourceforge.pmd.lang.ast.internal.AxisStream.FilteredDescendantStream;
import net.sourceforge.pmd.lang.ast.internal.GreedyNStream.GreedyKnownNStream;
public final class StreamImpl {
@SuppressWarnings("rawtypes")
private static final NodeStream EMPTY = new IteratorBasedNStream() {
@Override
public Iterator iterator() {
return Collections.emptyIterator();
}
@Override
public List toList() {
return Collections.emptyList();
}
@Override
public List toList(Function mapper) {
return Collections.emptyList();
}
@Override
public String toString() {
return "EmptyStream";
}
};
private static final DescendantNodeStream EMPTY = new EmptyNodeStream();
private StreamImpl() {
// utility class
@ -59,22 +42,17 @@ public final class StreamImpl {
return new SingletonNodeStream<>(node);
}
public static <T extends Node> NodeStream<T> fromIterable(Iterable<T> iterable) {
return new IteratorBasedNStream<T>() {
@Override
public Iterator<T> iterator() {
return IteratorUtil.filterNotNull(iterable.iterator());
public static <T extends Node> NodeStream<T> fromIterable(Iterable<@Nullable T> iterable) {
if (iterable instanceof Collection) {
Collection<@Nullable T> coll = (Collection<T>) iterable;
if (coll.isEmpty()) {
return empty();
} else if (coll.size() == 1) {
return NodeStream.of(coll.iterator().next());
}
}
@Override
public Spliterator<T> spliterator() {
Spliterator<T> spliter = iterable.spliterator();
return Spliterators.spliterator(iterator(), spliter.estimateSize(),
(spliter.characteristics() | Spliterator.NONNULL)
& ~Spliterator.SIZED
& ~Spliterator.SUBSIZED);
}
};
return fromNonNullList(IteratorUtil.toNonNullList(iterable.iterator()));
}
public static <T extends Node> NodeStream<T> union(Iterable<? extends @Nullable NodeStream<? extends T>> streams) {
@ -88,28 +66,29 @@ public final class StreamImpl {
@SuppressWarnings("unchecked")
public static <T extends Node> NodeStream<T> empty() {
public static <T extends Node> DescendantNodeStream<T> empty() {
return EMPTY;
}
public static <R extends Node> NodeStream<R> children(@NonNull Node node, Class<R> target) {
return node.getNumChildren() == 0 ? empty() : new FilteredChildrenStream<>(node, Filtermap.isInstance(target));
return sliceChildren(node, Filtermap.isInstance(target), 0, node.getNumChildren());
}
public static NodeStream<Node> children(@NonNull Node node) {
return node.getNumChildren() == 0 ? empty() : new ChildrenStream(node);
return sliceChildren(node, Filtermap.NODE_IDENTITY, 0, node.getNumChildren());
}
public static NodeStream<Node> descendants(@NonNull Node node) {
return node.getNumChildren() == 0 ? empty() : new DescendantStream(node);
public static DescendantNodeStream<Node> descendants(@NonNull Node node) {
return node.getNumChildren() == 0 ? empty() : new DescendantStream(node, TreeWalker.DEFAULT);
}
public static <R extends Node> NodeStream<R> descendants(@NonNull Node node, Class<R> rClass) {
return node.getNumChildren() == 0 ? empty() : new FilteredDescendantStream<>(node, Filtermap.isInstance(rClass));
public static <R extends Node> DescendantNodeStream<R> descendants(@NonNull Node node, Class<R> rClass) {
return node.getNumChildren() == 0 ? empty()
: new FilteredDescendantStream<>(node, TreeWalker.DEFAULT, Filtermap.isInstance(rClass));
}
public static NodeStream<Node> descendantsOrSelf(@NonNull Node node) {
return node.getNumChildren() == 0 ? empty() : new DescendantOrSelfStream(node);
public static DescendantNodeStream<Node> descendantsOrSelf(@NonNull Node node) {
return node.getNumChildren() == 0 ? empty() : new DescendantOrSelfStream(node, TreeWalker.DEFAULT);
}
public static NodeStream<Node> followingSiblings(@NonNull Node node) {
@ -146,7 +125,12 @@ public final class StreamImpl {
: (NodeStream<T>) new ChildrenStream(parent, from, length);
return res;
} else {
return new FilteredChildrenStream<>(parent, filtermap, from, length);
if (length == 1) {
// eager evaluation, empty or singleton
return NodeStream.of(filtermap.apply(parent.getChild(from)));
} else {
return new FilteredChildrenStream<>(parent, filtermap, from, length);
}
}
}
@ -178,5 +162,63 @@ public final class StreamImpl {
return ancestorsOrSelf(node.getParent(), Filtermap.isInstance(target));
}
static <T extends Node> NodeStream<T> fromNonNullList(List<@NonNull T> coll) {
if (coll.isEmpty()) {
return empty();
} else if (coll.size() == 1) {
return singleton(coll.get(0));
}
return new GreedyKnownNStream<>(coll);
}
private static class EmptyNodeStream<N extends Node> extends IteratorBasedNStream<N> implements DescendantNodeStream<N> {
@Override
protected <R extends Node> NodeStream<R> mapIter(Function<Iterator<N>, Iterator<R>> fun) {
return StreamImpl.empty();
}
@Override
protected @NonNull <R extends Node> DescendantNodeStream<R> flatMapDescendants(Function<N, DescendantNodeStream<R>> mapper) {
return StreamImpl.empty();
}
@Override
public DescendantNodeStream<N> crossFindBoundaries(boolean cross) {
return StreamImpl.empty();
}
@Override
public Iterator<N> iterator() {
return Collections.emptyIterator();
}
@Override
public List<N> toList() {
return Collections.emptyList();
}
@Override
public <R> List<R> toList(Function<? super N, ? extends R> mapper) {
return Collections.emptyList();
}
@Override
public Spliterator<N> spliterator() {
return Spliterators.emptySpliterator();
}
@Override
public NodeStream<N> cached() {
return this;
}
@Override
public String toString() {
return "EmptyStream";
}
}
}

Some files were not shown because too many files have changed in this diff Show More