forked from phoedos/pmd
add PathId class
This commit is contained in:
@ -177,7 +177,7 @@ public class PMDTaskImpl {
|
||||
|
||||
@Override
|
||||
public FileAnalysisListener startFileAnalysis(TextFile dataSource) {
|
||||
String name = dataSource.getDisplayName();
|
||||
String name = dataSource.getPathId().toUriString();
|
||||
project.log("Processing file " + name, Project.MSG_VERBOSE);
|
||||
return FileAnalysisListener.noop();
|
||||
}
|
||||
|
@ -13,6 +13,7 @@ import org.antlr.runtime.Token;
|
||||
|
||||
import net.sourceforge.pmd.lang.apex.ApexJorjeLogging;
|
||||
import net.sourceforge.pmd.lang.ast.TokenMgrError;
|
||||
import net.sourceforge.pmd.lang.document.PathId;
|
||||
|
||||
import apex.jorje.parser.impl.ApexLexer;
|
||||
|
||||
@ -42,7 +43,7 @@ public class ApexTokenizer implements Tokenizer {
|
||||
ApexLexer lexer = new ApexLexer(ass) {
|
||||
@Override
|
||||
public void emitErrorMessage(String msg) {
|
||||
throw new TokenMgrError(getLine(), getCharPositionInLine(), getSourceName(), msg, null);
|
||||
throw new TokenMgrError(getLine(), getCharPositionInLine(), PathId.fromPathLikeString(getSourceName()), msg, null);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -32,7 +32,7 @@ public final class ApexParser implements Parser {
|
||||
final ApexTreeBuilder treeBuilder = new ApexTreeBuilder(task, (ApexLanguageProcessor) task.getLanguageProcessor());
|
||||
return treeBuilder.buildTree(astRoot);
|
||||
} catch (apex.jorje.services.exception.ParseException e) {
|
||||
throw new ParseException(e).setFileName(task.getFileDisplayName());
|
||||
throw new ParseException(e).setFileName(task.getTextDocument().getPathId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -63,7 +63,7 @@ class CompilerService {
|
||||
public Compilation parseApex(TextDocument document) {
|
||||
SourceFile sourceFile = SourceFile.builder()
|
||||
.setBody(document.getText().toString())
|
||||
.setKnownName(document.getDisplayName())
|
||||
.setKnownName(document.getPathId().toUriString())
|
||||
.build();
|
||||
ApexCompiler compiler = ApexCompiler.builder().setInput(createCompilationInput(Collections.singletonList(sourceFile))).build();
|
||||
compiler.compile(CompilerStage.POST_TYPE_RESOLVE);
|
||||
|
@ -45,14 +45,6 @@ class ApexParserTest extends ApexParserTestBase {
|
||||
assertEquals(4, methods.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
void fileName() {
|
||||
String code = "class Outer { class Inner {}}";
|
||||
|
||||
ASTUserClass rootNode = (ASTUserClass) parse(code, "src/filename.cls");
|
||||
|
||||
assertEquals("src/filename.cls", rootNode.getTextDocument().getDisplayName());
|
||||
}
|
||||
|
||||
private final String testCodeForLineNumbers =
|
||||
"public class SimpleClass {\n" // line 1
|
||||
|
@ -18,6 +18,7 @@ import java.util.function.Predicate;
|
||||
import net.sourceforge.pmd.annotation.DeprecatedUntil700;
|
||||
import net.sourceforge.pmd.annotation.Experimental;
|
||||
import net.sourceforge.pmd.annotation.InternalApi;
|
||||
import net.sourceforge.pmd.lang.document.PathId;
|
||||
import net.sourceforge.pmd.lang.document.TextFile;
|
||||
import net.sourceforge.pmd.renderers.AbstractAccumulatingRenderer;
|
||||
import net.sourceforge.pmd.reporting.FileAnalysisListener;
|
||||
@ -102,7 +103,7 @@ public final class Report {
|
||||
public static class ProcessingError {
|
||||
|
||||
private final Throwable error;
|
||||
private final String file;
|
||||
private final PathId file;
|
||||
|
||||
/**
|
||||
* Creates a new processing error
|
||||
@ -112,7 +113,7 @@ public final class Report {
|
||||
* @param file
|
||||
* the file during which the error occurred
|
||||
*/
|
||||
public ProcessingError(Throwable error, String file) {
|
||||
public ProcessingError(Throwable error, PathId file) {
|
||||
this.error = error;
|
||||
this.file = file;
|
||||
}
|
||||
@ -133,7 +134,7 @@ public final class Report {
|
||||
}
|
||||
|
||||
public String getFile() {
|
||||
return file;
|
||||
return file.toUriString();
|
||||
}
|
||||
|
||||
public Throwable getError() {
|
||||
|
@ -626,7 +626,7 @@ public class RuleSet implements ChecksumAware {
|
||||
* <code>false</code> otherwise
|
||||
*/
|
||||
boolean applies(TextFile file) {
|
||||
return applies(file.getDisplayName());
|
||||
return applies(file.getPathId().getFileName());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -154,7 +154,7 @@ public class RuleSets {
|
||||
}
|
||||
|
||||
for (RuleSet ruleSet : ruleSets) {
|
||||
if (ruleSet.applies(root.getTextDocument().getPathId())) {
|
||||
if (ruleSet.applies(root.getTextDocument().getPathId().getFileName())) {
|
||||
ruleApplicator.apply(ruleSet.getRules(), listener);
|
||||
}
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ public interface RuleViolation {
|
||||
* @return The source file name.
|
||||
*/
|
||||
default String getFilename() {
|
||||
return getLocation().getFileName();
|
||||
return getLocation().getFileName().toUriString();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -18,6 +18,7 @@ import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
@ -34,7 +35,9 @@ import net.sourceforge.pmd.benchmark.TimedOperation;
|
||||
import net.sourceforge.pmd.benchmark.TimedOperationCategory;
|
||||
import net.sourceforge.pmd.cache.internal.ClasspathFingerprinter;
|
||||
import net.sourceforge.pmd.internal.util.IOUtil;
|
||||
import net.sourceforge.pmd.lang.document.PathId;
|
||||
import net.sourceforge.pmd.lang.document.TextDocument;
|
||||
import net.sourceforge.pmd.lang.document.TextFile;
|
||||
import net.sourceforge.pmd.reporting.FileAnalysisListener;
|
||||
|
||||
/**
|
||||
@ -49,8 +52,8 @@ public abstract class AbstractAnalysisCache implements AnalysisCache {
|
||||
protected static final Logger LOG = LoggerFactory.getLogger(AbstractAnalysisCache.class);
|
||||
protected static final ClasspathFingerprinter FINGERPRINTER = new ClasspathFingerprinter();
|
||||
protected final String pmdVersion;
|
||||
protected final ConcurrentMap<String, AnalysisResult> fileResultsCache = new ConcurrentHashMap<>();
|
||||
protected final ConcurrentMap<String, AnalysisResult> updatedResultsCache = new ConcurrentHashMap<>();
|
||||
protected final ConcurrentMap<PathId, AnalysisResult> fileResultsCache = new ConcurrentHashMap<>();
|
||||
protected final ConcurrentMap<PathId, AnalysisResult> updatedResultsCache = new ConcurrentHashMap<>();
|
||||
protected final CachedRuleMapper ruleMapper = new CachedRuleMapper();
|
||||
protected long rulesetChecksum;
|
||||
protected long auxClassPathChecksum;
|
||||
@ -76,13 +79,6 @@ public abstract class AbstractAnalysisCache implements AnalysisCache {
|
||||
if (upToDate) {
|
||||
LOG.trace("Incremental Analysis cache HIT");
|
||||
|
||||
/*
|
||||
* Update cached violation "filename" to match the appropriate text document,
|
||||
* so we can honor relativized paths for the current run
|
||||
*/
|
||||
final String displayName = document.getDisplayName();
|
||||
cachedResult.getViolations().forEach(v -> ((CachedRuleViolation) v).setFileDisplayName(displayName));
|
||||
|
||||
// copy results over
|
||||
updatedResult = cachedResult;
|
||||
} else {
|
||||
@ -125,7 +121,7 @@ public abstract class AbstractAnalysisCache implements AnalysisCache {
|
||||
|
||||
|
||||
@Override
|
||||
public void checkValidity(final RuleSets ruleSets, final ClassLoader auxclassPathClassLoader) {
|
||||
public void checkValidity(final RuleSets ruleSets, final ClassLoader auxclassPathClassLoader, Set<TextFile> files) {
|
||||
try (TimedOperation ignored = TimeTracker.startOperation(TimedOperationCategory.ANALYSIS_CACHE, "validity check")) {
|
||||
boolean cacheIsValid = cacheExists();
|
||||
|
||||
@ -222,7 +218,7 @@ public abstract class AbstractAnalysisCache implements AnalysisCache {
|
||||
|
||||
@Override
|
||||
public FileAnalysisListener startFileAnalysis(TextDocument file) {
|
||||
final String fileName = file.getPathId();
|
||||
final PathId fileName = file.getPathId();
|
||||
|
||||
return new FileAnalysisListener() {
|
||||
@Override
|
||||
|
@ -6,6 +6,7 @@ package net.sourceforge.pmd.cache;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import net.sourceforge.pmd.RuleSets;
|
||||
import net.sourceforge.pmd.RuleViolation;
|
||||
@ -64,7 +65,7 @@ public interface AnalysisCache {
|
||||
* @param ruleSets The rulesets configured for this analysis.
|
||||
* @param auxclassPathClassLoader The class loader for auxclasspath configured for this analysis.
|
||||
*/
|
||||
void checkValidity(RuleSets ruleSets, ClassLoader auxclassPathClassLoader);
|
||||
void checkValidity(RuleSets ruleSets, ClassLoader auxclassPathClassLoader, Set<TextFile> files);
|
||||
|
||||
/**
|
||||
* Returns a listener that will be used like in {@link GlobalAnalysisListener#startFileAnalysis(TextFile)}.
|
||||
|
@ -5,6 +5,7 @@
|
||||
package net.sourceforge.pmd.cache;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
import net.sourceforge.pmd.RuleSets;
|
||||
import net.sourceforge.pmd.annotation.InternalApi;
|
||||
@ -23,7 +24,7 @@ public class AnalysisCacheListener implements GlobalAnalysisListener {
|
||||
|
||||
public AnalysisCacheListener(AnalysisCache cache, RuleSets ruleSets, ClassLoader classLoader) {
|
||||
this.cache = cache;
|
||||
cache.checkValidity(ruleSets, classLoader);
|
||||
cache.checkValidity(ruleSets, classLoader, Collections.emptySet());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -18,6 +18,7 @@ import net.sourceforge.pmd.Rule;
|
||||
import net.sourceforge.pmd.RuleViolation;
|
||||
import net.sourceforge.pmd.annotation.InternalApi;
|
||||
import net.sourceforge.pmd.lang.document.FileLocation;
|
||||
import net.sourceforge.pmd.lang.document.PathId;
|
||||
import net.sourceforge.pmd.lang.document.TextRange2d;
|
||||
import net.sourceforge.pmd.util.StringUtil;
|
||||
|
||||
@ -38,10 +39,10 @@ public final class CachedRuleViolation implements RuleViolation {
|
||||
private final String ruleTargetLanguage;
|
||||
private final Map<String, String> additionalInfo;
|
||||
|
||||
private FileLocation location;
|
||||
private final FileLocation location;
|
||||
|
||||
private CachedRuleViolation(final CachedRuleMapper mapper, final String description,
|
||||
final String filePathId, final String ruleClassName, final String ruleName,
|
||||
final PathId filePathId, final String ruleClassName, final String ruleName,
|
||||
final String ruleTargetLanguage, final int beginLine, final int beginColumn,
|
||||
final int endLine, final int endColumn,
|
||||
final Map<String, String> additionalInfo) {
|
||||
@ -54,11 +55,6 @@ public final class CachedRuleViolation implements RuleViolation {
|
||||
this.additionalInfo = additionalInfo;
|
||||
}
|
||||
|
||||
void setFileDisplayName(String displayName) {
|
||||
this.location = FileLocation.range(displayName,
|
||||
TextRange2d.range2d(getBeginLine(), getBeginColumn(), getEndLine(), getEndColumn()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Rule getRule() {
|
||||
// The mapper may be initialized after cache is loaded, so use it lazily
|
||||
@ -83,23 +79,26 @@ public final class CachedRuleViolation implements RuleViolation {
|
||||
/**
|
||||
* Helper method to load a {@link CachedRuleViolation} from an input stream.
|
||||
*
|
||||
* @param stream The stream from which to load the violation.
|
||||
* @param stream The stream from which to load the violation.
|
||||
* @param filePathId The name of the file on which this rule was reported.
|
||||
* @param mapper The mapper to be used to obtain rule instances from the active rulesets.
|
||||
* @param mapper The mapper to be used to obtain rule instances from the active rulesets.
|
||||
*
|
||||
* @return The loaded rule violation.
|
||||
* @throws IOException
|
||||
*/
|
||||
/* package */ static CachedRuleViolation loadFromStream(final DataInputStream stream,
|
||||
final String filePathId, final CachedRuleMapper mapper) throws IOException {
|
||||
final String description = stream.readUTF();
|
||||
final String ruleClassName = stream.readUTF();
|
||||
final String ruleName = stream.readUTF();
|
||||
final String ruleTargetLanguage = stream.readUTF();
|
||||
final int beginLine = stream.readInt();
|
||||
final int beginColumn = stream.readInt();
|
||||
final int endLine = stream.readInt();
|
||||
final int endColumn = stream.readInt();
|
||||
final Map<String, String> additionalInfo = readAdditionalInfo(stream);
|
||||
/* package */
|
||||
static CachedRuleViolation loadFromStream(
|
||||
DataInputStream stream,
|
||||
PathId filePathId, CachedRuleMapper mapper) throws IOException {
|
||||
|
||||
String description = stream.readUTF();
|
||||
String ruleClassName = stream.readUTF();
|
||||
String ruleName = stream.readUTF();
|
||||
String ruleTargetLanguage = stream.readUTF();
|
||||
int beginLine = stream.readInt();
|
||||
int beginColumn = stream.readInt();
|
||||
int endLine = stream.readInt();
|
||||
int endColumn = stream.readInt();
|
||||
Map<String, String> additionalInfo = readAdditionalInfo(stream);
|
||||
return new CachedRuleViolation(mapper, description, filePathId, ruleClassName, ruleName, ruleTargetLanguage,
|
||||
beginLine, beginColumn, endLine, endColumn, additionalInfo);
|
||||
}
|
||||
|
@ -15,6 +15,8 @@ import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import net.sourceforge.pmd.PMDVersion;
|
||||
import net.sourceforge.pmd.RuleSets;
|
||||
@ -23,6 +25,8 @@ import net.sourceforge.pmd.annotation.InternalApi;
|
||||
import net.sourceforge.pmd.benchmark.TimeTracker;
|
||||
import net.sourceforge.pmd.benchmark.TimedOperation;
|
||||
import net.sourceforge.pmd.benchmark.TimedOperationCategory;
|
||||
import net.sourceforge.pmd.lang.document.PathId;
|
||||
import net.sourceforge.pmd.lang.document.TextFile;
|
||||
|
||||
/**
|
||||
* An analysis cache backed by a regular file.
|
||||
@ -45,17 +49,22 @@ public class FileAnalysisCache extends AbstractAnalysisCache {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkValidity(RuleSets ruleSets, ClassLoader auxclassPathClassLoader) {
|
||||
public void checkValidity(RuleSets ruleSets, ClassLoader auxclassPathClassLoader, Set<TextFile> files) {
|
||||
// load cached data before checking for validity
|
||||
loadFromFile(cacheFile);
|
||||
super.checkValidity(ruleSets, auxclassPathClassLoader);
|
||||
loadFromFile(cacheFile, files);
|
||||
super.checkValidity(ruleSets, auxclassPathClassLoader, files);
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads cache data from the given file.
|
||||
*
|
||||
* @param cacheFile The file which backs the file analysis cache.
|
||||
*/
|
||||
private void loadFromFile(final File cacheFile) {
|
||||
private void loadFromFile(final File cacheFile, Set<TextFile> files) {
|
||||
Map<String, PathId> idMap =
|
||||
files.stream().map(TextFile::getPathId)
|
||||
.collect(Collectors.toMap(PathId::toUriString, id -> id));
|
||||
|
||||
try (TimedOperation ignored = TimeTracker.startOperation(TimedOperationCategory.ANALYSIS_CACHE, "load")) {
|
||||
if (cacheExists()) {
|
||||
try (
|
||||
@ -75,15 +84,21 @@ public class FileAnalysisCache extends AbstractAnalysisCache {
|
||||
// Cached results
|
||||
while (inputStream.available() > 0) {
|
||||
final String filePathId = inputStream.readUTF();
|
||||
PathId pathId = idMap.get(filePathId);
|
||||
if (pathId == null) {
|
||||
LOG.debug("File {} is in the cache but is not part of the analysis",
|
||||
filePathId);
|
||||
pathId = PathId.fromPathLikeString(filePathId);
|
||||
}
|
||||
final long checksum = inputStream.readLong();
|
||||
|
||||
final int countViolations = inputStream.readInt();
|
||||
final List<RuleViolation> violations = new ArrayList<>(countViolations);
|
||||
for (int i = 0; i < countViolations; i++) {
|
||||
violations.add(CachedRuleViolation.loadFromStream(inputStream, filePathId, ruleMapper));
|
||||
violations.add(CachedRuleViolation.loadFromStream(inputStream, pathId, ruleMapper));
|
||||
}
|
||||
|
||||
fileResultsCache.put(filePathId, new AnalysisResult(checksum, violations));
|
||||
fileResultsCache.put(pathId, new AnalysisResult(checksum, violations));
|
||||
}
|
||||
|
||||
LOG.debug("Analysis cache loaded from {}", cacheFile);
|
||||
@ -129,10 +144,10 @@ public class FileAnalysisCache extends AbstractAnalysisCache {
|
||||
outputStream.writeLong(auxClassPathChecksum);
|
||||
outputStream.writeLong(executionClassPathChecksum);
|
||||
|
||||
for (final Map.Entry<String, AnalysisResult> resultEntry : updatedResultsCache.entrySet()) {
|
||||
for (final Map.Entry<PathId, AnalysisResult> resultEntry : updatedResultsCache.entrySet()) {
|
||||
final List<RuleViolation> violations = resultEntry.getValue().getViolations();
|
||||
|
||||
outputStream.writeUTF(resultEntry.getKey()); // the path id
|
||||
outputStream.writeUTF(resultEntry.getKey().toUriString()); // the path id
|
||||
outputStream.writeLong(resultEntry.getValue().getFileChecksum());
|
||||
|
||||
outputStream.writeInt(violations.size());
|
||||
|
@ -6,11 +6,13 @@ package net.sourceforge.pmd.cache;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import net.sourceforge.pmd.RuleSets;
|
||||
import net.sourceforge.pmd.RuleViolation;
|
||||
import net.sourceforge.pmd.annotation.InternalApi;
|
||||
import net.sourceforge.pmd.lang.document.TextDocument;
|
||||
import net.sourceforge.pmd.lang.document.TextFile;
|
||||
import net.sourceforge.pmd.reporting.FileAnalysisListener;
|
||||
|
||||
/**
|
||||
@ -38,7 +40,7 @@ public class NoopAnalysisCache implements AnalysisCache {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkValidity(final RuleSets ruleSets, final ClassLoader classLoader) {
|
||||
public void checkValidity(final RuleSets ruleSets, final ClassLoader classLoader, Set<TextFile> files) {
|
||||
// noop
|
||||
}
|
||||
|
||||
|
@ -78,7 +78,7 @@ public class TokenEntry implements Comparable<TokenEntry> {
|
||||
}
|
||||
|
||||
public TokenEntry(String image, FileLocation location) {
|
||||
this(image, location.getFileName(), location.getStartLine(), location.getStartColumn(), location.getEndColumn());
|
||||
this(image, location.getFileName().getNiceFileName(), location.getStartLine(), location.getStartColumn(), location.getEndColumn());
|
||||
}
|
||||
|
||||
private boolean isOk(int coord) {
|
||||
|
@ -32,7 +32,7 @@ public abstract class AntlrTokenizer implements Tokenizer {
|
||||
public void tokenize(final SourceCode sourceCode, final Tokens tokenEntries) {
|
||||
try (TextDocument textDoc = TextDocument.create(CpdCompat.cpdCompat(sourceCode))) {
|
||||
|
||||
CharStream charStream = CharStreams.fromString(textDoc.getText().toString(), textDoc.getDisplayName());
|
||||
CharStream charStream = CharStreams.fromString(textDoc.getText().toString(), textDoc.getPathId().getNiceFileName());
|
||||
|
||||
final AntlrTokenManager tokenManager = new AntlrTokenManager(getLexerForSource(charStream), textDoc);
|
||||
final AntlrTokenFilter tokenFilter = getTokenFilter(tokenManager);
|
||||
|
@ -19,6 +19,7 @@ import net.sourceforge.pmd.lang.ast.impl.javacc.JavaccToken;
|
||||
import net.sourceforge.pmd.lang.ast.impl.javacc.JavaccTokenDocument.TokenDocumentBehavior;
|
||||
import net.sourceforge.pmd.lang.document.CpdCompat;
|
||||
import net.sourceforge.pmd.lang.document.TextDocument;
|
||||
import net.sourceforge.pmd.lang.document.TextFile;
|
||||
|
||||
public abstract class JavaCCTokenizer implements Tokenizer {
|
||||
|
||||
@ -47,7 +48,8 @@ public abstract class JavaCCTokenizer implements Tokenizer {
|
||||
|
||||
@Override
|
||||
public void tokenize(SourceCode sourceCode, Tokens tokenEntries) throws IOException {
|
||||
try (TextDocument textDoc = TextDocument.create(CpdCompat.cpdCompat(sourceCode))) {
|
||||
TextFile textFile = CpdCompat.cpdCompat(sourceCode);
|
||||
try (TextDocument textDoc = TextDocument.create(textFile)) {
|
||||
TokenManager<JavaccToken> tokenManager = getLexerForSource(textDoc);
|
||||
final TokenFilter<JavaccToken> tokenFilter = getTokenFilter(tokenManager);
|
||||
JavaccToken currentToken = tokenFilter.getNextToken();
|
||||
@ -56,7 +58,7 @@ public abstract class JavaCCTokenizer implements Tokenizer {
|
||||
currentToken = tokenFilter.getNextToken();
|
||||
}
|
||||
} catch (FileAnalysisException e) {
|
||||
throw e.setFileName(sourceCode.getFileName());
|
||||
throw e.setFileName(textFile.getPathId());
|
||||
} finally {
|
||||
tokenEntries.add(TokenEntry.getEOF());
|
||||
}
|
||||
|
@ -17,6 +17,7 @@ import org.slf4j.LoggerFactory;
|
||||
|
||||
import net.sourceforge.pmd.PMDConfiguration;
|
||||
import net.sourceforge.pmd.lang.document.FileCollector;
|
||||
import net.sourceforge.pmd.lang.document.PathId;
|
||||
import net.sourceforge.pmd.util.database.DBMSMetadata;
|
||||
import net.sourceforge.pmd.util.database.DBURI;
|
||||
import net.sourceforge.pmd.util.database.SourceObject;
|
||||
@ -126,7 +127,7 @@ public final class FileCollectionUtil {
|
||||
|
||||
try (Reader sourceCode = dbmsMetadata.getSourceCode(sourceObject)) {
|
||||
String source = IOUtil.readToString(sourceCode);
|
||||
collector.addSourceFile(source, falseFilePath);
|
||||
collector.addSourceFile(PathId.fromPathLikeString(falseFilePath), source);
|
||||
} catch (SQLException ex) {
|
||||
collector.getReporter().warnEx("Cannot get SourceCode for {} - skipping ...",
|
||||
new Object[] { falseFilePath },
|
||||
|
@ -9,7 +9,7 @@ import java.util.Objects;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.checkerframework.checker.nullness.qual.NonNull;
|
||||
|
||||
import net.sourceforge.pmd.lang.document.TextFile;
|
||||
import net.sourceforge.pmd.lang.document.PathId;
|
||||
|
||||
/**
|
||||
* An exception that occurs while processing a file. Subtypes include
|
||||
@ -22,7 +22,7 @@ import net.sourceforge.pmd.lang.document.TextFile;
|
||||
*/
|
||||
public class FileAnalysisException extends RuntimeException {
|
||||
|
||||
private String filename = TextFile.UNKNOWN_FILENAME;
|
||||
private PathId filename = PathId.UNKNOWN;
|
||||
|
||||
public FileAnalysisException() {
|
||||
super();
|
||||
@ -40,19 +40,19 @@ public class FileAnalysisException extends RuntimeException {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public FileAnalysisException setFileName(String filename) {
|
||||
public FileAnalysisException setFileName(PathId filename) {
|
||||
this.filename = Objects.requireNonNull(filename);
|
||||
return this;
|
||||
}
|
||||
|
||||
protected boolean hasFileName() {
|
||||
return !TextFile.UNKNOWN_FILENAME.equals(filename);
|
||||
return !PathId.UNKNOWN.equals(filename);
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the file in which the error occurred.
|
||||
*/
|
||||
public @NonNull String getFileName() {
|
||||
public @NonNull PathId getFileName() {
|
||||
return filename;
|
||||
}
|
||||
|
||||
@ -83,7 +83,7 @@ public class FileAnalysisException extends RuntimeException {
|
||||
*
|
||||
* @return An exception
|
||||
*/
|
||||
public static FileAnalysisException wrap(@NonNull String filename, @NonNull String message, @NonNull Throwable cause) {
|
||||
public static FileAnalysisException wrap(@NonNull PathId filename, @NonNull String message, @NonNull Throwable cause) {
|
||||
if (cause instanceof FileAnalysisException) {
|
||||
return ((FileAnalysisException) cause).setFileName(filename);
|
||||
}
|
||||
|
@ -59,7 +59,7 @@ public interface Parser {
|
||||
* not be interpreted, it may not be a file-system path.
|
||||
*/
|
||||
public String getFileDisplayName() {
|
||||
return textDoc.getDisplayName();
|
||||
return textDoc.getPathId().getNiceFileName();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -7,6 +7,7 @@ package net.sourceforge.pmd.lang.ast;
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
|
||||
import net.sourceforge.pmd.annotation.InternalApi;
|
||||
import net.sourceforge.pmd.lang.document.PathId;
|
||||
import net.sourceforge.pmd.util.StringUtil;
|
||||
|
||||
/**
|
||||
@ -26,7 +27,7 @@ public final class TokenMgrError extends FileAnalysisException {
|
||||
* @param message Message of the error
|
||||
* @param cause Cause of the error, if any
|
||||
*/
|
||||
public TokenMgrError(int line, int column, @Nullable String filename, String message, @Nullable Throwable cause) {
|
||||
public TokenMgrError(int line, int column, @Nullable PathId filename, String message, @Nullable Throwable cause) {
|
||||
super(message, cause);
|
||||
this.line = line;
|
||||
this.column = column;
|
||||
@ -71,7 +72,7 @@ public final class TokenMgrError extends FileAnalysisException {
|
||||
* @return A new exception
|
||||
*/
|
||||
@Override
|
||||
public TokenMgrError setFileName(String filename) {
|
||||
public TokenMgrError setFileName(PathId filename) {
|
||||
super.setFileName(filename);
|
||||
return this;
|
||||
}
|
||||
|
@ -63,7 +63,7 @@ public class AntlrTokenManager implements TokenManager<AntlrToken> {
|
||||
final int charPositionInLine,
|
||||
final String msg,
|
||||
final RecognitionException ex) {
|
||||
throw new TokenMgrError(line, charPositionInLine, textDoc.getDisplayName(), msg, ex);
|
||||
throw new TokenMgrError(line, charPositionInLine, textDoc.getPathId(), msg, ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -35,7 +35,7 @@ public abstract class JjtreeParserAdapter<R extends RootNode> implements Parser
|
||||
// Finally, do the parsing
|
||||
return parseImpl(charStream, task);
|
||||
} catch (FileAnalysisException tme) {
|
||||
throw tme.setFileName(task.getFileDisplayName());
|
||||
throw tme.setFileName(task.getTextDocument().getPathId());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -26,15 +26,10 @@ abstract class BaseMappedDocument implements TextDocument {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPathId() {
|
||||
public PathId getPathId() {
|
||||
return base.getPathId();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDisplayName() {
|
||||
return base.getDisplayName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Chars sliceOriginalText(TextRegion region) {
|
||||
return base.sliceOriginalText(inputRegion(region));
|
||||
|
@ -33,7 +33,7 @@ public final class CpdCompat {
|
||||
public static TextFile cpdCompat(SourceCode sourceCode) {
|
||||
return TextFile.forCharSeq(
|
||||
sourceCode.getCodeBuffer(),
|
||||
sourceCode.getFileName(),
|
||||
PathId.fromPathLikeString("fname1.dummy"),
|
||||
dummyVersion()
|
||||
);
|
||||
}
|
||||
|
@ -155,7 +155,6 @@ public final class FileCollector implements AutoCloseable {
|
||||
LanguageVersion languageVersion = discoverLanguage(file.toString());
|
||||
return languageVersion != null
|
||||
&& addFileImpl(TextFile.builderForPath(file, charset, languageVersion)
|
||||
.withDisplayName(getDisplayName(file))
|
||||
.build());
|
||||
}
|
||||
|
||||
@ -178,7 +177,6 @@ public final class FileCollector implements AutoCloseable {
|
||||
LanguageVersion lv = discoverer.getDefaultLanguageVersion(language);
|
||||
Objects.requireNonNull(lv);
|
||||
return addFileImpl(TextFile.builderForPath(file, charset, lv)
|
||||
.withDisplayName(getDisplayName(file))
|
||||
.build());
|
||||
}
|
||||
|
||||
@ -200,14 +198,13 @@ public final class FileCollector implements AutoCloseable {
|
||||
*
|
||||
* @return True if the file has been added
|
||||
*/
|
||||
public boolean addSourceFile(String pathId, String sourceContents) {
|
||||
public boolean addSourceFile(PathId pathId, String sourceContents) {
|
||||
AssertionUtil.requireParamNotNull("sourceContents", sourceContents);
|
||||
AssertionUtil.requireParamNotNull("pathId", pathId);
|
||||
|
||||
LanguageVersion version = discoverLanguage(pathId);
|
||||
LanguageVersion version = discoverLanguage(pathId.getFileName());
|
||||
return version != null
|
||||
&& addFileImpl(TextFile.builderForCharSeq(sourceContents, pathId, version)
|
||||
.withDisplayName(pathId)
|
||||
.build());
|
||||
}
|
||||
|
||||
|
@ -41,14 +41,14 @@ public final class FileLocation {
|
||||
private final int endLine;
|
||||
private final int beginColumn;
|
||||
private final int endColumn;
|
||||
private final String fileName;
|
||||
private final PathId fileName;
|
||||
private final @Nullable TextRegion region;
|
||||
|
||||
FileLocation(String fileName, int beginLine, int beginColumn, int endLine, int endColumn) {
|
||||
FileLocation(PathId fileName, int beginLine, int beginColumn, int endLine, int endColumn) {
|
||||
this(fileName, beginLine, beginColumn, endLine, endColumn, null);
|
||||
}
|
||||
|
||||
FileLocation(String fileName, int beginLine, int beginColumn, int endLine, int endColumn, @Nullable TextRegion region) {
|
||||
FileLocation(PathId fileName, int beginLine, int beginColumn, int endLine, int endColumn, @Nullable TextRegion region) {
|
||||
this.fileName = Objects.requireNonNull(fileName);
|
||||
this.beginLine = AssertionUtil.requireOver1("Begin line", beginLine);
|
||||
this.endLine = AssertionUtil.requireOver1("End line", endLine);
|
||||
@ -71,7 +71,7 @@ public final class FileLocation {
|
||||
* File name of this position. This is a display name, it shouldn't
|
||||
* be parsed as a Path.
|
||||
*/
|
||||
public String getFileName() {
|
||||
public PathId getFileName() {
|
||||
return fileName;
|
||||
}
|
||||
|
||||
@ -145,7 +145,7 @@ public final class FileLocation {
|
||||
* @throws IllegalArgumentException If the line and column are not correctly ordered
|
||||
* @throws IllegalArgumentException If the start offset or length are negative
|
||||
*/
|
||||
public static FileLocation range(String fileName, TextRange2d range2d) {
|
||||
public static FileLocation range(PathId fileName, TextRange2d range2d) {
|
||||
TextPos2d start = range2d.getStartPos();
|
||||
TextPos2d end = range2d.getEndPos();
|
||||
return new FileLocation(fileName,
|
||||
@ -164,9 +164,9 @@ public final class FileLocation {
|
||||
*
|
||||
* @return A new location
|
||||
*
|
||||
* @throws IllegalArgumentException See {@link #range(String, int, int, int, int)}
|
||||
* @throws IllegalArgumentException See {@link #range(PathId, TextRange2d)}
|
||||
*/
|
||||
public static FileLocation caret(String fileName, int line, int column) {
|
||||
public static FileLocation caret(PathId fileName, int line, int column) {
|
||||
return new FileLocation(fileName, line, column, line, column);
|
||||
}
|
||||
|
||||
|
@ -25,28 +25,24 @@ class NioTextFile extends BaseCloseable implements TextFile {
|
||||
private final Path path;
|
||||
private final Charset charset;
|
||||
private final LanguageVersion languageVersion;
|
||||
private final String displayName;
|
||||
private final String pathId;
|
||||
private final PathId pathId;
|
||||
private boolean readOnly;
|
||||
|
||||
NioTextFile(Path path,
|
||||
Charset charset,
|
||||
LanguageVersion languageVersion,
|
||||
String displayName,
|
||||
boolean readOnly) {
|
||||
AssertionUtil.requireParamNotNull("path", path);
|
||||
AssertionUtil.requireParamNotNull("charset", charset);
|
||||
AssertionUtil.requireParamNotNull("language version", languageVersion);
|
||||
AssertionUtil.requireParamNotNull("display name", displayName);
|
||||
|
||||
this.displayName = displayName;
|
||||
this.readOnly = readOnly;
|
||||
this.path = path;
|
||||
this.charset = charset;
|
||||
this.languageVersion = languageVersion;
|
||||
// using the URI here, that handles files inside zip archives automatically (schema "jar:file:...!/path/inside/zip")
|
||||
// normalization ensures cannonical paths
|
||||
this.pathId = path.normalize().toUri().toString();
|
||||
this.pathId = PathId.fromPath(path);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -55,12 +51,7 @@ class NioTextFile extends BaseCloseable implements TextFile {
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NonNull String getDisplayName() {
|
||||
return displayName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPathId() {
|
||||
public PathId getPathId() {
|
||||
return pathId;
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,108 @@
|
||||
/*
|
||||
* BSD-style license; for more info see http://pmd.sourceforge.net/license.html
|
||||
*/
|
||||
|
||||
package net.sourceforge.pmd.lang.document;
|
||||
|
||||
import java.nio.file.Path;
|
||||
|
||||
/**
|
||||
* A virtual path for a {@link TextFile}.
|
||||
*
|
||||
* @author Clément Fournier
|
||||
*/
|
||||
public interface PathId extends Comparable<PathId> {
|
||||
|
||||
/**
|
||||
* The name used for a file that has no name. This is mostly only
|
||||
* relevant for unit tests.
|
||||
*/
|
||||
PathId UNKNOWN = fromPathLikeString("(unknown file)");
|
||||
|
||||
String toUriString();
|
||||
|
||||
String getFileName();
|
||||
String getNiceFileName();
|
||||
|
||||
|
||||
@Override
|
||||
boolean equals(Object o);
|
||||
|
||||
|
||||
@Override
|
||||
default int compareTo(PathId o) {
|
||||
return this.toUriString().compareTo(o.toUriString());
|
||||
}
|
||||
|
||||
PathId STDIN = new PathId() {
|
||||
@Override
|
||||
public String toUriString() {
|
||||
return "stdin";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFileName() {
|
||||
return "stdin";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getNiceFileName() {
|
||||
return "stdin";
|
||||
}
|
||||
};
|
||||
|
||||
static PathId fromPathLikeString(String str) {
|
||||
String[] segments = str.split("[/\\\\]");
|
||||
if (segments.length == 0) {
|
||||
throw new IllegalArgumentException("Invalid path id: '" + str + "'");
|
||||
}
|
||||
String fname = segments[segments.length - 1];
|
||||
return new PathId() {
|
||||
@Override
|
||||
public String toUriString() {
|
||||
return str;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFileName() {
|
||||
return fname;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getNiceFileName() {
|
||||
return str;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return obj instanceof PathId
|
||||
&& ((PathId) obj).toUriString().equals(this.toUriString());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
static PathId fromPath(Path path) {
|
||||
return new PathId() {
|
||||
@Override
|
||||
public String toUriString() {
|
||||
return path.normalize().toUri().toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFileName() {
|
||||
return path.getFileName().toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getNiceFileName() {
|
||||
return path.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return obj instanceof PathId
|
||||
&& ((PathId) obj).toUriString().equals(this.toUriString());
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user