forked from phoedos/pmd
Merge branch 'pmd7-textfile-display-name' into clem.pmd7-refactor-cpd
This commit is contained in:
commit
344b2cc17b
@ -246,6 +246,11 @@
|
||||
<fileset file="${parser-file}" />
|
||||
</replaceregexp>
|
||||
|
||||
<replaceregexp flags="g">
|
||||
<regexp pattern="throw new ParseException\(\);" />
|
||||
<substitution expression='throw net.sourceforge.pmd.util.AssertionUtil.shouldNotReachHere("consumetoken(-1) should have thrown");' />
|
||||
<fileset file="${parser-file}" />
|
||||
</replaceregexp>
|
||||
|
||||
<replaceregexp>
|
||||
<regexp pattern="public interface"/>
|
||||
|
@ -32,7 +32,9 @@ import net.sourceforge.pmd.lang.document.TextFile;
|
||||
import net.sourceforge.pmd.renderers.Renderer;
|
||||
import net.sourceforge.pmd.renderers.RendererFactory;
|
||||
import net.sourceforge.pmd.reporting.FileAnalysisListener;
|
||||
import net.sourceforge.pmd.reporting.FileNameRenderer;
|
||||
import net.sourceforge.pmd.reporting.GlobalAnalysisListener;
|
||||
import net.sourceforge.pmd.reporting.ListenerInitializer;
|
||||
|
||||
@InternalApi
|
||||
public class Formatter {
|
||||
@ -253,6 +255,16 @@ public class Formatter {
|
||||
return new GlobalAnalysisListener() {
|
||||
final GlobalAnalysisListener listener = renderer.newListener();
|
||||
|
||||
@Override
|
||||
public ListenerInitializer initializer() {
|
||||
return new ListenerInitializer() {
|
||||
@Override
|
||||
public void setFileNameRenderer(FileNameRenderer fileNameRenderer) {
|
||||
renderer.setFileNameRenderer(fileNameRenderer);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileAnalysisListener startFileAnalysis(TextFile file) {
|
||||
return listener.startFileAnalysis(file);
|
||||
|
@ -121,6 +121,7 @@ public class PMDTaskImpl {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("PMD.CloseResource")
|
||||
ReportStatsListener reportStatsListener = new ReportStatsListener();
|
||||
pmd.addListener(getListener(reportStatsListener));
|
||||
@ -178,7 +179,7 @@ public class PMDTaskImpl {
|
||||
|
||||
@Override
|
||||
public FileAnalysisListener startFileAnalysis(TextFile dataSource) {
|
||||
String name = dataSource.getDisplayName();
|
||||
String name = dataSource.getFileId().toUriString();
|
||||
project.log("Processing file " + name, Project.MSG_VERBOSE);
|
||||
return FileAnalysisListener.noop();
|
||||
}
|
||||
|
@ -17,6 +17,7 @@ import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
|
||||
import org.apache.tools.ant.BuildException;
|
||||
import org.checkerframework.checker.nullness.qual.NonNull;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@ -87,15 +88,17 @@ class PMDTaskTest extends AbstractAntTest {
|
||||
|
||||
try (InputStream in = Files.newInputStream(Paths.get("target/pmd-ant-xml.xml"));
|
||||
InputStream expectedStream = PMDTaskTest.class.getResourceAsStream("xml/expected-pmd-ant-xml.xml")) {
|
||||
String actual = IOUtil.readToString(in, StandardCharsets.UTF_8);
|
||||
actual = actual.replaceFirst("timestamp=\"[^\"]+\"", "timestamp=\"\"");
|
||||
actual = actual.replaceFirst("\\.xsd\" version=\"[^\"]+\"", ".xsd\" version=\"\"");
|
||||
|
||||
String expected = IOUtil.readToString(expectedStream, StandardCharsets.UTF_8);
|
||||
expected = expected.replaceFirst("timestamp=\"[^\"]+\"", "timestamp=\"\"");
|
||||
expected = expected.replaceFirst("\\.xsd\" version=\"[^\"]+\"", ".xsd\" version=\"\"");
|
||||
String actual = readAndNormalize(in);
|
||||
String expected = readAndNormalize(expectedStream);
|
||||
|
||||
assertEquals(expected, actual);
|
||||
}
|
||||
}
|
||||
|
||||
private static @NonNull String readAndNormalize(InputStream expectedStream) throws IOException {
|
||||
String expected = IOUtil.readToString(expectedStream, StandardCharsets.UTF_8);
|
||||
expected = expected.replaceFirst("timestamp=\"[^\"]+\"", "timestamp=\"\"");
|
||||
expected = expected.replaceFirst("\\.xsd\" version=\"[^\"]+\"", ".xsd\" version=\"\"");
|
||||
return expected;
|
||||
}
|
||||
}
|
||||
|
@ -4,8 +4,6 @@
|
||||
|
||||
package net.sourceforge.pmd.lang.apex.ast;
|
||||
|
||||
import java.net.URI;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@ -16,6 +14,7 @@ import net.sourceforge.pmd.lang.apex.multifile.ApexMultifileAnalysis;
|
||||
import net.sourceforge.pmd.lang.ast.AstInfo;
|
||||
import net.sourceforge.pmd.lang.ast.Parser.ParserTask;
|
||||
import net.sourceforge.pmd.lang.ast.RootNode;
|
||||
import net.sourceforge.pmd.lang.document.FileId;
|
||||
import net.sourceforge.pmd.lang.document.TextRegion;
|
||||
|
||||
import apex.jorje.semantic.ast.AstNode;
|
||||
@ -63,11 +62,7 @@ public final class ASTApexFile extends AbstractApexNode<AstNode> implements Root
|
||||
}
|
||||
|
||||
public List<Issue> getGlobalIssues() {
|
||||
String filename = getAstInfo().getTextDocument().getPathId();
|
||||
if (filename.length() > 7 && "file://".equalsIgnoreCase(filename.substring(0, 7))) {
|
||||
URI uri = URI.create(filename);
|
||||
filename = Paths.get(uri).toString();
|
||||
}
|
||||
return multifileAnalysis.getFileIssues(filename);
|
||||
FileId fileId = getAstInfo().getTextDocument().getFileId();
|
||||
return multifileAnalysis.getFileIssues(fileId.toAbsolutePath());
|
||||
}
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ import net.sourceforge.pmd.lang.apex.ApexJorjeLogging;
|
||||
import net.sourceforge.pmd.lang.apex.ApexLanguageProcessor;
|
||||
import net.sourceforge.pmd.lang.ast.ParseException;
|
||||
import net.sourceforge.pmd.lang.ast.Parser;
|
||||
import net.sourceforge.pmd.lang.document.FileLocation;
|
||||
|
||||
import apex.jorje.data.Locations;
|
||||
import apex.jorje.semantic.ast.compilation.Compilation;
|
||||
@ -32,7 +33,8 @@ public final class ApexParser implements Parser {
|
||||
final ApexTreeBuilder treeBuilder = new ApexTreeBuilder(task, (ApexLanguageProcessor) task.getLanguageProcessor());
|
||||
return treeBuilder.buildTree(astRoot);
|
||||
} catch (apex.jorje.services.exception.ParseException e) {
|
||||
throw new ParseException(e).setFileName(task.getFileDisplayName());
|
||||
FileLocation loc = FileLocation.caret(task.getTextDocument().getFileId(), e.getLoc().getLine(), e.getLoc().getColumn());
|
||||
throw new ParseException(e).withLocation(loc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -63,7 +63,7 @@ class CompilerService {
|
||||
public Compilation parseApex(TextDocument document) {
|
||||
SourceFile sourceFile = SourceFile.builder()
|
||||
.setBody(document.getText().toString())
|
||||
.setKnownName(document.getDisplayName())
|
||||
.setKnownName(document.getFileId().toUriString())
|
||||
.build();
|
||||
ApexCompiler compiler = ApexCompiler.builder().setInput(createCompilationInput(Collections.singletonList(sourceFile))).build();
|
||||
compiler.compile(CompilerStage.POST_TYPE_RESOLVE);
|
||||
|
@ -15,7 +15,7 @@ class ASTSwitchStatementTest extends ApexParserTestBase {
|
||||
|
||||
@Test
|
||||
void testExamples() {
|
||||
ApexNode<?> node = parseResource("SwitchStatements.cls");
|
||||
ApexNode<?> node = apex.parseResource("SwitchStatements.cls").getMainNode();
|
||||
List<ASTSwitchStatement> switchStatements = node.findDescendantsOfType(ASTSwitchStatement.class);
|
||||
assertEquals(4, switchStatements.size());
|
||||
|
||||
|
@ -45,14 +45,6 @@ class ApexParserTest extends ApexParserTestBase {
|
||||
assertEquals(4, methods.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
void fileName() {
|
||||
String code = "class Outer { class Inner {}}";
|
||||
|
||||
ASTUserClass rootNode = (ASTUserClass) parse(code, "src/filename.cls");
|
||||
|
||||
assertEquals("src/filename.cls", rootNode.getTextDocument().getDisplayName());
|
||||
}
|
||||
|
||||
private final String testCodeForLineNumbers =
|
||||
"public class SimpleClass {\n" // line 1
|
||||
|
@ -13,11 +13,4 @@ public class ApexParserTestBase {
|
||||
return apex.parse(code).getMainNode();
|
||||
}
|
||||
|
||||
protected ASTUserClassOrInterface<?> parse(String code, String fileName) {
|
||||
return apex.parse(code, null, fileName).getMainNode();
|
||||
}
|
||||
|
||||
protected ASTUserClassOrInterface<?> parseResource(String code) {
|
||||
return apex.parseResource(code).getMainNode();
|
||||
}
|
||||
}
|
||||
|
@ -10,6 +10,7 @@ import org.junit.jupiter.api.Test;
|
||||
|
||||
import net.sourceforge.pmd.Report;
|
||||
import net.sourceforge.pmd.lang.apex.ast.ApexParserTestBase;
|
||||
import net.sourceforge.pmd.lang.document.FileId;
|
||||
import net.sourceforge.pmd.lang.rule.XPathRule;
|
||||
|
||||
/**
|
||||
@ -26,7 +27,7 @@ class ApexXPathRuleTest extends ApexParserTestBase {
|
||||
void testFileNameInXpath() {
|
||||
Report report = apex.executeRule(makeXPath("/UserClass[pmd:fileName() = 'Foo.cls']"),
|
||||
"class Foo {}",
|
||||
"src/Foo.cls");
|
||||
FileId.fromPathLikeString("src/Foo.cls"));
|
||||
|
||||
assertSize(report, 1);
|
||||
}
|
||||
|
@ -43,6 +43,7 @@ import net.sourceforge.pmd.lang.LanguageVersionDiscoverer;
|
||||
import net.sourceforge.pmd.lang.document.FileCollector;
|
||||
import net.sourceforge.pmd.lang.document.TextFile;
|
||||
import net.sourceforge.pmd.renderers.Renderer;
|
||||
import net.sourceforge.pmd.reporting.ConfigurableFileNameRenderer;
|
||||
import net.sourceforge.pmd.reporting.GlobalAnalysisListener;
|
||||
import net.sourceforge.pmd.reporting.ListenerInitializer;
|
||||
import net.sourceforge.pmd.reporting.ReportStats;
|
||||
@ -94,6 +95,7 @@ public final class PmdAnalysis implements AutoCloseable {
|
||||
|
||||
private final Map<Language, LanguagePropertyBundle> langProperties = new HashMap<>();
|
||||
private boolean closed;
|
||||
private final ConfigurableFileNameRenderer fileNameRenderer = new ConfigurableFileNameRenderer();
|
||||
|
||||
/**
|
||||
* Constructs a new instance. The files paths (input files, filelist,
|
||||
@ -109,9 +111,6 @@ public final class PmdAnalysis implements AutoCloseable {
|
||||
reporter
|
||||
);
|
||||
|
||||
for (Path path : config.getRelativizeRoots()) {
|
||||
this.collector.relativizeWith(path);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -165,6 +164,10 @@ public final class PmdAnalysis implements AutoCloseable {
|
||||
}
|
||||
}
|
||||
|
||||
for (Path path : config.getRelativizeRoots()) {
|
||||
pmd.fileNameRenderer.relativizeWith(path);
|
||||
}
|
||||
|
||||
return pmd;
|
||||
}
|
||||
|
||||
@ -283,6 +286,11 @@ public final class PmdAnalysis implements AutoCloseable {
|
||||
return langProperties.computeIfAbsent(language, Language::newPropertyBundle);
|
||||
}
|
||||
|
||||
|
||||
public ConfigurableFileNameRenderer fileNameRenderer() {
|
||||
return fileNameRenderer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run PMD with the current state of this instance. This will start
|
||||
* and finish the registered renderers, and close all
|
||||
@ -324,7 +332,10 @@ public final class PmdAnalysis implements AutoCloseable {
|
||||
GlobalAnalysisListener listener;
|
||||
try {
|
||||
@SuppressWarnings("PMD.CloseResource")
|
||||
AnalysisCacheListener cacheListener = new AnalysisCacheListener(configuration.getAnalysisCache(), rulesets, configuration.getClassLoader());
|
||||
AnalysisCacheListener cacheListener = new AnalysisCacheListener(configuration.getAnalysisCache(),
|
||||
rulesets,
|
||||
configuration.getClassLoader(),
|
||||
textFiles);
|
||||
listener = GlobalAnalysisListener.tee(listOf(createComposedRendererListener(renderers),
|
||||
GlobalAnalysisListener.tee(listeners),
|
||||
GlobalAnalysisListener.tee(extraListeners),
|
||||
@ -333,6 +344,7 @@ public final class PmdAnalysis implements AutoCloseable {
|
||||
// Initialize listeners
|
||||
try (ListenerInitializer initializer = listener.initializer()) {
|
||||
initializer.setNumberOfFilesToAnalyze(textFiles.size());
|
||||
initializer.setFileNameRenderer(fileNameRenderer());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
reporter.errorEx("Exception while initializing analysis listeners", e);
|
||||
@ -395,7 +407,7 @@ public final class PmdAnalysis implements AutoCloseable {
|
||||
}
|
||||
|
||||
|
||||
private static GlobalAnalysisListener createComposedRendererListener(List<Renderer> renderers) throws Exception {
|
||||
private GlobalAnalysisListener createComposedRendererListener(List<Renderer> renderers) throws Exception {
|
||||
if (renderers.isEmpty()) {
|
||||
return GlobalAnalysisListener.noop();
|
||||
}
|
||||
@ -551,4 +563,5 @@ public final class PmdAnalysis implements AutoCloseable {
|
||||
+ "https://pmd.github.io/{0}/pmd_userdocs_incremental_analysis.html", version);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -18,6 +18,7 @@ import java.util.function.Predicate;
|
||||
import net.sourceforge.pmd.annotation.DeprecatedUntil700;
|
||||
import net.sourceforge.pmd.annotation.Experimental;
|
||||
import net.sourceforge.pmd.annotation.InternalApi;
|
||||
import net.sourceforge.pmd.lang.document.FileId;
|
||||
import net.sourceforge.pmd.lang.document.TextFile;
|
||||
import net.sourceforge.pmd.renderers.AbstractAccumulatingRenderer;
|
||||
import net.sourceforge.pmd.reporting.FileAnalysisListener;
|
||||
@ -102,7 +103,7 @@ public final class Report {
|
||||
public static class ProcessingError {
|
||||
|
||||
private final Throwable error;
|
||||
private final String file;
|
||||
private final FileId file;
|
||||
|
||||
/**
|
||||
* Creates a new processing error
|
||||
@ -112,7 +113,7 @@ public final class Report {
|
||||
* @param file
|
||||
* the file during which the error occurred
|
||||
*/
|
||||
public ProcessingError(Throwable error, String file) {
|
||||
public ProcessingError(Throwable error, FileId file) {
|
||||
this.error = error;
|
||||
this.file = file;
|
||||
}
|
||||
@ -132,7 +133,7 @@ public final class Report {
|
||||
}
|
||||
}
|
||||
|
||||
public String getFile() {
|
||||
public FileId getFileId() {
|
||||
return file;
|
||||
}
|
||||
|
||||
|
@ -141,7 +141,7 @@ public final class RuleContext {
|
||||
|
||||
FileLocation location = node.getReportLocation();
|
||||
if (beginLine != -1 && endLine != -1) {
|
||||
location = FileLocation.range(location.getFileName(), TextRange2d.range2d(beginLine, 1, endLine, 1));
|
||||
location = FileLocation.range(location.getFileId(), TextRange2d.range2d(beginLine, 1, endLine, 1));
|
||||
}
|
||||
|
||||
final Map<String, String> extraVariables = ViolationDecorator.apply(handler.getViolationDecorator(), node);
|
||||
|
@ -24,6 +24,7 @@ import net.sourceforge.pmd.annotation.InternalApi;
|
||||
import net.sourceforge.pmd.cache.ChecksumAware;
|
||||
import net.sourceforge.pmd.internal.util.PredicateUtil;
|
||||
import net.sourceforge.pmd.lang.LanguageVersion;
|
||||
import net.sourceforge.pmd.lang.document.FileId;
|
||||
import net.sourceforge.pmd.lang.document.TextFile;
|
||||
import net.sourceforge.pmd.lang.rule.RuleReference;
|
||||
import net.sourceforge.pmd.lang.rule.XPathRule;
|
||||
@ -609,8 +610,8 @@ public class RuleSet implements ChecksumAware {
|
||||
*/
|
||||
// TODO get rid of this overload
|
||||
@InternalApi
|
||||
public boolean applies(String qualFileName) {
|
||||
return filter.test(qualFileName);
|
||||
public boolean applies(FileId qualFileName) {
|
||||
return filter.test(qualFileName.toAbsolutePath());
|
||||
}
|
||||
|
||||
/**
|
||||
@ -626,7 +627,7 @@ public class RuleSet implements ChecksumAware {
|
||||
* <code>false</code> otherwise
|
||||
*/
|
||||
boolean applies(TextFile file) {
|
||||
return applies(file.getDisplayName());
|
||||
return applies(file.getFileId());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -136,10 +136,10 @@ public class RuleSets {
|
||||
* Apply all applicable rules to the compilation units. Applicable means the
|
||||
* language of the rules must match the language of the source (@see
|
||||
* applies).
|
||||
* @param root
|
||||
* the List of compilation units; the type these must have,
|
||||
* depends on the source language
|
||||
* @param listener
|
||||
*
|
||||
* @param root the List of compilation units; the type these must have,
|
||||
* depends on the source language
|
||||
* @param listener Listener that will handle events while analysing.
|
||||
*/
|
||||
public void apply(RootNode root, FileAnalysisListener listener) {
|
||||
if (ruleApplicator == null) {
|
||||
@ -154,7 +154,7 @@ public class RuleSets {
|
||||
}
|
||||
|
||||
for (RuleSet ruleSet : ruleSets) {
|
||||
if (ruleSet.applies(root.getTextDocument().getPathId())) {
|
||||
if (ruleSet.applies(root.getTextDocument().getFileId())) {
|
||||
ruleApplicator.apply(ruleSet.getRules(), listener);
|
||||
}
|
||||
}
|
||||
|
@ -8,6 +8,7 @@ import java.util.Comparator;
|
||||
import java.util.Map;
|
||||
|
||||
import net.sourceforge.pmd.annotation.DeprecatedUntil700;
|
||||
import net.sourceforge.pmd.lang.document.FileId;
|
||||
import net.sourceforge.pmd.lang.document.FileLocation;
|
||||
|
||||
/**
|
||||
@ -29,7 +30,7 @@ public interface RuleViolation {
|
||||
* in an unspecified order.
|
||||
*/
|
||||
Comparator<RuleViolation> DEFAULT_COMPARATOR =
|
||||
Comparator.comparing(RuleViolation::getFilename)
|
||||
Comparator.comparing(RuleViolation::getFileId)
|
||||
.thenComparingInt(RuleViolation::getBeginLine)
|
||||
.thenComparingInt(RuleViolation::getBeginColumn)
|
||||
.thenComparing(RuleViolation::getDescription, Comparator.nullsLast(Comparator.naturalOrder()))
|
||||
@ -80,12 +81,10 @@ public interface RuleViolation {
|
||||
FileLocation getLocation();
|
||||
|
||||
/**
|
||||
* Get the source file name in which this violation was identified.
|
||||
*
|
||||
* @return The source file name.
|
||||
* Return the ID of the file where the violation was found.
|
||||
*/
|
||||
default String getFilename() {
|
||||
return getLocation().getFileName();
|
||||
default FileId getFileId() {
|
||||
return getLocation().getFileId();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -15,6 +15,7 @@ import java.nio.file.Path;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
@ -34,7 +35,9 @@ import net.sourceforge.pmd.benchmark.TimedOperation;
|
||||
import net.sourceforge.pmd.benchmark.TimedOperationCategory;
|
||||
import net.sourceforge.pmd.cache.internal.ClasspathFingerprinter;
|
||||
import net.sourceforge.pmd.internal.util.IOUtil;
|
||||
import net.sourceforge.pmd.lang.document.FileId;
|
||||
import net.sourceforge.pmd.lang.document.TextDocument;
|
||||
import net.sourceforge.pmd.lang.document.TextFile;
|
||||
import net.sourceforge.pmd.reporting.FileAnalysisListener;
|
||||
|
||||
/**
|
||||
@ -49,8 +52,8 @@ public abstract class AbstractAnalysisCache implements AnalysisCache {
|
||||
protected static final Logger LOG = LoggerFactory.getLogger(AbstractAnalysisCache.class);
|
||||
protected static final ClasspathFingerprinter FINGERPRINTER = new ClasspathFingerprinter();
|
||||
protected final String pmdVersion;
|
||||
protected final ConcurrentMap<String, AnalysisResult> fileResultsCache = new ConcurrentHashMap<>();
|
||||
protected final ConcurrentMap<String, AnalysisResult> updatedResultsCache = new ConcurrentHashMap<>();
|
||||
protected final ConcurrentMap<FileId, AnalysisResult> fileResultsCache = new ConcurrentHashMap<>();
|
||||
protected final ConcurrentMap<FileId, AnalysisResult> updatedResultsCache = new ConcurrentHashMap<>();
|
||||
protected final CachedRuleMapper ruleMapper = new CachedRuleMapper();
|
||||
protected long rulesetChecksum;
|
||||
protected long auxClassPathChecksum;
|
||||
@ -66,7 +69,7 @@ public abstract class AbstractAnalysisCache implements AnalysisCache {
|
||||
@Override
|
||||
public boolean isUpToDate(final TextDocument document) {
|
||||
try (TimedOperation ignored = TimeTracker.startOperation(TimedOperationCategory.ANALYSIS_CACHE, "up-to-date check")) {
|
||||
final AnalysisResult cachedResult = fileResultsCache.get(document.getPathId());
|
||||
final AnalysisResult cachedResult = fileResultsCache.get(document.getFileId());
|
||||
final AnalysisResult updatedResult;
|
||||
|
||||
// is this a known file? has it changed?
|
||||
@ -76,13 +79,6 @@ public abstract class AbstractAnalysisCache implements AnalysisCache {
|
||||
if (upToDate) {
|
||||
LOG.trace("Incremental Analysis cache HIT");
|
||||
|
||||
/*
|
||||
* Update cached violation "filename" to match the appropriate text document,
|
||||
* so we can honor relativized paths for the current run
|
||||
*/
|
||||
final String displayName = document.getDisplayName();
|
||||
cachedResult.getViolations().forEach(v -> ((CachedRuleViolation) v).setFileDisplayName(displayName));
|
||||
|
||||
// copy results over
|
||||
updatedResult = cachedResult;
|
||||
} else {
|
||||
@ -93,7 +89,7 @@ public abstract class AbstractAnalysisCache implements AnalysisCache {
|
||||
updatedResult = new AnalysisResult(document.getCheckSum(), new ArrayList<>());
|
||||
}
|
||||
|
||||
updatedResultsCache.put(document.getPathId(), updatedResult);
|
||||
updatedResultsCache.put(document.getFileId(), updatedResult);
|
||||
|
||||
return upToDate;
|
||||
}
|
||||
@ -101,7 +97,7 @@ public abstract class AbstractAnalysisCache implements AnalysisCache {
|
||||
|
||||
@Override
|
||||
public List<RuleViolation> getCachedViolations(final TextDocument sourceFile) {
|
||||
final AnalysisResult analysisResult = fileResultsCache.get(sourceFile.getPathId());
|
||||
final AnalysisResult analysisResult = fileResultsCache.get(sourceFile.getFileId());
|
||||
|
||||
if (analysisResult == null) {
|
||||
// new file, avoid nulls
|
||||
@ -113,7 +109,7 @@ public abstract class AbstractAnalysisCache implements AnalysisCache {
|
||||
|
||||
@Override
|
||||
public void analysisFailed(final TextDocument sourceFile) {
|
||||
updatedResultsCache.remove(sourceFile.getPathId());
|
||||
updatedResultsCache.remove(sourceFile.getFileId());
|
||||
}
|
||||
|
||||
|
||||
@ -125,7 +121,7 @@ public abstract class AbstractAnalysisCache implements AnalysisCache {
|
||||
|
||||
|
||||
@Override
|
||||
public void checkValidity(final RuleSets ruleSets, final ClassLoader auxclassPathClassLoader) {
|
||||
public void checkValidity(RuleSets ruleSets, ClassLoader auxclassPathClassLoader, Collection<? extends TextFile> files) {
|
||||
try (TimedOperation ignored = TimeTracker.startOperation(TimedOperationCategory.ANALYSIS_CACHE, "validity check")) {
|
||||
boolean cacheIsValid = cacheExists();
|
||||
|
||||
@ -222,7 +218,7 @@ public abstract class AbstractAnalysisCache implements AnalysisCache {
|
||||
|
||||
@Override
|
||||
public FileAnalysisListener startFileAnalysis(TextDocument file) {
|
||||
final String fileName = file.getPathId();
|
||||
final FileId fileName = file.getFileId();
|
||||
|
||||
return new FileAnalysisListener() {
|
||||
@Override
|
||||
|
@ -5,6 +5,7 @@
|
||||
package net.sourceforge.pmd.cache;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import net.sourceforge.pmd.RuleSets;
|
||||
@ -63,8 +64,11 @@ public interface AnalysisCache {
|
||||
*
|
||||
* @param ruleSets The rulesets configured for this analysis.
|
||||
* @param auxclassPathClassLoader The class loader for auxclasspath configured for this analysis.
|
||||
* @param files Set of files in the current analysis. File
|
||||
* records in the cache are matched to the file
|
||||
* IDs of these files.
|
||||
*/
|
||||
void checkValidity(RuleSets ruleSets, ClassLoader auxclassPathClassLoader);
|
||||
void checkValidity(RuleSets ruleSets, ClassLoader auxclassPathClassLoader, Collection<? extends TextFile> files);
|
||||
|
||||
/**
|
||||
* Returns a listener that will be used like in {@link GlobalAnalysisListener#startFileAnalysis(TextFile)}.
|
||||
|
@ -5,6 +5,7 @@
|
||||
package net.sourceforge.pmd.cache;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
||||
import net.sourceforge.pmd.RuleSets;
|
||||
import net.sourceforge.pmd.annotation.InternalApi;
|
||||
@ -21,9 +22,10 @@ public class AnalysisCacheListener implements GlobalAnalysisListener {
|
||||
|
||||
private final AnalysisCache cache;
|
||||
|
||||
public AnalysisCacheListener(AnalysisCache cache, RuleSets ruleSets, ClassLoader classLoader) {
|
||||
public AnalysisCacheListener(AnalysisCache cache, RuleSets ruleSets, ClassLoader classLoader,
|
||||
Collection<? extends TextFile> textFiles) {
|
||||
this.cache = cache;
|
||||
cache.checkValidity(ruleSets, classLoader);
|
||||
cache.checkValidity(ruleSets, classLoader, textFiles);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -17,6 +17,7 @@ import org.checkerframework.checker.nullness.qual.NonNull;
|
||||
import net.sourceforge.pmd.Rule;
|
||||
import net.sourceforge.pmd.RuleViolation;
|
||||
import net.sourceforge.pmd.annotation.InternalApi;
|
||||
import net.sourceforge.pmd.lang.document.FileId;
|
||||
import net.sourceforge.pmd.lang.document.FileLocation;
|
||||
import net.sourceforge.pmd.lang.document.TextRange2d;
|
||||
import net.sourceforge.pmd.util.StringUtil;
|
||||
@ -38,27 +39,22 @@ public final class CachedRuleViolation implements RuleViolation {
|
||||
private final String ruleTargetLanguage;
|
||||
private final Map<String, String> additionalInfo;
|
||||
|
||||
private FileLocation location;
|
||||
private final FileLocation location;
|
||||
|
||||
private CachedRuleViolation(final CachedRuleMapper mapper, final String description,
|
||||
final String filePathId, final String ruleClassName, final String ruleName,
|
||||
final FileId fileFileId, final String ruleClassName, final String ruleName,
|
||||
final String ruleTargetLanguage, final int beginLine, final int beginColumn,
|
||||
final int endLine, final int endColumn,
|
||||
final Map<String, String> additionalInfo) {
|
||||
this.mapper = mapper;
|
||||
this.description = description;
|
||||
this.location = FileLocation.range(filePathId, TextRange2d.range2d(beginLine, beginColumn, endLine, endColumn));
|
||||
this.location = FileLocation.range(fileFileId, TextRange2d.range2d(beginLine, beginColumn, endLine, endColumn));
|
||||
this.ruleClassName = ruleClassName;
|
||||
this.ruleName = ruleName;
|
||||
this.ruleTargetLanguage = ruleTargetLanguage;
|
||||
this.additionalInfo = additionalInfo;
|
||||
}
|
||||
|
||||
void setFileDisplayName(String displayName) {
|
||||
this.location = FileLocation.range(displayName,
|
||||
TextRange2d.range2d(getBeginLine(), getBeginColumn(), getEndLine(), getEndColumn()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Rule getRule() {
|
||||
// The mapper may be initialized after cache is loaded, so use it lazily
|
||||
@ -83,24 +79,27 @@ public final class CachedRuleViolation implements RuleViolation {
|
||||
/**
|
||||
* Helper method to load a {@link CachedRuleViolation} from an input stream.
|
||||
*
|
||||
* @param stream The stream from which to load the violation.
|
||||
* @param filePathId The name of the file on which this rule was reported.
|
||||
* @param mapper The mapper to be used to obtain rule instances from the active rulesets.
|
||||
* @param stream The stream from which to load the violation.
|
||||
* @param fileFileId The name of the file on which this rule was reported.
|
||||
* @param mapper The mapper to be used to obtain rule instances from the active rulesets.
|
||||
*
|
||||
* @return The loaded rule violation.
|
||||
* @throws IOException
|
||||
*/
|
||||
/* package */ static CachedRuleViolation loadFromStream(final DataInputStream stream,
|
||||
final String filePathId, final CachedRuleMapper mapper) throws IOException {
|
||||
final String description = stream.readUTF();
|
||||
final String ruleClassName = stream.readUTF();
|
||||
final String ruleName = stream.readUTF();
|
||||
final String ruleTargetLanguage = stream.readUTF();
|
||||
final int beginLine = stream.readInt();
|
||||
final int beginColumn = stream.readInt();
|
||||
final int endLine = stream.readInt();
|
||||
final int endColumn = stream.readInt();
|
||||
final Map<String, String> additionalInfo = readAdditionalInfo(stream);
|
||||
return new CachedRuleViolation(mapper, description, filePathId, ruleClassName, ruleName, ruleTargetLanguage,
|
||||
/* package */
|
||||
static CachedRuleViolation loadFromStream(
|
||||
DataInputStream stream,
|
||||
FileId fileFileId, CachedRuleMapper mapper) throws IOException {
|
||||
|
||||
String description = stream.readUTF();
|
||||
String ruleClassName = stream.readUTF();
|
||||
String ruleName = stream.readUTF();
|
||||
String ruleTargetLanguage = stream.readUTF();
|
||||
int beginLine = stream.readInt();
|
||||
int beginColumn = stream.readInt();
|
||||
int endLine = stream.readInt();
|
||||
int endColumn = stream.readInt();
|
||||
Map<String, String> additionalInfo = readAdditionalInfo(stream);
|
||||
return new CachedRuleViolation(mapper, description, fileFileId, ruleClassName, ruleName, ruleTargetLanguage,
|
||||
beginLine, beginColumn, endLine, endColumn, additionalInfo);
|
||||
}
|
||||
|
||||
|
@ -13,8 +13,10 @@ import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import net.sourceforge.pmd.PMDVersion;
|
||||
import net.sourceforge.pmd.RuleSets;
|
||||
@ -23,6 +25,8 @@ import net.sourceforge.pmd.annotation.InternalApi;
|
||||
import net.sourceforge.pmd.benchmark.TimeTracker;
|
||||
import net.sourceforge.pmd.benchmark.TimedOperation;
|
||||
import net.sourceforge.pmd.benchmark.TimedOperationCategory;
|
||||
import net.sourceforge.pmd.lang.document.FileId;
|
||||
import net.sourceforge.pmd.lang.document.TextFile;
|
||||
|
||||
/**
|
||||
* An analysis cache backed by a regular file.
|
||||
@ -45,17 +49,22 @@ public class FileAnalysisCache extends AbstractAnalysisCache {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkValidity(RuleSets ruleSets, ClassLoader auxclassPathClassLoader) {
|
||||
public void checkValidity(RuleSets ruleSets, ClassLoader auxclassPathClassLoader, Collection<? extends TextFile> files) {
|
||||
// load cached data before checking for validity
|
||||
loadFromFile(cacheFile);
|
||||
super.checkValidity(ruleSets, auxclassPathClassLoader);
|
||||
loadFromFile(cacheFile, files);
|
||||
super.checkValidity(ruleSets, auxclassPathClassLoader, files);
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads cache data from the given file.
|
||||
*
|
||||
* @param cacheFile The file which backs the file analysis cache.
|
||||
*/
|
||||
private void loadFromFile(final File cacheFile) {
|
||||
private void loadFromFile(final File cacheFile, Collection<? extends TextFile> files) {
|
||||
Map<String, FileId> idMap =
|
||||
files.stream().map(TextFile::getFileId)
|
||||
.collect(Collectors.toMap(FileId::toUriString, id -> id));
|
||||
|
||||
try (TimedOperation ignored = TimeTracker.startOperation(TimedOperationCategory.ANALYSIS_CACHE, "load")) {
|
||||
if (cacheExists()) {
|
||||
try (
|
||||
@ -75,15 +84,23 @@ public class FileAnalysisCache extends AbstractAnalysisCache {
|
||||
// Cached results
|
||||
while (inputStream.available() > 0) {
|
||||
final String filePathId = inputStream.readUTF();
|
||||
FileId fileId = idMap.get(filePathId);
|
||||
if (fileId == null) {
|
||||
LOG.debug("File {} is in the cache but is not part of the analysis",
|
||||
filePathId);
|
||||
// todo we wrote a URI, if this happens several times we will be
|
||||
// prepending unknown:// several times.
|
||||
fileId = FileId.fromPathLikeString(filePathId);
|
||||
}
|
||||
final long checksum = inputStream.readLong();
|
||||
|
||||
final int countViolations = inputStream.readInt();
|
||||
final List<RuleViolation> violations = new ArrayList<>(countViolations);
|
||||
for (int i = 0; i < countViolations; i++) {
|
||||
violations.add(CachedRuleViolation.loadFromStream(inputStream, filePathId, ruleMapper));
|
||||
violations.add(CachedRuleViolation.loadFromStream(inputStream, fileId, ruleMapper));
|
||||
}
|
||||
|
||||
fileResultsCache.put(filePathId, new AnalysisResult(checksum, violations));
|
||||
fileResultsCache.put(fileId, new AnalysisResult(checksum, violations));
|
||||
}
|
||||
|
||||
LOG.debug("Analysis cache loaded from {}", cacheFile);
|
||||
@ -129,10 +146,10 @@ public class FileAnalysisCache extends AbstractAnalysisCache {
|
||||
outputStream.writeLong(auxClassPathChecksum);
|
||||
outputStream.writeLong(executionClassPathChecksum);
|
||||
|
||||
for (final Map.Entry<String, AnalysisResult> resultEntry : updatedResultsCache.entrySet()) {
|
||||
for (final Map.Entry<FileId, AnalysisResult> resultEntry : updatedResultsCache.entrySet()) {
|
||||
final List<RuleViolation> violations = resultEntry.getValue().getViolations();
|
||||
|
||||
outputStream.writeUTF(resultEntry.getKey()); // the path id
|
||||
outputStream.writeUTF(resultEntry.getKey().toUriString()); // the path id
|
||||
outputStream.writeLong(resultEntry.getValue().getFileChecksum());
|
||||
|
||||
outputStream.writeInt(violations.size());
|
||||
|
@ -4,6 +4,7 @@
|
||||
|
||||
package net.sourceforge.pmd.cache;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
@ -11,6 +12,7 @@ import net.sourceforge.pmd.RuleSets;
|
||||
import net.sourceforge.pmd.RuleViolation;
|
||||
import net.sourceforge.pmd.annotation.InternalApi;
|
||||
import net.sourceforge.pmd.lang.document.TextDocument;
|
||||
import net.sourceforge.pmd.lang.document.TextFile;
|
||||
import net.sourceforge.pmd.reporting.FileAnalysisListener;
|
||||
|
||||
/**
|
||||
@ -38,7 +40,7 @@ public class NoopAnalysisCache implements AnalysisCache {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkValidity(final RuleSets ruleSets, final ClassLoader classLoader) {
|
||||
public void checkValidity(RuleSets ruleSets, ClassLoader auxclassPathClassLoader, Collection<? extends TextFile> files) {
|
||||
// noop
|
||||
}
|
||||
|
||||
|
@ -13,6 +13,7 @@ import java.util.stream.Collectors;
|
||||
|
||||
import net.sourceforge.pmd.annotation.Experimental;
|
||||
import net.sourceforge.pmd.lang.document.Chars;
|
||||
import net.sourceforge.pmd.lang.document.FileId;
|
||||
|
||||
/**
|
||||
* @since 6.48.0
|
||||
@ -21,11 +22,11 @@ public class CPDReport {
|
||||
|
||||
private final SourceManager sourceManager;
|
||||
private final List<Match> matches;
|
||||
private final Map<String, Integer> numberOfTokensPerFile;
|
||||
private final Map<FileId, Integer> numberOfTokensPerFile;
|
||||
|
||||
CPDReport(SourceManager sourceManager,
|
||||
List<Match> matches,
|
||||
Map<String, Integer> numberOfTokensPerFile) {
|
||||
Map<FileId, Integer> numberOfTokensPerFile) {
|
||||
this.sourceManager = sourceManager;
|
||||
this.matches = Collections.unmodifiableList(matches);
|
||||
this.numberOfTokensPerFile = Collections.unmodifiableMap(new TreeMap<>(numberOfTokensPerFile));
|
||||
@ -35,7 +36,7 @@ public class CPDReport {
|
||||
return matches;
|
||||
}
|
||||
|
||||
public Map<String, Integer> getNumberOfTokensPerFile() {
|
||||
public Map<FileId, Integer> getNumberOfTokensPerFile() {
|
||||
return numberOfTokensPerFile;
|
||||
}
|
||||
|
||||
@ -63,4 +64,9 @@ public class CPDReport {
|
||||
|
||||
return new CPDReport(sourceManager, filtered, this.getNumberOfTokensPerFile());
|
||||
}
|
||||
|
||||
|
||||
public String getDisplayName(FileId fileId) {
|
||||
return sourceManager.getFileDisplayName(fileId);
|
||||
}
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ public class CSVRenderer implements CPDReportRenderer {
|
||||
if (lineCountPerFile) {
|
||||
writer.append(String.valueOf(loc.getLineCount())).append(separator);
|
||||
}
|
||||
writer.append(StringEscapeUtils.escapeCsv(loc.getFileName()));
|
||||
writer.append(StringEscapeUtils.escapeCsv(report.getDisplayName(loc.getFileId())));
|
||||
if (marks.hasNext()) {
|
||||
writer.append(separator);
|
||||
}
|
||||
|
@ -24,6 +24,7 @@ import net.sourceforge.pmd.lang.Language;
|
||||
import net.sourceforge.pmd.lang.LanguagePropertyBundle;
|
||||
import net.sourceforge.pmd.lang.ast.TokenMgrError;
|
||||
import net.sourceforge.pmd.lang.document.FileCollector;
|
||||
import net.sourceforge.pmd.lang.document.FileId;
|
||||
import net.sourceforge.pmd.lang.document.TextDocument;
|
||||
import net.sourceforge.pmd.lang.document.TextFile;
|
||||
import net.sourceforge.pmd.properties.PropertyDescriptor;
|
||||
@ -100,7 +101,7 @@ public final class CpdAnalysis implements AutoCloseable {
|
||||
}
|
||||
|
||||
private int doTokenize(TextDocument document, Tokenizer tokenizer, Tokens tokens) throws IOException, TokenMgrError {
|
||||
LOGGER.trace("Tokenizing {}", document.getPathId());
|
||||
LOGGER.trace("Tokenizing {}", document.getFileId().toAbsolutePath());
|
||||
int lastTokenSize = tokens.size();
|
||||
Tokenizer.tokenize(tokenizer, document, tokens);
|
||||
return tokens.size() - lastTokenSize - 1; /* EOF */
|
||||
@ -121,7 +122,7 @@ public final class CpdAnalysis implements AutoCloseable {
|
||||
.filter(it -> it instanceof CpdCapableLanguage)
|
||||
.collect(Collectors.toMap(lang -> lang, lang -> ((CpdCapableLanguage) lang).createCpdTokenizer(configuration.getLanguageProperties(lang))));
|
||||
|
||||
Map<String, Integer> numberOfTokensPerFile = new HashMap<>();
|
||||
Map<FileId, Integer> numberOfTokensPerFile = new HashMap<>();
|
||||
|
||||
boolean hasErrors = false;
|
||||
Tokens tokens = new Tokens();
|
||||
@ -130,11 +131,11 @@ public final class CpdAnalysis implements AutoCloseable {
|
||||
Tokens.State savedState = tokens.savePoint();
|
||||
try {
|
||||
int newTokens = doTokenize(textDocument, tokenizers.get(textFile.getLanguageVersion().getLanguage()), tokens);
|
||||
numberOfTokensPerFile.put(textDocument.getDisplayName(), newTokens);
|
||||
numberOfTokensPerFile.put(textDocument.getFileId(), newTokens);
|
||||
listener.addedFile(1);
|
||||
} catch (TokenMgrError | IOException e) {
|
||||
if (e instanceof TokenMgrError) { // NOPMD
|
||||
((TokenMgrError) e).setFileName(textFile.getDisplayName());
|
||||
((TokenMgrError) e).setFileId(textFile.getFileId());
|
||||
}
|
||||
String message = configuration.isSkipLexicalErrors() ? "Skipping file" : "Error while tokenizing";
|
||||
reporter.errorEx(message, e);
|
||||
|
@ -66,6 +66,7 @@ import net.sourceforge.pmd.lang.Language;
|
||||
import net.sourceforge.pmd.lang.LanguageModuleBase.LanguageMetadata;
|
||||
import net.sourceforge.pmd.lang.LanguagePropertyBundle;
|
||||
import net.sourceforge.pmd.lang.LanguageRegistry;
|
||||
import net.sourceforge.pmd.lang.document.FileId;
|
||||
import net.sourceforge.pmd.lang.impl.CpdOnlyLanguageModuleBase;
|
||||
import net.sourceforge.pmd.util.CollectionUtil;
|
||||
|
||||
@ -319,7 +320,7 @@ public class GUI implements CPDListener {
|
||||
private boolean trimLeadingWhitespace;
|
||||
|
||||
private List<Match> matches = new ArrayList<>();
|
||||
private Map<String, Integer> numberOfTokensPerFile;
|
||||
private Map<FileId, Integer> numberOfTokensPerFile;
|
||||
|
||||
private void addSaveOptionsTo(JMenu menu) {
|
||||
|
||||
@ -580,15 +581,14 @@ public class GUI implements CPDListener {
|
||||
|
||||
private static String getLabel(Match match) {
|
||||
|
||||
Set<String> sourceIDs = new HashSet<>(match.getMarkCount());
|
||||
Set<FileId> sourceIDs = new HashSet<>(match.getMarkCount());
|
||||
for (Mark mark : match) {
|
||||
sourceIDs.add(mark.getLocation().getFileName());
|
||||
sourceIDs.add(mark.getLocation().getFileId());
|
||||
}
|
||||
|
||||
if (sourceIDs.size() == 1) {
|
||||
String sourceId = sourceIDs.iterator().next();
|
||||
int separatorPos = sourceId.lastIndexOf(File.separatorChar);
|
||||
return "..." + sourceId.substring(separatorPos);
|
||||
FileId sourceId = sourceIDs.iterator().next();
|
||||
return "..." + sourceId.getFileName();
|
||||
} else {
|
||||
return String.format("(%d separate files)", sourceIDs.size());
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ import java.util.Objects;
|
||||
import org.checkerframework.checker.nullness.qual.NonNull;
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
|
||||
import net.sourceforge.pmd.lang.document.FileId;
|
||||
import net.sourceforge.pmd.lang.document.FileLocation;
|
||||
import net.sourceforge.pmd.lang.document.TextRange2d;
|
||||
|
||||
@ -21,7 +22,6 @@ public final class Mark implements Comparable<Mark> {
|
||||
|
||||
private final @NonNull TokenEntry token;
|
||||
private @Nullable TokenEntry endToken;
|
||||
private String fileDisplayName;
|
||||
|
||||
Mark(@NonNull TokenEntry token) {
|
||||
this.token = token;
|
||||
@ -41,16 +41,13 @@ public final class Mark implements Comparable<Mark> {
|
||||
public FileLocation getLocation() {
|
||||
TokenEntry endToken = getEndToken();
|
||||
return FileLocation.range(
|
||||
getFileName(),
|
||||
getFileId(),
|
||||
TextRange2d.range2d(token.getBeginLine(), token.getBeginColumn(),
|
||||
endToken.getEndLine(), endToken.getEndColumn()));
|
||||
}
|
||||
|
||||
String getFileName() {
|
||||
if (fileDisplayName == null) {
|
||||
return token.getFilePathId();
|
||||
}
|
||||
return fileDisplayName;
|
||||
FileId getFileId() {
|
||||
return token.getFileId();
|
||||
}
|
||||
|
||||
public int getBeginTokenIndex() {
|
||||
@ -62,7 +59,7 @@ public final class Mark implements Comparable<Mark> {
|
||||
}
|
||||
|
||||
void setEndToken(@NonNull TokenEntry endToken) {
|
||||
assert endToken.getFilePathId().equals(token.getFilePathId())
|
||||
assert endToken.getFileId().equals(token.getFileId())
|
||||
: "Tokens are not from the same file";
|
||||
this.endToken = endToken;
|
||||
}
|
||||
@ -97,7 +94,4 @@ public final class Mark implements Comparable<Mark> {
|
||||
return getToken().compareTo(other.getToken());
|
||||
}
|
||||
|
||||
public void setFileDisplayName(String fileDisplayName) {
|
||||
this.fileDisplayName = fileDisplayName;
|
||||
}
|
||||
}
|
||||
|
@ -4,6 +4,7 @@
|
||||
|
||||
package net.sourceforge.pmd.cpd;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.Iterator;
|
||||
import java.util.NoSuchElementException;
|
||||
@ -49,6 +50,10 @@ public class Match implements Comparable<Match>, Iterable<Mark> {
|
||||
}
|
||||
|
||||
|
||||
public Set<Mark> getMarkSet() {
|
||||
return Collections.unmodifiableSet(markSet);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Mark> iterator() {
|
||||
return markSet.iterator();
|
||||
|
@ -69,7 +69,6 @@ class MatchAlgorithm {
|
||||
TokenEntry endToken = tokens.getEndToken(token, match);
|
||||
|
||||
mark.setEndToken(endToken);
|
||||
mark.setFileDisplayName(sourceManager.getFileDisplayName(token.getFilePathId()));
|
||||
}
|
||||
}
|
||||
cpdListener.phaseUpdate(CPDListener.DONE);
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user