This commit is contained in:
Clément Fournier 2023-03-20 15:18:12 +01:00
parent 6eabac7d44
commit f2dc3805af
No known key found for this signature in database
GPG Key ID: 4D8D42402E4F47E2
12 changed files with 28 additions and 24 deletions

View File

@ -33,7 +33,7 @@ public final class ApexParser implements Parser {
final ApexTreeBuilder treeBuilder = new ApexTreeBuilder(task, (ApexLanguageProcessor) task.getLanguageProcessor());
return treeBuilder.buildTree(astRoot);
} catch (apex.jorje.services.exception.ParseException e) {
FileLocation loc = FileLocation.caret(task.getTextDocument().getFileId(), e.getLoc().getLine(), e.getLoc().getColumn());
FileLocation loc = FileLocation.caret(task.getFileId(), e.getLoc().getLine(), e.getLoc().getColumn());
throw new ParseException(e).withLocation(loc);
}
}

View File

@ -63,7 +63,7 @@ class CompilerService {
public Compilation parseApex(TextDocument document) {
SourceFile sourceFile = SourceFile.builder()
.setBody(document.getText().toString())
.setKnownName(document.getFileId().toUriString())
.setKnownName(document.getFileId().toAbsolutePath())
.build();
ApexCompiler compiler = ApexCompiler.builder().setInput(createCompilationInput(Collections.singletonList(sourceFile))).build();
compiler.compile(CompilerStage.POST_TYPE_RESOLVE);

View File

@ -16,7 +16,6 @@ import net.sourceforge.pmd.cpd.TokenFactory;
import net.sourceforge.pmd.cpd.Tokenizer;
import net.sourceforge.pmd.lang.apex.ApexJorjeLogging;
import net.sourceforge.pmd.lang.apex.ApexLanguageProperties;
import net.sourceforge.pmd.lang.ast.TokenMgrError;
import net.sourceforge.pmd.lang.document.TextDocument;
import apex.jorje.parser.impl.ApexLexer;
@ -37,7 +36,7 @@ public class ApexTokenizer implements Tokenizer {
ApexLexer lexer = new ApexLexer(ass) {
@Override
public void emitErrorMessage(String msg) {
throw new TokenMgrError(getLine(), getCharPositionInLine(), document.getFileId(), msg, null);
throw tokenEntries.makeLexException(getLine(), getCharPositionInLine(), msg, null);
}
};

View File

@ -7,6 +7,7 @@ package net.sourceforge.pmd.cpd;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import net.sourceforge.pmd.lang.ast.TokenMgrError;
import net.sourceforge.pmd.lang.document.FileLocation;
import net.sourceforge.pmd.lang.document.TextDocument;
@ -42,6 +43,8 @@ public interface TokenFactory extends AutoCloseable {
recordToken(image, location.getStartLine(), location.getStartColumn(), location.getEndLine(), location.getEndColumn());
}
TokenMgrError makeLexException(int line, int column, String message, @Nullable Throwable cause);
/**
* Sets the image of an existing token entry.
*/

View File

@ -14,6 +14,7 @@ import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import net.sourceforge.pmd.annotation.InternalApi;
import net.sourceforge.pmd.lang.ast.TokenMgrError;
import net.sourceforge.pmd.lang.document.FileId;
import net.sourceforge.pmd.lang.document.TextDocument;
@ -106,6 +107,11 @@ public class Tokens {
tokens.setImage(entry, newImage);
}
@Override
public TokenMgrError makeLexException(int line, int column, String message, @Nullable Throwable cause) {
return new TokenMgrError(line, column, fileId, message, cause);
}
@Override
public @Nullable TokenEntry peekLastToken() {
if (tokens.size() <= firstToken) {

View File

@ -9,6 +9,7 @@ import java.util.Objects;
import net.sourceforge.pmd.lang.LanguageProcessor;
import net.sourceforge.pmd.lang.LanguageProcessorRegistry;
import net.sourceforge.pmd.lang.LanguageVersion;
import net.sourceforge.pmd.lang.document.FileId;
import net.sourceforge.pmd.lang.document.TextDocument;
import net.sourceforge.pmd.util.AssertionUtil;
@ -54,12 +55,8 @@ public interface Parser {
return textDoc.getLanguageVersion();
}
/**
* The display name for where the file comes from. This should
* not be interpreted, it may not be a file-system path.
*/
public String getFileDisplayName() {
return textDoc.getFileId().getOriginalPath();
public FileId getFileId() {
return textDoc.getFileId();
}
/**

View File

@ -40,7 +40,11 @@ import net.sourceforge.pmd.util.IteratorUtil.AbstractIterator;
*/
public final class Chars implements CharSequence {
public static final Chars EMPTY = wrap("");
/**
* An empty Chars instance.
*/
public static final Chars EMPTY = new Chars("", 0, 0);
/**
* Special sentinel used by {@link #lines()}.
*/
@ -83,6 +87,8 @@ public final class Chars implements CharSequence {
public static Chars wrap(CharSequence chars) {
if (chars instanceof Chars) {
return (Chars) chars;
} else if (chars.length() == 0) {
return EMPTY;
}
return new Chars(chars.toString(), 0, chars.length());
}

View File

@ -9,10 +9,7 @@ import org.codehaus.groovy.antlr.parser.GroovyLexer;
import net.sourceforge.pmd.cpd.TokenFactory;
import net.sourceforge.pmd.cpd.Tokenizer;
import net.sourceforge.pmd.lang.ast.TokenMgrError;
import net.sourceforge.pmd.lang.document.TextDocument;
import net.sourceforge.pmd.lang.document.CpdCompat;
import net.sourceforge.pmd.lang.document.FileId;
import groovyjarjarantlr.Token;
import groovyjarjarantlr.TokenStream;
@ -50,11 +47,7 @@ public class GroovyTokenizer implements Tokenizer {
token = tokenStream.nextToken();
}
} catch (TokenStreamException err) {
// Wrap exceptions of the Groovy tokenizer in a TokenMgrError, so
// they are correctly handled
// when CPD is executed with the '--skipLexicalErrors' command line
// option
throw new TokenMgrError(lexer.getLine(), lexer.getColumn(), document.getFileId(), err.getMessage(), err);
throw tokens.makeLexException(lexer.getLine(), lexer.getColumn(), err.getMessage(), err);
}
}
}

View File

@ -14,7 +14,7 @@ public final class HtmlParser implements net.sourceforge.pmd.lang.ast.Parser {
@Override
public ASTHtmlDocument parse(ParserTask task) {
Document doc = Parser.xmlParser().parseInput(task.getTextDocument().getText().newReader(), "");
Document doc = Parser.xmlParser().parseInput(task.getTextDocument().newReader(), task.getFileId().toUriString());
HtmlTreeBuilder builder = new HtmlTreeBuilder();
return builder.build(doc, task, new HashMap<>());
}

View File

@ -68,8 +68,8 @@ public class ScalaTokenizer implements Tokenizer {
// cannot catch it as it's a checked exception and Scala sneaky throws
TokenizeException tokE = (TokenizeException) e;
Position pos = tokE.pos();
throw new TokenMgrError(
pos.startLine() + 1, pos.startColumn() + 1, document.getFileId(), "Scalameta threw", tokE);
throw tokenEntries.makeLexException(
pos.startLine() + 1, pos.startColumn() + 1, "Scalameta threw", tokE);
} else {
throw e;
}

View File

@ -22,7 +22,7 @@ public final class ScalaParser implements Parser {
@Override
public ASTSource parse(ParserTask task) throws ParseException {
Input.VirtualFile virtualFile = new Input.VirtualFile(task.getFileDisplayName(), task.getSourceText());
Input.VirtualFile virtualFile = new Input.VirtualFile(task.getFileId().toAbsolutePath(), task.getSourceText());
Dialect dialect = ScalaLanguageModule.dialectOf(task.getLanguageVersion());
Source src = new ScalametaParser(virtualFile, dialect).parseSource();
ASTSource root = (ASTSource) new ScalaTreeBuilder().build(src);

View File

@ -47,7 +47,7 @@ class VfExpressionTypeVisitor extends VfVisitorBase<Void, Void> {
private final List<String> objectsDirectories;
VfExpressionTypeVisitor(ParserTask task, VfLanguageProperties vfProperties) {
this.fileId = task.getTextDocument().getFileId();
this.fileId = task.getFileId();
this.apexDirectories = vfProperties.getProperty(VfLanguageProperties.APEX_DIRECTORIES_DESCRIPTOR);
this.objectsDirectories = vfProperties.getProperty(VfLanguageProperties.OBJECTS_DIRECTORIES_DESCRIPTOR);
this.apexClassNames = new ArrayList<>();