Merge branch 'master' into 7.0.x

This commit is contained in:
Clément Fournier
2020-06-19 01:32:32 +02:00
233 changed files with 21584 additions and 1726 deletions

View File

@ -11,6 +11,7 @@ import org.apache.commons.lang3.StringUtils;
import net.sourceforge.pmd.lang.LanguageRegistry;
import net.sourceforge.pmd.lang.LanguageVersion;
import net.sourceforge.pmd.lang.ast.TokenMgrError;
import net.sourceforge.pmd.lang.scala.ScalaLanguageHandler;
import net.sourceforge.pmd.lang.scala.ScalaLanguageModule;
@ -19,6 +20,7 @@ import scala.meta.Dialect;
import scala.meta.inputs.Input;
import scala.meta.inputs.Position;
import scala.meta.internal.tokenizers.ScalametaTokenizer;
import scala.meta.tokenizers.TokenizeException;
import scala.meta.tokens.Token;
/**
@ -74,12 +76,26 @@ public class ScalaTokenizer implements Tokenizer {
Token token;
while ((token = filter.getNextToken()) != null) {
String tokenText = token.text() != null ? token.text() : token.name();
Position tokenPosition = token.pos();
TokenEntry cpdToken = new TokenEntry(tokenText, filename, tokenPosition.startLine(),
tokenPosition.startColumn(), tokenPosition.endColumn());
if (StringUtils.isEmpty(token.text())) {
continue;
}
Position pos = token.pos();
TokenEntry cpdToken = new TokenEntry(token.text(),
filename,
pos.startLine() + 1,
pos.startColumn() + 1,
pos.endColumn() + 1);
tokenEntries.add(cpdToken);
}
} catch (Exception e) {
if (e instanceof TokenizeException) { // NOPMD
// cannot catch it as it's a checked exception and Scala sneaky throws
TokenizeException tokE = (TokenizeException) e;
Position pos = tokE.pos();
throw new TokenMgrError(pos.startLine() + 1, pos.startColumn() + 1, filename, "Scalameta threw", tokE);
} else {
throw e;
}
} finally {
tokenEntries.add(TokenEntry.getEOF());
}

View File

@ -4,71 +4,41 @@
package net.sourceforge.pmd.cpd;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Properties;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import net.sourceforge.pmd.testframework.AbstractTokenizerTest;
import net.sourceforge.pmd.cpd.test.CpdTextComparisonTest;
import net.sourceforge.pmd.lang.ast.TokenMgrError;
public class ScalaTokenizerTest extends AbstractTokenizerTest {
public class ScalaTokenizerTest extends CpdTextComparisonTest {
private static final Charset ENCODING = StandardCharsets.UTF_8;
@org.junit.Rule
public ExpectedException ex = ExpectedException.none();
private static final String FILENAME = "/tokenizerFiles/sample-LiftActor.scala";
private File tempFile;
@Before
@Override
public void buildTokenizer() throws IOException {
createTempFileOnDisk();
this.tokenizer = new ScalaTokenizer();
}
private void createTempFileOnDisk() throws IOException {
this.tempFile = File.createTempFile("scala-tokenizer-test-", ".scala");
FileUtils.writeStringToFile(tempFile, getSampleCode(), ENCODING);
public ScalaTokenizerTest() {
super(".scala");
}
@Override
public String getSampleCode() throws IOException {
return IOUtils.toString(getClass().getResourceAsStream(FILENAME), ENCODING);
protected String getResourcePrefix() {
return "../lang/scala/cpd/testdata";
}
@Override
public Tokenizer newTokenizer(Properties properties) {
return new ScalaTokenizer();
}
@Test
public void tokenizeTest() throws IOException {
this.sourceCode = new SourceCode(new SourceCode.FileCodeLoader(tempFile, "UTF-8"));
this.expectedTokenCount = 2472;
super.tokenizeTest();
public void testSample() {
doTest("sample-LiftActor");
}
@Test
public void tokenizeFailTest() throws IOException {
this.sourceCode = new SourceCode(new SourceCode.StringCodeLoader(
" object Main { "
+ " def main(args: Array[String]): Unit = { "
+ " println(\"Hello, World!) " //unclosed string literal
+ " }"
+ "}"));
try {
super.tokenizeTest();
Assert.fail();
} catch (Exception e) {
// intentional
}
}
@After
public void cleanUp() {
FileUtils.deleteQuietly(this.tempFile);
this.tempFile = null;
public void tokenizeFailTest() {
ex.expect(TokenMgrError.class);
doTest("unlexable_sample");
}
}

View File

@ -0,0 +1,5 @@
object Main {
def main(args: Array[String]): Unit = {
println("Hello, World!) // unclosed literal
}
}