Merge branch 'master' into 7.0.x
This commit is contained in:
@ -6,22 +6,19 @@ package net.sourceforge.pmd.cpd;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.io.StringReader;
|
||||
import java.util.Properties;
|
||||
|
||||
import net.sourceforge.pmd.PMD;
|
||||
import net.sourceforge.pmd.cpd.token.JavaCCTokenFilter;
|
||||
import net.sourceforge.pmd.cpd.token.TokenFilter;
|
||||
import net.sourceforge.pmd.lang.ast.GenericToken;
|
||||
import net.sourceforge.pmd.lang.ast.TokenMgrError;
|
||||
import net.sourceforge.pmd.cpd.internal.JavaCCTokenizer;
|
||||
import net.sourceforge.pmd.lang.TokenManager;
|
||||
import net.sourceforge.pmd.lang.cpp.CppTokenManager;
|
||||
import net.sourceforge.pmd.util.IOUtil;
|
||||
|
||||
/**
|
||||
* The C++ tokenizer.
|
||||
*/
|
||||
public class CPPTokenizer implements Tokenizer {
|
||||
public class CPPTokenizer extends JavaCCTokenizer {
|
||||
|
||||
private boolean skipBlocks = true;
|
||||
private String skipBlocksStart;
|
||||
@ -49,26 +46,6 @@ public class CPPTokenizer implements Tokenizer {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void tokenize(SourceCode sourceCode, Tokens tokenEntries) {
|
||||
StringBuilder buffer = sourceCode.getCodeBuffer();
|
||||
try (Reader reader = IOUtil.skipBOM(new StringReader(maybeSkipBlocks(buffer.toString())))) {
|
||||
final TokenFilter tokenFilter = new JavaCCTokenFilter(new CppTokenManager(reader));
|
||||
|
||||
GenericToken currentToken = tokenFilter.getNextToken();
|
||||
while (currentToken != null) {
|
||||
tokenEntries.add(new TokenEntry(currentToken.getImage(), sourceCode.getFileName(), currentToken.getBeginLine()));
|
||||
currentToken = tokenFilter.getNextToken();
|
||||
}
|
||||
tokenEntries.add(TokenEntry.getEOF());
|
||||
System.err.println("Added " + sourceCode.getFileName());
|
||||
} catch (TokenMgrError | IOException err) {
|
||||
err.printStackTrace();
|
||||
System.err.println("Skipping " + sourceCode.getFileName() + " due to parse error");
|
||||
tokenEntries.add(TokenEntry.getEOF());
|
||||
}
|
||||
}
|
||||
|
||||
private String maybeSkipBlocks(String test) throws IOException {
|
||||
if (!skipBlocks) {
|
||||
return test;
|
||||
@ -92,4 +69,14 @@ public class CPPTokenizer implements Tokenizer {
|
||||
}
|
||||
return filtered.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TokenManager getLexerForSource(SourceCode sourceCode) {
|
||||
try {
|
||||
StringBuilder buffer = sourceCode.getCodeBuffer();
|
||||
return new CppTokenManager(IOUtil.skipBOM(new StringReader(maybeSkipBlocks(buffer.toString()))));
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -7,6 +7,7 @@ package net.sourceforge.pmd.cpd;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
@ -117,7 +118,7 @@ public class CPPTokenizerContinuationTest {
|
||||
.getResourceAsStream("cpp/" + name), StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
private Tokens parse(String code) {
|
||||
private Tokens parse(String code) throws IOException {
|
||||
CPPTokenizer tokenizer = new CPPTokenizer();
|
||||
tokenizer.setProperties(new Properties());
|
||||
Tokens tokens = new Tokens();
|
||||
|
@ -8,16 +8,23 @@ import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotSame;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
||||
import net.sourceforge.pmd.PMD;
|
||||
import net.sourceforge.pmd.lang.ast.TokenMgrError;
|
||||
|
||||
public class CPPTokenizerTest {
|
||||
|
||||
@Rule
|
||||
public ExpectedException expectedException = ExpectedException.none();
|
||||
|
||||
@Test
|
||||
public void testUTFwithBOM() {
|
||||
Tokens tokens = parse("\ufeffint start()\n{ int ret = 1;\nreturn ret;\n}\n");
|
||||
@ -69,21 +76,31 @@ public class CPPTokenizerTest {
|
||||
@Test
|
||||
public void testTokenizerWithSkipBlocks() throws Exception {
|
||||
String test = IOUtils.toString(CPPTokenizerTest.class.getResourceAsStream("cpp/cpp_with_asm.cpp"), StandardCharsets.UTF_8);
|
||||
Tokens tokens = parse(test, true);
|
||||
Tokens tokens = parse(test, true, new Tokens());
|
||||
assertEquals(19, tokens.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTokenizerWithSkipBlocksPattern() throws Exception {
|
||||
String test = IOUtils.toString(CPPTokenizerTest.class.getResourceAsStream("cpp/cpp_with_asm.cpp"), StandardCharsets.UTF_8);
|
||||
Tokens tokens = parse(test, true, "#if debug|#endif");
|
||||
Tokens tokens = new Tokens();
|
||||
try {
|
||||
parse(test, true, "#if debug|#endif", tokens);
|
||||
} catch (TokenMgrError ignored) {
|
||||
// ignored
|
||||
}
|
||||
assertEquals(31, tokens.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTokenizerWithoutSkipBlocks() throws Exception {
|
||||
String test = IOUtils.toString(CPPTokenizerTest.class.getResourceAsStream("cpp/cpp_with_asm.cpp"), StandardCharsets.UTF_8);
|
||||
Tokens tokens = parse(test, false);
|
||||
Tokens tokens = new Tokens();
|
||||
try {
|
||||
parse(test, false, tokens);
|
||||
} catch (TokenMgrError ignored) {
|
||||
// ignored
|
||||
}
|
||||
assertEquals(37, tokens.size());
|
||||
}
|
||||
|
||||
@ -128,15 +145,33 @@ public class CPPTokenizerTest {
|
||||
assertEquals(9, tokens.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLexicalErrorFilename() throws Exception {
|
||||
Properties properties = new Properties();
|
||||
properties.setProperty(Tokenizer.OPTION_SKIP_BLOCKS, Boolean.toString(false));
|
||||
String test = IOUtils.toString(CPPTokenizerTest.class.getResourceAsStream("cpp/issue-1559.cpp"), StandardCharsets.UTF_8);
|
||||
SourceCode code = new SourceCode(new SourceCode.StringCodeLoader(test, "issue-1559.cpp"));
|
||||
CPPTokenizer tokenizer = new CPPTokenizer();
|
||||
tokenizer.setProperties(properties);
|
||||
|
||||
expectedException.expect(TokenMgrError.class);
|
||||
expectedException.expectMessage("Lexical error in file issue-1559.cpp at");
|
||||
tokenizer.tokenize(code, new Tokens());
|
||||
}
|
||||
|
||||
private Tokens parse(String snippet) {
|
||||
return parse(snippet, false);
|
||||
try {
|
||||
return parse(snippet, false, new Tokens());
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private Tokens parse(String snippet, boolean skipBlocks) {
|
||||
return parse(snippet, skipBlocks, null);
|
||||
private Tokens parse(String snippet, boolean skipBlocks, Tokens tokens) throws IOException {
|
||||
return parse(snippet, skipBlocks, null, tokens);
|
||||
}
|
||||
|
||||
private Tokens parse(String snippet, boolean skipBlocks, String skipPattern) {
|
||||
private Tokens parse(String snippet, boolean skipBlocks, String skipPattern, Tokens tokens) throws IOException {
|
||||
Properties properties = new Properties();
|
||||
properties.setProperty(Tokenizer.OPTION_SKIP_BLOCKS, Boolean.toString(skipBlocks));
|
||||
if (skipPattern != null) {
|
||||
@ -147,7 +182,6 @@ public class CPPTokenizerTest {
|
||||
tokenizer.setProperties(properties);
|
||||
|
||||
SourceCode code = new SourceCode(new SourceCode.StringCodeLoader(snippet));
|
||||
Tokens tokens = new Tokens();
|
||||
tokenizer.tokenize(code, tokens);
|
||||
return tokens;
|
||||
}
|
||||
|
@ -0,0 +1,11 @@
|
||||
namespace ABC
|
||||
{
|
||||
namespace DEF
|
||||
{
|
||||
|
||||
#ifdef USE_QT
|
||||
const char* perPixelQml = R"QML(
|
||||
)QML";
|
||||
}
|
||||
}
|
||||
#endif // USE_QT
|
Reference in New Issue
Block a user