forked from phoedos/pmd
Fix asserts
This commit is contained in:
@ -75,30 +75,32 @@ public class CPPTokenizerTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testTokenizerWithSkipBlocks() throws Exception {
|
public void testTokenizerWithSkipBlocks() throws Exception {
|
||||||
String test = IOUtils.toString(CPPTokenizerTest.class.getResourceAsStream("cpp/cpp_with_asm.cpp"), StandardCharsets.UTF_8);
|
String test = IOUtils.toString(CPPTokenizerTest.class.getResourceAsStream("cpp/cpp_with_asm.cpp"), StandardCharsets.UTF_8);
|
||||||
Tokens tokens = parse(test, true);
|
Tokens tokens = parse(test, true, new Tokens());
|
||||||
assertEquals(19, tokens.size());
|
assertEquals(19, tokens.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTokenizerWithSkipBlocksPattern() throws Exception {
|
public void testTokenizerWithSkipBlocksPattern() throws Exception {
|
||||||
String test = IOUtils.toString(CPPTokenizerTest.class.getResourceAsStream("cpp/cpp_with_asm.cpp"), StandardCharsets.UTF_8);
|
String test = IOUtils.toString(CPPTokenizerTest.class.getResourceAsStream("cpp/cpp_with_asm.cpp"), StandardCharsets.UTF_8);
|
||||||
|
Tokens tokens = new Tokens();
|
||||||
try {
|
try {
|
||||||
Tokens tokens = parse(test, true, "#if debug|#endif");
|
parse(test, true, "#if debug|#endif", tokens);
|
||||||
assertEquals(31, tokens.size());
|
|
||||||
} catch (TokenMgrError ignored) {
|
} catch (TokenMgrError ignored) {
|
||||||
// ignored
|
// ignored
|
||||||
}
|
}
|
||||||
|
assertEquals(31, tokens.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTokenizerWithoutSkipBlocks() throws Exception {
|
public void testTokenizerWithoutSkipBlocks() throws Exception {
|
||||||
String test = IOUtils.toString(CPPTokenizerTest.class.getResourceAsStream("cpp/cpp_with_asm.cpp"), StandardCharsets.UTF_8);
|
String test = IOUtils.toString(CPPTokenizerTest.class.getResourceAsStream("cpp/cpp_with_asm.cpp"), StandardCharsets.UTF_8);
|
||||||
|
Tokens tokens = new Tokens();
|
||||||
try {
|
try {
|
||||||
Tokens tokens = parse(test, false);
|
parse(test, false, tokens);
|
||||||
assertEquals(37, tokens.size());
|
|
||||||
} catch (TokenMgrError ignored) {
|
} catch (TokenMgrError ignored) {
|
||||||
// ignored
|
// ignored
|
||||||
}
|
}
|
||||||
|
assertEquals(37, tokens.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@ -157,14 +159,14 @@ public class CPPTokenizerTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private Tokens parse(String snippet) {
|
private Tokens parse(String snippet) {
|
||||||
return parse(snippet, false);
|
return parse(snippet, false, new Tokens());
|
||||||
}
|
}
|
||||||
|
|
||||||
private Tokens parse(String snippet, boolean skipBlocks) {
|
private Tokens parse(String snippet, boolean skipBlocks, Tokens tokens) {
|
||||||
return parse(snippet, skipBlocks, null);
|
return parse(snippet, skipBlocks, null, tokens);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Tokens parse(String snippet, boolean skipBlocks, String skipPattern) {
|
private Tokens parse(String snippet, boolean skipBlocks, String skipPattern, Tokens tokens) {
|
||||||
Properties properties = new Properties();
|
Properties properties = new Properties();
|
||||||
properties.setProperty(Tokenizer.OPTION_SKIP_BLOCKS, Boolean.toString(skipBlocks));
|
properties.setProperty(Tokenizer.OPTION_SKIP_BLOCKS, Boolean.toString(skipBlocks));
|
||||||
if (skipPattern != null) {
|
if (skipPattern != null) {
|
||||||
@ -175,7 +177,6 @@ public class CPPTokenizerTest {
|
|||||||
tokenizer.setProperties(properties);
|
tokenizer.setProperties(properties);
|
||||||
|
|
||||||
SourceCode code = new SourceCode(new SourceCode.StringCodeLoader(snippet));
|
SourceCode code = new SourceCode(new SourceCode.StringCodeLoader(snippet));
|
||||||
Tokens tokens = new Tokens();
|
|
||||||
tokenizer.tokenize(code, tokens);
|
tokenizer.tokenize(code, tokens);
|
||||||
return tokens;
|
return tokens;
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user