Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 8 additions & 3 deletions DOC.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,15 +68,20 @@ e.g

But [other coverage tools](https://vladfilippov.com/blog/rust-code-coverage-tools/) might work as well

## Highlighting unit tests
By default, the plugin will highlight Rust unit tests for functions having attributes `#[test]` or `#[tokio::test]`
You may configure different attributes with parameter `community.rust.unitttests.attributes`


## Adding test measures

Optionally SonarQube can also display tests measures.

This Community Rust plugin doesn't run your tests or generate tests reports for you. That has to be done before analysis and provided in the form of reports.
This Community Rust plugin doesn't run your tests or generate tests reports for you. That has to be done before analysis
and provided in the form of reports.

Currently, only `junit report` formats are supported :

Insert a parameter `community.rust.test.reportPath` into you `sonar-project.properties` file. As an example, one of such tool
Insert a parameter `community.rust.test.reportPath` into you `sonar-project.properties` file.
As an example, one of such tool for Rust that converts `cargo test` report to `junit report` is [cargo2junit](https://crates.io/crates/cargo2junit).

for Rust than converts `cargo test` report to `junit report` is [cargo2junit](https://crates.io/crates/cargo2junit).
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,9 @@ public class CommunityRustPlugin implements Plugin {
public static final String DEFAULT_LCOV_REPORT_PATHS = "lcov.info";
public static final String COBERTURA_REPORT_PATHS = "community.rust.cobertura.reportPaths";
public static final String DEFAULT_COBERTURA_REPORT_PATHS = "cobertura.xml";
public static final String UNIT_TEST_ATTRIBUTES = "community.rust.unittests.attributes";
public static final String TEST_AND_COVERAGE = "Test and Coverage";
public static final String DEFAULT_UNIT_TEST_ATTRIBUTES="test,tokio::test";

@Override
public void define(Context context) {
Expand Down Expand Up @@ -73,7 +76,7 @@ public void define(Context context) {
.name("LCOV Files")
.description("Paths (absolute or relative) to the files with LCOV data.")
.onQualifiers(Qualifiers.PROJECT)
.subCategory("Test and Coverage")
.subCategory(TEST_AND_COVERAGE)
.category("Rust")
.multiValues(true)
.build(),
Expand All @@ -84,12 +87,20 @@ public void define(Context context) {
.name("LCOV Files")
.description("Paths (absolute or relative) to the files with LCOV data.")
.onQualifiers(Qualifiers.PROJECT)
.subCategory("Test and Coverage")
.subCategory(TEST_AND_COVERAGE)
.category("Rust")
.multiValues(true)
.build()

.build(),

PropertyDefinition.builder(UNIT_TEST_ATTRIBUTES)
.defaultValue(DEFAULT_UNIT_TEST_ATTRIBUTES)
.name("Unit tests")
.description("Comme separated list of Rust attributes for Unit Tests")
.onQualifiers(Qualifiers.PROJECT)
.subCategory(TEST_AND_COVERAGE)
.category("Rust")
.multiValues(true)
.build()
);


Expand All @@ -98,7 +109,7 @@ public void define(Context context) {
.name("Path to xunit report(s)")
.description("Path to the report of test execution, relative to project's root. Ant patterns are accepted. The reports have to conform to the junitreport XML format.")
.category("Rust")
.subCategory("Test and Coverage")
.subCategory(TEST_AND_COVERAGE)
.onQualifiers(Qualifiers.PROJECT)
.defaultValue(XUnitSensor.DEFAULT_REPORT_PATH)
.build(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,17 @@
* Copyright (C) 2021 Eric Le Goff
* mailto:community-rust AT pm DOT me
* http://github.com/elegoff/sonar-rust
*
* <p>
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* <p>
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* <p>
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
Expand All @@ -24,92 +24,150 @@
import com.sonar.sslr.api.GenericTokenType;
import com.sonar.sslr.api.Token;
import com.sonar.sslr.api.Trivia;
import org.apache.commons.lang.StringUtils;
import org.sonar.api.batch.fs.InputFile;
import org.sonar.api.batch.sensor.SensorContext;
import org.sonar.api.batch.sensor.highlighting.NewHighlighting;
import org.sonar.api.batch.sensor.highlighting.TypeOfText;
import org.sonar.api.config.Configuration;
import org.sonar.rust.RustVisitorContext;
import org.sonar.rust.api.RustKeyword;
import org.sonar.rust.api.RustTokenType;
import org.sonar.sslr.parser.LexerlessGrammar;
import org.sonar.sslr.parser.ParserAdapter;
import org.sonarsource.analyzer.commons.TokenLocation;

import java.util.Arrays;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

public class RustTokensVisitor{
public class RustTokensVisitor {


private final Set<String> keywords = new HashSet<>(Arrays.asList(RustKeyword.keywordValues()));
private final SensorContext context;
private final ParserAdapter<LexerlessGrammar> lexer;
private final Set<String> keywords = new HashSet<>(Arrays.asList(RustKeyword.keywordValues()));
private final SensorContext context;
private final ParserAdapter<LexerlessGrammar> lexer;

public RustTokensVisitor(SensorContext context, ParserAdapter<LexerlessGrammar> lexer) {
this.context = context;
this.lexer = lexer;
}
public RustTokensVisitor(SensorContext context, ParserAdapter<LexerlessGrammar> lexer) {
this.context = context;
this.lexer = lexer;
}

private static String getTokenImage(Token token) {
if (token.getType().equals(RustTokenType.CHARACTER_LITERAL)) {
return RustTokenType.CHARACTER_LITERAL.getValue();
}
return token.getValue().toLowerCase(Locale.ENGLISH);
private static String getTokenImage(Token token) {
if (token.getType().equals(RustTokenType.CHARACTER_LITERAL)) {
return RustTokenType.CHARACTER_LITERAL.getValue();
}
return token.getValue().toLowerCase(Locale.ENGLISH);
}

private static void highlight(NewHighlighting highlighting, TokenLocation tokenLocation, TypeOfText typeOfText) {
highlighting.highlight(tokenLocation.startLine(), tokenLocation.startLineOffset(), tokenLocation.endLine(), tokenLocation.endLineOffset(), typeOfText);
}
private static void highlight(NewHighlighting highlighting, TokenLocation tokenLocation, TypeOfText typeOfText) {
highlighting.highlight(tokenLocation.startLine(), tokenLocation.startLineOffset(), tokenLocation.endLine(), tokenLocation.endLineOffset(), typeOfText);
}

private static TokenLocation tokenLocation(Token token) {
return new TokenLocation(token.getLine(), token.getColumn(), token.getOriginalValue());
}
private static TokenLocation tokenLocation(Token token) {
return new TokenLocation(token.getLine(), token.getColumn(), token.getOriginalValue());
}

public void scanFile(InputFile inputFile, RustVisitorContext visitorContext) {
var highlighting = context.newHighlighting();
highlighting.onFile(inputFile);
public void scanFile(InputFile inputFile, RustVisitorContext visitorContext) {
var highlighting = context.newHighlighting();
highlighting.onFile(inputFile);

var cpdTokens = context.newCpdTokens();
cpdTokens.onFile(inputFile);
var cpdTokens = context.newCpdTokens();
cpdTokens.onFile(inputFile);

for (Token token : lexer.parse(visitorContext.file().content()).getTokens()) {
final String tokenImage = getTokenImage(token);
final var tokenLocation = tokenLocation(token);
List<Token> parsedTokens = lexer.parse(visitorContext.file().content()).getTokens();
Set<Token> unitTestTokens = identifyUnitTestTokens(parsedTokens);

if (token.getType().equals(RustTokenType.CHARACTER_LITERAL)
||token.getType().equals(RustTokenType.STRING_LITERAL)
||token.getType().equals(RustTokenType.RAW_STRING_LITERAL)
||token.getType().equals(RustTokenType.RAW_BYTE_STRING_LITERAL)
for (Token token : parsedTokens) {
final String tokenImage = getTokenImage(token);
final var tokenLocation = tokenLocation(token);

) {
highlight(highlighting, tokenLocation, TypeOfText.STRING);
if (token.getType().equals(RustTokenType.CHARACTER_LITERAL)
|| token.getType().equals(RustTokenType.STRING_LITERAL)
|| token.getType().equals(RustTokenType.RAW_STRING_LITERAL)
|| token.getType().equals(RustTokenType.RAW_BYTE_STRING_LITERAL)

} else if (keywords.contains(tokenImage)) {
highlight(highlighting, tokenLocation, TypeOfText.KEYWORD);
}
) {
highlight(highlighting, tokenLocation, TypeOfText.STRING);

if (token.getType().equals(RustTokenType.FLOAT_LITERAL)
||token.getType().equals(RustTokenType.BOOLEAN_LITERAL)
||token.getType().equals(RustTokenType.INTEGER_LITERAL)
} else if (keywords.contains(tokenImage)) {
highlight(highlighting, tokenLocation, TypeOfText.KEYWORD);
}

) {
highlight(highlighting, tokenLocation, TypeOfText.CONSTANT);
if (token.getType().equals(RustTokenType.FLOAT_LITERAL)
|| token.getType().equals(RustTokenType.BOOLEAN_LITERAL)
|| token.getType().equals(RustTokenType.INTEGER_LITERAL)) {
highlight(highlighting, tokenLocation, TypeOfText.CONSTANT);
}

}
for (Trivia trivia : token.getTrivia()) {
highlight(highlighting, tokenLocation(trivia.getToken()), TypeOfText.COMMENT);
}

if (unitTestTokens.contains(token)) {
highlight(highlighting, tokenLocation, TypeOfText.ANNOTATION
);
}

for (Trivia trivia : token.getTrivia()) {
highlight(highlighting, tokenLocation(trivia.getToken()), TypeOfText.COMMENT);
if (!GenericTokenType.EOF.equals(token.getType())) {
cpdTokens.addToken(tokenLocation.startLine(), tokenLocation.startLineOffset(), tokenLocation.endLine(), tokenLocation.endLineOffset(), tokenImage);
}
}

highlighting.save();
cpdTokens.save();
}

private Set<Token> identifyUnitTestTokens(List<Token> parsedTokens) {
Set<Token> testTokens = new HashSet<>();
Set<String> unitTestsAttributes = getUnitTestAttributes();
int i = 0;
while (i < parsedTokens.size()) {
if ("#".equals(getTokenImage(parsedTokens.get(i))) && ("[".equals(getTokenImage(parsedTokens.get(i + 1)))) && (unitTestsAttributes.contains(getTokenImage(parsedTokens.get(i + 2)))) && ("]".equals(getTokenImage(parsedTokens.get(i + 3)))) && ("fn".equals(getTokenImage(parsedTokens.get(i + 4))))) {
int j = i + 5;
//lookup for opening bracket
while (!"{".equals(getTokenImage(parsedTokens.get(j)))) {
j++;
}

if (!GenericTokenType.EOF.equals(token.getType())) {
cpdTokens.addToken(tokenLocation.startLine(), tokenLocation.startLineOffset(), tokenLocation.endLine(), tokenLocation.endLineOffset(), tokenImage);
int cptOpeningBracket = 1;
//lookup for outer closing bracket (end of test function position)
while (cptOpeningBracket > 0) {
j++;
String tokenImage = getTokenImage(parsedTokens.get(j));
if ("{".equals(tokenImage)) {
cptOpeningBracket++;
} else if ("}".equals(tokenImage)) {
cptOpeningBracket--;
}

}
}

highlighting.save();
cpdTokens.save();
//all tokens constituting a test function are added to the set
IntStream.rangeClosed(i, j).mapToObj(parsedTokens::get).forEach(testTokens::add);
}
i++;
}
return testTokens;
}

private Set<String> getUnitTestAttributes() {
Configuration config = context.config();
String[] attrs = filterEmptyStrings(config.getStringArray(CommunityRustPlugin.UNIT_TEST_ATTRIBUTES));
if (attrs.length == 0) {
attrs = StringUtils.split(CommunityRustPlugin.DEFAULT_UNIT_TEST_ATTRIBUTES, ",");
}
return Arrays.stream(attrs).collect(Collectors.toSet());
}

private String[] filterEmptyStrings(String[] stringArray) {
List<String> nonEmptyStrings = new ArrayList<>();
for (String string : stringArray) {
if (StringUtils.isNotBlank(string.trim())) {
nonEmptyStrings.add(string.trim());
}
}
return nonEmptyStrings.toArray(new String[nonEmptyStrings.size()]);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ public class CommunityRustPluginTest extends TestCase {
public void testGetExtensions() {
Version v79 = Version.create(7, 9);
SonarRuntime runtime = SonarRuntimeImpl.forSonarQube(v79, SonarQubeSide.SERVER, SonarEdition.DEVELOPER);
assertThat(extensions(runtime)).hasSize(15);
assertThat(extensions(runtime)).hasSize(16);
assertThat(extensions(runtime)).contains(ClippyRulesDefinition.class);
assertThat(extensions(SonarRuntimeImpl.forSonarLint(v79))).hasSize(15);
assertThat(extensions(SonarRuntimeImpl.forSonarLint(v79))).hasSize(16);
}

private static List extensions(SonarRuntime runtime) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,15 @@ public void canParse() throws IOException {
Assertions.assertThat(tester.allAnalysisErrors()).isEmpty();
}

@Test
public void checkDuplication() throws IOException {
DefaultInputFile inputFile = executeSensorOnSingleFile("sensor/cpd.rs");
assertEquals(212, tester.cpdTokens(inputFile.key()).size());
verify(fileLinesContext).save();
assertEquals(Collections.singletonList(TypeOfText.ANNOTATION), tester.highlightingTypeAt(inputFile.key(), 5, 5));
Assertions.assertThat(tester.allAnalysisErrors()).isEmpty();
}



private DefaultInputFile executeSensorOnSingleFile(String fileName) throws IOException {
Expand Down
Loading