Skip to content

Commit

Permalink
Move protobuf spec of table FILE_SOURCES.BINARY_DATA into sonar-db
Browse files Browse the repository at this point in the history
  • Loading branch information
Simon Brandhof committed Jul 23, 2015
1 parent 5e36720 commit 17af536
Show file tree
Hide file tree
Showing 37 changed files with 571 additions and 4,044 deletions.
23 changes: 17 additions & 6 deletions compile_protobuf.sh
@@ -1,14 +1,25 @@
#!/bin/bash #!/bin/bash


# Usage: compile_protobuf <inputDir> <outputDir> # Compiles all the Protocol Buffers files (*.proto) to Java source code.
# IMPORTANT - protobuf 2.6.1 must be installed. Other versions are not supported.

# Usage: compile_protobuf <module> <type: main or test>
function compile_protobuf { function compile_protobuf {
echo "Compiling [$1] to [$2]..." INPUT="$1/src/$2/protobuf"
mkdir -p $2 OUTPUT="$1/src/$2/gen-java"
protoc --proto_path=$1 --java_out=$2 $1/*.proto
if [ -d $INPUT ]
then
echo "Compiling [$INPUT] to [$OUTPUT]..."
rm -rf $OUTPUT
mkdir -p $OUTPUT
protoc --proto_path=$INPUT --java_out=$OUTPUT $INPUT/*.proto
fi
} }


compile_protobuf "sonar-core/src/test/protobuf" "sonar-core/src/test/gen-java" compile_protobuf "sonar-batch-protocol" "main"
compile_protobuf "sonar-batch-protocol/src/main/protobuf" "sonar-batch-protocol/src/main/gen-java" compile_protobuf "sonar-core" "test"
compile_protobuf "sonar-db" "main"






Expand Up @@ -23,7 +23,6 @@
import java.util.Arrays; import java.util.Arrays;
import java.util.Timer; import java.util.Timer;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;

import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
Expand All @@ -32,9 +31,9 @@
import org.sonar.api.utils.System2; import org.sonar.api.utils.System2;
import org.sonar.api.utils.internal.Uuids; import org.sonar.api.utils.internal.Uuids;
import org.sonar.db.DbTester; import org.sonar.db.DbTester;
import org.sonar.db.FileSources;
import org.sonar.db.source.FileSourceDao; import org.sonar.db.source.FileSourceDao;
import org.sonar.db.source.FileSourceDto; import org.sonar.db.source.FileSourceDto;
import org.sonar.server.source.db.FileSourceDb;
import org.sonar.server.source.index.FileSourcesUpdaterHelper; import org.sonar.server.source.index.FileSourcesUpdaterHelper;
import org.sonar.server.source.index.SourceLineResultSetIterator; import org.sonar.server.source.index.SourceLineResultSetIterator;


Expand Down Expand Up @@ -111,8 +110,8 @@ private FileSourceDto generateDto() throws IOException {
} }


private byte[] generateData() { private byte[] generateData() {
FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder(); FileSources.Data.Builder dataBuilder = FileSources.Data.newBuilder();
FileSourceDb.Line.Builder lineBuilder = FileSourceDb.Line.newBuilder(); FileSources.Line.Builder lineBuilder = FileSources.Line.newBuilder();
for (int i = 1; i <= NUMBER_OF_LINES; i++) { for (int i = 1; i <= NUMBER_OF_LINES; i++) {
lineBuilder.clear(); lineBuilder.clear();
dataBuilder.addLines(lineBuilder dataBuilder.addLines(lineBuilder
Expand Down
Expand Up @@ -31,8 +31,8 @@
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.sonar.db.FileSources;
import org.sonar.server.es.EsClient; import org.sonar.server.es.EsClient;
import org.sonar.server.source.db.FileSourceDb;
import org.sonar.server.source.index.FileSourcesUpdaterHelper; import org.sonar.server.source.index.FileSourcesUpdaterHelper;
import org.sonar.server.source.index.SourceLineDoc; import org.sonar.server.source.index.SourceLineDoc;
import org.sonar.server.source.index.SourceLineIndex; import org.sonar.server.source.index.SourceLineIndex;
Expand Down Expand Up @@ -115,8 +115,8 @@ private static class SourceIterator implements Iterator<FileSourcesUpdaterHelper
private final int nbLinesPerFile; private final int nbLinesPerFile;
private int currentProject = 0; private int currentProject = 0;
private AtomicLong count = new AtomicLong(0L); private AtomicLong count = new AtomicLong(0L);
private final FileSourceDb.Data.Builder dataBuilder = FileSourceDb.Data.newBuilder(); private final FileSources.Data.Builder dataBuilder = FileSources.Data.newBuilder();
private final FileSourceDb.Line.Builder lineBuilder = FileSourceDb.Line.newBuilder(); private final FileSources.Line.Builder lineBuilder = FileSources.Line.newBuilder();


SourceIterator(long nbFiles, int nbLinesPerFile) { SourceIterator(long nbFiles, int nbLinesPerFile) {
this.nbFiles = nbFiles; this.nbFiles = nbFiles;
Expand Down
Expand Up @@ -20,14 +20,13 @@


package org.sonar.server.computation.source; package org.sonar.server.computation.source;


import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang.StringUtils;
import org.sonar.server.source.db.FileSourceDb;

import java.security.MessageDigest; import java.security.MessageDigest;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang.StringUtils;
import org.sonar.db.FileSources;


import static java.nio.charset.StandardCharsets.UTF_8; import static java.nio.charset.StandardCharsets.UTF_8;


Expand Down Expand Up @@ -69,7 +68,7 @@ private void read(Data data, String source, boolean hasNextLine) {
data.srcMd5Digest.update(source.getBytes(UTF_8)); data.srcMd5Digest.update(source.getBytes(UTF_8));
} }


FileSourceDb.Line.Builder lineBuilder = data.fileSourceBuilder.addLinesBuilder() FileSources.Line.Builder lineBuilder = data.fileSourceBuilder.addLinesBuilder()
.setSource(source) .setSource(source)
.setLine(currentLine); .setLine(currentLine);
for (LineReader lineReader : lineReaders) { for (LineReader lineReader : lineReaders) {
Expand All @@ -85,17 +84,17 @@ private static String computeLineChecksum(String line) {
return DigestUtils.md5Hex(reducedLine); return DigestUtils.md5Hex(reducedLine);
} }


private boolean hasNextLine(){ private boolean hasNextLine() {
return linesIterator.hasNext() || currentLine < numberOfLines; return linesIterator.hasNext() || currentLine < numberOfLines;
} }


public static class Data { public static class Data {
private final StringBuilder lineHashes; private final StringBuilder lineHashes;
private final MessageDigest srcMd5Digest; private final MessageDigest srcMd5Digest;
private final FileSourceDb.Data.Builder fileSourceBuilder; private final FileSources.Data.Builder fileSourceBuilder;


public Data() { public Data() {
this.fileSourceBuilder = FileSourceDb.Data.newBuilder(); this.fileSourceBuilder = FileSources.Data.newBuilder();
this.lineHashes = new StringBuilder(); this.lineHashes = new StringBuilder();
this.srcMd5Digest = DigestUtils.getMd5Digest(); this.srcMd5Digest = DigestUtils.getMd5Digest();
} }
Expand All @@ -108,7 +107,7 @@ public String getLineHashes() {
return lineHashes.toString(); return lineHashes.toString();
} }


public FileSourceDb.Data getFileSourceData() { public FileSources.Data getFileSourceData() {
return fileSourceBuilder.build(); return fileSourceBuilder.build();
} }
} }
Expand Down
Expand Up @@ -21,7 +21,7 @@
package org.sonar.server.computation.source; package org.sonar.server.computation.source;


import org.sonar.batch.protocol.output.BatchReport; import org.sonar.batch.protocol.output.BatchReport;
import org.sonar.server.source.db.FileSourceDb; import org.sonar.db.FileSources;


import javax.annotation.CheckForNull; import javax.annotation.CheckForNull;


Expand All @@ -37,7 +37,7 @@ public CoverageLineReader(Iterator<BatchReport.Coverage> coverageIterator) {
} }


@Override @Override
public void read(FileSourceDb.Line.Builder lineBuilder) { public void read(FileSources.Line.Builder lineBuilder) {
BatchReport.Coverage reportCoverage = getNextLineCoverageIfMatchLine(lineBuilder.getLine()); BatchReport.Coverage reportCoverage = getNextLineCoverageIfMatchLine(lineBuilder.getLine());
if (reportCoverage != null) { if (reportCoverage != null) {
processUnitTest(lineBuilder, reportCoverage); processUnitTest(lineBuilder, reportCoverage);
Expand All @@ -47,7 +47,7 @@ public void read(FileSourceDb.Line.Builder lineBuilder) {
} }
} }


private static void processUnitTest(FileSourceDb.Line.Builder lineBuilder, BatchReport.Coverage reportCoverage){ private static void processUnitTest(FileSources.Line.Builder lineBuilder, BatchReport.Coverage reportCoverage){
if (reportCoverage.hasUtHits()) { if (reportCoverage.hasUtHits()) {
lineBuilder.setUtLineHits(reportCoverage.getUtHits() ? 1 : 0); lineBuilder.setUtLineHits(reportCoverage.getUtHits() ? 1 : 0);
} }
Expand All @@ -57,7 +57,7 @@ private static void processUnitTest(FileSourceDb.Line.Builder lineBuilder, Batch
} }
} }


private static void processIntegrationTest(FileSourceDb.Line.Builder lineBuilder, BatchReport.Coverage reportCoverage){ private static void processIntegrationTest(FileSources.Line.Builder lineBuilder, BatchReport.Coverage reportCoverage){
if (reportCoverage.hasItHits()) { if (reportCoverage.hasItHits()) {
lineBuilder.setItLineHits(reportCoverage.getItHits() ? 1 : 0); lineBuilder.setItLineHits(reportCoverage.getItHits() ? 1 : 0);
} }
Expand All @@ -67,7 +67,7 @@ private static void processIntegrationTest(FileSourceDb.Line.Builder lineBuilder
} }
} }


private static void processOverallTest(FileSourceDb.Line.Builder lineBuilder, BatchReport.Coverage reportCoverage){ private static void processOverallTest(FileSources.Line.Builder lineBuilder, BatchReport.Coverage reportCoverage){
if (reportCoverage.hasUtHits() || reportCoverage.hasItHits()) { if (reportCoverage.hasUtHits() || reportCoverage.hasItHits()) {
lineBuilder.setOverallLineHits((reportCoverage.getUtHits() || reportCoverage.getItHits()) ? 1 : 0); lineBuilder.setOverallLineHits((reportCoverage.getUtHits() || reportCoverage.getItHits()) ? 1 : 0);
} }
Expand Down
Expand Up @@ -27,7 +27,7 @@
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.sonar.batch.protocol.output.BatchReport; import org.sonar.batch.protocol.output.BatchReport;
import org.sonar.server.source.db.FileSourceDb; import org.sonar.db.FileSources;


import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Maps.newHashMap; import static com.google.common.collect.Maps.newHashMap;
Expand All @@ -47,7 +47,7 @@ public DuplicationLineReader(Iterator<BatchReport.Duplication> duplications) {
} }


@Override @Override
public void read(FileSourceDb.Line.Builder lineBuilder) { public void read(FileSources.Line.Builder lineBuilder) {
int line = lineBuilder.getLine(); int line = lineBuilder.getLine();
List<BatchReport.Range> blocks = findDuplicationBlockMatchingLine(line); List<BatchReport.Range> blocks = findDuplicationBlockMatchingLine(line);
for (BatchReport.Range block : blocks) { for (BatchReport.Range block : blocks) {
Expand Down
Expand Up @@ -21,15 +21,13 @@
package org.sonar.server.computation.source; package org.sonar.server.computation.source;


import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import org.sonar.batch.protocol.Constants;
import org.sonar.batch.protocol.output.BatchReport;
import org.sonar.server.source.db.FileSourceDb;

import javax.annotation.CheckForNull;

import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import javax.annotation.CheckForNull;
import org.sonar.batch.protocol.Constants;
import org.sonar.batch.protocol.output.BatchReport;
import org.sonar.db.FileSources;


import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Lists.newArrayList;


Expand Down Expand Up @@ -58,7 +56,7 @@ public HighlightingLineReader(Iterator<BatchReport.SyntaxHighlighting> lineHighl
} }


@Override @Override
public void read(FileSourceDb.Line.Builder lineBuilder) { public void read(FileSources.Line.Builder lineBuilder) {
int line = lineBuilder.getLine(); int line = lineBuilder.getLine();
StringBuilder highlighting = new StringBuilder(); StringBuilder highlighting = new StringBuilder();


Expand Down
Expand Up @@ -20,10 +20,10 @@


package org.sonar.server.computation.source; package org.sonar.server.computation.source;


import org.sonar.server.source.db.FileSourceDb; import org.sonar.db.FileSources;


public interface LineReader { public interface LineReader {


void read(FileSourceDb.Line.Builder lineBuilder); void read(FileSources.Line.Builder lineBuilder);


} }
Expand Up @@ -21,7 +21,7 @@
package org.sonar.server.computation.source; package org.sonar.server.computation.source;


import org.sonar.batch.protocol.output.BatchReport; import org.sonar.batch.protocol.output.BatchReport;
import org.sonar.server.source.db.FileSourceDb; import org.sonar.db.FileSources;


public class ScmLineReader implements LineReader { public class ScmLineReader implements LineReader {


Expand All @@ -30,8 +30,9 @@ public class ScmLineReader implements LineReader {
public ScmLineReader(BatchReport.Changesets scmReport) { public ScmLineReader(BatchReport.Changesets scmReport) {
this.scmReport = scmReport; this.scmReport = scmReport;
} }

@Override @Override
public void read(FileSourceDb.Line.Builder lineBuilder) { public void read(FileSources.Line.Builder lineBuilder) {
int changeSetIndex = scmReport.getChangesetIndexByLine(lineBuilder.getLine() - 1); int changeSetIndex = scmReport.getChangesetIndexByLine(lineBuilder.getLine() - 1);
BatchReport.Changesets.Changeset changeset = scmReport.getChangeset(changeSetIndex); BatchReport.Changesets.Changeset changeset = scmReport.getChangeset(changeSetIndex);
boolean hasAuthor = changeset.hasAuthor(); boolean hasAuthor = changeset.hasAuthor();
Expand Down
Expand Up @@ -32,7 +32,7 @@
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.sonar.batch.protocol.output.BatchReport; import org.sonar.batch.protocol.output.BatchReport;
import org.sonar.server.source.db.FileSourceDb; import org.sonar.db.FileSources;


public class SymbolsLineReader implements LineReader { public class SymbolsLineReader implements LineReader {


Expand All @@ -49,7 +49,7 @@ public SymbolsLineReader(Iterator<BatchReport.Symbol> symbols) {
} }


@Override @Override
public void read(FileSourceDb.Line.Builder lineBuilder) { public void read(FileSources.Line.Builder lineBuilder) {
int line = lineBuilder.getLine(); int line = lineBuilder.getLine();
List<BatchReport.Symbol> lineSymbols = findSymbolsMatchingLine(line); List<BatchReport.Symbol> lineSymbols = findSymbolsMatchingLine(line);
for (BatchReport.Symbol lineSymbol : lineSymbols) { for (BatchReport.Symbol lineSymbol : lineSymbols) {
Expand Down
Expand Up @@ -47,7 +47,7 @@
import org.sonar.server.computation.source.LineReader; import org.sonar.server.computation.source.LineReader;
import org.sonar.server.computation.source.ScmLineReader; import org.sonar.server.computation.source.ScmLineReader;
import org.sonar.server.computation.source.SymbolsLineReader; import org.sonar.server.computation.source.SymbolsLineReader;
import org.sonar.server.source.db.FileSourceDb; import org.sonar.db.FileSources;


import static org.sonar.server.computation.component.ComponentVisitor.Order.PRE_ORDER; import static org.sonar.server.computation.component.ComponentVisitor.Order.PRE_ORDER;


Expand Down Expand Up @@ -120,7 +120,7 @@ public void visitFile(Component file) {
} }


private void persistSource(ComputeFileSourceData.Data fileSourceData, String componentUuid) { private void persistSource(ComputeFileSourceData.Data fileSourceData, String componentUuid) {
FileSourceDb.Data fileData = fileSourceData.getFileSourceData(); FileSources.Data fileData = fileSourceData.getFileSourceData();


byte[] data = FileSourceDto.encodeSourceData(fileData); byte[] data = FileSourceDto.encodeSourceData(fileData);
String dataHash = DigestUtils.md5Hex(data); String dataHash = DigestUtils.md5Hex(data);
Expand Down

0 comments on commit 17af536

Please sign in to comment.