Skip to content
Permalink
Browse files
Add checkstyle plugin to check code style (#193)
Change-Id: Ieb294cbe7d9fafa4ed468b965abe61135cafd50d
  • Loading branch information
Linary committed Dec 16, 2020
1 parent 7d2cfb7 commit 1b1b78087519476b7513f15855bd1656d06b802d
Show file tree
Hide file tree
Showing 27 changed files with 350 additions and 224 deletions.
@@ -0,0 +1,87 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE module PUBLIC
"-//Checkstyle//DTD Checkstyle Configuration 1.3//EN"
"https://checkstyle.org/dtds/configuration_1_3.dtd">
<!--参考:https://checkstyle.sourceforge.io/checks.html-->
<module name="Checker">
<!--检查不通过时被判定的违规级别,必须修复后才能使build通过-->
<property name="severity" value="error"/>
<!--对java文件做检查-->
<property name="fileExtensions" value="java"/>
<!--对UTF-8编码的文件做检查-->
<property name="charset" value="UTF-8"/>
<!--文件中不允许包含制表符-->
<module name="FileTabCharacter">
<property name="eachLine" value="true"/>
</module>

<!--检查java源文件并定义一些适用于检查此类文件的一些属性-->
<module name="TreeWalker">
<!--检查行长度-->
<module name="LineLength">
<property name="max" value="80"/>
<!--可以忽略的行-->
<property name="ignorePattern"
value="^package.*|^import.*|a href|href|http://|https://|ftp://"/>
</module>
<!--检查没有import语句使用*号-->
<module name="AvoidStarImport"/>
<!--检查是否存在多余的import语句,比如重复的,java自带的包,相同包下的其他类-->
<module name="RedundantImport"/>
<!--检查是否存在没有使用的import语句-->
<module name="UnusedImports"/>
<!--检查包名称是否遵守命名规约-->
<module name="PackageName">
<property name="format" value="^[a-z]+(\.[a-z][a-z0-9]*)*$"/>
</module>
<!--检查局部变量的名称是否遵守命名规约-->
<module name="LocalVariableName">
<property name="format" value="^[a-z][a-zA-Z0-9_]*$"/>
</module>
<!--检查成员变量(非静态字段)的名称是否遵守命名规约-->
<module name="MemberName">
<property name="format" value="^[a-z][a-zA-Z0-9]*$"/>
</module>
<!--检查方法名称是否遵守命名规约-->
<module name="MethodName">
<property name="format" value="^[a-z][a-zA-Z0-9]*$"/>
</module>
<!--检查参数名称是否遵守命名规约-->
<module name="ParameterName">
<property name="format" value="^[a-z][a-zA-Z0-9]*$"/>
</module>
<!--检查常量(用static final修饰的字段)的名称是否遵守命名规约-->
<module name="ConstantName">
<property name="format" value="^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$"/>
</module>
<!--检查数组是否属于java风格,方括号放在类型后面,而不是变量后面,比如:int[] nums(合法),int nums[](不合法)-->
<module name="ArrayTypeStyle">
<property name="javaStyle" value="true"/>
</module>
<!--long类型的字面量如果要以"L"结尾,必须是大写的"L",而非小写的"l"-->
<module name="UpperEll"/>
<!--代码换行时,运算符必须在当前行的末尾,比如:+、&&、?、: 等-->
<module name="OperatorWrap">
<property name="option" value="eol"/>
</module>
<!--检查指定标记的周围是否有空格,比如:if、for、while、synchoronized 等-->
<module name="WhitespaceAround"/>
<!--检查修饰符是否符合Java建议,顺序是:public、protected、private、abstract、default、static、final、transient、volatile、synchronized、native、strictfp-->
<module name="ModifierOrder"/>
<!--检查代码块的左花括号的放置位置,必须在当前行的末尾-->
<module name="LeftCurly">
<property name="option" value="eol"/>
<property name="ignoreEnums" value="false"/>
</module>
<!--代码中不允许有空语句,也就是单独的;符号-->
<module name="EmptyStatement"/>
<!--覆盖equals()方法的类必须也覆盖了hashCode()方法-->
<module name="EqualsHashCode"/>
<!--switch语句必须含有default子句-->
<module name="MissingSwitchDefault"/>
<!--switch语句的default必须放在所有的case分支之后-->
<module name="DefaultComesLast"/>
<!--覆盖clone()方法时调用了super.clone()方法-->
<module name="SuperClone"/>
</module>
</module>
22 pom.xml
@@ -535,6 +535,28 @@
</includes>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>3.1.0</version>
<configuration>
<configLocation>checkstyle.xml</configLocation>
<encoding>UTF-8</encoding>
<consoleOutput>true</consoleOutput>
<failsOnError>true</failsOnError>
<linkXRef>false</linkXRef>
<includeTestSourceDirectory>false</includeTestSourceDirectory>
</configuration>
<executions>
<execution>
<id>validate</id>
<phase>validate</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>

<testResources>
@@ -208,7 +208,8 @@ private void load(List<InputStruct> structs) {
private void load(InputStruct struct, InputReader reader) {
LOG.info("Start parsing and loading '{}'", struct);
LoadMetrics metrics = this.context.summary().metrics(struct);
ParseTaskBuilder taskBuilder = new ParseTaskBuilder(this.context, struct);
ParseTaskBuilder taskBuilder = new ParseTaskBuilder(this.context,
struct);

final int batchSize = this.context.options().batchSize;
List<Line> lines = new ArrayList<>(batchSize);
@@ -82,8 +82,10 @@ public List<Edge> build(Map<String, Object> keyValues) {
int size = Math.max(sources.size(), targets.size());
List<Edge> edges = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
Vertex source = i < sources.size() ? sources.get(i) : sources.get(0);
Vertex target = i < targets.size() ? targets.get(i) : targets.get(0);
Vertex source = i < sources.size() ?
sources.get(i) : sources.get(0);
Vertex target = i < targets.size() ?
targets.get(i) : targets.get(0);
Edge edge = new Edge(this.mapping.label());
edge.source(source);
edge.target(target);
@@ -87,7 +87,8 @@ protected Collection<String> nonNullableKeys(SchemaLabel schemaLabel) {
schemaLabel.nullableKeys());
}

protected VertexKVPairs newKVPairs(VertexLabel vertexLabel, boolean unfold) {
protected VertexKVPairs newKVPairs(VertexLabel vertexLabel,
boolean unfold) {
IdStrategy idStrategy = vertexLabel.idStrategy();
if (idStrategy.isCustomize()) {
if (unfold) {
@@ -407,7 +408,8 @@ public void extractFromVertex(Map<String, Object> keyValues) {
}
if (isIdField(fieldName)) {
this.idField = fieldName;
List<Object> rawIdValues = splitField(fieldName, fieldValue);
List<Object> rawIdValues = splitField(fieldName,
fieldValue);
this.idValues = rawIdValues.stream().map(rawIdValue -> {
return mappingValue(fieldName, rawIdValue);
}).collect(Collectors.toList());
@@ -585,7 +587,8 @@ public void extractFromVertex(Map<String, Object> keyValues) {
String key = mapping().mappingField(fieldName);
if (!handledPk && primaryKeys.contains(key)) {
// Don't put priamry key/values into general properties
List<Object> rawPkValues = splitField(fieldName, fieldValue);
List<Object> rawPkValues = splitField(fieldName,
fieldValue);
this.pkValues = rawPkValues.stream().map(rawPkValue -> {
return mappingValue(fieldName, rawPkValue);
}).collect(Collectors.toList());
@@ -73,17 +73,17 @@ public final class LoadOptions {

@Parameter(names = {"--protocol"}, arity = 1,
validateWith = {ProtocolValidator.class},
description = "The protocol of HugeGraphServer, allowed values " +
"are: http or https")
description = "The protocol of HugeGraphServer, " +
"allowed values are: http or https")
public String protocol = "http";

@Parameter(names = {"--trust-store-file"}, arity = 1,
description = "The path of client truststore file used when https " +
"protocol is enabled")
description = "The path of client truststore file used " +
"when https protocol is enabled")
public String trustStoreFile = null;

@Parameter(names = {"--trust-store-password"}, arity = 1,
description = "The password of the client truststore file used " +
description = "The password of client truststore file used " +
"when https protocol is enabled")
public String trustStorePassword = null;

@@ -258,10 +258,10 @@ public static class UrlValidator implements IParameterValidator {

@Override
public void validate(String name, String value) {
String regex = "^((http)(s?)://)?"
+ "(([0-9]{1,3}\\.){3}[0-9]{1,3}" // IP URL, like: 10.0.0.1
+ "|" // Or domain name
+ "([0-9a-z_!~*'()-]+\\.)*[0-9a-z_!~*'()-]+)$";
String regex = "^((http)(s?)://)?" +
"(([0-9]{1,3}\\.){3}[0-9]{1,3}" + // IP URL
"|" + // Or domain name
"([0-9a-z_!~*'()-]+\\.)*[0-9a-z_!~*'()-]+)$";
if (!value.matches(regex)) {
throw new ParameterException(String.format(
"Invalid url value of args '%s': '%s'", name, value));
@@ -170,7 +170,7 @@ private void removeDupLines() {
}
}
} catch (IOException e) {
throw new LoadException("Failed to scan and remove duplicate lines");
throw new LoadException("Failed to remove duplicate lines");
}
if (!dedupFile.renameTo(this.file)) {
throw new LoadException("Failed to rename dedup file to origin");
@@ -101,7 +101,7 @@ public void checkFieldsValid(InputSource source) {
}
if (!this.ignoredFields.isEmpty()) {
E.checkArgument(header.containsAll(this.ignoredFields),
"The all keys %s of ignored must be existed "+
"The all keys %s of ignored must be existed " +
"in header %s", this.ignoredFields, header);
}
if (!this.mappingFields.isEmpty()) {
@@ -59,8 +59,8 @@ public void check() throws IllegalArgumentException {
E.checkArgument(this.input != null, "The mapping.input can't be null");
this.input.check();
E.checkArgument(!this.vertices.isEmpty() || !this.edges.isEmpty(),
"The mapping.vertices and mapping.edges can't be empty" +
"at same time, need specify at least one");
"The mapping.vertices and mapping.edges can't be " +
"empty at same time, need specify at least one");
this.vertices.forEach(VertexMapping::check);
this.edges.forEach(EdgeMapping::check);

@@ -158,7 +158,8 @@ private Map<String, FailureFile> groupFailureFiles(File pathDir) {
} else {
E.checkArgument(Constants.HEADER_SUFFIX.equals(suffix),
"The failure data file must end with %s or %s",
Constants.FAILURE_SUFFIX, Constants.HEADER_SUFFIX);
Constants.FAILURE_SUFFIX,
Constants.HEADER_SUFFIX);
failureFile.headerFile = subFile;
}
failureFiles.put(inputId, failureFile);
@@ -21,8 +21,8 @@

import java.util.Set;

import com.baidu.hugegraph.loader.source.SourceType;
import com.baidu.hugegraph.loader.mapping.InputStruct;
import com.baidu.hugegraph.loader.source.SourceType;
import com.baidu.hugegraph.util.InsertionOrderUtil;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
@@ -36,7 +36,7 @@ public final class InputProgress {
@JsonProperty("loading_item")
private InputItemProgress loadingItem;

private transient final Set<InputItemProgress> loadingItems;
private final transient Set<InputItemProgress> loadingItems;

@JsonCreator
public InputProgress(@JsonProperty("type") SourceType type,
@@ -261,8 +261,8 @@ private static Reader createCompressReader(InputStream stream,
Configuration config = new Configuration();
CompressionCodec codec = ReflectionUtils.newInstance(
SnappyCodec.class, config);
CompressionInputStream sis = codec.createInputStream(
stream, codec.createDecompressor());
CompressionInputStream sis = codec.createInputStream(stream,
codec.createDecompressor());
return new InputStreamReader(sis, charset);
case GZIP:
case BZ2:
@@ -88,7 +88,8 @@ public String[] readHeader(List<Readable> readables) {
this.openReader(readable);
StructObjectInspector inspector;
try {
inspector = (StructObjectInspector) this.reader.getObjectInspector();
inspector = (StructObjectInspector)
this.reader.getObjectInspector();
return this.parseHeader(inspector);
} finally {
try {
@@ -162,7 +162,8 @@ private boolean fetchNextPage() {
throw new LoadException("Failed to read next page for '%s'", e);
}
GroupRecordConverter converter = new GroupRecordConverter(this.schema);
this.recordReader = this.columnIO.getRecordReader(this.pages, converter);
this.recordReader = this.columnIO.getRecordReader(this.pages,
converter);
this.pagesRowCount = this.pages.getRowCount();
this.currRowOffset = 0L;
return this.currRowOffset < this.pagesRowCount;
@@ -23,7 +23,6 @@
import java.util.List;

import com.baidu.hugegraph.loader.reader.Readable;
import com.baidu.hugegraph.loader.reader.line.Line;
import com.baidu.hugegraph.loader.source.InputSource;
import com.baidu.hugegraph.util.E;

@@ -64,7 +64,8 @@ private static InputSource readInputSource(JsonNode node) {
case JDBC:
JsonNode vendorNode = getNode(node, FIELD_VENDOR,
JsonNodeType.STRING);
vendorNode = TextNode.valueOf(vendorNode.asText().toUpperCase());
vendorNode = TextNode.valueOf(vendorNode.asText()
.toUpperCase());
objectNode.replace(FIELD_VENDOR, vendorNode);
return JsonUtil.convert(node, JDBCSource.class);
default:
@@ -69,10 +69,13 @@ public FileSource(@JsonProperty("path") String path,
this.path = path;
this.filter = filter != null ? filter : new FileFilter();
this.format = format != null ? format : FileFormat.CSV;
this.delimiter = delimiter != null ? delimiter : this.format.delimiter();
this.dateFormat = dateFormat != null ? dateFormat : Constants.DATE_FORMAT;
this.delimiter = delimiter != null ?
delimiter : this.format.delimiter();
this.dateFormat = dateFormat != null ?
dateFormat : Constants.DATE_FORMAT;
this.timeZone = timeZone != null ? timeZone : Constants.TIME_ZONE;
this.skippedLine = skippedLine != null ? skippedLine : new SkippedLine();
this.skippedLine = skippedLine != null ?
skippedLine : new SkippedLine();
this.compression = compression != null ? compression : Compression.NONE;
this.batchSize = batchSize != null ? batchSize : 500;
}
@@ -108,6 +108,7 @@ private <ES extends ElementStructV1> void checkNoSameStruct(
.map(ElementStructV1::uniqueKey)
.collect(Collectors.toSet());
E.checkArgument(structs.size() == uniqueKeys.size(),
"Please ensure there is no same mapping in %s", structs);
"Please ensure there is no same mapping in %s",
structs);
}
}
@@ -101,7 +101,8 @@ protected void insertBatch(List<Record> batch, boolean checkVertex) {
if (this.type().isVertex()) {
client.graph().addVertices((List<Vertex>) (Object) elements);
} else {
client.graph().addEdges((List<Edge>) (Object) elements, checkVertex);
client.graph().addEdges((List<Edge>) (Object) elements,
checkVertex);
}
}

@@ -89,7 +89,8 @@ private ParseTask buildTask(ElementBuilder builder, List<Line> lines) {
for (Line line : lines) {
try {
// NOTE: don't remove entry in keyValues
List<GraphElement> elements = builder.build(line.keyValues());
List<GraphElement> elements = builder.build(
line.keyValues());
E.checkState(elements.size() <= batchSize,
"The number of columns in a line cannot " +
"exceed the size of a batch, but got %s > %s",
@@ -164,6 +164,8 @@ private void submitInSingle(InputStruct struct, ElementMapping mapping,
InsertTask task = new SingleInsertTask(this.context, struct,
mapping, batch);
CompletableFuture.runAsync(task, this.singleService)
.whenComplete((r, e) -> this.singleSemaphore.release());
.whenComplete((r, e) -> {
this.singleSemaphore.release();
});
}
}

0 comments on commit 1b1b780

Please sign in to comment.