Skip to content
Permalink
Browse files
Add travis test
Change-Id: I4d604d889913d22c22c63249e5e59c78c1c8550c
  • Loading branch information
Linary authored and zhoney committed Nov 23, 2018
1 parent a85817b commit 0fbea6908fb6c06441e3377ac4ba1ee958d81932
Showing 13 changed files with 139 additions and 44 deletions.
@@ -0,0 +1,28 @@
language: java

jdk:
- oraclejdk8

sudo: required

branches:
only:
- master
- /^release-.*$/
- /^test-.*$/

install: mvn compile -Dmaven.javadoc.skip=true

before_script:
- $TRAVIS_DIR/install-hugegraph.sh $TRAVIS_BRANCH

script:
- mvn test -Dtest=LoaderTest
- mvn cobertura:cobertura

after_success:
- bash <(curl -s https://codecov.io/bash)

env:
global:
- TRAVIS_DIR=assembly/travis
@@ -1,5 +1,9 @@
# hugegraph-loader

[![License](https://img.shields.io/badge/license-Apache%202-0E78BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html)
[![Build Status](https://travis-ci.org/hugegraph/hugegraph-loader.svg?branch=master)](https://travis-ci.org/hugegraph/hugegraph-loader)
[![codecov](https://codecov.io/gh/hugegraph/hugegraph-loader/branch/master/graph/badge.svg)](https://codecov.io/gh/hugegraph/hugegraph-loader)

hugegraph-loader is a customizable command line utility for loading small to medium size graph datasets into the HugeGraph database from files with various input formats.

## Features
@@ -1,4 +1,4 @@
{"aname": "marko", "bname": "lop", "date": "20171210", "weight": 0.4}
{"aname": "josh", "bname": "lop", "date": "20091111", "weight": 0.4}
{"aname": "josh", "bname": "ripple", "date": "20171210", "weight": 1.0}
{"aname": "peter", "bname": "lop", "date": "20170324", "weight": 0.2}
{"source_name": "marko", "target_name": "lop", "date": "20171210", "weight": 0.4}
{"source_name": "josh", "target_name": "lop", "date": "20091111", "weight": 0.4}
{"source_name": "josh", "target_name": "ripple", "date": "20171210", "weight": 1.0}
{"source_name": "peter", "target_name": "lop", "date": "20170324", "weight": 0.2}
@@ -1,2 +1,2 @@
{"source_name": "marko", "target_name": "vadas", "date": "20160110", "weight": 0.5}
{"source_name": "marko", "target_name": "josh", "date": "20130220", "weight": 1.0}
{"source_name": "marko", "target_name": "josh", "date": "20130220", "weight": 1.0}
@@ -21,4 +21,4 @@ schema.edgeLabel("created").sourceLabel("person").targetLabel("software").proper

schema.indexLabel("createdByDate").onE("created").by("date").secondary().ifNotExist().create();
schema.indexLabel("createdByWeight").onE("created").by("weight").range().ifNotExist().create();
schema.indexLabel("knowsByWeight").onE("knows").by("weight").range().ifNotExist().create();
schema.indexLabel("knowsByWeight").onE("knows").by("weight").range().ifNotExist().create();
@@ -44,17 +44,17 @@
},
{
"label": "created",
"source": ["aname"],
"target": ["bname"],
"source": ["source_name"],
"target": ["target_name"],
"input": {
"type": "file",
"path": "example/edge_created.json",
"format": "JSON"
},
"mapping": {
"aname": "name",
"bname": "name"
"source_name": "name",
"target_name": "name"
}
}
]
}
}
@@ -2,4 +2,4 @@ marko,29,Beijing
vadas,27,Hongkong
josh,32,Beijing
peter,35,Shanghai
"li,nary",26,"Wu,han"
"li,nary",26,"Wu,han"
@@ -0,0 +1,35 @@
#!/bin/bash

set -ev

if [ $# -ne 1 ]; then
echo "Must pass base branch name of pull request"
exit 1
fi

LOADER_BRANCH=$1
HUGEGRAPH_BRANCH=$LOADER_BRANCH

HUGEGRAPH_GIT_URL="https://github.com/hugegraph/hugegraph.git"

git clone $HUGEGRAPH_GIT_URL || exit 1

cd hugegraph

git checkout $HUGEGRAPH_BRANCH || exit 1

mvn package -DskipTests || exit 1

mv hugegraph-*.tar.gz ../

cd ../

rm -rf hugegraph

tar -zxvf hugegraph-*.tar.gz

cd hugegraph-*

bin/init-store.sh || exit 1

bin/start-hugegraph.sh || exit 1
@@ -28,7 +28,7 @@
<dependency>
<groupId>com.baidu.hugegraph</groupId>
<artifactId>hugegraph-client</artifactId>
<version>1.5.8</version>
<version>1.6.4</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
@@ -21,15 +21,16 @@

import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;

import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;

import com.google.common.collect.ImmutableList;

public class FileUtil {

private static final String DEFAULT_CHARSET = "UTF-8";
private static final Charset DEFAULT_CHARSET = StandardCharsets.UTF_8;

public static String newCSVLine(Object... parts) {
return StringUtils.join(parts, ",");
@@ -46,19 +47,21 @@ public static void clear(String fileName) {
}
}

public static void append(String fileName, String line) {
append(fileName, line, DEFAULT_CHARSET);
public static void append(String fileName, String... lines) {
append(fileName, DEFAULT_CHARSET, lines);
}

public static void append(String fileName, String line, String charset) {
public static void append(String fileName, Charset charset,
String... lines) {
File file = org.apache.commons.io.FileUtils.getFile(fileName);
checkFileValid(file, true);
try {
FileUtils.writeLines(file, charset, ImmutableList.of(line), true);
FileUtils.writeLines(file, charset.name(),
Arrays.asList(lines), true);
} catch (IOException e) {
throw new RuntimeException(String.format(
"Failed to append line '%s' to file '%s'",
line, fileName), e);
"Failed to append lines '%s' to file '%s'",
lines, fileName), e);
}
}

@@ -19,6 +19,7 @@

package com.baidu.hugegraph.loader.test.functional;

import java.nio.charset.Charset;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
@@ -43,10 +44,11 @@
import com.baidu.hugegraph.structure.schema.PropertyKey;
import com.baidu.hugegraph.testutil.Assert;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;

public class LoaderTest {

private static final Charset GBK = Charset.forName("GBK");

private static final String PATH_PREFIX = "src/test/resources";
private static final String url = "http://127.0.0.1:8080";
private static final String graph = "hugegraph";
@@ -61,9 +63,11 @@ public static void setUp() {
@Before
public void init() {
FileUtil.append(path("vertex_person.csv"), "name,age,city");
FileUtil.append(path("vertex_software.csv"), "name,lang,price", "GBK");
FileUtil.append(path("edge_knows.csv"), "aname,bname,date,weight");
FileUtil.append(path("edge_created.csv"), "aname,bname,date,weight");
FileUtil.append(path("vertex_software.csv"), GBK, "name,lang,price");
FileUtil.append(path("edge_knows.csv"),
"source_name,target_name,date,weight");
FileUtil.append(path("edge_created.csv"),
"source_name,target_name,date,weight");
}

@After
@@ -99,17 +103,17 @@ private static void clearServerData() {
// Clear schema
List<Long> taskIds = new ArrayList<>();
schema.getIndexLabels().forEach(il -> {
taskIds.add(schema.removeIndexLabel(il.name()));
taskIds.add(schema.removeIndexLabelAsync(il.name()));
});
taskIds.forEach(id -> task.waitUntilTaskCompleted(id, 5L));
taskIds.clear();
schema.getEdgeLabels().forEach(el -> {
taskIds.add(schema.removeEdgeLabel(el.name()));
taskIds.add(schema.removeEdgeLabelAsync(el.name()));
});
taskIds.forEach(id -> task.waitUntilTaskCompleted(id, 5L));
taskIds.clear();
schema.getVertexLabels().forEach(vl -> {
taskIds.add(schema.removeVertexLabel(vl.name()));
taskIds.add(schema.removeVertexLabelAsync(vl.name()));
});
taskIds.forEach(id -> task.waitUntilTaskCompleted(id, 5L));
taskIds.clear();
@@ -170,8 +174,29 @@ public void testLoadWithAutoCreateSchema() {

@Test
public void testLoadWithCustomizedSchema() {
String[] args = new String[]{"-f", "example/struct.json",
"-s", "example/schema.groovy",
FileUtil.append(path("vertex_person.csv"),
"marko,29,Beijing",
"vadas,27,Hongkong",
"josh,32,Beijing",
"peter,35,Shanghai",
"\"li,nary\",26,\"Wu,han\"");

FileUtil.append(path("vertex_software.csv"),
"lop,java,328",
"ripple,java,199");

FileUtil.append(path("edge_knows.csv"),
"marko,vadas,20160110,0.5",
"marko,josh,20130220,1.0");

FileUtil.append(path("edge_created.csv"),
"marko,lop,20171210,0.4",
"josh,lop,20091111,0.4",
"josh,ripple,20171210,1.0",
"peter,lop,20170324,0.2");

String[] args = new String[]{"-f", path("struct.json"),
"-s", path("schema.groovy"),
"-g", "hugegraph",
"--num-threads", "2",
"--test-mode", "true"};
@@ -239,7 +264,7 @@ public void testLoadWithIdExceedLimitLengthInBytes() {
"啡前壳+极光银后壳+浅灰电池扣+极光银电池组件+深灰天线";
assert pk.length() < 128;
String line = FileUtil.newCSVLine(pk, "中文", 328);
FileUtil.append(path("vertex_software.csv"), line, "GBK");
FileUtil.append(path("vertex_software.csv"), GBK, line);

String[] args = new String[]{"-f", path("struct.json"),
"-s", path("schema.groovy"),
@@ -330,7 +355,7 @@ public void testVertexPkContainsSpecicalSymbol() {
@Test
public void testLoadWithUnmatchedEncodingCharset() {
String line = FileUtil.newCSVLine("lop", "中文", 328);
FileUtil.append(path("vertex_software.csv"), line, "GBK");
FileUtil.append(path("vertex_software.csv"), GBK, line);

String[] args = new String[]{"-f", path("struct.json"),
"-g", "hugegraph",
@@ -354,7 +379,7 @@ public void testLoadWithUnmatchedEncodingCharset() {
@Test
public void testLoadWithMatchedEncodingCharset() {
String line = FileUtil.newCSVLine("lop", "中文", 328);
FileUtil.append(path("vertex_software.csv"), line, "GBK");
FileUtil.append(path("vertex_software.csv"), GBK, line);

String[] args = new String[]{"-f", path("struct_gbk.json"),
"-g", "hugegraph",
@@ -382,7 +407,7 @@ public void testLoadWithValueListPorpertyInJsonFile() {
FileUtil.append(path("vertex_person.csv"), line);

line = FileUtil.newCSVLine("lop", "中文", 328);
FileUtil.append(path("vertex_software.csv"), line, "GBK");
FileUtil.append(path("vertex_software.csv"), GBK, line);

line = "{\"person_name\": \"marko\", \"software_name\": \"lop\", " +
"\"feel\": [\"so so\", \"good\", \"good\"]}";
@@ -419,7 +444,7 @@ public void testLoadWithValueSetPorpertyInJsonFile() {
FileUtil.append(path("vertex_person.csv"), line);

line = FileUtil.newCSVLine("lop", "中文", 328);
FileUtil.append(path("vertex_software.csv"), line, "GBK");
FileUtil.append(path("vertex_software.csv"), GBK, line);

line = "{\"person_name\": \"marko\", \"software_name\": \"lop\", " +
"\"time\": [\"20171210\", \"20180101\"]}";
@@ -40,16 +40,16 @@
},
{
"label": "created",
"source": ["aname"],
"target": ["bname"],
"source": ["source_name"],
"target": ["target_name"],
"input": {
"type": "file",
"path": "src/test/resources/edge_created.csv",
"format": "CSV"
},
"mapping": {
"aname": "name",
"bname": "name"
"source_name": "name",
"target_name": "name"
}
}
]
@@ -42,17 +42,17 @@
},
{
"label": "created",
"source": ["aname"],
"target": ["bname"],
"source": ["source_name"],
"target": ["target_name"],
"input": {
"type": "file",
"path": "src/test/resources/edge_created.csv",
"format": "CSV",
"charset": "GBK"
},
"mapping": {
"aname": "name",
"bname": "name"
"source_name": "name",
"target_name": "name"
}
}
]

0 comments on commit 0fbea69

Please sign in to comment.