/
import_tables.sh
executable file
·43 lines (34 loc) · 1.18 KB
/
import_tables.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
#!/usr/bin/env bash
source conf/drillTestConfig.properties
if [ -n $USERNAME ]
then
user=$USERNAME
else
user=`ps -aef | grep Drillbit | grep org.apache.drill | cut -d' ' -f1 | head -1`
fi
hadoop_folder=${DRILL_TESTDATA}/hive_storage/maprdb/json
test_data=test_data.json
maprdb_table=json_MapR_DB_table
# Creating the MapR-DB table:
hadoop fs -test -f $hadoop_folder/$maprdb_table
if [ $? -eq 0 ]
then
hadoop fs -rm -r $hadoop_folder/$maprdb_table
fi
if [ "$user" == "root" ]
then
mapr importJSON -idField "id" -src $hadoop_folder/$test_data -dst $hadoop_folder/$maprdb_table
else
sudo -u $user mapr importJSON -idField "id" -src $hadoop_folder/$test_data -dst $hadoop_folder/$maprdb_table
fi
# Creating the Hive table:
hive_ddl=${DRILL_TEST_DATA_DIR}/Datasources/hive_storage/hive_maprdb_json/hive_table.ddl
hive_ddl_parametrized=${hive_ddl%.*}_param.ddl
cp $hive_ddl $hive_ddl_parametrized
# Replacing parameters with values:
sed -i "s|table_name|$maprdb_table|g" $hive_ddl_parametrized
sed -i "s|hadoop_folder|$hadoop_folder|g" $hive_ddl_parametrized
# Executing the ddl:
${DRILL_TEST_DATA_DIR}/Datasources/hive/execHive.sh $hive_ddl_parametrized
# Clean:
rm $hive_ddl_parametrized