Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion dev/deps/spark-deps-hadoop-2-hive-2.3
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ hive-jdbc/2.3.9//hive-jdbc-2.3.9.jar
hive-llap-common/2.3.9//hive-llap-common-2.3.9.jar
hive-metastore/2.3.9//hive-metastore-2.3.9.jar
hive-serde/2.3.9//hive-serde-2.3.9.jar
hive-service-rpc/3.1.3//hive-service-rpc-3.1.3.jar
hive-service-rpc/4.0.0-alpha-1//hive-service-rpc-4.0.0-alpha-1.jar
hive-shims-0.23/2.3.9//hive-shims-0.23-2.3.9.jar
hive-shims-common/2.3.9//hive-shims-common-2.3.9.jar
hive-shims-scheduler/2.3.9//hive-shims-scheduler-2.3.9.jar
Expand Down
2 changes: 1 addition & 1 deletion dev/deps/spark-deps-hadoop-3-hive-2.3
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ hive-jdbc/2.3.9//hive-jdbc-2.3.9.jar
hive-llap-common/2.3.9//hive-llap-common-2.3.9.jar
hive-metastore/2.3.9//hive-metastore-2.3.9.jar
hive-serde/2.3.9//hive-serde-2.3.9.jar
hive-service-rpc/3.1.3//hive-service-rpc-3.1.3.jar
hive-service-rpc/4.0.0-alpha-1//hive-service-rpc-4.0.0-alpha-1.jar
hive-shims-0.23/2.3.9//hive-shims-0.23-2.3.9.jar
hive-shims-common/2.3.9//hive-shims-common-2.3.9.jar
hive-shims-scheduler/2.3.9//hive-shims-scheduler-2.3.9.jar
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -2372,7 +2372,7 @@
<dependency>
<groupId>${hive.group}</groupId>
<artifactId>hive-service-rpc</artifactId>
<version>3.1.3</version>
<version>4.0.0-alpha-1</version>
<exclusions>
<exclusion>
<groupId>*</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.hive.service.cli;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CancellationException;
Expand Down Expand Up @@ -428,6 +429,33 @@ public OperationHandle getCrossReference(SessionHandle sessionHandle,
return opHandle;
}

/* (non-Javadoc)
* @see org.apache.hive.service.cli.ICLIService#uploadData(org.apache.hive.service.cli.SessionHandle)
*/
@Override
public OperationHandle uploadData(
SessionHandle sessionHandle,
ByteBuffer values,
String tableName,
String path) throws HiveSQLException {
LOG.info(sessionHandle + ": uploadData()");
return sessionManager.getSession(sessionHandle).uploadData(values, tableName, path);
}

/* (non-Javadoc)
* @see org.apache.hive.service.cli.ICLIService#downloadData(org.apache.hive.service.cli.SessionHandle)
*/
@Override
public OperationHandle downloadData(
SessionHandle sessionHandle,
String tableName,
String query,
String format,
Map<String, String> options) throws HiveSQLException {
LOG.info(sessionHandle + ": downloadData()");
return sessionManager.getSession(sessionHandle).downloadData(tableName, query, format, options);
}

/* (non-Javadoc)
* @see org.apache.hive.service.cli.ICLIService#getOperationStatus(org.apache.hive.service.cli.OperationHandle)
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
*/
package org.apache.hive.service.cli;

import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;

Expand Down Expand Up @@ -112,4 +113,17 @@ OperationHandle getPrimaryKeys(SessionHandle sessionHandle, String catalog,
OperationHandle getCrossReference(SessionHandle sessionHandle,
String primaryCatalog, String primarySchema, String primaryTable,
String foreignCatalog, String foreignSchema, String foreignTable) throws HiveSQLException;

OperationHandle uploadData(
SessionHandle sessionHandle,
ByteBuffer values,
String tableName,
String path) throws HiveSQLException;

OperationHandle downloadData(
SessionHandle sessionHandle,
String tableName,
String query,
String format,
Map<String, String> options) throws HiveSQLException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

package org.apache.hive.service.cli.operation;

import java.nio.ByteBuffer;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
Expand Down Expand Up @@ -170,6 +171,23 @@ public GetCrossReferenceOperation newGetCrossReferenceOperation(
return operation;
}

public Operation newUploadDataOperation(
HiveSession parentSession,
ByteBuffer values,
String tableName,
String path) throws HiveSQLException {
throw new HiveSQLException("unimplemented exception");
}

public Operation newDownloadDataOperation(
HiveSession parentSession,
String tableName,
String query,
String format,
Map<String, String> options) throws HiveSQLException {
throw new HiveSQLException("unimplemented exception");
}

public Operation getOperation(OperationHandle operationHandle) throws HiveSQLException {
Operation operation = getOperationInternal(operationHandle);
if (operation == null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

package org.apache.hive.service.cli.session;

import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;

Expand Down Expand Up @@ -172,6 +173,32 @@ OperationHandle getCrossReference(String primaryCatalog,
String primarySchema, String primaryTable, String foreignCatalog,
String foreignSchema, String foreignTable) throws HiveSQLException;

/**
* uploadData operation handler
* @param values
* @param tableName
* @param path
* @return
* @throws HiveSQLException
*/
OperationHandle uploadData(
ByteBuffer values, String tableName, String path) throws HiveSQLException;

/**
* downloadData operation handler
* @param tableName
* @param query
* @param format
* @param options
* @return
* @throws HiveSQLException
*/
OperationHandle downloadData(
String tableName,
String query,
String format,
Map<String, String> options) throws HiveSQLException;

/**
* close the session
* @throws HiveSQLException
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -898,4 +899,49 @@ public OperationHandle getCrossReference(String primaryCatalog,
release(true);
}
}

@Override
public OperationHandle uploadData(
ByteBuffer values, String tableName, String path) throws HiveSQLException {
acquire(true);

OperationManager operationManager = getOperationManager();
Operation operation = operationManager.newUploadDataOperation(
getSession(), values, tableName, path);
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release(true);
}
}

@Override
public OperationHandle downloadData(
String tableName,
String query,
String format,
Map<String, String> options) throws HiveSQLException {
acquire(true);

OperationManager operationManager = getOperationManager();
Operation operation = operationManager.newDownloadDataOperation(
getSession(), tableName, query, format, options);
OperationHandle opHandle = operation.getHandle();
try {
operation.run();
opHandleSet.add(opHandle);
return opHandle;
} catch (HiveSQLException e) {
operationManager.closeOperation(opHandle);
throw e;
} finally {
release(true);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,7 @@
import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.CLIService;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.rpc.thrift.TGetQueryIdReq;
import org.apache.hive.service.rpc.thrift.TGetQueryIdResp;
import org.apache.hive.service.rpc.thrift.*;
import org.apache.hive.service.server.ThreadFactoryWithGarbageCleanup;
import org.apache.thrift.TException;
import org.apache.thrift.TProcessorFactory;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -688,6 +688,45 @@ public TGetCrossReferenceResp GetCrossReference(TGetCrossReferenceReq req)
return resp;
}

@Override
public TUploadDataResp UploadData(TUploadDataReq req) throws TException {
TUploadDataResp resp = new TUploadDataResp();
try {
SessionHandle sessionHandle = new SessionHandle(req.getSessionHandle());
OperationHandle operationHandle = cliService.uploadData(
sessionHandle,
req.bufferForValues(),
req.getTableName(),
req.getPath());
resp.setOperationHandle(operationHandle.toTOperationHandle());
resp.setStatus(OK_STATUS);
} catch (Exception e) {
LOG.warn("Error UploadData: ", e);
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}

@Override
public TDownloadDataResp DownloadData(TDownloadDataReq req) throws TException {
TDownloadDataResp resp = new TDownloadDataResp();
try {
SessionHandle sessionHandle = new SessionHandle(req.getSessionHandle());
OperationHandle operationHandle = cliService.downloadData(
sessionHandle,
req.getTableName(),
req.getQuery(),
req.getFormat(),
req.getDownloadOptions());
resp.setOperationHandle(operationHandle.toTOperationHandle());
resp.setStatus(OK_STATUS);
} catch (Exception e) {
LOG.warn("Error download data: ", e);
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}

protected abstract void initializeServer();

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

package org.apache.hive.service.cli.thrift;

import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;

Expand Down Expand Up @@ -490,6 +491,49 @@ public OperationHandle getCrossReference(SessionHandle sessionHandle,
}
}

@Override
public OperationHandle uploadData(
SessionHandle sessionHandle,
ByteBuffer values,
String tableName,
String path) throws HiveSQLException {
try {
TUploadDataReq req = new TUploadDataReq(sessionHandle.toTSessionHandle(), values);
req.setTableName(tableName);
req.setPath(path);
TUploadDataResp resp = cliService.UploadData(req);
checkStatus(resp.getStatus());
TProtocolVersion protocol = sessionHandle.getProtocolVersion();
return new OperationHandle(resp.getOperationHandle(), protocol);
} catch (HiveSQLException e) {
throw e;
} catch (Exception e) {
throw new HiveSQLException(e);
}
}

@Override
public OperationHandle downloadData(
SessionHandle sessionHandle,
String tableName,
String query,
String format,
Map<String, String> options) throws HiveSQLException {
try {
TDownloadDataReq req = new TDownloadDataReq(sessionHandle.toTSessionHandle());
req.setTableName(tableName);
req.setQuery(query);
req.setFormat(format);
req.setDownloadOptions(options);
TDownloadDataResp resp = cliService.DownloadData(req);
checkStatus(resp.getStatus());
TProtocolVersion protocol = sessionHandle.getProtocolVersion();
return new OperationHandle(resp.getOperationHandle(), protocol);
} catch (TException e) {
throw new HiveSQLException(e);
}
}

@Override
public String getQueryId(TOperationHandle operationHandle) throws HiveSQLException {
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,10 @@
import org.apache.hive.service.ServiceException;
import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.CLIService;
import org.apache.hive.service.rpc.thrift.TCLIService;
import org.apache.hive.service.rpc.thrift.*;
import org.apache.hive.service.rpc.thrift.TCLIService.Iface;
import org.apache.hive.service.server.ThreadFactoryWithGarbageCleanup;
import org.apache.thrift.TException;
import org.apache.thrift.TProcessor;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocolFactory;
Expand Down Expand Up @@ -210,4 +211,14 @@ private String getHttpPath(String httpPath) {
}
return httpPath;
}

@Override
public TUploadDataResp UploadData(TUploadDataReq req) throws TException {
return null;
}

@Override
public TDownloadDataResp DownloadData(TDownloadDataReq req) throws TException {
return null;
}
}
Loading