/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.service.cli.thrift;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.HashMap;
import java.util.Map;
import javax.security.auth.login.LoginException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hive.service.AbstractService;
import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.CLIService;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.GetInfoType;
import org.apache.hive.service.cli.GetInfoValue;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationHandle;
import org.apache.hive.service.cli.OperationState;
import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.SessionHandle;
import org.apache.hive.service.cli.TableSchema;
import org.apache.thrift.TException;
import org.apache.thrift.TProcessorFactory;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.server.TServer;
import org.apache.thrift.server.TThreadPoolServer;
import org.apache.thrift.transport.TServerSocket;
import org.apache.thrift.transport.TTransportFactory;
/**
* CLIService.
*
*/
public class ThriftCLIService extends AbstractService implements TCLIService.Iface, Runnable {
public static final Log LOG = LogFactory.getLog(ThriftCLIService.class.getName());
protected CLIService cliService;
private static final TStatus OK_STATUS = new TStatus(TStatusCode.SUCCESS_STATUS);
private static final TStatus ERROR_STATUS = new TStatus(TStatusCode.ERROR_STATUS);
private static HiveAuthFactory hiveAuthFactory;
private int portNum;
private InetSocketAddress serverAddress;
private TServer server;
private boolean isStarted = false;
protected boolean isEmbedded = false;
private HiveConf hiveConf;
private int minWorkerThreads;
private int maxWorkerThreads;
public ThriftCLIService(CLIService cliService) {
super("ThriftCLIService");
this.cliService = cliService;
}
@Override
public synchronized void init(HiveConf hiveConf) {
this.hiveConf = hiveConf;
super.init(hiveConf);
}
@Override
public synchronized void start() {
super.start();
if (!isStarted && !isEmbedded) {
new Thread(this).start();
isStarted = true;
}
}
@Override
public synchronized void stop() {
if (isStarted && !isEmbedded) {
server.stop();
isStarted = false;
}
super.stop();
}
@Override
public TOpenSessionResp OpenSession(TOpenSessionReq req) throws TException {
TOpenSessionResp resp = new TOpenSessionResp();
try {
SessionHandle sessionHandle = getSessionHandle(req);
resp.setSessionHandle(sessionHandle.toTSessionHandle());
// TODO: set real configuration map
resp.setConfiguration(new HashMap<String, String>());
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
private String getUserName(TOpenSessionReq req) {
if (hiveAuthFactory != null
&& hiveAuthFactory.getRemoteUser() != null) {
return hiveAuthFactory.getRemoteUser();
} else {
return req.getUsername();
}
}
SessionHandle getSessionHandle(TOpenSessionReq req)
throws HiveSQLException, LoginException, IOException {
String userName = getUserName(req);
SessionHandle sessionHandle = null;
if (
cliService.getHiveConf().getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION)
.equals(HiveAuthFactory.AuthTypes.KERBEROS.toString())
&&
cliService.getHiveConf().
getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS)
)
{
String delegationTokenStr = null;
try {
delegationTokenStr = cliService.getDelegationTokenFromMetaStore(userName);
} catch (UnsupportedOperationException e) {
// The delegation token is not applicable in the given deployment mode
}
sessionHandle = cliService.openSessionWithImpersonation(userName, req.getPassword(),
req.getConfiguration(), delegationTokenStr);
} else {
sessionHandle = cliService.openSession(userName, req.getPassword(),
req.getConfiguration());
}
return sessionHandle;
}
@Override
public TCloseSessionResp CloseSession(TCloseSessionReq req) throws TException {
TCloseSessionResp resp = new TCloseSessionResp();
try {
SessionHandle sessionHandle = new SessionHandle(req.getSessionHandle());
cliService.closeSession(sessionHandle);
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
@Override
public TGetInfoResp GetInfo(TGetInfoReq req) throws TException {
TGetInfoResp resp = new TGetInfoResp();
try {
GetInfoValue getInfoValue =
cliService.getInfo(new SessionHandle(req.getSessionHandle()),
GetInfoType.getGetInfoType(req.getInfoType()));
resp.setInfoValue(getInfoValue.toTGetInfoValue());
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
@Override
public TExecuteStatementResp ExecuteStatement(TExecuteStatementReq req) throws TException {
TExecuteStatementResp resp = new TExecuteStatementResp();
try {
SessionHandle sessionHandle = new SessionHandle(req.getSessionHandle());
String statement = req.getStatement();
Map<String, String> confOverlay = req.getConfOverlay();
OperationHandle operationHandle =
cliService.executeStatement(sessionHandle, statement, confOverlay);
resp.setOperationHandle(operationHandle.toTOperationHandle());
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
@Override
public TGetTypeInfoResp GetTypeInfo(TGetTypeInfoReq req) throws TException {
TGetTypeInfoResp resp = new TGetTypeInfoResp();
try {
OperationHandle operationHandle = cliService.getTypeInfo(new SessionHandle(req.getSessionHandle()));
resp.setOperationHandle(operationHandle.toTOperationHandle());
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
@Override
public TGetCatalogsResp GetCatalogs(TGetCatalogsReq req) throws TException {
TGetCatalogsResp resp = new TGetCatalogsResp();
try {
OperationHandle opHandle = cliService.getCatalogs(new SessionHandle(req.getSessionHandle()));
resp.setOperationHandle(opHandle.toTOperationHandle());
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
@Override
public TGetSchemasResp GetSchemas(TGetSchemasReq req) throws TException {
TGetSchemasResp resp = new TGetSchemasResp();
try {
OperationHandle opHandle = cliService.getSchemas(
new SessionHandle(req.getSessionHandle()), req.getCatalogName(), req.getSchemaName());
resp.setOperationHandle(opHandle.toTOperationHandle());
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
@Override
public TGetTablesResp GetTables(TGetTablesReq req) throws TException {
TGetTablesResp resp = new TGetTablesResp();
try {
OperationHandle opHandle = cliService
.getTables(new SessionHandle(req.getSessionHandle()), req.getCatalogName(),
req.getSchemaName(), req.getTableName(), req.getTableTypes());
resp.setOperationHandle(opHandle.toTOperationHandle());
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
@Override
public TGetTableTypesResp GetTableTypes(TGetTableTypesReq req) throws TException {
TGetTableTypesResp resp = new TGetTableTypesResp();
try {
OperationHandle opHandle = cliService.getTableTypes(new SessionHandle(req.getSessionHandle()));
resp.setOperationHandle(opHandle.toTOperationHandle());
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
@Override
public TGetColumnsResp GetColumns(TGetColumnsReq req) throws TException {
TGetColumnsResp resp = new TGetColumnsResp();
try {
OperationHandle opHandle = cliService.getColumns(
new SessionHandle(req.getSessionHandle()),
req.getCatalogName(),
req.getSchemaName(),
req.getTableName(),
req.getColumnName());
resp.setOperationHandle(opHandle.toTOperationHandle());
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
@Override
public TGetFunctionsResp GetFunctions(TGetFunctionsReq req) throws TException {
TGetFunctionsResp resp = new TGetFunctionsResp();
try {
OperationHandle opHandle = cliService.getFunctions(
new SessionHandle(req.getSessionHandle()), req.getCatalogName(),
req.getSchemaName(), req.getFunctionName());
resp.setOperationHandle(opHandle.toTOperationHandle());
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
@Override
public TGetOperationStatusResp GetOperationStatus(TGetOperationStatusReq req) throws TException {
TGetOperationStatusResp resp = new TGetOperationStatusResp();
try {
OperationState operationState = cliService.getOperationStatus(new OperationHandle(req.getOperationHandle()));
resp.setOperationState(operationState.toTOperationState());
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
@Override
public TCancelOperationResp CancelOperation(TCancelOperationReq req) throws TException {
TCancelOperationResp resp = new TCancelOperationResp();
try {
cliService.cancelOperation(new OperationHandle(req.getOperationHandle()));
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
@Override
public TCloseOperationResp CloseOperation(TCloseOperationReq req) throws TException {
TCloseOperationResp resp = new TCloseOperationResp();
try {
cliService.closeOperation(new OperationHandle(req.getOperationHandle()));
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
@Override
public TGetResultSetMetadataResp GetResultSetMetadata(TGetResultSetMetadataReq req)
throws TException {
TGetResultSetMetadataResp resp = new TGetResultSetMetadataResp();
try {
TableSchema schema = cliService.getResultSetMetadata(new OperationHandle(req.getOperationHandle()));
resp.setSchema(schema.toTTableSchema());
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
@Override
public TFetchResultsResp FetchResults(TFetchResultsReq req) throws TException {
TFetchResultsResp resp = new TFetchResultsResp();
try {
RowSet rowSet = cliService.fetchResults(
new OperationHandle(req.getOperationHandle()),
FetchOrientation.getFetchOrientation(req.getOrientation()),
req.getMaxRows());
resp.setResults(rowSet.toTRowSet());
resp.setHasMoreRows(false);
resp.setStatus(OK_STATUS);
} catch (Exception e) {
e.printStackTrace();
resp.setStatus(HiveSQLException.toTStatus(e));
}
return resp;
}
@Override
public void run() {
try {
hiveAuthFactory = new HiveAuthFactory();
TTransportFactory transportFactory = hiveAuthFactory.getAuthTransFactory();
TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this);
String portString = System.getenv("HIVE_SERVER2_THRIFT_PORT");
if (portString != null) {
portNum = Integer.valueOf(portString);
} else {
portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT);
}
String hiveHost = System.getenv("HIVE_SERVER2_THRIFT_BIND_HOST");
if (hiveHost == null) {
hiveHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST);
}
if (hiveHost != null && !hiveHost.isEmpty()) {
serverAddress = new InetSocketAddress(hiveHost, portNum);
} else {
serverAddress = new InetSocketAddress(portNum);
}
minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS);
maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS);
TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(new TServerSocket(serverAddress))
.processorFactory(processorFactory)
.transportFactory(transportFactory)
.protocolFactory(new TBinaryProtocol.Factory())
.minWorkerThreads(minWorkerThreads)
.maxWorkerThreads(maxWorkerThreads);
server = new TThreadPoolServer(sargs);
LOG.info("ThriftCLIService listening on " + serverAddress);
server.serve();
} catch (Throwable t) {
t.printStackTrace();
}
}
}