From 91dfe0913f86748130d26edb653dee62d582e5ca Mon Sep 17 00:00:00 2001 From: zhuangchong <37063904+zhuangchong@users.noreply.github.com> Date: Sun, 17 Jan 2021 23:02:02 +0800 Subject: [PATCH] [Improvement-4435][datasource] the datasource tests the connection and returns details when the connection is wrong (#4436) * add datasource test connection return result message. * add code checkstyle. * spark/hive datasoure test connection add loadKerberosConf. * solve sonarcloud coverage. --- .../api/controller/DataSourceController.java | 63 ++-- .../dolphinscheduler/api/enums/Status.java | 2 +- .../api/service/DataSourceService.java | 194 ++++------- .../api/service/BaseServiceTest.java | 2 + .../api/service/DataSourceServiceTest.java | 108 ++++-- .../common/utils/StringUtils.java | 14 +- .../dao/datasource/BaseDataSource.java | 325 +++++++++--------- .../dao/datasource/HiveDataSource.java | 128 ++++--- .../dao/datasource/SQLServerDataSource.java | 52 +-- .../dao/datasource/SparkDataSource.java | 45 ++- 10 files changed, 450 insertions(+), 483 deletions(-) diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java index 4bdaa365e..a67ade2ba 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java @@ -14,12 +14,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; +import static org.apache.dolphinscheduler.api.enums.Status.AUTHORIZED_DATA_SOURCE; +import static org.apache.dolphinscheduler.api.enums.Status.CONNECTION_TEST_FAILURE; +import static org.apache.dolphinscheduler.api.enums.Status.CONNECT_DATASOURCE_FAILURE; +import static org.apache.dolphinscheduler.api.enums.Status.CREATE_DATASOURCE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.DELETE_DATA_SOURCE_FAILURE; +import static org.apache.dolphinscheduler.api.enums.Status.KERBEROS_STARTUP_STATE; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATASOURCE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.UNAUTHORIZED_DATASOURCE; +import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_DATASOURCE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_DATASOURCE_NAME_FAILURE; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.DataSourceService; @@ -30,17 +38,27 @@ import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; + +import java.util.Map; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; import springfox.documentation.annotations.ApiIgnore; -import java.util.Map; - -import static org.apache.dolphinscheduler.api.enums.Status.*; - /** * data source controller */ @@ -101,8 +119,7 @@ public class DataSourceController extends BaseController { logger.info("login user {} create datasource name: {}, note: {}, type: {}, host: {}, port: {}, database : {}, principal: {}, userName : {}, connectType: {}, other: {}", loginUser.getUserName(), name, note, type, host, port, database, principal, userName, connectType, other); String parameter = dataSourceService.buildParameter(type, host, port, database, principal, userName, password, connectType, other); - Map result = dataSourceService.createDataSource(loginUser, name, note, type, parameter); - return returnDataList(result); + return dataSourceService.createDataSource(loginUser, name, note, type, parameter); } @@ -156,8 +173,7 @@ public class DataSourceController extends BaseController { logger.info("login user {} updateProcessInstance datasource name: {}, note: {}, type: {}, connectType: {}, other: {}", loginUser.getUserName(), name, note, type, connectType, other); String parameter = dataSourceService.buildParameter(type, host, port, database, principal, userName, password, connectType, other); - Map dataSource = dataSourceService.updateDataSource(id, loginUser, name, note, type, parameter); - return returnDataList(dataSource); + return dataSourceService.updateDataSource(id, loginUser, name, note, type, parameter); } /** @@ -281,15 +297,7 @@ public class DataSourceController extends BaseController { logger.info("login user {}, connect datasource: {}, note: {}, type: {}, connectType: {}, other: {}", loginUser.getUserName(), name, note, type, connectType, other); String parameter = dataSourceService.buildParameter(type, host, port, database, principal, userName, password, connectType, other); - Boolean isConnection = dataSourceService.checkConnection(type, parameter); - Result result = new Result(); - - if (isConnection) { - putMsg(result, SUCCESS); - } else { - putMsg(result, CONNECT_DATASOURCE_FAILURE); - } - return result; + return dataSourceService.checkConnection(type, parameter); } /** @@ -309,16 +317,7 @@ public class DataSourceController extends BaseController { public Result connectionTest(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("id") int id) { logger.info("connection test, login user:{}, id:{}", loginUser.getUserName(), id); - - Boolean isConnection = dataSourceService.connectionTest(id); - Result result = new Result(); - - if (isConnection) { - putMsg(result, SUCCESS); - } else { - putMsg(result, CONNECTION_TEST_FAILURE); - } - return result; + return dataSourceService.connectionTest(id); } /** diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java index 52d246dcb..c56f7d0b9 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java @@ -198,7 +198,7 @@ public enum Status { CHECK_OS_TENANT_CODE_ERROR(10164, "Please enter the English os tenant code", "请输入英文操作系统租户"), FORCE_TASK_SUCCESS_ERROR(10165, "force task success error", "强制成功任务实例错误"), TASK_INSTANCE_STATE_OPERATION_ERROR(10166, "the status of task instance {0} is {1},Cannot perform force success operation", "任务实例[{0}]的状态是[{1}],无法执行强制成功操作"), - + DATASOURCE_TYPE_NOT_EXIST(10167, "data source type not exist", "数据源类型不存在"), UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"), UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"), diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java index 1fa0494d1..58bb657c6 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java @@ -14,12 +14,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import com.fasterxml.jackson.databind.node.ObjectNode; -import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; @@ -27,27 +24,36 @@ import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DbConnectType; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.CommonUtils; -import org.apache.dolphinscheduler.common.utils.*; -import org.apache.dolphinscheduler.dao.datasource.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; +import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; +import org.apache.dolphinscheduler.dao.datasource.OracleDataSource; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.UserGroupInformation; + +import java.sql.Connection; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.*; - -import static org.apache.dolphinscheduler.common.utils.PropertyUtils.getString; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.fasterxml.jackson.databind.node.ObjectNode; /** * datasource service @@ -67,11 +73,9 @@ public class DataSourceService extends BaseService { public static final String USER_NAME = "userName"; public static final String OTHER = "other"; - @Autowired private DataSourceMapper dataSourceMapper; - @Autowired private DataSourceUserMapper datasourceUserMapper; @@ -85,24 +89,16 @@ public class DataSourceService extends BaseService { * @param parameter datasource parameters * @return create result code */ - public Map createDataSource(User loginUser, String name, String desc, DbType type, String parameter) { + public Result createDataSource(User loginUser, String name, String desc, DbType type, String parameter) { - Map result = new HashMap<>(); + Result result = new Result<>(); // check name can use or not if (checkName(name)) { putMsg(result, Status.DATASOURCE_EXIST); return result; } - Boolean isConnection = checkConnection(type, parameter); - if (!isConnection) { - logger.info("connect failed, type:{}, parameter:{}", type, parameter); - putMsg(result, Status.DATASOURCE_CONNECT_FAILED); - return result; - } - - BaseDataSource datasource = DataSourceFactory.getDatasource(type, parameter); - if (datasource == null) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, parameter); + Result isConnection = checkConnection(type, parameter); + if (Status.SUCCESS.getCode() != isConnection.getCode()) { return result; } @@ -125,7 +121,6 @@ public class DataSourceService extends BaseService { return result; } - /** * updateProcessInstance datasource * @@ -137,9 +132,9 @@ public class DataSourceService extends BaseService { * @param id data source id * @return update result code */ - public Map updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter) { + public Result updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter) { - Map result = new HashMap<>(); + Result result = new Result<>(); // determine whether the data source exists DataSource dataSource = dataSourceMapper.selectById(id); if (dataSource == null) { @@ -168,12 +163,11 @@ public class DataSourceService extends BaseService { // connectionParams json String connectionParams = paramObject.toString(); - Boolean isConnection = checkConnection(type, connectionParams); - if (!isConnection) { - logger.info("connect failed, type:{}, parameter:{}", type, parameter); - putMsg(result, Status.DATASOURCE_CONNECT_FAILED); + Result isConnection = checkConnection(type, parameter); + if (Status.SUCCESS.getCode() != isConnection.getCode()) { return result; } + Date now = new Date(); dataSource.setName(name.trim()); @@ -192,7 +186,6 @@ public class DataSourceService extends BaseService { return queryDataSource != null && queryDataSource.size() > 0; } - /** * updateProcessInstance datasource * @@ -360,8 +353,8 @@ public class DataSourceService extends BaseService { * @param name datasource name * @return true if data datasource not exists, otherwise return false */ - public Result verifyDataSourceName(String name) { - Result result = new Result(); + public Result verifyDataSourceName(String name) { + Result result = new Result<>(); List dataSourceList = dataSourceMapper.queryDataSourceByName(name); if (dataSourceList != null && dataSourceList.size() > 0) { logger.error("datasource name:{} has exist, can't create again.", name); @@ -373,77 +366,6 @@ public class DataSourceService extends BaseService { return result; } - /** - * get connection - * - * @param dbType datasource type - * @param parameter parameter - * @return connection for datasource - */ - private Connection getConnection(DbType dbType, String parameter) { - Connection connection = null; - BaseDataSource datasource = null; - try { - switch (dbType) { - case POSTGRESQL: - datasource = JSONUtils.parseObject(parameter, PostgreDataSource.class); - Class.forName(Constants.ORG_POSTGRESQL_DRIVER); - break; - case MYSQL: - datasource = JSONUtils.parseObject(parameter, MySQLDataSource.class); - Class.forName(Constants.COM_MYSQL_JDBC_DRIVER); - break; - case HIVE: - case SPARK: - if (CommonUtils.getKerberosStartupState()) { - System.setProperty(org.apache.dolphinscheduler.common.Constants.JAVA_SECURITY_KRB5_CONF, - getString(org.apache.dolphinscheduler.common.Constants.JAVA_SECURITY_KRB5_CONF_PATH)); - Configuration configuration = new Configuration(); - configuration.set(org.apache.dolphinscheduler.common.Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); - UserGroupInformation.setConfiguration(configuration); - UserGroupInformation.loginUserFromKeytab(getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_USERNAME), - getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_PATH)); - } - if (dbType == DbType.HIVE) { - datasource = JSONUtils.parseObject(parameter, HiveDataSource.class); - } else if (dbType == DbType.SPARK) { - datasource = JSONUtils.parseObject(parameter, SparkDataSource.class); - } - Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER); - break; - case CLICKHOUSE: - datasource = JSONUtils.parseObject(parameter, ClickHouseDataSource.class); - Class.forName(Constants.COM_CLICKHOUSE_JDBC_DRIVER); - break; - case ORACLE: - datasource = JSONUtils.parseObject(parameter, OracleDataSource.class); - Class.forName(Constants.COM_ORACLE_JDBC_DRIVER); - break; - case SQLSERVER: - datasource = JSONUtils.parseObject(parameter, SQLServerDataSource.class); - Class.forName(Constants.COM_SQLSERVER_JDBC_DRIVER); - break; - case DB2: - datasource = JSONUtils.parseObject(parameter, DB2ServerDataSource.class); - Class.forName(Constants.COM_DB2_JDBC_DRIVER); - break; - case PRESTO: - datasource = JSONUtils.parseObject(parameter, PrestoDataSource.class); - Class.forName(Constants.COM_PRESTO_JDBC_DRIVER); - break; - default: - break; - } - - if (datasource != null) { - connection = DriverManager.getConnection(datasource.getJdbcUrl(), datasource.getUser(), datasource.getPassword()); - } - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - return connection; - } - /** * check connection * @@ -451,18 +373,24 @@ public class DataSourceService extends BaseService { * @param parameter data source parameters * @return true if connect successfully, otherwise false */ - public boolean checkConnection(DbType type, String parameter) { - Boolean isConnection = false; - Connection con = getConnection(type, parameter); - if (con != null) { - isConnection = true; - try { - con.close(); - } catch (SQLException e) { - logger.error("close connection fail at DataSourceService::checkConnection()", e); - } + public Result checkConnection(DbType type, String parameter) { + Result result = new Result<>(); + BaseDataSource datasource = DataSourceFactory.getDatasource(type, parameter); + if (datasource == null) { + putMsg(result, Status.DATASOURCE_TYPE_NOT_EXIST, type); + return result; + } + try (Connection connection = datasource.getConnection()) { + if (connection == null) { + putMsg(result, Status.CONNECTION_TEST_FAILURE); + return result; + } + putMsg(result, Status.SUCCESS); + return result; + } catch (Exception e) { + logger.error("datasource test connection error, dbType:{}, jdbcUrl:{}, message:{}.", type, datasource.getJdbcUrl(), e.getMessage()); + return new Result<>(Status.CONNECTION_TEST_FAILURE.getCode(),e.getMessage()); } - return isConnection; } /** @@ -471,13 +399,14 @@ public class DataSourceService extends BaseService { * @param id datasource id * @return connect result code */ - public boolean connectionTest(int id) { + public Result connectionTest(int id) { DataSource dataSource = dataSourceMapper.selectById(id); - if (dataSource != null) { - return checkConnection(dataSource.getType(), dataSource.getConnectionParams()); - } else { - return false; + if (dataSource == null) { + Result result = new Result<>(); + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; } + return checkConnection(dataSource.getType(), dataSource.getConnectionParams()); } /** @@ -510,8 +439,8 @@ public class DataSourceService extends BaseService { parameterMap.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType); } - if (CommonUtils.getKerberosStartupState() && - (type == DbType.HIVE || type == DbType.SPARK)) { + if (CommonUtils.getKerberosStartupState() + && (type == DbType.HIVE || type == DbType.SPARK)) { jdbcUrl += ";principal=" + principal; } @@ -535,8 +464,8 @@ public class DataSourceService extends BaseService { parameterMap.put(Constants.JDBC_URL, jdbcUrl); parameterMap.put(Constants.USER, userName); parameterMap.put(Constants.PASSWORD, CommonUtils.encodePassword(password)); - if (CommonUtils.getKerberosStartupState() && - (type == DbType.HIVE || type == DbType.SPARK)) { + if (CommonUtils.getKerberosStartupState() + && (type == DbType.HIVE || type == DbType.SPARK)) { parameterMap.put(Constants.PRINCIPAL, principal); } @@ -557,7 +486,6 @@ public class DataSourceService extends BaseService { } return JSONUtils.toJsonString(parameterMap); - } private String buildAddress(DbType type, String host, String port, DbConnectType connectType) { @@ -609,8 +537,8 @@ public class DataSourceService extends BaseService { * @return delete result code */ @Transactional(rollbackFor = RuntimeException.class) - public Result delete(User loginUser, int datasourceId) { - Result result = new Result(); + public Result delete(User loginUser, int datasourceId) { + Result result = new Result<>(); try { //query datasource by id DataSource dataSource = dataSourceMapper.selectById(datasourceId); @@ -673,7 +601,6 @@ public class DataSourceService extends BaseService { return result; } - /** * authorized datasource * @@ -695,7 +622,6 @@ public class DataSourceService extends BaseService { return result; } - /** * get host and port by address * diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseServiceTest.java index 02086a825..95083dd51 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseServiceTest.java @@ -72,6 +72,8 @@ public class BaseServiceTest { } + + @Test public void testPutMsg(){ diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java index 789e5f6cc..84ccd2e0a 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; @@ -24,12 +25,19 @@ import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; import org.apache.dolphinscheduler.dao.datasource.MySQLDataSource; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; + +import java.sql.Connection; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -38,16 +46,15 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; +import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - @RunWith(PowerMockRunner.class) @PowerMockIgnore({"sun.security.*", "javax.net.*"}) +@PrepareForTest({DataSourceFactory.class}) public class DataSourceServiceTest { + @InjectMocks private DataSourceService dataSourceService; @Mock @@ -69,28 +76,31 @@ public class DataSourceServiceTest { dataSource.setName(dataSourceName); dataSourceList.add(dataSource); PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(dataSourceList); - Map dataSourceExitsResult = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); - Assert.assertEquals(Status.DATASOURCE_EXIST, dataSourceExitsResult.get(Constants.STATUS)); + Result dataSourceExitsResult = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); + Assert.assertEquals(Status.DATASOURCE_EXIST.getCode(), dataSourceExitsResult.getCode().intValue()); // data source exits PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null); - PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(false); - Map connectFailedResult = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); - Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED, connectFailedResult.get(Constants.STATUS)); + Result connectionResult = new Result(Status.DATASOURCE_CONNECT_FAILED.getCode(),Status.DATASOURCE_CONNECT_FAILED.getMsg()); + //PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult); + PowerMockito.doReturn(connectionResult).when(dataSourceService).checkConnection(dataSourceType, parameter); + Result connectFailedResult = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); + Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED.getCode(), connectFailedResult.getCode().intValue()); // data source exits PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null); - PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(true); + connectionResult = new Result(Status.SUCCESS.getCode(),Status.SUCCESS.getMsg()); + PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult); PowerMockito.when(DataSourceFactory.getDatasource(dataSourceType, parameter)).thenReturn(null); - Map notValidError = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); - Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, notValidError.get(Constants.STATUS)); + Result notValidError = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getCode(), notValidError.getCode().intValue()); // success PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null); - PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(true); + PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult); PowerMockito.when(DataSourceFactory.getDatasource(dataSourceType, parameter)).thenReturn(JSONUtils.parseObject(parameter, MySQLDataSource.class)); - Map success = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); - Assert.assertEquals(Status.SUCCESS, success.get(Constants.STATUS)); + Result success = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); + Assert.assertEquals(Status.SUCCESS.getCode(), success.getCode().intValue()); } public void updateDataSourceTest() { @@ -104,14 +114,14 @@ public class DataSourceServiceTest { // data source not exits PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null); - Map resourceNotExits = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); - Assert.assertEquals(Status.RESOURCE_NOT_EXIST, resourceNotExits.get(Constants.STATUS)); + Result resourceNotExits = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); + Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getCode(), resourceNotExits.getCode().intValue()); // user no operation perm DataSource dataSource = new DataSource(); dataSource.setUserId(0); PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource); - Map userNoOperationPerm = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); - Assert.assertEquals(Status.USER_NO_OPERATION_PERM, userNoOperationPerm.get(Constants.STATUS)); + Result userNoOperationPerm = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); + Assert.assertEquals(Status.USER_NO_OPERATION_PERM.getCode(), userNoOperationPerm.getCode().intValue()); // data source name exits dataSource.setUserId(-1); @@ -119,22 +129,24 @@ public class DataSourceServiceTest { dataSourceList.add(dataSource); PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource); PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(dataSourceList); - Map dataSourceNameExist = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); - Assert.assertEquals(Status.DATASOURCE_EXIST, dataSourceNameExist.get(Constants.STATUS)); + Result dataSourceNameExist = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); + Assert.assertEquals(Status.DATASOURCE_EXIST.getCode(), dataSourceNameExist.getCode().intValue()); // data source connect failed PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource); PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(null); - PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(true); - Map connectFailed = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); - Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED, connectFailed.get(Constants.STATUS)); + Result connectionResult = new Result(Status.SUCCESS.getCode(),Status.SUCCESS.getMsg()); + PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult); + Result connectFailed = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); + Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED.getCode(), connectFailed.getCode().intValue()); //success PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource); PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(null); - PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(false); - Map success = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); - Assert.assertEquals(Status.SUCCESS, connectFailed.get(Constants.STATUS)); + connectionResult = new Result(Status.DATASOURCE_CONNECT_FAILED.getCode(),Status.DATASOURCE_CONNECT_FAILED.getMsg()); + PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult); + Result success = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter); + Assert.assertEquals(Status.SUCCESS.getCode(), success.getCode().intValue()); } @@ -152,7 +164,8 @@ public class DataSourceServiceTest { public void connectionTest() { int dataSourceId = -1; PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null); - Assert.assertFalse(dataSourceService.connectionTest(dataSourceId)); + Result result = dataSourceService.connectionTest(dataSourceId); + Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getCode(),result.getCode().intValue()); } @Test @@ -252,7 +265,8 @@ public class DataSourceServiceTest { dataSource.setName("test"); dataSource.setNote("Note"); dataSource.setType(DbType.ORACLE); - dataSource.setConnectionParams("{\"connectType\":\"ORACLE_SID\",\"address\":\"jdbc:oracle:thin:@192.168.xx.xx:49161\",\"database\":\"XE\",\"jdbcUrl\":\"jdbc:oracle:thin:@192.168.xx.xx:49161/XE\",\"user\":\"system\",\"password\":\"oracle\"}"); + dataSource.setConnectionParams("{\"connectType\":\"ORACLE_SID\",\"address\":\"jdbc:oracle:thin:@192.168.xx.xx:49161\",\"database\":\"XE\"," + + "\"jdbcUrl\":\"jdbc:oracle:thin:@192.168.xx.xx:49161/XE\",\"user\":\"system\",\"password\":\"oracle\"}"); return dataSource; } @@ -261,7 +275,8 @@ public class DataSourceServiceTest { public void buildParameter() { String param = dataSourceService.buildParameter(DbType.ORACLE, "192.168.9.1", "1521", "im" , "", "test", "test", DbConnectType.ORACLE_SERVICE_NAME, ""); - String expected = "{\"connectType\":\"ORACLE_SERVICE_NAME\",\"type\":\"ORACLE_SERVICE_NAME\",\"address\":\"jdbc:oracle:thin:@//192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:oracle:thin:@//192.168.9.1:1521/im\",\"user\":\"test\",\"password\":\"test\"}"; + String expected = "{\"connectType\":\"ORACLE_SERVICE_NAME\",\"type\":\"ORACLE_SERVICE_NAME\",\"address\":\"jdbc:oracle:thin:@//192.168.9.1:1521\",\"database\":\"im\"," + + "\"jdbcUrl\":\"jdbc:oracle:thin:@//192.168.9.1:1521/im\",\"user\":\"test\",\"password\":\"test\"}"; Assert.assertEquals(expected, param); } @@ -270,10 +285,10 @@ public class DataSourceServiceTest { PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "true"); String param = dataSourceService.buildParameter(DbType.MYSQL, "192.168.9.1", "1521", "im" , "", "test", "123456", null, ""); - String expected = "{\"type\":null,\"address\":\"jdbc:mysql://192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\",\"user\":\"test\",\"password\":\"IUAjJCVeJipNVEl6TkRVMg==\"}"; + String expected = "{\"type\":null,\"address\":\"jdbc:mysql://192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\"," + + "\"user\":\"test\",\"password\":\"IUAjJCVeJipNVEl6TkRVMg==\"}"; Assert.assertEquals(expected, param); - PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "false"); param = dataSourceService.buildParameter(DbType.MYSQL, "192.168.9.1", "1521", "im" , "", "test", "123456", null, ""); @@ -294,4 +309,31 @@ public class DataSourceServiceTest { return loginUser; } -} \ No newline at end of file + /** + * test check connection + * @throws Exception + */ + @Test + public void testCheckConnection() throws Exception { + DbType dataSourceType = DbType.POSTGRESQL; + String parameter = dataSourceService.buildParameter(dataSourceType, "172.16.133.200", "5432", "dolphinscheduler", null, "postgres", "", null, null); + + PowerMockito.mockStatic(DataSourceFactory.class); + PowerMockito.when(DataSourceFactory.getDatasource(Mockito.any(), Mockito.anyString())).thenReturn(null); + Result result = dataSourceService.checkConnection(dataSourceType, parameter); + Assert.assertEquals(Status.DATASOURCE_TYPE_NOT_EXIST.getCode(), result.getCode().intValue()); + + BaseDataSource dataSource = PowerMockito.mock(BaseDataSource.class); + PowerMockito.when(DataSourceFactory.getDatasource(Mockito.any(), Mockito.anyString())).thenReturn(dataSource); + PowerMockito.when(dataSource.getConnection()).thenReturn(null); + result = dataSourceService.checkConnection(dataSourceType, parameter); + Assert.assertEquals(Status.CONNECTION_TEST_FAILURE.getCode(), result.getCode().intValue()); + + Connection connection = PowerMockito.mock(Connection.class); + PowerMockito.when(dataSource.getConnection()).thenReturn(connection); + result = dataSourceService.checkConnection(dataSourceType, parameter); + Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); + + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java index 256f19905..6e32d12df 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java @@ -33,11 +33,17 @@ public class StringUtils { return !isEmpty(cs); } - public static boolean isBlank(String s) { - if (isEmpty(s)) { - return true; + public static boolean isBlank(String str) { + int strLen; + if (str != null && (strLen = str.length()) != 0) { + for (int i = 0; i < strLen; ++i) { + if (!Character.isWhitespace(str.charAt(i))) { + return false; + } + } } - return s.trim().length() == 0; + return true; + } public static boolean isNotBlank(String s) { diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java index 729a17f27..bb3825fbf 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java @@ -14,14 +14,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.dao.datasource; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; + +import java.sql.Connection; +import java.sql.DriverManager; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,194 +32,183 @@ import org.slf4j.LoggerFactory; */ public abstract class BaseDataSource { - private static final Logger logger = LoggerFactory.getLogger(BaseDataSource.class); + private static final Logger logger = LoggerFactory.getLogger(BaseDataSource.class); - /** - * user name - */ - protected String user; + /** + * user name + */ + protected String user; - /** - * user password - */ - protected String password; + /** + * user password + */ + protected String password; - /** - * data source address - */ - private String address; + /** + * data source address + */ + private String address; - /** - * database name - */ - private String database; + /** + * database name + */ + private String database; - /** - * other connection parameters for the data source - */ - private String other; + /** + * other connection parameters for the data source + */ + private String other; - /** - * principal - */ - private String principal; + /** + * principal + */ + private String principal; - public String getPrincipal() { - return principal; - } - - public void setPrincipal(String principal) { - this.principal = principal; - } - - /** - * @return driver class - */ - public abstract String driverClassSelector(); - - /** - * @return db type - */ - public abstract DbType dbTypeSelector(); - - /** - * gets the JDBC url for the data source connection - * @return getJdbcUrl - */ - public String getJdbcUrl() { - StringBuilder jdbcUrl = new StringBuilder(getAddress()); - - appendDatabase(jdbcUrl); - appendPrincipal(jdbcUrl); - appendOther(jdbcUrl); - - return jdbcUrl.toString(); - } - - /** - * append database - * @param jdbcUrl jdbc url - */ - protected void appendDatabase(StringBuilder jdbcUrl) { - if (dbTypeSelector() == DbType.SQLSERVER) { - jdbcUrl.append(";databaseName=").append(getDatabase()); - } else { - if (getAddress().lastIndexOf('/') != (jdbcUrl.length() - 1)) { - jdbcUrl.append("/"); - } - jdbcUrl.append(getDatabase()); + public String getPrincipal() { + return principal; } - } - /** - * append principal - * @param jdbcUrl jdbc url - */ - private void appendPrincipal(StringBuilder jdbcUrl) { - boolean tag = dbTypeSelector() == DbType.HIVE || dbTypeSelector() == DbType.SPARK; - if (tag && StringUtils.isNotEmpty(getPrincipal())) { - jdbcUrl.append(";principal=").append(getPrincipal()); + public void setPrincipal(String principal) { + this.principal = principal; } - } - /** - * append other - * @param jdbcUrl jdbc url - */ - private void appendOther(StringBuilder jdbcUrl) { - String otherParams = filterOther(getOther()); - if (StringUtils.isNotEmpty(otherParams)) { - String separator = ""; - switch (dbTypeSelector()) { - case CLICKHOUSE: - case MYSQL: - case ORACLE: - case POSTGRESQL: - case PRESTO: - separator = "?"; - break; - case DB2: - separator = ":"; - break; - case HIVE: - case SPARK: - case SQLSERVER: - separator = ";"; - break; - default: - logger.error("Db type mismatch!"); - } - jdbcUrl.append(separator).append(otherParams); + /** + * @return driver class + */ + public abstract String driverClassSelector(); + + /** + * @return db type + */ + public abstract DbType dbTypeSelector(); + + /** + * gets the JDBC url for the data source connection + * @return getJdbcUrl + */ + public String getJdbcUrl() { + StringBuilder jdbcUrl = new StringBuilder(getAddress()); + + appendDatabase(jdbcUrl); + appendPrincipal(jdbcUrl); + appendOther(jdbcUrl); + + return jdbcUrl.toString(); } - } - protected String filterOther(String otherParams){ - return otherParams; - } - - /** - * test whether the data source can be connected successfully - */ - public void isConnectable() { - Connection con = null; - try { - Class.forName(driverClassSelector()); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); - } catch (ClassNotFoundException | SQLException e) { - logger.error("Get connection error: {}", e.getMessage()); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error(e.getMessage(), e); + /** + * append database + * @param jdbcUrl jdbc url + */ + protected void appendDatabase(StringBuilder jdbcUrl) { + if (dbTypeSelector() == DbType.SQLSERVER) { + jdbcUrl.append(";databaseName=").append(getDatabase()); + } else { + if (getAddress().lastIndexOf('/') != (jdbcUrl.length() - 1)) { + jdbcUrl.append("/"); + } + jdbcUrl.append(getDatabase()); } - } } - } - public String getUser() { - return user; - } + /** + * append principal + * @param jdbcUrl jdbc url + */ + private void appendPrincipal(StringBuilder jdbcUrl) { + boolean tag = dbTypeSelector() == DbType.HIVE || dbTypeSelector() == DbType.SPARK; + if (tag && StringUtils.isNotEmpty(getPrincipal())) { + jdbcUrl.append(";principal=").append(getPrincipal()); + } + } - public void setUser(String user) { - this.user = user; - } + /** + * append other + * @param jdbcUrl jdbc url + */ + private void appendOther(StringBuilder jdbcUrl) { + String otherParams = filterOther(getOther()); + if (StringUtils.isNotEmpty(otherParams)) { + String separator = ""; + switch (dbTypeSelector()) { + case CLICKHOUSE: + case MYSQL: + case ORACLE: + case POSTGRESQL: + case PRESTO: + separator = "?"; + break; + case DB2: + separator = ":"; + break; + case HIVE: + case SPARK: + case SQLSERVER: + separator = ";"; + break; + default: + logger.error("Db type mismatch!"); + } + jdbcUrl.append(separator).append(otherParams); + } + } - /** - * password need decode - * @return - */ - public String getPassword() { - return CommonUtils.decodePassword(password); - } + /** + * the data source test connection + * @return Connection Connection + * @throws Exception Exception + */ + public Connection getConnection() throws Exception { + Class.forName(driverClassSelector()); + return DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); + } - public void setPassword(String password) { - this.password = password; - } + protected String filterOther(String otherParams) { + return otherParams; + } - public void setAddress(String address) { - this.address = address; - } + public String getUser() { + return user; + } - public String getAddress() { - return address; - } + public void setUser(String user) { + this.user = user; + } - public String getDatabase() { - return database; - } + /** + * password need decode + * @return + */ + public String getPassword() { + return CommonUtils.decodePassword(password); + } - public void setDatabase(String database) { - this.database = database; - } + public void setPassword(String password) { + this.password = password; + } - public String getOther() { - return other; - } + public void setAddress(String address) { + this.address = address; + } - public void setOther(String other) { - this.other = other; - } + public String getAddress() { + return address; + } + + public String getDatabase() { + return database; + } + + public void setDatabase(String database) { + this.database = database; + } + + public String getOther() { + return other; + } + + public void setOther(String other) { + this.other = other; + } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java index e64b0395e..0f9bfc164 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java @@ -19,78 +19,92 @@ package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; + +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; + +import java.sql.Connection; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; /** * data source of hive */ public class HiveDataSource extends BaseDataSource { - /** - * gets the JDBC url for the data source connection - * @return jdbc url - */ - @Override - public String driverClassSelector() { - return Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER; - } - - /** - * @return db type - */ - @Override - public DbType dbTypeSelector() { - return DbType.HIVE; - } - - /** - * build hive jdbc params,append : ?hive_conf_list - * - * hive jdbc url template: - * - * jdbc:hive2://:,:/dbName;initFile=;sess_var_list?hive_conf_list#hive_var_list - * - * @param otherParams otherParams - * @return filter otherParams - */ - @Override - protected String filterOther(String otherParams) { - if (StringUtils.isBlank(otherParams)) { - return ""; + /** + * gets the JDBC url for the data source connection + * @return jdbc url + */ + @Override + public String driverClassSelector() { + return Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER; } - StringBuilder hiveConfListSb = new StringBuilder(); - hiveConfListSb.append("?"); - StringBuilder sessionVarListSb = new StringBuilder(); - - String[] otherArray = otherParams.split(";", -1); - - // get the default hive conf var name - Set hiveConfSet = Stream.of(ConfVars.values()).map(confVars -> confVars.varname) - .collect(Collectors.toSet()); - - for (String conf : otherArray) { - if (hiveConfSet.contains(conf.split("=")[0])) { - hiveConfListSb.append(conf).append(";"); - } else { - sessionVarListSb.append(conf).append(";"); - } + /** + * @return db type + */ + @Override + public DbType dbTypeSelector() { + return DbType.HIVE; } - // remove the last ";" - if (sessionVarListSb.length() > 0) { - sessionVarListSb.deleteCharAt(sessionVarListSb.length() - 1); + /** + * build hive jdbc params,append : ?hive_conf_list + * + * hive jdbc url template: + * + * jdbc:hive2://:,:/dbName;initFile=;sess_var_list?hive_conf_list#hive_var_list + * + * @param otherParams otherParams + * @return filter otherParams + */ + @Override + protected String filterOther(String otherParams) { + if (StringUtils.isBlank(otherParams)) { + return ""; + } + + StringBuilder hiveConfListSb = new StringBuilder(); + hiveConfListSb.append("?"); + StringBuilder sessionVarListSb = new StringBuilder(); + + String[] otherArray = otherParams.split(";", -1); + + // get the default hive conf var name + Set hiveConfSet = Stream.of(ConfVars.values()).map(confVars -> confVars.varname) + .collect(Collectors.toSet()); + + for (String conf : otherArray) { + if (hiveConfSet.contains(conf.split("=")[0])) { + hiveConfListSb.append(conf).append(";"); + } else { + sessionVarListSb.append(conf).append(";"); + } + } + + // remove the last ";" + if (sessionVarListSb.length() > 0) { + sessionVarListSb.deleteCharAt(sessionVarListSb.length() - 1); + } + + if (hiveConfListSb.length() > 0) { + hiveConfListSb.deleteCharAt(hiveConfListSb.length() - 1); + } + + return sessionVarListSb.toString() + hiveConfListSb.toString(); } - if (hiveConfListSb.length() > 0) { - hiveConfListSb.deleteCharAt(hiveConfListSb.length() - 1); + /** + * the data source test connection + * @return Connection Connection + * @throws Exception Exception + */ + @Override + public Connection getConnection() throws Exception { + CommonUtils.loadKerberosConf(); + return super.getConnection(); } - - return sessionVarListSb.toString() + hiveConfListSb.toString(); - } - } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SQLServerDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SQLServerDataSource.java index e4b8f4bf1..78062357e 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SQLServerDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SQLServerDataSource.java @@ -14,25 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; /** * data source of SQL Server */ public class SQLServerDataSource extends BaseDataSource { - private static final Logger logger = LoggerFactory.getLogger(SQLServerDataSource.class); - /** * gets the JDBC url for the data source connection * @return jdbc url @@ -50,39 +43,18 @@ public class SQLServerDataSource extends BaseDataSource { } /** - * test whether the data source can be connected successfully - */ + * @return driver class + */ @Override - public void isConnectable() { - Connection con = null; - try { - Class.forName(Constants.COM_SQLSERVER_JDBC_DRIVER); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); - } catch (Exception e) { - logger.error("error", e); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error("SQL Server datasource try conn close conn error", e); - } - } - } + public String driverClassSelector() { + return Constants.COM_SQLSERVER_JDBC_DRIVER; } - /** - * @return driver class - */ - @Override - public String driverClassSelector() { - return Constants.COM_SQLSERVER_JDBC_DRIVER; - } - /** - * @return db type - */ - @Override - public DbType dbTypeSelector() { - return DbType.SQLSERVER; - } + /** + * @return db type + */ + @Override + public DbType dbTypeSelector() { + return DbType.SQLSERVER; + } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SparkDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SparkDataSource.java index 0329ef840..207ed4394 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SparkDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SparkDataSource.java @@ -14,30 +14,45 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.utils.CommonUtils; + +import java.sql.Connection; /** * data source of spark */ public class SparkDataSource extends BaseDataSource { - /** - * gets the JDBC url for the data source connection - * @return jdbc url - */ - @Override - public String driverClassSelector() { - return Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER; - } + /** + * gets the JDBC url for the data source connection + * @return jdbc url + */ + @Override + public String driverClassSelector() { + return Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER; + } - /** - * @return db type - */ - @Override - public DbType dbTypeSelector() { - return DbType.SPARK; - } + /** + * @return db type + */ + @Override + public DbType dbTypeSelector() { + return DbType.SPARK; + } + + /** + * the data source test connection + * @return Connection Connection + * @throws Exception Exception + */ + @Override + public Connection getConnection() throws Exception { + CommonUtils.loadKerberosConf(); + return super.getConnection(); + } }