Merge branch 'dev' into dev-FTS
commit
7e4bf1902c
|
|
@ -58,7 +58,9 @@ jobs:
|
|||
wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
|
||||
sudo dpkg -i google-chrome*.deb
|
||||
sudo apt-get install -f -y
|
||||
wget -N https://chromedriver.storage.googleapis.com/83.0.4103.39/chromedriver_linux64.zip
|
||||
google-chrome -version
|
||||
googleVersion=`google-chrome -version | awk '{print $3}'`
|
||||
wget -N https://chromedriver.storage.googleapis.com/${googleVersion}/chromedriver_linux64.zip
|
||||
unzip chromedriver_linux64.zip
|
||||
sudo mv -f chromedriver /usr/local/share/chromedriver
|
||||
sudo ln -s /usr/local/share/chromedriver /usr/local/bin/chromedriver
|
||||
|
|
|
|||
|
|
@ -14,13 +14,14 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.alert.utils;
|
||||
|
||||
import org.apache.dolphinscheduler.common.enums.ShowType;
|
||||
import org.apache.dolphinscheduler.common.utils.JSONUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.StringUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.*;
|
||||
|
||||
import org.apache.dolphinscheduler.plugin.model.AlertData;
|
||||
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
|
|
@ -29,11 +30,17 @@ import org.apache.http.entity.StringEntity;
|
|||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Enterprise WeChat utils
|
||||
|
|
@ -41,25 +48,21 @@ import java.util.*;
|
|||
public class EnterpriseWeChatUtils {
|
||||
|
||||
public static final Logger logger = LoggerFactory.getLogger(EnterpriseWeChatUtils.class);
|
||||
|
||||
public static final String ENTERPRISE_WE_CHAT_AGENT_ID = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_AGENT_ID);
|
||||
public static final String ENTERPRISE_WE_CHAT_USERS = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USERS);
|
||||
private static final String ENTERPRISE_WE_CHAT_CORP_ID = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_CORP_ID);
|
||||
|
||||
private static final String ENTERPRISE_WE_CHAT_SECRET = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_SECRET);
|
||||
|
||||
private static final String ENTERPRISE_WE_CHAT_TOKEN_URL = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_TOKEN_URL);
|
||||
private static final String ENTERPRISE_WE_CHAT_TOKEN_URL_REPLACE = ENTERPRISE_WE_CHAT_TOKEN_URL == null ? null : ENTERPRISE_WE_CHAT_TOKEN_URL
|
||||
.replaceAll("\\{corpId\\}", ENTERPRISE_WE_CHAT_CORP_ID)
|
||||
.replaceAll("\\{secret\\}", ENTERPRISE_WE_CHAT_SECRET);
|
||||
|
||||
.replaceAll("\\{corpId}", ENTERPRISE_WE_CHAT_CORP_ID)
|
||||
.replaceAll("\\{secret}", ENTERPRISE_WE_CHAT_SECRET);
|
||||
private static final String ENTERPRISE_WE_CHAT_PUSH_URL = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_PUSH_URL);
|
||||
|
||||
private static final String ENTERPRISE_WE_CHAT_TEAM_SEND_MSG = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_TEAM_SEND_MSG);
|
||||
|
||||
private static final String ENTERPRISE_WE_CHAT_USER_SEND_MSG = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USER_SEND_MSG);
|
||||
|
||||
public static final String ENTERPRISE_WE_CHAT_AGENT_ID = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_AGENT_ID);
|
||||
|
||||
public static final String ENTERPRISE_WE_CHAT_USERS = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USERS);
|
||||
private static final String agentIdRegExp = "\\{agentId}";
|
||||
private static final String msgRegExp = "\\{msg}";
|
||||
private static final String userRegExp = "\\{toUser}";
|
||||
|
||||
/**
|
||||
* get Enterprise WeChat is enable
|
||||
|
|
@ -116,13 +119,13 @@ public class EnterpriseWeChatUtils {
|
|||
*
|
||||
* @param toParty the toParty
|
||||
* @param agentId the agentId
|
||||
* @param msg the msg
|
||||
* @param msg the msg
|
||||
* @return Enterprise WeChat send message
|
||||
*/
|
||||
public static String makeTeamSendMsg(String toParty, String agentId, String msg) {
|
||||
return ENTERPRISE_WE_CHAT_TEAM_SEND_MSG.replaceAll("\\{toParty\\}", toParty)
|
||||
.replaceAll("\\{agentId\\}", agentId)
|
||||
.replaceAll("\\{msg\\}", msg);
|
||||
return ENTERPRISE_WE_CHAT_TEAM_SEND_MSG.replaceAll("\\{toParty}", toParty)
|
||||
.replaceAll(agentIdRegExp, agentId)
|
||||
.replaceAll(msgRegExp, msg);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -130,56 +133,56 @@ public class EnterpriseWeChatUtils {
|
|||
*
|
||||
* @param toParty the toParty
|
||||
* @param agentId the agentId
|
||||
* @param msg the msg
|
||||
* @param msg the msg
|
||||
* @return Enterprise WeChat send message
|
||||
*/
|
||||
public static String makeTeamSendMsg(Collection<String> toParty, String agentId, String msg) {
|
||||
String listParty = FuncUtils.mkString(toParty, "|");
|
||||
return ENTERPRISE_WE_CHAT_TEAM_SEND_MSG.replaceAll("\\{toParty\\}", listParty)
|
||||
.replaceAll("\\{agentId\\}", agentId)
|
||||
.replaceAll("\\{msg\\}", msg);
|
||||
return ENTERPRISE_WE_CHAT_TEAM_SEND_MSG.replaceAll("\\{toParty}", listParty)
|
||||
.replaceAll(agentIdRegExp, agentId)
|
||||
.replaceAll(msgRegExp, msg);
|
||||
}
|
||||
|
||||
/**
|
||||
* make team single user message
|
||||
*
|
||||
* @param toUser the toUser
|
||||
* @param toUser the toUser
|
||||
* @param agentId the agentId
|
||||
* @param msg the msg
|
||||
* @param msg the msg
|
||||
* @return Enterprise WeChat send message
|
||||
*/
|
||||
public static String makeUserSendMsg(String toUser, String agentId, String msg) {
|
||||
return ENTERPRISE_WE_CHAT_USER_SEND_MSG.replaceAll("\\{toUser\\}", toUser)
|
||||
.replaceAll("\\{agentId\\}", agentId)
|
||||
.replaceAll("\\{msg\\}", msg);
|
||||
return ENTERPRISE_WE_CHAT_USER_SEND_MSG.replaceAll("\\{toUser}", toUser)
|
||||
.replaceAll(agentIdRegExp, agentId)
|
||||
.replaceAll(msgRegExp, msg);
|
||||
}
|
||||
|
||||
/**
|
||||
* make team multi user message
|
||||
*
|
||||
* @param toUser the toUser
|
||||
* @param toUser the toUser
|
||||
* @param agentId the agentId
|
||||
* @param msg the msg
|
||||
* @param msg the msg
|
||||
* @return Enterprise WeChat send message
|
||||
*/
|
||||
public static String makeUserSendMsg(Collection<String> toUser, String agentId, String msg) {
|
||||
String listUser = FuncUtils.mkString(toUser, "|");
|
||||
return ENTERPRISE_WE_CHAT_USER_SEND_MSG.replaceAll("\\{toUser\\}", listUser)
|
||||
.replaceAll("\\{agentId\\}", agentId)
|
||||
.replaceAll("\\{msg\\}", msg);
|
||||
return ENTERPRISE_WE_CHAT_USER_SEND_MSG.replaceAll(userRegExp, listUser)
|
||||
.replaceAll(agentIdRegExp, agentId)
|
||||
.replaceAll(msgRegExp, msg);
|
||||
}
|
||||
|
||||
/**
|
||||
* send Enterprise WeChat
|
||||
*
|
||||
* @param charset the charset
|
||||
* @param data the data
|
||||
* @param token the token
|
||||
* @param data the data
|
||||
* @param token the token
|
||||
* @return Enterprise WeChat resp, demo: {"errcode":0,"errmsg":"ok","invaliduser":""}
|
||||
* @throws IOException the IOException
|
||||
*/
|
||||
public static String sendEnterpriseWeChat(String charset, String data, String token) throws IOException {
|
||||
String enterpriseWeChatPushUrlReplace = ENTERPRISE_WE_CHAT_PUSH_URL.replaceAll("\\{token\\}", token);
|
||||
String enterpriseWeChatPushUrlReplace = ENTERPRISE_WE_CHAT_PUSH_URL.replaceAll("\\{token}", token);
|
||||
|
||||
CloseableHttpClient httpClient = HttpClients.createDefault();
|
||||
try {
|
||||
|
|
@ -205,7 +208,7 @@ public class EnterpriseWeChatUtils {
|
|||
/**
|
||||
* convert table to markdown style
|
||||
*
|
||||
* @param title the title
|
||||
* @param title the title
|
||||
* @param content the content
|
||||
* @return markdown table content
|
||||
*/
|
||||
|
|
@ -215,13 +218,13 @@ public class EnterpriseWeChatUtils {
|
|||
|
||||
if (null != mapItemsList) {
|
||||
for (LinkedHashMap mapItems : mapItemsList) {
|
||||
Set<Map.Entry<String, String>> entries = mapItems.entrySet();
|
||||
Iterator<Map.Entry<String, String>> iterator = entries.iterator();
|
||||
Set<Map.Entry<String, Object>> entries = mapItems.entrySet();
|
||||
Iterator<Map.Entry<String, Object>> iterator = entries.iterator();
|
||||
StringBuilder t = new StringBuilder(String.format("`%s`%s", title, Constants.MARKDOWN_ENTER));
|
||||
|
||||
while (iterator.hasNext()) {
|
||||
|
||||
Map.Entry<String, String> entry = iterator.next();
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
t.append(Constants.MARKDOWN_QUOTE);
|
||||
t.append(entry.getKey()).append(":").append(entry.getValue());
|
||||
t.append(Constants.MARKDOWN_ENTER);
|
||||
|
|
@ -235,29 +238,30 @@ public class EnterpriseWeChatUtils {
|
|||
/**
|
||||
* convert text to markdown style
|
||||
*
|
||||
* @param title the title
|
||||
* @param title the title
|
||||
* @param content the content
|
||||
* @return markdown text
|
||||
*/
|
||||
public static String markdownText(String title, String content) {
|
||||
if (StringUtils.isNotEmpty(content)) {
|
||||
List<String> list;
|
||||
try {
|
||||
list = JSONUtils.toList(content, String.class);
|
||||
} catch (Exception e) {
|
||||
logger.error("json format exception", e);
|
||||
return null;
|
||||
}
|
||||
List<LinkedHashMap> mapItemsList = JSONUtils.toList(content, LinkedHashMap.class);
|
||||
if (null != mapItemsList) {
|
||||
StringBuilder contents = new StringBuilder(100);
|
||||
contents.append(String.format("`%s`%n", title));
|
||||
for (LinkedHashMap mapItems : mapItemsList) {
|
||||
|
||||
StringBuilder contents = new StringBuilder(100);
|
||||
contents.append(String.format("`%s`%n", title));
|
||||
for (String str : list) {
|
||||
contents.append(Constants.MARKDOWN_QUOTE);
|
||||
contents.append(str);
|
||||
contents.append(Constants.MARKDOWN_ENTER);
|
||||
}
|
||||
Set<Map.Entry<String, Object>> entries = mapItems.entrySet();
|
||||
Iterator<Map.Entry<String, Object>> iterator = entries.iterator();
|
||||
while (iterator.hasNext()) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
contents.append(Constants.MARKDOWN_QUOTE);
|
||||
contents.append(entry.getKey()).append(":").append(entry.getValue());
|
||||
contents.append(Constants.MARKDOWN_ENTER);
|
||||
}
|
||||
|
||||
return contents.toString();
|
||||
}
|
||||
return contents.toString();
|
||||
}
|
||||
|
||||
}
|
||||
return null;
|
||||
|
|
@ -278,4 +282,5 @@ public class EnterpriseWeChatUtils {
|
|||
return result;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,36 +14,38 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.alert.utils;
|
||||
|
||||
import org.apache.dolphinscheduler.common.enums.AlertType;
|
||||
import org.apache.dolphinscheduler.common.enums.ShowType;
|
||||
import org.apache.dolphinscheduler.common.utils.JSONUtils;
|
||||
import org.apache.dolphinscheduler.dao.entity.Alert;
|
||||
import org.apache.dolphinscheduler.plugin.model.AlertData;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
import org.powermock.api.mockito.PowerMockito;
|
||||
import org.powermock.core.classloader.annotations.PrepareForTest;
|
||||
import org.powermock.modules.junit4.PowerMockRunner;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import org.apache.dolphinscheduler.common.utils.*;
|
||||
|
||||
/**
|
||||
* Please manually modify the configuration file before testing.
|
||||
* file: alert.properties
|
||||
* enterprise.wechat.corp.id
|
||||
* enterprise.wechat.secret
|
||||
* enterprise.wechat.token.url
|
||||
* enterprise.wechat.push.url
|
||||
* enterprise.wechat.send.msg
|
||||
* enterprise.wechat.agent.id
|
||||
* enterprise.wechat.users
|
||||
* enterprise.wechat.corp.id
|
||||
* enterprise.wechat.secret
|
||||
* enterprise.wechat.token.url
|
||||
* enterprise.wechat.push.url
|
||||
* enterprise.wechat.send.msg
|
||||
* enterprise.wechat.agent.id
|
||||
* enterprise.wechat.users
|
||||
*/
|
||||
@PrepareForTest(PropertyUtils.class)
|
||||
@RunWith(PowerMockRunner.class)
|
||||
|
|
@ -52,14 +54,18 @@ public class EnterpriseWeChatUtilsTest {
|
|||
private static final String toParty = "wwc99134b6fc1edb6";
|
||||
private static final String enterpriseWechatSecret = "Uuv2KFrkdf7SeKOsTDCpsTkpawXBMNRhFy6VKX5FV";
|
||||
private static final String enterpriseWechatAgentId = "1000004";
|
||||
private static final String enterpriseWechatUsers="LiGang,journey";
|
||||
private static final String enterpriseWechatUsers = "LiGang,journey";
|
||||
private static final String msg = "hello world";
|
||||
|
||||
private static final String enterpriseWechatTeamSendMsg = "{\\\"toparty\\\":\\\"{toParty}\\\",\\\"agentid\\\":\\\"{agentId}\\\",\\\"msgtype\\\":\\\"text\\\",\\\"text\\\":{\\\"content\\\":\\\"{msg}\\\"},\\\"safe\\\":\\\"0\\\"}";
|
||||
private static final String enterpriseWechatUserSendMsg = "{\\\"touser\\\":\\\"{toUser}\\\",\\\"agentid\\\":\\\"{agentId}\\\",\\\"msgtype\\\":\\\"markdown\\\",\\\"markdown\\\":{\\\"content\\\":\\\"{msg}\\\"}}";
|
||||
private static final String enterpriseWechatTeamSendMsg = "{\\\"toparty\\\":\\\"{toParty}\\\",\\\"agentid\\\":\\\"{agentId}\\\""
|
||||
+
|
||||
",\\\"msgtype\\\":\\\"text\\\",\\\"text\\\":{\\\"content\\\":\\\"{msg}\\\"},\\\"safe\\\":\\\"0\\\"}";
|
||||
private static final String enterpriseWechatUserSendMsg = "{\\\"touser\\\":\\\"{toUser}\\\",\\\"agentid\\\":\\\"{agentId}\\\""
|
||||
+
|
||||
",\\\"msgtype\\\":\\\"markdown\\\",\\\"markdown\\\":{\\\"content\\\":\\\"{msg}\\\"}}";
|
||||
|
||||
@Before
|
||||
public void init(){
|
||||
public void init() {
|
||||
PowerMockito.mockStatic(PropertyUtils.class);
|
||||
Mockito.when(PropertyUtils.getBoolean(Constants.ENTERPRISE_WECHAT_ENABLE)).thenReturn(true);
|
||||
Mockito.when(PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USER_SEND_MSG)).thenReturn(enterpriseWechatUserSendMsg);
|
||||
|
|
@ -67,14 +73,13 @@ public class EnterpriseWeChatUtilsTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testIsEnable(){
|
||||
public void testIsEnable() {
|
||||
Boolean weChartEnable = EnterpriseWeChatUtils.isEnable();
|
||||
Assert.assertTrue(weChartEnable);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testMakeTeamSendMsg1(){
|
||||
public void testMakeTeamSendMsg1() {
|
||||
String sendMsg = EnterpriseWeChatUtils.makeTeamSendMsg(toParty, enterpriseWechatSecret, msg);
|
||||
Assert.assertTrue(sendMsg.contains(toParty));
|
||||
Assert.assertTrue(sendMsg.contains(enterpriseWechatSecret));
|
||||
|
|
@ -82,9 +87,8 @@ public class EnterpriseWeChatUtilsTest {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testMakeTeamSendMsg2(){
|
||||
public void testMakeTeamSendMsg2() {
|
||||
List<String> parties = new ArrayList<>();
|
||||
parties.add(toParty);
|
||||
parties.add("test1");
|
||||
|
|
@ -96,7 +100,7 @@ public class EnterpriseWeChatUtilsTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void tesMakeUserSendMsg1(){
|
||||
public void tesMakeUserSendMsg1() {
|
||||
|
||||
String sendMsg = EnterpriseWeChatUtils.makeUserSendMsg(enterpriseWechatUsers, enterpriseWechatAgentId, msg);
|
||||
Assert.assertTrue(sendMsg.contains(enterpriseWechatUsers));
|
||||
|
|
@ -105,7 +109,7 @@ public class EnterpriseWeChatUtilsTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void tesMakeUserSendMsg2(){
|
||||
public void tesMakeUserSendMsg2() {
|
||||
List<String> users = new ArrayList<>();
|
||||
users.add("user1");
|
||||
users.add("user2");
|
||||
|
|
@ -118,7 +122,7 @@ public class EnterpriseWeChatUtilsTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testMarkdownByAlertForText(){
|
||||
public void testMarkdownByAlertForText() {
|
||||
Alert alertForText = createAlertForText();
|
||||
AlertData alertData = new AlertData();
|
||||
alertData.setTitle(alertForText.getTitle())
|
||||
|
|
@ -129,7 +133,7 @@ public class EnterpriseWeChatUtilsTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testMarkdownByAlertForTable(){
|
||||
public void testMarkdownByAlertForTable() {
|
||||
Alert alertForText = createAlertForTable();
|
||||
AlertData alertData = new AlertData();
|
||||
alertData.setTitle(alertForText.getTitle())
|
||||
|
|
@ -139,17 +143,26 @@ public class EnterpriseWeChatUtilsTest {
|
|||
Assert.assertNotNull(result);
|
||||
}
|
||||
|
||||
private Alert createAlertForText(){
|
||||
String content ="[\"id:69\"," +
|
||||
"\"name:UserBehavior-0--1193959466\"," +
|
||||
"\"Job name: Start workflow\"," +
|
||||
"\"State: SUCCESS\"," +
|
||||
"\"Recovery:NO\"," +
|
||||
"\"Run time: 1\"," +
|
||||
"\"Start time: 2018-08-06 10:31:34.0\"," +
|
||||
"\"End time: 2018-08-06 10:31:49.0\"," +
|
||||
"\"Host: 192.168.xx.xx\"," +
|
||||
"\"Notify group :4\"]";
|
||||
private Alert createAlertForText() {
|
||||
String content = "[{\"id\":\"69\","
|
||||
+
|
||||
"\"name\":\"UserBehavior-0--1193959466\","
|
||||
+
|
||||
"\"Job name\":\"Start workflow\","
|
||||
+
|
||||
"\"State\":\"SUCCESS\","
|
||||
+
|
||||
"\"Recovery\":\"NO\","
|
||||
+
|
||||
"\"Run time\":\"1\","
|
||||
+
|
||||
"\"Start time\": \"2018-08-06 10:31:34.0\","
|
||||
+
|
||||
"\"End time\": \"2018-08-06 10:31:49.0\","
|
||||
+
|
||||
"\"Host\": \"192.168.xx.xx\","
|
||||
+
|
||||
"\"Notify group\" :\"4\"}]";
|
||||
|
||||
Alert alert = new Alert();
|
||||
alert.setTitle("Mysql Exception");
|
||||
|
|
@ -161,18 +174,18 @@ public class EnterpriseWeChatUtilsTest {
|
|||
return alert;
|
||||
}
|
||||
|
||||
private String list2String(){
|
||||
private String list2String() {
|
||||
|
||||
LinkedHashMap<String, Object> map1 = new LinkedHashMap<>();
|
||||
map1.put("mysql service name","mysql200");
|
||||
map1.put("mysql address","192.168.xx.xx");
|
||||
map1.put("port","3306");
|
||||
map1.put("no index of number","80");
|
||||
map1.put("database client connections","190");
|
||||
map1.put("mysql service name", "mysql200");
|
||||
map1.put("mysql address", "192.168.xx.xx");
|
||||
map1.put("port", "3306");
|
||||
map1.put("no index of number", "80");
|
||||
map1.put("database client connections", "190");
|
||||
|
||||
LinkedHashMap<String, Object> map2 = new LinkedHashMap<>();
|
||||
map2.put("mysql service name","mysql210");
|
||||
map2.put("mysql address","192.168.xx.xx");
|
||||
map2.put("mysql service name", "mysql210");
|
||||
map2.put("mysql address", "192.168.xx.xx");
|
||||
map2.put("port", "3306");
|
||||
map2.put("no index of number", "10");
|
||||
map2.put("database client connections", "90");
|
||||
|
|
@ -184,11 +197,11 @@ public class EnterpriseWeChatUtilsTest {
|
|||
return mapjson;
|
||||
}
|
||||
|
||||
private Alert createAlertForTable(){
|
||||
private Alert createAlertForTable() {
|
||||
Alert alert = new Alert();
|
||||
alert.setTitle("Mysql Exception");
|
||||
alert.setShowType(ShowType.TABLE);
|
||||
String content= list2String();
|
||||
String content = list2String();
|
||||
alert.setContent(content);
|
||||
alert.setAlertType(AlertType.EMAIL);
|
||||
alert.setAlertGroupId(1);
|
||||
|
|
@ -196,77 +209,75 @@ public class EnterpriseWeChatUtilsTest {
|
|||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// @Test
|
||||
// public void testSendSingleTeamWeChat() {
|
||||
// try {
|
||||
// String token = EnterpriseWeChatUtils.getToken();
|
||||
// String msg = EnterpriseWeChatUtils.makeTeamSendMsg(partyId, agentId, "hello world");
|
||||
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
|
||||
//
|
||||
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
|
||||
// Assert.assertEquals("ok",errmsg);
|
||||
// } catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// @Test
|
||||
// public void testSendMultiTeamWeChat() {
|
||||
//
|
||||
// try {
|
||||
// String token = EnterpriseWeChatUtils.getToken();
|
||||
// String msg = EnterpriseWeChatUtils.makeTeamSendMsg(listPartyId, agentId, "hello world");
|
||||
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
|
||||
//
|
||||
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
|
||||
// Assert.assertEquals("ok",errmsg);
|
||||
// } catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// @Test
|
||||
// public void testSendSingleUserWeChat() {
|
||||
// try {
|
||||
// String token = EnterpriseWeChatUtils.getToken();
|
||||
// String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId.stream().findFirst().get(), agentId, "your meeting room has been booked and will be synced to the 'mailbox' later \n" +
|
||||
// ">**matter details** \n" +
|
||||
// ">matter:<font color='info'>meeting</font> <br>" +
|
||||
// ">organizer:@miglioguan \n" +
|
||||
// ">participant:@miglioguan、@kunliu、@jamdeezhou、@kanexiong、@kisonwang \n" +
|
||||
// "> \n" +
|
||||
// ">meeting room:<font color='info'>Guangzhou TIT 1st Floor 301</font> \n" +
|
||||
// ">date:<font color='warning'>May 18, 2018</font> \n" +
|
||||
// ">time:<font color='comment'>9:00-11:00 am</font> \n" +
|
||||
// "> \n" +
|
||||
// ">please attend the meeting on time\n" +
|
||||
// "> \n" +
|
||||
// ">to modify the meeting information, please click: [Modify Meeting Information](https://work.weixin.qq.com)\"");
|
||||
//
|
||||
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
|
||||
//
|
||||
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
|
||||
// Assert.assertEquals("ok",errmsg);
|
||||
// } catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// @Test
|
||||
// public void testSendMultiUserWeChat() {
|
||||
// try {
|
||||
// String token = EnterpriseWeChatUtils.getToken();
|
||||
//
|
||||
// String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId, agentId, "hello world");
|
||||
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
|
||||
//
|
||||
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
|
||||
// Assert.assertEquals("ok",errmsg);
|
||||
// } catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
// }
|
||||
// }
|
||||
// @Test
|
||||
// public void testSendSingleTeamWeChat() {
|
||||
// try {
|
||||
// String token = EnterpriseWeChatUtils.getToken();
|
||||
// String msg = EnterpriseWeChatUtils.makeTeamSendMsg(partyId, agentId, "hello world");
|
||||
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
|
||||
//
|
||||
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
|
||||
// Assert.assertEquals("ok",errmsg);
|
||||
// } catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// @Test
|
||||
// public void testSendMultiTeamWeChat() {
|
||||
//
|
||||
// try {
|
||||
// String token = EnterpriseWeChatUtils.getToken();
|
||||
// String msg = EnterpriseWeChatUtils.makeTeamSendMsg(listPartyId, agentId, "hello world");
|
||||
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
|
||||
//
|
||||
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
|
||||
// Assert.assertEquals("ok",errmsg);
|
||||
// } catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// @Test
|
||||
// public void testSendSingleUserWeChat() {
|
||||
// try {
|
||||
// String token = EnterpriseWeChatUtils.getToken();
|
||||
// String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId.stream().findFirst().get(), agentId, "your meeting room has been booked and will be synced to the 'mailbox' later \n" +
|
||||
// ">**matter details** \n" +
|
||||
// ">matter:<font color='info'>meeting</font> <br>" +
|
||||
// ">organizer:@miglioguan \n" +
|
||||
// ">participant:@miglioguan、@kunliu、@jamdeezhou、@kanexiong、@kisonwang \n" +
|
||||
// "> \n" +
|
||||
// ">meeting room:<font color='info'>Guangzhou TIT 1st Floor 301</font> \n" +
|
||||
// ">date:<font color='warning'>May 18, 2018</font> \n" +
|
||||
// ">time:<font color='comment'>9:00-11:00 am</font> \n" +
|
||||
// "> \n" +
|
||||
// ">please attend the meeting on time\n" +
|
||||
// "> \n" +
|
||||
// ">to modify the meeting information, please click: [Modify Meeting Information](https://work.weixin.qq.com)\"");
|
||||
//
|
||||
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
|
||||
//
|
||||
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
|
||||
// Assert.assertEquals("ok",errmsg);
|
||||
// } catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// @Test
|
||||
// public void testSendMultiUserWeChat() {
|
||||
// try {
|
||||
// String token = EnterpriseWeChatUtils.getToken();
|
||||
//
|
||||
// String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId, agentId, "hello world");
|
||||
// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token);
|
||||
//
|
||||
// String errmsg = JSONUtils.parseObject(resp).getString("errmsg");
|
||||
// Assert.assertEquals("ok",errmsg);
|
||||
// } catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
// }
|
||||
// }
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -100,7 +100,7 @@ public class DataSourceController extends BaseController {
|
|||
@RequestParam(value = "other") String other) {
|
||||
logger.info("login user {} create datasource name: {}, note: {}, type: {}, host: {}, port: {}, database : {}, principal: {}, userName : {}, connectType: {}, other: {}",
|
||||
loginUser.getUserName(), name, note, type, host, port, database, principal, userName, connectType, other);
|
||||
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, principal, userName, password, connectType, other);
|
||||
String parameter = dataSourceService.buildParameter(type, host, port, database, principal, userName, password, connectType, other);
|
||||
Map<String, Object> result = dataSourceService.createDataSource(loginUser, name, note, type, parameter);
|
||||
return returnDataList(result);
|
||||
}
|
||||
|
|
@ -155,7 +155,7 @@ public class DataSourceController extends BaseController {
|
|||
@RequestParam(value = "other") String other) {
|
||||
logger.info("login user {} updateProcessInstance datasource name: {}, note: {}, type: {}, connectType: {}, other: {}",
|
||||
loginUser.getUserName(), name, note, type, connectType, other);
|
||||
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, principal, userName, password, connectType, other);
|
||||
String parameter = dataSourceService.buildParameter(type, host, port, database, principal, userName, password, connectType, other);
|
||||
Map<String, Object> dataSource = dataSourceService.updateDataSource(id, loginUser, name, note, type, parameter);
|
||||
return returnDataList(dataSource);
|
||||
}
|
||||
|
|
@ -280,7 +280,7 @@ public class DataSourceController extends BaseController {
|
|||
@RequestParam(value = "other") String other) {
|
||||
logger.info("login user {}, connect datasource: {}, note: {}, type: {}, connectType: {}, other: {}",
|
||||
loginUser.getUserName(), name, note, type, connectType, other);
|
||||
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, principal, userName, password, connectType, other);
|
||||
String parameter = dataSourceService.buildParameter(type, host, port, database, principal, userName, password, connectType, other);
|
||||
Boolean isConnection = dataSourceService.checkConnection(type, parameter);
|
||||
Result result = new Result();
|
||||
|
||||
|
|
@ -310,7 +310,7 @@ public class DataSourceController extends BaseController {
|
|||
@RequestParam("id") int id) {
|
||||
logger.info("connection test, login user:{}, id:{}", loginUser.getUserName(), id);
|
||||
|
||||
Boolean isConnection = dataSourceService.connectionTest(loginUser, id);
|
||||
Boolean isConnection = dataSourceService.connectionTest(id);
|
||||
Result result = new Result();
|
||||
|
||||
if (isConnection) {
|
||||
|
|
@ -361,7 +361,7 @@ public class DataSourceController extends BaseController {
|
|||
logger.info("login user {}, verfiy datasource name: {}",
|
||||
loginUser.getUserName(), name);
|
||||
|
||||
return dataSourceService.verifyDataSourceName(loginUser, name);
|
||||
return dataSourceService.verifyDataSourceName(name);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -14,32 +14,65 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.controller;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.BATCH_COPY_PROCESS_DEFINITION_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.BATCH_MOVE_PROCESS_DEFINITION_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_PROCESS_DEFINITION;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROCESS_DEFINITION_VERSION_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_LIST;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_VERSIONS_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.RELEASE_PROCESS_DEFINITION_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_PROCESS_DEFINITION_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR;
|
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status;
|
||||
import org.apache.dolphinscheduler.api.exceptions.ApiException;
|
||||
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
|
||||
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
|
||||
import org.apache.dolphinscheduler.api.utils.Result;
|
||||
import org.apache.dolphinscheduler.common.Constants;
|
||||
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.StringUtils;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
import io.swagger.annotations.*;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
import springfox.documentation.annotations.ApiIgnore;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.*;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PathVariable;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.RequestAttribute;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.ResponseBody;
|
||||
import org.springframework.web.bind.annotation.ResponseStatus;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
|
||||
import io.swagger.annotations.Api;
|
||||
import io.swagger.annotations.ApiImplicitParam;
|
||||
import io.swagger.annotations.ApiImplicitParams;
|
||||
import io.swagger.annotations.ApiOperation;
|
||||
import io.swagger.annotations.ApiParam;
|
||||
import springfox.documentation.annotations.ApiIgnore;
|
||||
|
||||
|
||||
/**
|
||||
|
|
@ -55,16 +88,19 @@ public class ProcessDefinitionController extends BaseController {
|
|||
@Autowired
|
||||
private ProcessDefinitionService processDefinitionService;
|
||||
|
||||
@Autowired
|
||||
private ProcessDefinitionVersionService processDefinitionVersionService;
|
||||
|
||||
/**
|
||||
* create process definition
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param name process definition name
|
||||
* @param json process definition json
|
||||
* @param name process definition name
|
||||
* @param json process definition json
|
||||
* @param description description
|
||||
* @param locations locations for nodes
|
||||
* @param connects connects for nodes
|
||||
* @param locations locations for nodes
|
||||
* @param connects connects for nodes
|
||||
* @return create result code
|
||||
*/
|
||||
@ApiOperation(value = "save", notes = "CREATE_PROCESS_DEFINITION_NOTES")
|
||||
|
|
@ -86,8 +122,8 @@ public class ProcessDefinitionController extends BaseController {
|
|||
@RequestParam(value = "connects", required = true) String connects,
|
||||
@RequestParam(value = "description", required = false) String description) throws JsonProcessingException {
|
||||
|
||||
logger.info("login user {}, create process definition, project name: {}, process definition name: {}, " +
|
||||
"process_definition_json: {}, desc: {} locations:{}, connects:{}",
|
||||
logger.info("login user {}, create process definition, project name: {}, process definition name: {}, "
|
||||
+ "process_definition_json: {}, desc: {} locations:{}, connects:{}",
|
||||
loginUser.getUserName(), projectName, name, json, description, locations, connects);
|
||||
Map<String, Object> result = processDefinitionService.createProcessDefinition(loginUser, projectName, name, json,
|
||||
description, locations, connects);
|
||||
|
|
@ -97,13 +133,13 @@ public class ProcessDefinitionController extends BaseController {
|
|||
/**
|
||||
* copy process definition
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processDefinitionIds process definition ids
|
||||
* @param processDefinitionIds process definition ids
|
||||
* @param targetProjectId target project id
|
||||
* @return copy result code
|
||||
*/
|
||||
@ApiOperation(value = "copyProcessDefinition", notes= "COPY_PROCESS_DEFINITION_NOTES")
|
||||
@ApiOperation(value = "copyProcessDefinition", notes = "COPY_PROCESS_DEFINITION_NOTES")
|
||||
@ApiImplicitParams({
|
||||
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"),
|
||||
@ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, type = "Integer")
|
||||
|
|
@ -114,7 +150,7 @@ public class ProcessDefinitionController extends BaseController {
|
|||
public Result copyProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
|
||||
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
|
||||
@RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds,
|
||||
@RequestParam(value = "targetProjectId",required = true) int targetProjectId) {
|
||||
@RequestParam(value = "targetProjectId", required = true) int targetProjectId) {
|
||||
logger.info("batch copy process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}",
|
||||
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()),
|
||||
StringUtils.replaceNRTtoUnderline(projectName),
|
||||
|
|
@ -122,19 +158,19 @@ public class ProcessDefinitionController extends BaseController {
|
|||
StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId)));
|
||||
|
||||
return returnDataList(
|
||||
processDefinitionService.batchCopyProcessDefinition(loginUser,projectName,processDefinitionIds,targetProjectId));
|
||||
processDefinitionService.batchCopyProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId));
|
||||
}
|
||||
|
||||
/**
|
||||
* move process definition
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processDefinitionIds process definition ids
|
||||
* @param processDefinitionIds process definition ids
|
||||
* @param targetProjectId target project id
|
||||
* @return move result code
|
||||
*/
|
||||
@ApiOperation(value = "moveProcessDefinition", notes= "MOVE_PROCESS_DEFINITION_NOTES")
|
||||
@ApiOperation(value = "moveProcessDefinition", notes = "MOVE_PROCESS_DEFINITION_NOTES")
|
||||
@ApiImplicitParams({
|
||||
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"),
|
||||
@ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, type = "Integer")
|
||||
|
|
@ -145,7 +181,7 @@ public class ProcessDefinitionController extends BaseController {
|
|||
public Result moveProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
|
||||
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
|
||||
@RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds,
|
||||
@RequestParam(value = "targetProjectId",required = true) int targetProjectId) {
|
||||
@RequestParam(value = "targetProjectId", required = true) int targetProjectId) {
|
||||
logger.info("batch move process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}",
|
||||
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()),
|
||||
StringUtils.replaceNRTtoUnderline(projectName),
|
||||
|
|
@ -153,15 +189,15 @@ public class ProcessDefinitionController extends BaseController {
|
|||
StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId)));
|
||||
|
||||
return returnDataList(
|
||||
processDefinitionService.batchMoveProcessDefinition(loginUser,projectName,processDefinitionIds,targetProjectId));
|
||||
processDefinitionService.batchMoveProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId));
|
||||
}
|
||||
|
||||
/**
|
||||
* verify process definition name unique
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param name name
|
||||
* @param name name
|
||||
* @return true if process definition name not exists, otherwise false
|
||||
*/
|
||||
@ApiOperation(value = "verify-name", notes = "VERIFY_PROCESS_DEFINITION_NAME_NOTES")
|
||||
|
|
@ -172,8 +208,8 @@ public class ProcessDefinitionController extends BaseController {
|
|||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR)
|
||||
public Result verifyProcessDefinitionName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
|
||||
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
|
||||
@RequestParam(value = "name", required = true) String name) {
|
||||
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
|
||||
@RequestParam(value = "name", required = true) String name) {
|
||||
logger.info("verify process definition name unique, user:{}, project name:{}, process definition name:{}",
|
||||
loginUser.getUserName(), projectName, name);
|
||||
Map<String, Object> result = processDefinitionService.verifyProcessDefinitionName(loginUser, projectName, name);
|
||||
|
|
@ -183,18 +219,18 @@ public class ProcessDefinitionController extends BaseController {
|
|||
/**
|
||||
* update process definition
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param name process definition name
|
||||
* @param id process definition id
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param name process definition name
|
||||
* @param id process definition id
|
||||
* @param processDefinitionJson process definition json
|
||||
* @param description description
|
||||
* @param locations locations for nodes
|
||||
* @param connects connects for nodes
|
||||
* @param description description
|
||||
* @param locations locations for nodes
|
||||
* @param connects connects for nodes
|
||||
* @return update result code
|
||||
*/
|
||||
|
||||
@ApiOperation(value = "updateProcessDefinition", notes= "UPDATE_PROCESS_DEFINITION_NOTES")
|
||||
@ApiOperation(value = "updateProcessDefinition", notes = "UPDATE_PROCESS_DEFINITION_NOTES")
|
||||
@ApiImplicitParams({
|
||||
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"),
|
||||
@ApiImplicitParam(name = "id", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
|
||||
|
|
@ -207,33 +243,115 @@ public class ProcessDefinitionController extends BaseController {
|
|||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(UPDATE_PROCESS_DEFINITION_ERROR)
|
||||
public Result updateProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
|
||||
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
|
||||
@RequestParam(value = "name", required = true) String name,
|
||||
@RequestParam(value = "id", required = true) int id,
|
||||
@RequestParam(value = "processDefinitionJson", required = true) String processDefinitionJson,
|
||||
@RequestParam(value = "locations", required = false) String locations,
|
||||
@RequestParam(value = "connects", required = false) String connects,
|
||||
@RequestParam(value = "description", required = false) String description) {
|
||||
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
|
||||
@RequestParam(value = "name", required = true) String name,
|
||||
@RequestParam(value = "id", required = true) int id,
|
||||
@RequestParam(value = "processDefinitionJson", required = true) String processDefinitionJson,
|
||||
@RequestParam(value = "locations", required = false) String locations,
|
||||
@RequestParam(value = "connects", required = false) String connects,
|
||||
@RequestParam(value = "description", required = false) String description) {
|
||||
|
||||
logger.info("login user {}, update process define, project name: {}, process define name: {}, " +
|
||||
"process_definition_json: {}, desc: {}, locations:{}, connects:{}",
|
||||
logger.info("login user {}, update process define, project name: {}, process define name: {}, "
|
||||
+ "process_definition_json: {}, desc: {}, locations:{}, connects:{}",
|
||||
loginUser.getUserName(), projectName, name, processDefinitionJson, description, locations, connects);
|
||||
Map<String, Object> result = processDefinitionService.updateProcessDefinition(loginUser, projectName, id, name,
|
||||
processDefinitionJson, description, locations, connects);
|
||||
return returnDataList(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* query process definition version paging list info
|
||||
*
|
||||
* @param loginUser login user info
|
||||
* @param projectName the process definition project name
|
||||
* @param pageNo the process definition version list current page number
|
||||
* @param pageSize the process definition version list page size
|
||||
* @param processDefinitionId the process definition id
|
||||
* @return the process definition version list
|
||||
*/
|
||||
@ApiOperation(value = "queryProcessDefinitionVersions", notes = "QUERY_PROCESS_DEFINITION_VERSIONS_NOTES")
|
||||
@ApiImplicitParams({
|
||||
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "100"),
|
||||
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "100"),
|
||||
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
|
||||
})
|
||||
@GetMapping(value = "/versions")
|
||||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(QUERY_PROCESS_DEFINITION_VERSIONS_ERROR)
|
||||
public Result queryProcessDefinitionVersions(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
|
||||
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
|
||||
@RequestParam(value = "pageNo") int pageNo,
|
||||
@RequestParam(value = "pageSize") int pageSize,
|
||||
@RequestParam(value = "processDefinitionId") int processDefinitionId) {
|
||||
|
||||
Map<String, Object> result = processDefinitionVersionService.queryProcessDefinitionVersions(loginUser
|
||||
, projectName, pageNo, pageSize, processDefinitionId);
|
||||
return returnDataList(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* switch certain process definition version
|
||||
*
|
||||
* @param loginUser login user info
|
||||
* @param projectName the process definition project name
|
||||
* @param processDefinitionId the process definition id
|
||||
* @param version the version user want to switch
|
||||
* @return switch version result code
|
||||
*/
|
||||
@ApiOperation(value = "switchProcessDefinitionVersion", notes = "SWITCH_PROCESS_DEFINITION_VERSION_NOTES")
|
||||
@ApiImplicitParams({
|
||||
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
|
||||
@ApiImplicitParam(name = "version", value = "VERSION", required = true, dataType = "Long", example = "100")
|
||||
})
|
||||
@GetMapping(value = "/version/switch")
|
||||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(SWITCH_PROCESS_DEFINITION_VERSION_ERROR)
|
||||
public Result switchProcessDefinitionVersion(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
|
||||
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
|
||||
@RequestParam(value = "processDefinitionId") int processDefinitionId,
|
||||
@RequestParam(value = "version") long version) {
|
||||
|
||||
Map<String, Object> result = processDefinitionService.switchProcessDefinitionVersion(loginUser, projectName
|
||||
, processDefinitionId, version);
|
||||
return returnDataList(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* delete the certain process definition version by version and process definition id
|
||||
*
|
||||
* @param loginUser login user info
|
||||
* @param projectName the process definition project name
|
||||
* @param processDefinitionId process definition id
|
||||
* @param version the process definition version user want to delete
|
||||
* @return delete version result code
|
||||
*/
|
||||
@ApiOperation(value = "deleteProcessDefinitionVersion", notes = "DELETE_PROCESS_DEFINITION_VERSION_NOTES")
|
||||
@ApiImplicitParams({
|
||||
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
|
||||
@ApiImplicitParam(name = "version", value = "VERSION", required = true, dataType = "Long", example = "100")
|
||||
})
|
||||
@GetMapping(value = "/version/delete")
|
||||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(DELETE_PROCESS_DEFINITION_VERSION_ERROR)
|
||||
public Result deleteProcessDefinitionVersion(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
|
||||
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
|
||||
@RequestParam(value = "processDefinitionId") int processDefinitionId,
|
||||
@RequestParam(value = "version") long version) {
|
||||
|
||||
Map<String, Object> result = processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion(loginUser, projectName, processDefinitionId, version);
|
||||
return returnDataList(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* release process definition
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processId process definition id
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processId process definition id
|
||||
* @param releaseState release state
|
||||
* @return release result code
|
||||
*/
|
||||
|
||||
@ApiOperation(value = "releaseProcessDefinition", notes= "RELEASE_PROCESS_DEFINITION_NOTES")
|
||||
@ApiOperation(value = "releaseProcessDefinition", notes = "RELEASE_PROCESS_DEFINITION_NOTES")
|
||||
@ApiImplicitParams({
|
||||
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"),
|
||||
@ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
|
||||
|
|
@ -243,9 +361,9 @@ public class ProcessDefinitionController extends BaseController {
|
|||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(RELEASE_PROCESS_DEFINITION_ERROR)
|
||||
public Result releaseProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
|
||||
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
|
||||
@RequestParam(value = "processId", required = true) int processId,
|
||||
@RequestParam(value = "releaseState", required = true) int releaseState) {
|
||||
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
|
||||
@RequestParam(value = "processId", required = true) int processId,
|
||||
@RequestParam(value = "releaseState", required = true) int releaseState) {
|
||||
|
||||
logger.info("login user {}, release process definition, project name: {}, release state: {}",
|
||||
loginUser.getUserName(), projectName, releaseState);
|
||||
|
|
@ -256,12 +374,12 @@ public class ProcessDefinitionController extends BaseController {
|
|||
/**
|
||||
* query datail of process definition
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processId process definition id
|
||||
* @param processId process definition id
|
||||
* @return process definition detail
|
||||
*/
|
||||
@ApiOperation(value = "queryProcessDefinitionById", notes= "QUERY_PROCESS_DEFINITION_BY_ID_NOTES")
|
||||
@ApiOperation(value = "queryProcessDefinitionById", notes = "QUERY_PROCESS_DEFINITION_BY_ID_NOTES")
|
||||
@ApiImplicitParams({
|
||||
@ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
|
||||
})
|
||||
|
|
@ -269,8 +387,8 @@ public class ProcessDefinitionController extends BaseController {
|
|||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR)
|
||||
public Result queryProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
|
||||
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
|
||||
@RequestParam("processId") Integer processId
|
||||
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
|
||||
@RequestParam("processId") Integer processId
|
||||
) {
|
||||
logger.info("query detail of process definition, login user:{}, project name:{}, process definition id:{}",
|
||||
loginUser.getUserName(), projectName, processId);
|
||||
|
|
@ -281,7 +399,7 @@ public class ProcessDefinitionController extends BaseController {
|
|||
/**
|
||||
* query Process definition list
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @return process definition list
|
||||
*/
|
||||
|
|
@ -290,7 +408,7 @@ public class ProcessDefinitionController extends BaseController {
|
|||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(QUERY_PROCESS_DEFINITION_LIST)
|
||||
public Result queryProcessDefinitionList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
|
||||
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName
|
||||
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName
|
||||
) {
|
||||
logger.info("query process definition list, login user:{}, project name:{}",
|
||||
loginUser.getUserName(), projectName);
|
||||
|
|
@ -301,15 +419,15 @@ public class ProcessDefinitionController extends BaseController {
|
|||
/**
|
||||
* query process definition list paging
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param searchVal search value
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @param userId user id
|
||||
* @param searchVal search value
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @param userId user id
|
||||
* @return process definition page
|
||||
*/
|
||||
@ApiOperation(value = "queryProcessDefinitionListPaging", notes= "QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES")
|
||||
@ApiOperation(value = "queryProcessDefinitionListPaging", notes = "QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES")
|
||||
@ApiImplicitParams({
|
||||
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "100"),
|
||||
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", required = false, type = "String"),
|
||||
|
|
@ -338,10 +456,10 @@ public class ProcessDefinitionController extends BaseController {
|
|||
/**
|
||||
* encapsulation treeview structure
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param id process definition id
|
||||
* @param limit limit
|
||||
* @param id process definition id
|
||||
* @param limit limit
|
||||
* @return tree view json data
|
||||
*/
|
||||
@ApiOperation(value = "viewTree", notes = "VIEW_TREE_NOTES")
|
||||
|
|
@ -363,8 +481,8 @@ public class ProcessDefinitionController extends BaseController {
|
|||
/**
|
||||
* get tasks list by process definition id
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processDefinitionId process definition id
|
||||
* @return task list
|
||||
*/
|
||||
|
|
@ -388,8 +506,8 @@ public class ProcessDefinitionController extends BaseController {
|
|||
/**
|
||||
* get tasks list by process definition id
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processDefinitionIdList process definition id list
|
||||
* @return node list data
|
||||
*/
|
||||
|
|
@ -414,8 +532,8 @@ public class ProcessDefinitionController extends BaseController {
|
|||
/**
|
||||
* delete process definition by id
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processDefinitionId process definition id
|
||||
* @return delete result code
|
||||
*/
|
||||
|
|
@ -439,8 +557,8 @@ public class ProcessDefinitionController extends BaseController {
|
|||
/**
|
||||
* batch delete process definition by ids
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processDefinitionIds process definition id list
|
||||
* @return delete result code
|
||||
*/
|
||||
|
|
@ -489,13 +607,13 @@ public class ProcessDefinitionController extends BaseController {
|
|||
/**
|
||||
* batch export process definition by ids
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processDefinitionIds process definition ids
|
||||
* @param response response
|
||||
* @param response response
|
||||
*/
|
||||
|
||||
@ApiOperation(value = "batchExportProcessDefinitionByIds", notes= "BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES")
|
||||
@ApiOperation(value = "batchExportProcessDefinitionByIds", notes = "BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES")
|
||||
@ApiImplicitParams({
|
||||
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_ID", required = true, dataType = "String")
|
||||
})
|
||||
|
|
@ -526,7 +644,7 @@ public class ProcessDefinitionController extends BaseController {
|
|||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(QUERY_PROCESS_DEFINITION_LIST)
|
||||
public Result queryProcessDefinitionAllByProjectId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
|
||||
@RequestParam("projectId") Integer projectId) {
|
||||
@RequestParam("projectId") Integer projectId) {
|
||||
logger.info("query process definition list, login user:{}, project id:{}",
|
||||
loginUser.getUserName(), projectId);
|
||||
Map<String, Object> result = processDefinitionService.queryProcessDefinitionAllByProjectId(projectId);
|
||||
|
|
|
|||
|
|
@ -14,8 +14,15 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.controller;
|
||||
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_TENANT_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_TENANT_BY_ID_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TENANT_LIST_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TENANT_LIST_PAGING_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_TENANT_ERROR;
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_TENANT_CODE_ERROR;
|
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status;
|
||||
import org.apache.dolphinscheduler.api.exceptions.ApiException;
|
||||
|
|
@ -24,21 +31,27 @@ import org.apache.dolphinscheduler.api.utils.Result;
|
|||
import org.apache.dolphinscheduler.common.Constants;
|
||||
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
import io.swagger.annotations.Api;
|
||||
import io.swagger.annotations.ApiImplicitParam;
|
||||
import io.swagger.annotations.ApiImplicitParams;
|
||||
import io.swagger.annotations.ApiOperation;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.RequestAttribute;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.ResponseStatus;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import io.swagger.annotations.Api;
|
||||
import io.swagger.annotations.ApiImplicitParam;
|
||||
import io.swagger.annotations.ApiImplicitParams;
|
||||
import io.swagger.annotations.ApiOperation;
|
||||
import springfox.documentation.annotations.ApiIgnore;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.*;
|
||||
|
||||
|
||||
/**
|
||||
* tenant controller
|
||||
|
|
@ -57,10 +70,10 @@ public class TenantController extends BaseController {
|
|||
/**
|
||||
* create tenant
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param tenantCode tenant code
|
||||
* @param tenantName tenant name
|
||||
* @param queueId queue id
|
||||
* @param loginUser login user
|
||||
* @param tenantCode tenant code
|
||||
* @param tenantName tenant name
|
||||
* @param queueId queue id
|
||||
* @param description description
|
||||
* @return create result code
|
||||
*/
|
||||
|
|
@ -92,8 +105,8 @@ public class TenantController extends BaseController {
|
|||
*
|
||||
* @param loginUser login user
|
||||
* @param searchVal search value
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @return tenant list page
|
||||
*/
|
||||
@ApiOperation(value = "queryTenantlistPaging", notes = "QUERY_TENANT_LIST_PAGING_NOTES")
|
||||
|
|
@ -141,11 +154,11 @@ public class TenantController extends BaseController {
|
|||
/**
|
||||
* udpate tenant
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param id tennat id
|
||||
* @param tenantCode tennat code
|
||||
* @param tenantName tennat name
|
||||
* @param queueId queue id
|
||||
* @param loginUser login user
|
||||
* @param id tennat id
|
||||
* @param tenantCode tennat code
|
||||
* @param tenantName tennat name
|
||||
* @param queueId queue id
|
||||
* @param description description
|
||||
* @return update result code
|
||||
*/
|
||||
|
|
@ -177,7 +190,7 @@ public class TenantController extends BaseController {
|
|||
* delete tenant by id
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param id tenant id
|
||||
* @param id tenant id
|
||||
* @return delete result code
|
||||
*/
|
||||
@ApiOperation(value = "deleteTenantById", notes = "DELETE_TENANT_NOTES")
|
||||
|
|
@ -195,11 +208,10 @@ public class TenantController extends BaseController {
|
|||
return returnDataList(result);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* verify tenant code
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param loginUser login user
|
||||
* @param tenantCode tenant code
|
||||
* @return true if tenant code can user, otherwise return false
|
||||
*/
|
||||
|
|
@ -211,12 +223,10 @@ public class TenantController extends BaseController {
|
|||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(VERIFY_TENANT_CODE_ERROR)
|
||||
public Result verifyTenantCode(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
|
||||
@RequestParam(value = "tenantCode") String tenantCode
|
||||
) {
|
||||
@RequestParam(value = "tenantCode") String tenantCode) {
|
||||
logger.info("login user {}, verfiy tenant code: {}",
|
||||
loginUser.getUserName(), tenantCode);
|
||||
return tenantService.verifyTenantCode(tenantCode);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,10 +35,12 @@ import org.springframework.http.HttpStatus;
|
|||
import org.springframework.web.bind.annotation.*;
|
||||
import springfox.documentation.annotations.ApiIgnore;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.apache.dolphinscheduler.api.enums.Status.*;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
||||
/**
|
||||
* user controller
|
||||
|
|
@ -462,4 +464,24 @@ public class UsersController extends BaseController {
|
|||
Map<String, Object> result = usersService.activateUser(loginUser, userName);
|
||||
return returnDataList(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* user batch activate
|
||||
*
|
||||
* @param userNames user names
|
||||
*/
|
||||
@ApiOperation(value = "batchActivateUser",notes = "BATCH_ACTIVATE_USER_NOTES")
|
||||
@ApiImplicitParams({
|
||||
@ApiImplicitParam(name = "userNames", value = "USER_NAMES", type = "String"),
|
||||
})
|
||||
@PostMapping("/batch/activate")
|
||||
@ResponseStatus(HttpStatus.OK)
|
||||
@ApiException(UPDATE_USER_ERROR)
|
||||
public Result<Object> batchActivateUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
|
||||
@RequestBody List<String> userNames) {
|
||||
List<String> formatUserNames = userNames.stream().map(ParameterUtils::handleEscapes).collect(Collectors.toList());
|
||||
logger.info(" activate userNames: {}", formatUserNames);
|
||||
Map<String, Object> result = usersService.batchActivateUser(loginUser, formatUserNames);
|
||||
return returnDataList(result);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,14 +14,15 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.dolphinscheduler.api.enums;
|
||||
|
||||
import org.springframework.context.i18n.LocaleContextHolder;
|
||||
package org.apache.dolphinscheduler.api.enums;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
import org.springframework.context.i18n.LocaleContextHolder;
|
||||
|
||||
/**
|
||||
* status enum
|
||||
* status enum
|
||||
*/
|
||||
public enum Status {
|
||||
|
||||
|
|
@ -32,15 +33,15 @@ public enum Status {
|
|||
REQUEST_PARAMS_NOT_VALID_ERROR(10001, "request parameter {0} is not valid", "请求参数[{0}]无效"),
|
||||
TASK_TIMEOUT_PARAMS_ERROR(10002, "task timeout parameter is not valid", "任务超时参数无效"),
|
||||
USER_NAME_EXIST(10003, "user name already exists", "用户名已存在"),
|
||||
USER_NAME_NULL(10004,"user name is null", "用户名不能为空"),
|
||||
USER_NAME_NULL(10004, "user name is null", "用户名不能为空"),
|
||||
HDFS_OPERATION_ERROR(10006, "hdfs operation error", "hdfs操作错误"),
|
||||
TASK_INSTANCE_NOT_FOUND(10008, "task instance not found", "任务实例不存在"),
|
||||
TENANT_NAME_EXIST(10009, "tenant code {0} already exists", "租户编码[{0}]已存在"),
|
||||
USER_NOT_EXIST(10010, "user {0} not exists", "用户[{0}]不存在"),
|
||||
ALERT_GROUP_NOT_EXIST(10011, "alarm group not found", "告警组不存在"),
|
||||
ALERT_GROUP_EXIST(10012, "alarm group already exists", "告警组名称已存在"),
|
||||
USER_NAME_PASSWD_ERROR(10013,"user name or password error", "用户名或密码错误"),
|
||||
LOGIN_SESSION_FAILED(10014,"create session failed!", "创建session失败"),
|
||||
USER_NAME_PASSWD_ERROR(10013, "user name or password error", "用户名或密码错误"),
|
||||
LOGIN_SESSION_FAILED(10014, "create session failed!", "创建session失败"),
|
||||
DATASOURCE_EXIST(10015, "data source name already exists", "数据源名称已存在"),
|
||||
DATASOURCE_CONNECT_FAILED(10016, "data source connection failed", "建立数据源连接失败"),
|
||||
TENANT_NOT_EXIST(10017, "tenant not exists", "租户不存在"),
|
||||
|
|
@ -53,105 +54,105 @@ public enum Status {
|
|||
SCHEDULE_CRON_CHECK_FAILED(10024, "scheduler crontab expression validation failure: {0}", "调度配置定时表达式验证失败: {0}"),
|
||||
MASTER_NOT_EXISTS(10025, "master does not exist", "无可用master节点"),
|
||||
SCHEDULE_STATUS_UNKNOWN(10026, "unknown status: {0}", "未知状态: {0}"),
|
||||
CREATE_ALERT_GROUP_ERROR(10027,"create alert group error", "创建告警组错误"),
|
||||
QUERY_ALL_ALERTGROUP_ERROR(10028,"query all alertgroup error", "查询告警组错误"),
|
||||
LIST_PAGING_ALERT_GROUP_ERROR(10029,"list paging alert group error", "分页查询告警组错误"),
|
||||
UPDATE_ALERT_GROUP_ERROR(10030,"update alert group error", "更新告警组错误"),
|
||||
DELETE_ALERT_GROUP_ERROR(10031,"delete alert group error", "删除告警组错误"),
|
||||
ALERT_GROUP_GRANT_USER_ERROR(10032,"alert group grant user error", "告警组授权用户错误"),
|
||||
CREATE_DATASOURCE_ERROR(10033,"create datasource error", "创建数据源错误"),
|
||||
UPDATE_DATASOURCE_ERROR(10034,"update datasource error", "更新数据源错误"),
|
||||
QUERY_DATASOURCE_ERROR(10035,"query datasource error", "查询数据源错误"),
|
||||
CONNECT_DATASOURCE_FAILURE(10036,"connect datasource failure", "建立数据源连接失败"),
|
||||
CONNECTION_TEST_FAILURE(10037,"connection test failure", "测试数据源连接失败"),
|
||||
DELETE_DATA_SOURCE_FAILURE(10038,"delete data source failure", "删除数据源失败"),
|
||||
VERIFY_DATASOURCE_NAME_FAILURE(10039,"verify datasource name failure", "验证数据源名称失败"),
|
||||
UNAUTHORIZED_DATASOURCE(10040,"unauthorized datasource", "未经授权的数据源"),
|
||||
AUTHORIZED_DATA_SOURCE(10041,"authorized data source", "授权数据源失败"),
|
||||
LOGIN_SUCCESS(10042,"login success", "登录成功"),
|
||||
USER_LOGIN_FAILURE(10043,"user login failure", "用户登录失败"),
|
||||
LIST_WORKERS_ERROR(10044,"list workers error", "查询worker列表错误"),
|
||||
LIST_MASTERS_ERROR(10045,"list masters error", "查询master列表错误"),
|
||||
UPDATE_PROJECT_ERROR(10046,"update project error", "更新项目信息错误"),
|
||||
QUERY_PROJECT_DETAILS_BY_ID_ERROR(10047,"query project details by id error", "查询项目详细信息错误"),
|
||||
CREATE_PROJECT_ERROR(10048,"create project error", "创建项目错误"),
|
||||
LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR(10049,"login user query project list paging error", "分页查询项目列表错误"),
|
||||
DELETE_PROJECT_ERROR(10050,"delete project error", "删除项目错误"),
|
||||
QUERY_UNAUTHORIZED_PROJECT_ERROR(10051,"query unauthorized project error", "查询未授权项目错误"),
|
||||
QUERY_AUTHORIZED_PROJECT(10052,"query authorized project", "查询授权项目错误"),
|
||||
QUERY_QUEUE_LIST_ERROR(10053,"query queue list error", "查询队列列表错误"),
|
||||
CREATE_RESOURCE_ERROR(10054,"create resource error", "创建资源错误"),
|
||||
UPDATE_RESOURCE_ERROR(10055,"update resource error", "更新资源错误"),
|
||||
QUERY_RESOURCES_LIST_ERROR(10056,"query resources list error", "查询资源列表错误"),
|
||||
QUERY_RESOURCES_LIST_PAGING(10057,"query resources list paging", "分页查询资源列表错误"),
|
||||
DELETE_RESOURCE_ERROR(10058,"delete resource error", "删除资源错误"),
|
||||
VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR(10059,"verify resource by name and type error", "资源名称或类型验证错误"),
|
||||
VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060,"view resource file online error", "查看资源文件错误"),
|
||||
CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061,"create resource file online error", "创建资源文件错误"),
|
||||
RESOURCE_FILE_IS_EMPTY(10062,"resource file is empty", "资源文件内容不能为空"),
|
||||
EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063,"edit resource file online error", "更新资源文件错误"),
|
||||
DOWNLOAD_RESOURCE_FILE_ERROR(10064,"download resource file error", "下载资源文件错误"),
|
||||
CREATE_UDF_FUNCTION_ERROR(10065 ,"create udf function error", "创建UDF函数错误"),
|
||||
VIEW_UDF_FUNCTION_ERROR( 10066,"view udf function error", "查询UDF函数错误"),
|
||||
UPDATE_UDF_FUNCTION_ERROR(10067,"update udf function error", "更新UDF函数错误"),
|
||||
QUERY_UDF_FUNCTION_LIST_PAGING_ERROR( 10068,"query udf function list paging error", "分页查询UDF函数列表错误"),
|
||||
QUERY_DATASOURCE_BY_TYPE_ERROR( 10069,"query datasource by type error", "查询数据源信息错误"),
|
||||
VERIFY_UDF_FUNCTION_NAME_ERROR( 10070,"verify udf function name error", "UDF函数名称验证错误"),
|
||||
DELETE_UDF_FUNCTION_ERROR( 10071,"delete udf function error", "删除UDF函数错误"),
|
||||
AUTHORIZED_FILE_RESOURCE_ERROR( 10072,"authorized file resource error", "授权资源文件错误"),
|
||||
AUTHORIZE_RESOURCE_TREE( 10073,"authorize resource tree display error","授权资源目录树错误"),
|
||||
UNAUTHORIZED_UDF_FUNCTION_ERROR( 10074,"unauthorized udf function error", "查询未授权UDF函数错误"),
|
||||
AUTHORIZED_UDF_FUNCTION_ERROR(10075,"authorized udf function error", "授权UDF函数错误"),
|
||||
CREATE_SCHEDULE_ERROR(10076,"create schedule error", "创建调度配置错误"),
|
||||
UPDATE_SCHEDULE_ERROR(10077,"update schedule error", "更新调度配置错误"),
|
||||
PUBLISH_SCHEDULE_ONLINE_ERROR(10078,"publish schedule online error", "上线调度配置错误"),
|
||||
OFFLINE_SCHEDULE_ERROR(10079,"offline schedule error", "下线调度配置错误"),
|
||||
QUERY_SCHEDULE_LIST_PAGING_ERROR(10080,"query schedule list paging error", "分页查询调度配置列表错误"),
|
||||
QUERY_SCHEDULE_LIST_ERROR(10081,"query schedule list error", "查询调度配置列表错误"),
|
||||
QUERY_TASK_LIST_PAGING_ERROR(10082,"query task list paging error", "分页查询任务列表错误"),
|
||||
QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083,"query task record list paging error", "分页查询任务记录错误"),
|
||||
CREATE_TENANT_ERROR(10084,"create tenant error", "创建租户错误"),
|
||||
QUERY_TENANT_LIST_PAGING_ERROR(10085,"query tenant list paging error", "分页查询租户列表错误"),
|
||||
QUERY_TENANT_LIST_ERROR(10086,"query tenant list error", "查询租户列表错误"),
|
||||
UPDATE_TENANT_ERROR(10087,"update tenant error", "更新租户错误"),
|
||||
DELETE_TENANT_BY_ID_ERROR(10088,"delete tenant by id error", "删除租户错误"),
|
||||
VERIFY_TENANT_CODE_ERROR(10089,"verify tenant code error", "租户编码验证错误"),
|
||||
CREATE_USER_ERROR(10090,"create user error", "创建用户错误"),
|
||||
QUERY_USER_LIST_PAGING_ERROR(10091,"query user list paging error", "分页查询用户列表错误"),
|
||||
UPDATE_USER_ERROR(10092,"update user error", "更新用户错误"),
|
||||
DELETE_USER_BY_ID_ERROR(10093,"delete user by id error", "删除用户错误"),
|
||||
GRANT_PROJECT_ERROR(10094,"grant project error", "授权项目错误"),
|
||||
GRANT_RESOURCE_ERROR(10095,"grant resource error", "授权资源错误"),
|
||||
GRANT_UDF_FUNCTION_ERROR(10096,"grant udf function error", "授权UDF函数错误"),
|
||||
GRANT_DATASOURCE_ERROR(10097,"grant datasource error", "授权数据源错误"),
|
||||
GET_USER_INFO_ERROR(10098,"get user info error", "获取用户信息错误"),
|
||||
USER_LIST_ERROR(10099,"user list error", "查询用户列表错误"),
|
||||
VERIFY_USERNAME_ERROR(10100,"verify username error", "用户名验证错误"),
|
||||
UNAUTHORIZED_USER_ERROR(10101,"unauthorized user error", "查询未授权用户错误"),
|
||||
AUTHORIZED_USER_ERROR(10102,"authorized user error", "查询授权用户错误"),
|
||||
QUERY_TASK_INSTANCE_LOG_ERROR(10103,"view task instance log error", "查询任务实例日志错误"),
|
||||
DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104,"download task instance log file error", "下载任务日志文件错误"),
|
||||
CREATE_PROCESS_DEFINITION(10105,"create process definition", "创建工作流错误"),
|
||||
VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106,"verify process definition name unique error", "工作流名称已存在"),
|
||||
UPDATE_PROCESS_DEFINITION_ERROR(10107,"update process definition error", "更新工作流定义错误"),
|
||||
RELEASE_PROCESS_DEFINITION_ERROR(10108,"release process definition error", "上线工作流错误"),
|
||||
QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109,"query datail of process definition error", "查询工作流详细信息错误"),
|
||||
QUERY_PROCESS_DEFINITION_LIST(10110,"query process definition list", "查询工作流列表错误"),
|
||||
ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111,"encapsulation treeview structure error", "查询工作流树形图数据错误"),
|
||||
GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112,"get tasks list by process definition id error", "查询工作流定义节点信息错误"),
|
||||
QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113,"query process instance list paging error", "分页查询工作流实例列表错误"),
|
||||
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114,"query task list by process instance id error", "查询任务实例列表错误"),
|
||||
UPDATE_PROCESS_INSTANCE_ERROR(10115,"update process instance error", "更新工作流实例错误"),
|
||||
QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116,"query process instance by id error", "查询工作流实例错误"),
|
||||
DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117,"delete process instance by id error", "删除工作流实例错误"),
|
||||
QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118,"query sub process instance detail info by task id error", "查询子流程任务实例错误"),
|
||||
QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119,"query parent process instance detail info by sub process instance id error", "查询子流程该工作流实例错误"),
|
||||
QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120,"query process instance all variables error", "查询工作流自定义变量信息错误"),
|
||||
ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121,"encapsulation process instance gantt structure error", "查询工作流实例甘特图数据错误"),
|
||||
QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR(10122,"query process definition list paging error", "分页查询工作流定义列表错误"),
|
||||
SIGN_OUT_ERROR(10123,"sign out error", "退出错误"),
|
||||
TENANT_CODE_HAS_ALREADY_EXISTS(10124,"tenant code has already exists", "租户编码已存在"),
|
||||
IP_IS_EMPTY(10125,"ip is empty", "IP地址不能为空"),
|
||||
CREATE_ALERT_GROUP_ERROR(10027, "create alert group error", "创建告警组错误"),
|
||||
QUERY_ALL_ALERTGROUP_ERROR(10028, "query all alertgroup error", "查询告警组错误"),
|
||||
LIST_PAGING_ALERT_GROUP_ERROR(10029, "list paging alert group error", "分页查询告警组错误"),
|
||||
UPDATE_ALERT_GROUP_ERROR(10030, "update alert group error", "更新告警组错误"),
|
||||
DELETE_ALERT_GROUP_ERROR(10031, "delete alert group error", "删除告警组错误"),
|
||||
ALERT_GROUP_GRANT_USER_ERROR(10032, "alert group grant user error", "告警组授权用户错误"),
|
||||
CREATE_DATASOURCE_ERROR(10033, "create datasource error", "创建数据源错误"),
|
||||
UPDATE_DATASOURCE_ERROR(10034, "update datasource error", "更新数据源错误"),
|
||||
QUERY_DATASOURCE_ERROR(10035, "query datasource error", "查询数据源错误"),
|
||||
CONNECT_DATASOURCE_FAILURE(10036, "connect datasource failure", "建立数据源连接失败"),
|
||||
CONNECTION_TEST_FAILURE(10037, "connection test failure", "测试数据源连接失败"),
|
||||
DELETE_DATA_SOURCE_FAILURE(10038, "delete data source failure", "删除数据源失败"),
|
||||
VERIFY_DATASOURCE_NAME_FAILURE(10039, "verify datasource name failure", "验证数据源名称失败"),
|
||||
UNAUTHORIZED_DATASOURCE(10040, "unauthorized datasource", "未经授权的数据源"),
|
||||
AUTHORIZED_DATA_SOURCE(10041, "authorized data source", "授权数据源失败"),
|
||||
LOGIN_SUCCESS(10042, "login success", "登录成功"),
|
||||
USER_LOGIN_FAILURE(10043, "user login failure", "用户登录失败"),
|
||||
LIST_WORKERS_ERROR(10044, "list workers error", "查询worker列表错误"),
|
||||
LIST_MASTERS_ERROR(10045, "list masters error", "查询master列表错误"),
|
||||
UPDATE_PROJECT_ERROR(10046, "update project error", "更新项目信息错误"),
|
||||
QUERY_PROJECT_DETAILS_BY_ID_ERROR(10047, "query project details by id error", "查询项目详细信息错误"),
|
||||
CREATE_PROJECT_ERROR(10048, "create project error", "创建项目错误"),
|
||||
LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR(10049, "login user query project list paging error", "分页查询项目列表错误"),
|
||||
DELETE_PROJECT_ERROR(10050, "delete project error", "删除项目错误"),
|
||||
QUERY_UNAUTHORIZED_PROJECT_ERROR(10051, "query unauthorized project error", "查询未授权项目错误"),
|
||||
QUERY_AUTHORIZED_PROJECT(10052, "query authorized project", "查询授权项目错误"),
|
||||
QUERY_QUEUE_LIST_ERROR(10053, "query queue list error", "查询队列列表错误"),
|
||||
CREATE_RESOURCE_ERROR(10054, "create resource error", "创建资源错误"),
|
||||
UPDATE_RESOURCE_ERROR(10055, "update resource error", "更新资源错误"),
|
||||
QUERY_RESOURCES_LIST_ERROR(10056, "query resources list error", "查询资源列表错误"),
|
||||
QUERY_RESOURCES_LIST_PAGING(10057, "query resources list paging", "分页查询资源列表错误"),
|
||||
DELETE_RESOURCE_ERROR(10058, "delete resource error", "删除资源错误"),
|
||||
VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR(10059, "verify resource by name and type error", "资源名称或类型验证错误"),
|
||||
VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060, "view resource file online error", "查看资源文件错误"),
|
||||
CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061, "create resource file online error", "创建资源文件错误"),
|
||||
RESOURCE_FILE_IS_EMPTY(10062, "resource file is empty", "资源文件内容不能为空"),
|
||||
EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063, "edit resource file online error", "更新资源文件错误"),
|
||||
DOWNLOAD_RESOURCE_FILE_ERROR(10064, "download resource file error", "下载资源文件错误"),
|
||||
CREATE_UDF_FUNCTION_ERROR(10065, "create udf function error", "创建UDF函数错误"),
|
||||
VIEW_UDF_FUNCTION_ERROR(10066, "view udf function error", "查询UDF函数错误"),
|
||||
UPDATE_UDF_FUNCTION_ERROR(10067, "update udf function error", "更新UDF函数错误"),
|
||||
QUERY_UDF_FUNCTION_LIST_PAGING_ERROR(10068, "query udf function list paging error", "分页查询UDF函数列表错误"),
|
||||
QUERY_DATASOURCE_BY_TYPE_ERROR(10069, "query datasource by type error", "查询数据源信息错误"),
|
||||
VERIFY_UDF_FUNCTION_NAME_ERROR(10070, "verify udf function name error", "UDF函数名称验证错误"),
|
||||
DELETE_UDF_FUNCTION_ERROR(10071, "delete udf function error", "删除UDF函数错误"),
|
||||
AUTHORIZED_FILE_RESOURCE_ERROR(10072, "authorized file resource error", "授权资源文件错误"),
|
||||
AUTHORIZE_RESOURCE_TREE(10073, "authorize resource tree display error", "授权资源目录树错误"),
|
||||
UNAUTHORIZED_UDF_FUNCTION_ERROR(10074, "unauthorized udf function error", "查询未授权UDF函数错误"),
|
||||
AUTHORIZED_UDF_FUNCTION_ERROR(10075, "authorized udf function error", "授权UDF函数错误"),
|
||||
CREATE_SCHEDULE_ERROR(10076, "create schedule error", "创建调度配置错误"),
|
||||
UPDATE_SCHEDULE_ERROR(10077, "update schedule error", "更新调度配置错误"),
|
||||
PUBLISH_SCHEDULE_ONLINE_ERROR(10078, "publish schedule online error", "上线调度配置错误"),
|
||||
OFFLINE_SCHEDULE_ERROR(10079, "offline schedule error", "下线调度配置错误"),
|
||||
QUERY_SCHEDULE_LIST_PAGING_ERROR(10080, "query schedule list paging error", "分页查询调度配置列表错误"),
|
||||
QUERY_SCHEDULE_LIST_ERROR(10081, "query schedule list error", "查询调度配置列表错误"),
|
||||
QUERY_TASK_LIST_PAGING_ERROR(10082, "query task list paging error", "分页查询任务列表错误"),
|
||||
QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083, "query task record list paging error", "分页查询任务记录错误"),
|
||||
CREATE_TENANT_ERROR(10084, "create tenant error", "创建租户错误"),
|
||||
QUERY_TENANT_LIST_PAGING_ERROR(10085, "query tenant list paging error", "分页查询租户列表错误"),
|
||||
QUERY_TENANT_LIST_ERROR(10086, "query tenant list error", "查询租户列表错误"),
|
||||
UPDATE_TENANT_ERROR(10087, "update tenant error", "更新租户错误"),
|
||||
DELETE_TENANT_BY_ID_ERROR(10088, "delete tenant by id error", "删除租户错误"),
|
||||
VERIFY_TENANT_CODE_ERROR(10089, "verify tenant code error", "租户编码验证错误"),
|
||||
CREATE_USER_ERROR(10090, "create user error", "创建用户错误"),
|
||||
QUERY_USER_LIST_PAGING_ERROR(10091, "query user list paging error", "分页查询用户列表错误"),
|
||||
UPDATE_USER_ERROR(10092, "update user error", "更新用户错误"),
|
||||
DELETE_USER_BY_ID_ERROR(10093, "delete user by id error", "删除用户错误"),
|
||||
GRANT_PROJECT_ERROR(10094, "grant project error", "授权项目错误"),
|
||||
GRANT_RESOURCE_ERROR(10095, "grant resource error", "授权资源错误"),
|
||||
GRANT_UDF_FUNCTION_ERROR(10096, "grant udf function error", "授权UDF函数错误"),
|
||||
GRANT_DATASOURCE_ERROR(10097, "grant datasource error", "授权数据源错误"),
|
||||
GET_USER_INFO_ERROR(10098, "get user info error", "获取用户信息错误"),
|
||||
USER_LIST_ERROR(10099, "user list error", "查询用户列表错误"),
|
||||
VERIFY_USERNAME_ERROR(10100, "verify username error", "用户名验证错误"),
|
||||
UNAUTHORIZED_USER_ERROR(10101, "unauthorized user error", "查询未授权用户错误"),
|
||||
AUTHORIZED_USER_ERROR(10102, "authorized user error", "查询授权用户错误"),
|
||||
QUERY_TASK_INSTANCE_LOG_ERROR(10103, "view task instance log error", "查询任务实例日志错误"),
|
||||
DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104, "download task instance log file error", "下载任务日志文件错误"),
|
||||
CREATE_PROCESS_DEFINITION(10105, "create process definition", "创建工作流错误"),
|
||||
VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106, "verify process definition name unique error", "工作流名称已存在"),
|
||||
UPDATE_PROCESS_DEFINITION_ERROR(10107, "update process definition error", "更新工作流定义错误"),
|
||||
RELEASE_PROCESS_DEFINITION_ERROR(10108, "release process definition error", "上线工作流错误"),
|
||||
QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109, "query datail of process definition error", "查询工作流详细信息错误"),
|
||||
QUERY_PROCESS_DEFINITION_LIST(10110, "query process definition list", "查询工作流列表错误"),
|
||||
ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111, "encapsulation treeview structure error", "查询工作流树形图数据错误"),
|
||||
GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112, "get tasks list by process definition id error", "查询工作流定义节点信息错误"),
|
||||
QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113, "query process instance list paging error", "分页查询工作流实例列表错误"),
|
||||
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114, "query task list by process instance id error", "查询任务实例列表错误"),
|
||||
UPDATE_PROCESS_INSTANCE_ERROR(10115, "update process instance error", "更新工作流实例错误"),
|
||||
QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116, "query process instance by id error", "查询工作流实例错误"),
|
||||
DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117, "delete process instance by id error", "删除工作流实例错误"),
|
||||
QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118, "query sub process instance detail info by task id error", "查询子流程任务实例错误"),
|
||||
QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119, "query parent process instance detail info by sub process instance id error", "查询子流程该工作流实例错误"),
|
||||
QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120, "query process instance all variables error", "查询工作流自定义变量信息错误"),
|
||||
ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121, "encapsulation process instance gantt structure error", "查询工作流实例甘特图数据错误"),
|
||||
QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR(10122, "query process definition list paging error", "分页查询工作流定义列表错误"),
|
||||
SIGN_OUT_ERROR(10123, "sign out error", "退出错误"),
|
||||
TENANT_CODE_HAS_ALREADY_EXISTS(10124, "tenant code has already exists", "租户编码已存在"),
|
||||
IP_IS_EMPTY(10125, "ip is empty", "IP地址不能为空"),
|
||||
SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE(10126, "schedule release is already {0}", "调度配置上线错误[{0}]"),
|
||||
CREATE_QUEUE_ERROR(10127, "create queue error", "创建队列错误"),
|
||||
QUEUE_NOT_EXIST(10128, "queue {0} not exists", "队列ID[{0}]不存在"),
|
||||
|
|
@ -159,31 +160,43 @@ public enum Status {
|
|||
QUEUE_NAME_EXIST(10130, "queue name {0} already exists", "队列名称[{0}]已存在"),
|
||||
UPDATE_QUEUE_ERROR(10131, "update queue error", "更新队列信息错误"),
|
||||
NEED_NOT_UPDATE_QUEUE(10132, "no content changes, no updates are required", "数据未变更,不需要更新队列信息"),
|
||||
VERIFY_QUEUE_ERROR(10133,"verify queue error", "验证队列信息错误"),
|
||||
NAME_NULL(10134,"name must be not null", "名称不能为空"),
|
||||
VERIFY_QUEUE_ERROR(10133, "verify queue error", "验证队列信息错误"),
|
||||
NAME_NULL(10134, "name must be not null", "名称不能为空"),
|
||||
NAME_EXIST(10135, "name {0} already exists", "名称[{0}]已存在"),
|
||||
SAVE_ERROR(10136, "save error", "保存错误"),
|
||||
DELETE_PROJECT_ERROR_DEFINES_NOT_NULL(10137, "please delete the process definitions in project first!", "请先删除全部工作流定义"),
|
||||
BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117,"batch delete process instance by ids {0} error", "批量删除工作流实例错误"),
|
||||
PREVIEW_SCHEDULE_ERROR(10139,"preview schedule error", "预览调度配置错误"),
|
||||
PARSE_TO_CRON_EXPRESSION_ERROR(10140,"parse cron to cron expression error", "解析调度表达式错误"),
|
||||
SCHEDULE_START_TIME_END_TIME_SAME(10141,"The start time must not be the same as the end", "开始时间不能和结束时间一样"),
|
||||
DELETE_TENANT_BY_ID_FAIL(10142,"delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"),
|
||||
DELETE_TENANT_BY_ID_FAIL_DEFINES(10143,"delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"),
|
||||
DELETE_TENANT_BY_ID_FAIL_USERS(10144,"delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"),
|
||||
DELETE_WORKER_GROUP_BY_ID_FAIL(10145,"delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"),
|
||||
QUERY_WORKER_GROUP_FAIL(10146,"query worker group fail ", "查询worker分组失败"),
|
||||
DELETE_WORKER_GROUP_FAIL(10147,"delete worker group fail ", "删除worker分组失败"),
|
||||
USER_DISABLED(10148,"The current user is disabled", "当前用户已停用"),
|
||||
COPY_PROCESS_DEFINITION_ERROR(10149,"copy process definition from {0} to {1} error : {2}", "从{0}复制工作流到{1}错误 : {2}"),
|
||||
MOVE_PROCESS_DEFINITION_ERROR(10150,"move process definition from {0} to {1} error : {2}", "从{0}移动工作流到{1}错误 : {2}"),
|
||||
QUERY_USER_CREATED_PROJECT_ERROR(10151,"query user created project error error", "查询用户创建的项目错误"),
|
||||
PROCESS_DEFINITION_IDS_IS_EMPTY(10152,"process definition ids is empty", "工作流IDS不能为空"),
|
||||
BATCH_COPY_PROCESS_DEFINITION_ERROR(10153,"batch copy process definition error", "复制工作流错误"),
|
||||
BATCH_MOVE_PROCESS_DEFINITION_ERROR(10154,"batch move process definition error", "移动工作流错误"),
|
||||
QUERY_WORKFLOW_LINEAGE_ERROR(10155,"query workflow lineage error", "查询血缘失败"),
|
||||
FORCE_TASK_SUCCESS_ERROR(10156, "force task success error", "强制成功任务实例错误"),
|
||||
TASK_INSTANCE_STATE_OPETATION_ERROR(10157, "the status of task instance {0} is {1},Cannot perform force success operation", "任务实例[{0}]的状态是[{1}],无法执行强制成功操作"),
|
||||
BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117, "batch delete process instance by ids {0} error", "批量删除工作流实例错误"),
|
||||
PREVIEW_SCHEDULE_ERROR(10139, "preview schedule error", "预览调度配置错误"),
|
||||
PARSE_TO_CRON_EXPRESSION_ERROR(10140, "parse cron to cron expression error", "解析调度表达式错误"),
|
||||
SCHEDULE_START_TIME_END_TIME_SAME(10141, "The start time must not be the same as the end", "开始时间不能和结束时间一样"),
|
||||
DELETE_TENANT_BY_ID_FAIL(10142, "delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"),
|
||||
DELETE_TENANT_BY_ID_FAIL_DEFINES(10143, "delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"),
|
||||
DELETE_TENANT_BY_ID_FAIL_USERS(10144, "delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"),
|
||||
DELETE_WORKER_GROUP_BY_ID_FAIL(10145, "delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"),
|
||||
QUERY_WORKER_GROUP_FAIL(10146, "query worker group fail ", "查询worker分组失败"),
|
||||
DELETE_WORKER_GROUP_FAIL(10147, "delete worker group fail ", "删除worker分组失败"),
|
||||
USER_DISABLED(10148, "The current user is disabled", "当前用户已停用"),
|
||||
COPY_PROCESS_DEFINITION_ERROR(10149, "copy process definition from {0} to {1} error : {2}", "从{0}复制工作流到{1}错误 : {2}"),
|
||||
MOVE_PROCESS_DEFINITION_ERROR(10150, "move process definition from {0} to {1} error : {2}", "从{0}移动工作流到{1}错误 : {2}"),
|
||||
SWITCH_PROCESS_DEFINITION_VERSION_ERROR(10151, "Switch process definition version error", "切换工作流版本出错"),
|
||||
SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR(10152
|
||||
, "Switch process definition version error: not exists process definition, [process definition id {0}]", "切换工作流版本出错:工作流不存在,[工作流id {0}]"),
|
||||
SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR(10153
|
||||
, "Switch process definition version error: not exists process definition version, [process definition id {0}] [version number {1}]", "切换工作流版本出错:工作流版本信息不存在,[工作流id {0}] [版本号 {1}]"),
|
||||
QUERY_PROCESS_DEFINITION_VERSIONS_ERROR(10154, "query process definition versions error", "查询工作流历史版本信息出错"),
|
||||
QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR(10155
|
||||
, "query process definition versions error: [page number:{0}] < 1 or [page size:{1}] < 1", "查询工作流历史版本出错:[pageNo:{0}] < 1 或 [pageSize:{1}] < 1"),
|
||||
DELETE_PROCESS_DEFINITION_VERSION_ERROR(10156, "delete process definition version error", "删除工作流历史版本出错"),
|
||||
|
||||
QUERY_USER_CREATED_PROJECT_ERROR(10157, "query user created project error error", "查询用户创建的项目错误"),
|
||||
PROCESS_DEFINITION_IDS_IS_EMPTY(10158, "process definition ids is empty", "工作流IDS不能为空"),
|
||||
BATCH_COPY_PROCESS_DEFINITION_ERROR(10159, "batch copy process definition error", "复制工作流错误"),
|
||||
BATCH_MOVE_PROCESS_DEFINITION_ERROR(10160, "batch move process definition error", "移动工作流错误"),
|
||||
QUERY_WORKFLOW_LINEAGE_ERROR(10161, "query workflow lineage error", "查询血缘失败"),
|
||||
FORCE_TASK_SUCCESS_ERROR(10162, "force task success error", "强制成功任务实例错误"),
|
||||
TASK_INSTANCE_STATE_OPETATION_ERROR(10163, "the status of task instance {0} is {1},Cannot perform force success operation", "任务实例[{0}]的状态是[{1}],无法执行强制成功操作"),
|
||||
|
||||
|
||||
|
||||
UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"),
|
||||
UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"),
|
||||
|
|
@ -196,10 +209,10 @@ public enum Status {
|
|||
HDFS_COPY_FAIL(20010, "hdfs copy {0} -> {1} fail", "hdfs复制失败:[{0}] -> [{1}]"),
|
||||
RESOURCE_FILE_EXIST(20011, "resource file {0} already exists in hdfs,please delete it or change name!", "资源文件[{0}]在hdfs中已存在,请删除或修改资源名"),
|
||||
RESOURCE_FILE_NOT_EXIST(20012, "resource file {0} not exists in hdfs!", "资源文件[{0}]在hdfs中不存在"),
|
||||
UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}","udf函数绑定了资源文件[{0}]"),
|
||||
RESOURCE_IS_USED(20014, "resource file is used by process definition","资源文件被上线的流程定义使用了"),
|
||||
PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist","父资源文件不存在"),
|
||||
RESOURCE_NOT_EXIST_OR_NO_PERMISSION(20016, "resource not exist or no permission,please view the task node and remove error resource","请检查任务节点并移除无权限或者已删除的资源"),
|
||||
UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}", "udf函数绑定了资源文件[{0}]"),
|
||||
RESOURCE_IS_USED(20014, "resource file is used by process definition", "资源文件被上线的流程定义使用了"),
|
||||
PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist", "父资源文件不存在"),
|
||||
RESOURCE_NOT_EXIST_OR_NO_PERMISSION(20016, "resource not exist or no permission,please view the task node and remove error resource", "请检查任务节点并移除无权限或者已删除的资源"),
|
||||
RESOURCE_IS_AUTHORIZED(20017, "resource is authorized to user {0},suffix not allowed to be modified", "资源文件已授权其他用户[{0}],后缀不允许修改"),
|
||||
|
||||
USER_NO_OPERATION_PERM(30001, "user has no operation privilege", "当前用户没有操作权限"),
|
||||
|
|
@ -216,17 +229,17 @@ public enum Status {
|
|||
PROCESS_DEFINE_NOT_ALLOWED_EDIT(50008, "process definition {0} does not allow edit", "工作流定义[{0}]不允许修改"),
|
||||
PROCESS_INSTANCE_EXECUTING_COMMAND(50009, "process instance {0} is executing the command, please wait ...", "工作流实例[{0}]正在执行命令,请稍等..."),
|
||||
PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE(50010, "process instance {0} is not sub process instance", "工作流实例[{0}]不是子工作流实例"),
|
||||
TASK_INSTANCE_STATE_COUNT_ERROR(50011,"task instance state count error", "查询各状态任务实例数错误"),
|
||||
COUNT_PROCESS_INSTANCE_STATE_ERROR(50012,"count process instance state error", "查询各状态流程实例数错误"),
|
||||
COUNT_PROCESS_DEFINITION_USER_ERROR(50013,"count process definition user error", "查询各用户流程定义数错误"),
|
||||
START_PROCESS_INSTANCE_ERROR(50014,"start process instance error", "运行工作流实例错误"),
|
||||
EXECUTE_PROCESS_INSTANCE_ERROR(50015,"execute process instance error", "操作工作流实例错误"),
|
||||
CHECK_PROCESS_DEFINITION_ERROR(50016,"check process definition error", "检查工作流实例错误"),
|
||||
QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017,"query recipients and copyers by process definition error", "查询收件人和抄送人错误"),
|
||||
DATA_IS_NOT_VALID(50017,"data {0} not valid", "数据[{0}]无效"),
|
||||
DATA_IS_NULL(50018,"data {0} is null", "数据[{0}]不能为空"),
|
||||
PROCESS_NODE_HAS_CYCLE(50019,"process node has cycle", "流程节点间存在循环依赖"),
|
||||
PROCESS_NODE_S_PARAMETER_INVALID(50020,"process node {0} parameter invalid", "流程节点[{0}]参数无效"),
|
||||
TASK_INSTANCE_STATE_COUNT_ERROR(50011, "task instance state count error", "查询各状态任务实例数错误"),
|
||||
COUNT_PROCESS_INSTANCE_STATE_ERROR(50012, "count process instance state error", "查询各状态流程实例数错误"),
|
||||
COUNT_PROCESS_DEFINITION_USER_ERROR(50013, "count process definition user error", "查询各用户流程定义数错误"),
|
||||
START_PROCESS_INSTANCE_ERROR(50014, "start process instance error", "运行工作流实例错误"),
|
||||
EXECUTE_PROCESS_INSTANCE_ERROR(50015, "execute process instance error", "操作工作流实例错误"),
|
||||
CHECK_PROCESS_DEFINITION_ERROR(50016, "check process definition error", "检查工作流实例错误"),
|
||||
QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017, "query recipients and copyers by process definition error", "查询收件人和抄送人错误"),
|
||||
DATA_IS_NOT_VALID(50017, "data {0} not valid", "数据[{0}]无效"),
|
||||
DATA_IS_NULL(50018, "data {0} is null", "数据[{0}]不能为空"),
|
||||
PROCESS_NODE_HAS_CYCLE(50019, "process node has cycle", "流程节点间存在循环依赖"),
|
||||
PROCESS_NODE_S_PARAMETER_INVALID(50020, "process node {0} parameter invalid", "流程节点[{0}]参数无效"),
|
||||
PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line", "工作流定义[{0}]已上线"),
|
||||
DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022,"delete process definition by id error", "删除工作流定义错误"),
|
||||
SCHEDULE_CRON_STATE_ONLINE(50023,"the status of schedule {0} is already on line", "调度配置[{0}]已上线"),
|
||||
|
|
@ -239,30 +252,29 @@ public enum Status {
|
|||
IMPORT_PROCESS_DEFINE_ERROR(50029,"import process definition error", "导入工作流定义错误"),
|
||||
NO_VALID_FORCED_SUCCESS_TASK(50030, "there is no valid forced success node in process instance {0}", "工作流实例[{0}]中不包含有效的强制成功的任务实例"),
|
||||
|
||||
HDFS_NOT_STARTUP(60001,"hdfs not startup", "hdfs未启用"),
|
||||
HDFS_NOT_STARTUP(60001, "hdfs not startup", "hdfs未启用"),
|
||||
|
||||
/**
|
||||
* for monitor
|
||||
*/
|
||||
QUERY_DATABASE_STATE_ERROR(70001,"query database state error", "查询数据库状态错误"),
|
||||
QUERY_ZOOKEEPER_STATE_ERROR(70002,"query zookeeper state error", "查询zookeeper状态错误"),
|
||||
QUERY_DATABASE_STATE_ERROR(70001, "query database state error", "查询数据库状态错误"),
|
||||
QUERY_ZOOKEEPER_STATE_ERROR(70002, "query zookeeper state error", "查询zookeeper状态错误"),
|
||||
|
||||
|
||||
|
||||
CREATE_ACCESS_TOKEN_ERROR(70010,"create access token error", "创建访问token错误"),
|
||||
GENERATE_TOKEN_ERROR(70011,"generate token error", "生成token错误"),
|
||||
QUERY_ACCESSTOKEN_LIST_PAGING_ERROR(70012,"query access token list paging error", "分页查询访问token列表错误"),
|
||||
UPDATE_ACCESS_TOKEN_ERROR(70013,"update access token error", "更新访问token错误"),
|
||||
DELETE_ACCESS_TOKEN_ERROR(70014,"delete access token error", "删除访问token错误"),
|
||||
CREATE_ACCESS_TOKEN_ERROR(70010, "create access token error", "创建访问token错误"),
|
||||
GENERATE_TOKEN_ERROR(70011, "generate token error", "生成token错误"),
|
||||
QUERY_ACCESSTOKEN_LIST_PAGING_ERROR(70012, "query access token list paging error", "分页查询访问token列表错误"),
|
||||
UPDATE_ACCESS_TOKEN_ERROR(70013, "update access token error", "更新访问token错误"),
|
||||
DELETE_ACCESS_TOKEN_ERROR(70014, "delete access token error", "删除访问token错误"),
|
||||
ACCESS_TOKEN_NOT_EXIST(70015, "access token not exist", "访问token不存在"),
|
||||
|
||||
|
||||
COMMAND_STATE_COUNT_ERROR(80001,"task instance state count error", "查询各状态任务实例数错误"),
|
||||
NEGTIVE_SIZE_NUMBER_ERROR(80002,"query size number error","查询size错误"),
|
||||
START_TIME_BIGGER_THAN_END_TIME_ERROR(80003,"start time bigger than end time error","开始时间在结束时间之后错误"),
|
||||
QUEUE_COUNT_ERROR(90001,"queue count error", "查询队列数据错误"),
|
||||
COMMAND_STATE_COUNT_ERROR(80001, "task instance state count error", "查询各状态任务实例数错误"),
|
||||
NEGTIVE_SIZE_NUMBER_ERROR(80002, "query size number error", "查询size错误"),
|
||||
START_TIME_BIGGER_THAN_END_TIME_ERROR(80003, "start time bigger than end time error", "开始时间在结束时间之后错误"),
|
||||
QUEUE_COUNT_ERROR(90001, "queue count error", "查询队列数据错误"),
|
||||
|
||||
KERBEROS_STARTUP_STATE(100001,"get kerberos startup state error", "获取kerberos启动状态错误"),
|
||||
KERBEROS_STARTUP_STATE(100001, "get kerberos startup state error", "获取kerberos启动状态错误"),
|
||||
;
|
||||
|
||||
private final int code;
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ import static org.apache.dolphinscheduler.common.utils.PropertyUtils.getString;
|
|||
* datasource service
|
||||
*/
|
||||
@Service
|
||||
public class DataSourceService extends BaseService{
|
||||
public class DataSourceService extends BaseService {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(DataSourceService.class);
|
||||
|
||||
|
|
@ -65,7 +65,6 @@ public class DataSourceService extends BaseService{
|
|||
public static final String PRINCIPAL = "principal";
|
||||
public static final String DATABASE = "database";
|
||||
public static final String USER_NAME = "userName";
|
||||
public static final String PASSWORD = Constants.PASSWORD;
|
||||
public static final String OTHER = "other";
|
||||
|
||||
|
||||
|
|
@ -80,9 +79,9 @@ public class DataSourceService extends BaseService{
|
|||
* create data source
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param name data source name
|
||||
* @param desc data source description
|
||||
* @param type data source type
|
||||
* @param name data source name
|
||||
* @param desc data source description
|
||||
* @param type data source type
|
||||
* @param parameter datasource parameters
|
||||
* @return create result code
|
||||
*/
|
||||
|
|
@ -131,11 +130,11 @@ public class DataSourceService extends BaseService{
|
|||
* updateProcessInstance datasource
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param name data source name
|
||||
* @param desc data source description
|
||||
* @param type data source type
|
||||
* @param name data source name
|
||||
* @param desc data source description
|
||||
* @param type data source type
|
||||
* @param parameter datasource parameters
|
||||
* @param id data source id
|
||||
* @param id data source id
|
||||
* @return update result code
|
||||
*/
|
||||
public Map<String, Object> updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter) {
|
||||
|
|
@ -148,13 +147,13 @@ public class DataSourceService extends BaseService{
|
|||
return result;
|
||||
}
|
||||
|
||||
if(!hasPerm(loginUser, dataSource.getUserId())){
|
||||
if (!hasPerm(loginUser, dataSource.getUserId())) {
|
||||
putMsg(result, Status.USER_NO_OPERATION_PERM);
|
||||
return result;
|
||||
}
|
||||
|
||||
//check name can use or not
|
||||
if(!name.trim().equals(dataSource.getName()) && checkName(name)){
|
||||
if (!name.trim().equals(dataSource.getName()) && checkName(name)) {
|
||||
putMsg(result, Status.DATASOURCE_EXIST);
|
||||
return result;
|
||||
}
|
||||
|
|
@ -199,6 +198,7 @@ public class DataSourceService extends BaseService{
|
|||
|
||||
/**
|
||||
* updateProcessInstance datasource
|
||||
*
|
||||
* @param id datasource id
|
||||
* @return data source detail
|
||||
*/
|
||||
|
|
@ -220,11 +220,11 @@ public class DataSourceService extends BaseService{
|
|||
String parameter = dataSource.getConnectionParams();
|
||||
|
||||
BaseDataSource datasourceForm = DataSourceFactory.getDatasource(dataSource.getType(), parameter);
|
||||
DbConnectType connectType = null;
|
||||
DbConnectType connectType = null;
|
||||
String hostSeperator = Constants.DOUBLE_SLASH;
|
||||
if(DbType.ORACLE.equals(dataSource.getType())){
|
||||
if (DbType.ORACLE.equals(dataSource.getType())) {
|
||||
connectType = ((OracleDataSource) datasourceForm).getConnectType();
|
||||
if(DbConnectType.ORACLE_SID.equals(connectType)){
|
||||
if (DbConnectType.ORACLE_SID.equals(connectType)) {
|
||||
hostSeperator = Constants.AT_SIGN;
|
||||
}
|
||||
}
|
||||
|
|
@ -233,7 +233,7 @@ public class DataSourceService extends BaseService{
|
|||
String other = datasourceForm.getOther();
|
||||
String address = datasourceForm.getAddress();
|
||||
|
||||
String[] hostsPorts = getHostsAndPort(address,hostSeperator);
|
||||
String[] hostsPorts = getHostsAndPort(address, hostSeperator);
|
||||
// ip host
|
||||
String host = hostsPorts[0];
|
||||
// prot
|
||||
|
|
@ -285,14 +285,13 @@ public class DataSourceService extends BaseService{
|
|||
return result;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* query datasource list by keyword
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param searchVal search value
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @return data source list page
|
||||
*/
|
||||
public Map<String, Object> queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
|
||||
|
|
@ -302,14 +301,14 @@ public class DataSourceService extends BaseService{
|
|||
|
||||
if (isAdmin(loginUser)) {
|
||||
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal);
|
||||
}else{
|
||||
} else {
|
||||
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal);
|
||||
}
|
||||
|
||||
List<DataSource> dataSources = dataSourceList.getRecords();
|
||||
List<DataSource> dataSources = dataSourceList != null ? dataSourceList.getRecords() : new ArrayList<>();
|
||||
handlePasswd(dataSources);
|
||||
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize);
|
||||
pageInfo.setTotalCount((int)(dataSourceList.getTotal()));
|
||||
pageInfo.setTotalCount((int) (dataSourceList != null ? dataSourceList.getTotal() : 0L));
|
||||
pageInfo.setLists(dataSources);
|
||||
result.put(Constants.DATA_LIST, pageInfo);
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
|
@ -319,14 +318,15 @@ public class DataSourceService extends BaseService{
|
|||
|
||||
/**
|
||||
* handle datasource connection password for safety
|
||||
*
|
||||
* @param dataSourceList
|
||||
*/
|
||||
private void handlePasswd(List<DataSource> dataSourceList) {
|
||||
|
||||
for (DataSource dataSource : dataSourceList) {
|
||||
|
||||
String connectionParams = dataSource.getConnectionParams();
|
||||
ObjectNode object = JSONUtils.parseObject(connectionParams);
|
||||
String connectionParams = dataSource.getConnectionParams();
|
||||
ObjectNode object = JSONUtils.parseObject(connectionParams);
|
||||
object.put(Constants.PASSWORD, Constants.XXXXXX);
|
||||
dataSource.setConnectionParams(object.toString());
|
||||
|
||||
|
|
@ -337,7 +337,7 @@ public class DataSourceService extends BaseService{
|
|||
* query data resource list
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param type data source type
|
||||
* @param type data source type
|
||||
* @return data source list page
|
||||
*/
|
||||
public Map<String, Object> queryDataSourceList(User loginUser, Integer type) {
|
||||
|
|
@ -347,7 +347,7 @@ public class DataSourceService extends BaseService{
|
|||
|
||||
if (isAdmin(loginUser)) {
|
||||
datasourceList = dataSourceMapper.listAllDataSourceByType(type);
|
||||
}else{
|
||||
} else {
|
||||
datasourceList = dataSourceMapper.queryDataSourceByType(loginUser.getId(), type);
|
||||
}
|
||||
|
||||
|
|
@ -360,11 +360,10 @@ public class DataSourceService extends BaseService{
|
|||
/**
|
||||
* verify datasource exists
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param name datasource name
|
||||
* @param name datasource name
|
||||
* @return true if data datasource not exists, otherwise return false
|
||||
*/
|
||||
public Result verifyDataSourceName(User loginUser, String name) {
|
||||
public Result verifyDataSourceName(String name) {
|
||||
Result result = new Result();
|
||||
List<DataSource> dataSourceList = dataSourceMapper.queryDataSourceByName(name);
|
||||
if (dataSourceList != null && dataSourceList.size() > 0) {
|
||||
|
|
@ -380,7 +379,7 @@ public class DataSourceService extends BaseService{
|
|||
/**
|
||||
* get connection
|
||||
*
|
||||
* @param dbType datasource type
|
||||
* @param dbType datasource type
|
||||
* @param parameter parameter
|
||||
* @return connection for datasource
|
||||
*/
|
||||
|
|
@ -399,18 +398,18 @@ public class DataSourceService extends BaseService{
|
|||
break;
|
||||
case HIVE:
|
||||
case SPARK:
|
||||
if (CommonUtils.getKerberosStartupState()) {
|
||||
System.setProperty(org.apache.dolphinscheduler.common.Constants.JAVA_SECURITY_KRB5_CONF,
|
||||
getString(org.apache.dolphinscheduler.common.Constants.JAVA_SECURITY_KRB5_CONF_PATH));
|
||||
Configuration configuration = new Configuration();
|
||||
configuration.set(org.apache.dolphinscheduler.common.Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
|
||||
UserGroupInformation.setConfiguration(configuration);
|
||||
UserGroupInformation.loginUserFromKeytab(getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_USERNAME),
|
||||
getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_PATH));
|
||||
if (CommonUtils.getKerberosStartupState()) {
|
||||
System.setProperty(org.apache.dolphinscheduler.common.Constants.JAVA_SECURITY_KRB5_CONF,
|
||||
getString(org.apache.dolphinscheduler.common.Constants.JAVA_SECURITY_KRB5_CONF_PATH));
|
||||
Configuration configuration = new Configuration();
|
||||
configuration.set(org.apache.dolphinscheduler.common.Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
|
||||
UserGroupInformation.setConfiguration(configuration);
|
||||
UserGroupInformation.loginUserFromKeytab(getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_USERNAME),
|
||||
getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_PATH));
|
||||
}
|
||||
if (dbType == DbType.HIVE){
|
||||
if (dbType == DbType.HIVE) {
|
||||
datasource = JSONUtils.parseObject(parameter, HiveDataSource.class);
|
||||
}else if (dbType == DbType.SPARK){
|
||||
} else if (dbType == DbType.SPARK) {
|
||||
datasource = JSONUtils.parseObject(parameter, SparkDataSource.class);
|
||||
}
|
||||
Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER);
|
||||
|
|
@ -439,20 +438,19 @@ public class DataSourceService extends BaseService{
|
|||
break;
|
||||
}
|
||||
|
||||
if(datasource != null){
|
||||
if (datasource != null) {
|
||||
connection = DriverManager.getConnection(datasource.getJdbcUrl(), datasource.getUser(), datasource.getPassword());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getMessage(),e);
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
return connection;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* check connection
|
||||
*
|
||||
* @param type data source type
|
||||
* @param type data source type
|
||||
* @param parameter data source parameters
|
||||
* @return true if connect successfully, otherwise false
|
||||
*/
|
||||
|
|
@ -470,35 +468,35 @@ public class DataSourceService extends BaseService{
|
|||
return isConnection;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* test connection
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param id datasource id
|
||||
* @return connect result code
|
||||
*/
|
||||
public boolean connectionTest(User loginUser, int id) {
|
||||
public boolean connectionTest(int id) {
|
||||
DataSource dataSource = dataSourceMapper.selectById(id);
|
||||
return checkConnection(dataSource.getType(), dataSource.getConnectionParams());
|
||||
if (dataSource != null) {
|
||||
return checkConnection(dataSource.getType(), dataSource.getConnectionParams());
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* build paramters
|
||||
*
|
||||
* @param name data source name
|
||||
* @param desc data source description
|
||||
* @param type data source type
|
||||
* @param host data source host
|
||||
* @param port data source port
|
||||
* @param database data source database name
|
||||
* @param userName user name
|
||||
* @param password password
|
||||
* @param other other parameters
|
||||
* @param type data source type
|
||||
* @param host data source host
|
||||
* @param port data source port
|
||||
* @param database data source database name
|
||||
* @param userName user name
|
||||
* @param password password
|
||||
* @param other other parameters
|
||||
* @param principal principal
|
||||
* @return datasource parameter
|
||||
*/
|
||||
public String buildParameter(String name, String desc, DbType type, String host,
|
||||
public String buildParameter(DbType type, String host,
|
||||
String port, String database, String principal, String userName,
|
||||
String password, DbConnectType connectType, String other) {
|
||||
|
||||
|
|
@ -510,7 +508,7 @@ public class DataSourceService extends BaseService{
|
|||
}
|
||||
|
||||
if (CommonUtils.getKerberosStartupState() &&
|
||||
(type == DbType.HIVE || type == DbType.SPARK)){
|
||||
(type == DbType.HIVE || type == DbType.SPARK)) {
|
||||
jdbcUrl += ";principal=" + principal;
|
||||
}
|
||||
|
||||
|
|
@ -535,14 +533,14 @@ public class DataSourceService extends BaseService{
|
|||
parameterMap.put(Constants.USER, userName);
|
||||
parameterMap.put(Constants.PASSWORD, CommonUtils.encodePassword(password));
|
||||
if (CommonUtils.getKerberosStartupState() &&
|
||||
(type == DbType.HIVE || type == DbType.SPARK)){
|
||||
parameterMap.put(Constants.PRINCIPAL,principal);
|
||||
(type == DbType.HIVE || type == DbType.SPARK)) {
|
||||
parameterMap.put(Constants.PRINCIPAL, principal);
|
||||
}
|
||||
if (other != null && !"".equals(other)) {
|
||||
Map<String, String> map = JSONUtils.toMap(other);
|
||||
if (map.size() > 0) {
|
||||
StringBuilder otherSb = new StringBuilder();
|
||||
for (Map.Entry<String, String> entry: map.entrySet()) {
|
||||
for (Map.Entry<String, String> entry : map.entrySet()) {
|
||||
otherSb.append(String.format("%s=%s%s", entry.getKey(), entry.getValue(), separator));
|
||||
}
|
||||
if (!Constants.DB2.equals(type.name())) {
|
||||
|
|
@ -553,7 +551,7 @@ public class DataSourceService extends BaseService{
|
|||
|
||||
}
|
||||
|
||||
if(logger.isDebugEnabled()){
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.info("parameters map:{}", JSONUtils.toJsonString(parameterMap));
|
||||
}
|
||||
return JSONUtils.toJsonString(parameterMap);
|
||||
|
|
@ -605,7 +603,7 @@ public class DataSourceService extends BaseService{
|
|||
/**
|
||||
* delete datasource
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param loginUser login user
|
||||
* @param datasourceId data source id
|
||||
* @return delete result code
|
||||
*/
|
||||
|
|
@ -615,12 +613,12 @@ public class DataSourceService extends BaseService{
|
|||
try {
|
||||
//query datasource by id
|
||||
DataSource dataSource = dataSourceMapper.selectById(datasourceId);
|
||||
if(dataSource == null){
|
||||
if (dataSource == null) {
|
||||
logger.error("resource id {} not exist", datasourceId);
|
||||
putMsg(result, Status.RESOURCE_NOT_EXIST);
|
||||
return result;
|
||||
}
|
||||
if(!hasPerm(loginUser, dataSource.getUserId())){
|
||||
if (!hasPerm(loginUser, dataSource.getUserId())) {
|
||||
putMsg(result, Status.USER_NO_OPERATION_PERM);
|
||||
return result;
|
||||
}
|
||||
|
|
@ -628,7 +626,7 @@ public class DataSourceService extends BaseService{
|
|||
datasourceUserMapper.deleteByDatasourceId(datasourceId);
|
||||
putMsg(result, Status.SUCCESS);
|
||||
} catch (Exception e) {
|
||||
logger.error("delete datasource error",e);
|
||||
logger.error("delete datasource error", e);
|
||||
throw new RuntimeException("delete datasource error");
|
||||
}
|
||||
return result;
|
||||
|
|
@ -638,7 +636,7 @@ public class DataSourceService extends BaseService{
|
|||
* unauthorized datasource
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param userId user id
|
||||
* @param userId user id
|
||||
* @return unauthed data source result code
|
||||
*/
|
||||
public Map<String, Object> unauthDatasource(User loginUser, Integer userId) {
|
||||
|
|
@ -679,7 +677,7 @@ public class DataSourceService extends BaseService{
|
|||
* authorized datasource
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param userId user id
|
||||
* @param userId user id
|
||||
* @return authorized result code
|
||||
*/
|
||||
public Map<String, Object> authedDatasource(User loginUser, Integer userId) {
|
||||
|
|
@ -700,11 +698,11 @@ public class DataSourceService extends BaseService{
|
|||
/**
|
||||
* get host and port by address
|
||||
*
|
||||
* @param address address
|
||||
* @param address address
|
||||
* @return sting array: [host,port]
|
||||
*/
|
||||
private String[] getHostsAndPort(String address) {
|
||||
return getHostsAndPort(address,Constants.DOUBLE_SLASH);
|
||||
return getHostsAndPort(address, Constants.DOUBLE_SLASH);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -714,7 +712,7 @@ public class DataSourceService extends BaseService{
|
|||
* @param separator separator
|
||||
* @return sting array: [host,port]
|
||||
*/
|
||||
private String[] getHostsAndPort(String address,String separator) {
|
||||
private String[] getHostsAndPort(String address, String separator) {
|
||||
String[] result = new String[2];
|
||||
String[] tmpArray = address.split(separator);
|
||||
String hostsAndPorts = tmpArray[tmpArray.length - 1];
|
||||
|
|
|
|||
|
|
@ -14,13 +14,18 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
import java.util.Map;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessData;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
|
||||
/**
|
||||
|
|
@ -31,13 +36,13 @@ public interface ProcessDefinitionService {
|
|||
/**
|
||||
* create process definition
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param name process definition name
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param name process definition name
|
||||
* @param processDefinitionJson process definition json
|
||||
* @param desc description
|
||||
* @param locations locations for nodes
|
||||
* @param connects connects for nodes
|
||||
* @param desc description
|
||||
* @param locations locations for nodes
|
||||
* @param connects connects for nodes
|
||||
* @return create result code
|
||||
* @throws JsonProcessingException JsonProcessingException
|
||||
*/
|
||||
|
|
@ -52,7 +57,7 @@ public interface ProcessDefinitionService {
|
|||
/**
|
||||
* query process definition list
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @return definition list
|
||||
*/
|
||||
|
|
@ -62,12 +67,12 @@ public interface ProcessDefinitionService {
|
|||
/**
|
||||
* query process definition list paging
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param searchVal search value
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @param userId user id
|
||||
* @param searchVal search value
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @param userId user id
|
||||
* @return process definition page
|
||||
*/
|
||||
Map<String, Object> queryProcessDefinitionListPaging(User loginUser,
|
||||
|
|
@ -80,9 +85,9 @@ public interface ProcessDefinitionService {
|
|||
/**
|
||||
* query datail of process definition
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processId process definition id
|
||||
* @param processId process definition id
|
||||
* @return process definition detail
|
||||
*/
|
||||
|
||||
|
|
@ -92,41 +97,41 @@ public interface ProcessDefinitionService {
|
|||
|
||||
/**
|
||||
* batch copy process definition
|
||||
*
|
||||
* @param loginUser loginUser
|
||||
* @param projectName projectName
|
||||
* @param processDefinitionIds processDefinitionIds
|
||||
* @param targetProjectId targetProjectId
|
||||
* @return
|
||||
*/
|
||||
Map<String, Object> batchCopyProcessDefinition(User loginUser,
|
||||
String projectName,
|
||||
String processDefinitionIds,
|
||||
int targetProjectId);
|
||||
String projectName,
|
||||
String processDefinitionIds,
|
||||
int targetProjectId);
|
||||
|
||||
/**
|
||||
* batch move process definition
|
||||
*
|
||||
* @param loginUser loginUser
|
||||
* @param projectName projectName
|
||||
* @param processDefinitionIds processDefinitionIds
|
||||
* @param targetProjectId targetProjectId
|
||||
* @return
|
||||
*/
|
||||
Map<String, Object> batchMoveProcessDefinition(User loginUser,
|
||||
String projectName,
|
||||
String processDefinitionIds,
|
||||
int targetProjectId);
|
||||
String projectName,
|
||||
String processDefinitionIds,
|
||||
int targetProjectId);
|
||||
|
||||
/**
|
||||
* update process definition
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param name process definition name
|
||||
* @param id process definition id
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param name process definition name
|
||||
* @param id process definition id
|
||||
* @param processDefinitionJson process definition json
|
||||
* @param desc description
|
||||
* @param locations locations for nodes
|
||||
* @param connects connects for nodes
|
||||
* @param desc description
|
||||
* @param locations locations for nodes
|
||||
* @param connects connects for nodes
|
||||
* @return update result code
|
||||
*/
|
||||
Map<String, Object> updateProcessDefinition(User loginUser,
|
||||
|
|
@ -139,9 +144,9 @@ public interface ProcessDefinitionService {
|
|||
/**
|
||||
* verify process definition name unique
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param name name
|
||||
* @param name name
|
||||
* @return true if process definition name not exists, otherwise false
|
||||
*/
|
||||
Map<String, Object> verifyProcessDefinitionName(User loginUser,
|
||||
|
|
@ -151,8 +156,8 @@ public interface ProcessDefinitionService {
|
|||
/**
|
||||
* delete process definition by id
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processDefinitionId process definition id
|
||||
* @return delete result code
|
||||
*/
|
||||
|
|
@ -163,9 +168,9 @@ public interface ProcessDefinitionService {
|
|||
/**
|
||||
* release process definition: online / offline
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param id process definition id
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param id process definition id
|
||||
* @param releaseState release state
|
||||
* @return release result code
|
||||
*/
|
||||
|
|
@ -177,21 +182,21 @@ public interface ProcessDefinitionService {
|
|||
/**
|
||||
* batch export process definition by ids
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processDefinitionIds process definition ids
|
||||
* @param response http servlet response
|
||||
* @param response http servlet response
|
||||
*/
|
||||
void batchExportProcessDefinitionByIds(User loginUser,
|
||||
String projectName,
|
||||
String processDefinitionIds,
|
||||
HttpServletResponse response);
|
||||
String projectName,
|
||||
String processDefinitionIds,
|
||||
HttpServletResponse response);
|
||||
|
||||
/**
|
||||
* import process definition
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param file process metadata json file
|
||||
* @param loginUser login user
|
||||
* @param file process metadata json file
|
||||
* @param currentProjectName current project name
|
||||
* @return import process
|
||||
*/
|
||||
|
|
@ -202,7 +207,7 @@ public interface ProcessDefinitionService {
|
|||
/**
|
||||
* check the process definition node meets the specifications
|
||||
*
|
||||
* @param processData process data
|
||||
* @param processData process data
|
||||
* @param processDefinitionJson process definition json
|
||||
* @return check result code
|
||||
*/
|
||||
|
|
@ -237,11 +242,23 @@ public interface ProcessDefinitionService {
|
|||
* Encapsulates the TreeView structure
|
||||
*
|
||||
* @param processId process definition id
|
||||
* @param limit limit
|
||||
* @param limit limit
|
||||
* @return tree view json data
|
||||
* @throws Exception exception
|
||||
*/
|
||||
Map<String, Object> viewTree(Integer processId,
|
||||
Integer limit) throws Exception;
|
||||
|
||||
/**
|
||||
* switch the defined process definition verison
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processDefinitionId process definition id
|
||||
* @param version the version user want to switch
|
||||
* @return switch process definition version result code
|
||||
*/
|
||||
Map<String, Object> switchProcessDefinitionVersion(User loginUser, String projectName
|
||||
, int processDefinitionId, long version);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,70 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public interface ProcessDefinitionVersionService {
|
||||
|
||||
/**
|
||||
* add the newest version of one process definition
|
||||
*
|
||||
* @param processDefinition the process definition that need to record version
|
||||
* @return the newest version number of this process definition
|
||||
*/
|
||||
long addProcessDefinitionVersion(ProcessDefinition processDefinition);
|
||||
|
||||
/**
|
||||
* query the pagination versions info by one certain process definition id
|
||||
*
|
||||
* @param loginUser login user info to check auth
|
||||
* @param projectName process definition project name
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @param processDefinitionId process definition id
|
||||
* @return the pagination process definition versions info of the certain process definition
|
||||
*/
|
||||
Map<String, Object> queryProcessDefinitionVersions(User loginUser, String projectName,
|
||||
int pageNo, int pageSize, int processDefinitionId);
|
||||
|
||||
/**
|
||||
* query one certain process definition version by version number and process definition id
|
||||
*
|
||||
* @param processDefinitionId process definition id
|
||||
* @param version version number
|
||||
* @return the process definition version info
|
||||
*/
|
||||
ProcessDefinitionVersion queryByProcessDefinitionIdAndVersion(int processDefinitionId,
|
||||
long version);
|
||||
|
||||
/**
|
||||
* delete one certain process definition by version number and process definition id
|
||||
*
|
||||
* @param loginUser login user info to check auth
|
||||
* @param projectName process definition project name
|
||||
* @param processDefinitionId process definition id
|
||||
* @param version version number
|
||||
* @return delele result code
|
||||
*/
|
||||
Map<String, Object> deleteByProcessDefinitionIdAndVersion(User loginUser, String projectName,
|
||||
int processDefinitionId, long version);
|
||||
}
|
||||
|
|
@ -14,8 +14,8 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
import static org.apache.dolphinscheduler.common.Constants.DATA_LIST;
|
||||
import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT;
|
||||
|
|
@ -24,19 +24,6 @@ import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS;
|
|||
import static org.apache.dolphinscheduler.common.Constants.PROCESS_INSTANCE_STATE;
|
||||
import static org.apache.dolphinscheduler.common.Constants.TASK_LIST;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.ParseException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.dolphinscheduler.api.dto.gantt.GanttDto;
|
||||
import org.apache.dolphinscheduler.api.dto.gantt.Task;
|
||||
|
|
@ -72,6 +59,21 @@ import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
|
|||
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
|
||||
import org.apache.dolphinscheduler.dao.utils.DagHelper;
|
||||
import org.apache.dolphinscheduler.service.process.ProcessService;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.ParseException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
|
@ -108,6 +110,9 @@ public class ProcessInstanceService extends BaseService {
|
|||
@Autowired
|
||||
ProcessDefinitionService processDefinitionService;
|
||||
|
||||
@Autowired
|
||||
ProcessDefinitionVersionService processDefinitionVersionService;
|
||||
|
||||
@Autowired
|
||||
ExecutorService execService;
|
||||
|
||||
|
|
@ -118,18 +123,11 @@ public class ProcessInstanceService extends BaseService {
|
|||
LoggerService loggerService;
|
||||
|
||||
|
||||
|
||||
@Autowired
|
||||
UsersService usersService;
|
||||
|
||||
/**
|
||||
* return top n SUCCESS process instance order by running time which started between startTime and endTime
|
||||
* @param loginUser
|
||||
* @param projectName
|
||||
* @param size
|
||||
* @param startTime
|
||||
* @param endTime
|
||||
* @return
|
||||
*/
|
||||
public Map<String, Object> queryTopNLongestRunningProcessInstance(User loginUser, String projectName, int size, String startTime, String endTime) {
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
|
|
@ -155,7 +153,7 @@ public class ProcessInstanceService extends BaseService {
|
|||
return result;
|
||||
}
|
||||
Date end = DateUtils.stringToDate(endTime);
|
||||
if(start == null || end == null) {
|
||||
if (start == null || end == null) {
|
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate");
|
||||
return result;
|
||||
}
|
||||
|
|
@ -169,6 +167,7 @@ public class ProcessInstanceService extends BaseService {
|
|||
putMsg(result, Status.SUCCESS);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* query process instance by id
|
||||
*
|
||||
|
|
@ -214,7 +213,7 @@ public class ProcessInstanceService extends BaseService {
|
|||
*/
|
||||
public Map<String, Object> queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId,
|
||||
String startDate, String endDate,
|
||||
String searchVal, String executorName,ExecutionStatus stateType, String host,
|
||||
String searchVal, String executorName, ExecutionStatus stateType, String host,
|
||||
Integer pageNo, Integer pageSize) {
|
||||
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
|
|
@ -246,18 +245,18 @@ public class ProcessInstanceService extends BaseService {
|
|||
return result;
|
||||
}
|
||||
|
||||
Page<ProcessInstance> page = new Page(pageNo, pageSize);
|
||||
Page<ProcessInstance> page = new Page<>(pageNo, pageSize);
|
||||
PageInfo pageInfo = new PageInfo<ProcessInstance>(pageNo, pageSize);
|
||||
int executorId = usersService.getUserIdByName(executorName);
|
||||
|
||||
IPage<ProcessInstance> processInstanceList =
|
||||
processInstanceMapper.queryProcessInstanceListPaging(page,
|
||||
project.getId(), processDefineId, searchVal, executorId,statusArray, host, start, end);
|
||||
project.getId(), processDefineId, searchVal, executorId, statusArray, host, start, end);
|
||||
|
||||
List<ProcessInstance> processInstances = processInstanceList.getRecords();
|
||||
|
||||
for(ProcessInstance processInstance: processInstances){
|
||||
processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(),processInstance.getEndTime()));
|
||||
for (ProcessInstance processInstance : processInstances) {
|
||||
processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(), processInstance.getEndTime()));
|
||||
User executor = usersService.queryUser(processInstance.getExecutorId());
|
||||
if (null != executor) {
|
||||
processInstance.setExecutorName(executor.getUserName());
|
||||
|
|
@ -271,8 +270,6 @@ public class ProcessInstanceService extends BaseService {
|
|||
return result;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* query task list by process instance id
|
||||
*
|
||||
|
|
@ -305,14 +302,13 @@ public class ProcessInstanceService extends BaseService {
|
|||
|
||||
/**
|
||||
* add dependent result for dependent task
|
||||
* @param taskInstanceList
|
||||
*/
|
||||
private void addDependResultForTaskList(List<TaskInstance> taskInstanceList) throws IOException {
|
||||
for(TaskInstance taskInstance: taskInstanceList){
|
||||
if(taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())){
|
||||
for (TaskInstance taskInstance : taskInstanceList) {
|
||||
if (taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())) {
|
||||
Result logResult = loggerService.queryLog(
|
||||
taskInstance.getId(), 0, 4098);
|
||||
if(logResult.getCode() == Status.SUCCESS.ordinal()){
|
||||
if (logResult.getCode() == Status.SUCCESS.ordinal()) {
|
||||
String log = (String) logResult.getData();
|
||||
Map<String, DependResult> resultMap = parseLogForDependentResult(log);
|
||||
taskInstance.setDependentResult(JSONUtils.toJsonString(resultMap));
|
||||
|
|
@ -321,24 +317,24 @@ public class ProcessInstanceService extends BaseService {
|
|||
}
|
||||
}
|
||||
|
||||
public Map<String,DependResult> parseLogForDependentResult(String log) throws IOException {
|
||||
public Map<String, DependResult> parseLogForDependentResult(String log) throws IOException {
|
||||
Map<String, DependResult> resultMap = new HashMap<>();
|
||||
if(StringUtils.isEmpty(log)){
|
||||
if (StringUtils.isEmpty(log)) {
|
||||
return resultMap;
|
||||
}
|
||||
|
||||
BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(
|
||||
StandardCharsets.UTF_8)), StandardCharsets.UTF_8));
|
||||
StandardCharsets.UTF_8)), StandardCharsets.UTF_8));
|
||||
String line;
|
||||
while ((line = br.readLine()) != null) {
|
||||
if(line.contains(DEPENDENT_SPLIT)){
|
||||
if (line.contains(DEPENDENT_SPLIT)) {
|
||||
String[] tmpStringArray = line.split(":\\|\\|");
|
||||
if(tmpStringArray.length != 2){
|
||||
if (tmpStringArray.length != 2) {
|
||||
continue;
|
||||
}
|
||||
String dependResultString = tmpStringArray[1];
|
||||
String[] dependStringArray = dependResultString.split(",");
|
||||
if(dependStringArray.length != 2){
|
||||
if (dependStringArray.length != 2) {
|
||||
continue;
|
||||
}
|
||||
String key = dependStringArray[0].trim();
|
||||
|
|
@ -349,7 +345,6 @@ public class ProcessInstanceService extends BaseService {
|
|||
return resultMap;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* query sub process instance detail info by task id
|
||||
*
|
||||
|
|
@ -462,7 +457,7 @@ public class ProcessInstanceService extends BaseService {
|
|||
processInstance.setTimeout(timeout);
|
||||
Tenant tenant = processService.getTenantForProcess(processData.getTenantId(),
|
||||
processDefinition.getUserId());
|
||||
if(tenant != null){
|
||||
if (tenant != null) {
|
||||
processInstance.setTenantCode(tenant.getTenantCode());
|
||||
}
|
||||
processInstance.setProcessInstanceJson(processInstanceJson);
|
||||
|
|
@ -477,6 +472,11 @@ public class ProcessInstanceService extends BaseService {
|
|||
processDefinition.setLocations(locations);
|
||||
processDefinition.setConnects(connects);
|
||||
processDefinition.setTimeout(timeout);
|
||||
processDefinition.setUpdateTime(new Date());
|
||||
|
||||
// add process definition version
|
||||
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefinition);
|
||||
processDefinition.setVersion(version);
|
||||
updateDefine = processDefineMapper.updateById(processDefinition);
|
||||
}
|
||||
if (update > 0 && updateDefine > 0) {
|
||||
|
|
@ -485,7 +485,6 @@ public class ProcessInstanceService extends BaseService {
|
|||
putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR);
|
||||
}
|
||||
|
||||
|
||||
return result;
|
||||
|
||||
}
|
||||
|
|
@ -532,6 +531,7 @@ public class ProcessInstanceService extends BaseService {
|
|||
|
||||
/**
|
||||
* delete process instance by id, at the same time,delete task instance and their mapping relation data
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processInstanceId process instance id
|
||||
|
|
@ -554,13 +554,10 @@ public class ProcessInstanceService extends BaseService {
|
|||
return result;
|
||||
}
|
||||
|
||||
|
||||
|
||||
processService.removeTaskLogFile(processInstanceId);
|
||||
// delete database cascade
|
||||
int delete = processService.deleteWorkProcessInstanceById(processInstanceId);
|
||||
|
||||
|
||||
processService.deleteAllSubWorkProcessByParentId(processInstanceId);
|
||||
processService.deleteWorkProcessMapByParentId(processInstanceId);
|
||||
|
||||
|
|
@ -592,7 +589,6 @@ public class ProcessInstanceService extends BaseService {
|
|||
.getBusinessTime(processInstance.getCmdTypeIfComplement(),
|
||||
processInstance.getScheduleTime());
|
||||
|
||||
|
||||
String workflowInstanceJson = processInstance.getProcessInstanceJson();
|
||||
|
||||
ProcessData workflowData = JSONUtils.parseObject(workflowInstanceJson, ProcessData.class);
|
||||
|
|
@ -603,10 +599,9 @@ public class ProcessInstanceService extends BaseService {
|
|||
List<Property> globalParams = new ArrayList<>();
|
||||
|
||||
if (userDefinedParams != null && userDefinedParams.length() > 0) {
|
||||
globalParams = JSONUtils.toList(userDefinedParams, Property.class);
|
||||
globalParams = JSONUtils.toList(userDefinedParams, Property.class);
|
||||
}
|
||||
|
||||
|
||||
List<TaskNode> taskNodeList = workflowData.getTasks();
|
||||
|
||||
// global param string
|
||||
|
|
@ -618,7 +613,7 @@ public class ProcessInstanceService extends BaseService {
|
|||
}
|
||||
|
||||
// local params
|
||||
Map<String, Map<String,Object>> localUserDefParams = new HashMap<>();
|
||||
Map<String, Map<String, Object>> localUserDefParams = new HashMap<>();
|
||||
for (TaskNode taskNode : taskNodeList) {
|
||||
String parameter = taskNode.getParams();
|
||||
Map<String, String> map = JSONUtils.toMap(parameter);
|
||||
|
|
@ -627,9 +622,9 @@ public class ProcessInstanceService extends BaseService {
|
|||
localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams);
|
||||
List<Property> localParamsList = JSONUtils.toList(localParams, Property.class);
|
||||
|
||||
Map<String,Object> localParamsMap = new HashMap<>();
|
||||
localParamsMap.put("taskType",taskNode.getType());
|
||||
localParamsMap.put("localParamsList",localParamsList);
|
||||
Map<String, Object> localParamsMap = new HashMap<>();
|
||||
localParamsMap.put("taskType", taskNode.getType());
|
||||
localParamsMap.put("localParamsList", localParamsList);
|
||||
if (CollectionUtils.isNotEmpty(localParamsList)) {
|
||||
localUserDefParams.put(taskNode.getName(), localParamsMap);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,8 +17,6 @@
|
|||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import org.apache.dolphinscheduler.api.enums.Status;
|
||||
import org.apache.dolphinscheduler.api.utils.PageInfo;
|
||||
import org.apache.dolphinscheduler.common.Constants;
|
||||
|
|
@ -32,11 +30,20 @@ import org.apache.dolphinscheduler.dao.entity.User;
|
|||
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
|
||||
import org.apache.dolphinscheduler.service.process.ProcessService;
|
||||
|
||||
import java.text.MessageFormat;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.text.MessageFormat;
|
||||
import java.util.*;
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
|
||||
/**
|
||||
* task instance service
|
||||
|
|
@ -79,10 +86,10 @@ public class TaskInstanceService extends BaseService {
|
|||
* @param pageSize page size
|
||||
* @return task list page
|
||||
*/
|
||||
public Map<String,Object> queryTaskListPaging(User loginUser, String projectName,
|
||||
Integer processInstanceId, String taskName, String executorName, String startDate,
|
||||
String endDate, String searchVal, ExecutionStatus stateType,String host,
|
||||
Integer pageNo, Integer pageSize) {
|
||||
public Map<String, Object> queryTaskListPaging(User loginUser, String projectName,
|
||||
Integer processInstanceId, String taskName, String executorName, String startDate,
|
||||
String endDate, String searchVal, ExecutionStatus stateType, String host,
|
||||
Integer pageNo, Integer pageSize) {
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
Project project = projectMapper.queryByName(projectName);
|
||||
|
||||
|
|
@ -93,23 +100,23 @@ public class TaskInstanceService extends BaseService {
|
|||
}
|
||||
|
||||
int[] statusArray = null;
|
||||
if(stateType != null){
|
||||
if (stateType != null) {
|
||||
statusArray = new int[]{stateType.ordinal()};
|
||||
}
|
||||
|
||||
Date start = null;
|
||||
Date end = null;
|
||||
try {
|
||||
if(StringUtils.isNotEmpty(startDate)){
|
||||
start = DateUtils.getScheduleDate(startDate);
|
||||
if (StringUtils.isNotEmpty(startDate)) {
|
||||
start = DateUtils.getScheduleDate(startDate);
|
||||
if (start == null) {
|
||||
return generateInvalidParamRes(result, "startDate");
|
||||
}
|
||||
if(StringUtils.isNotEmpty( endDate)){
|
||||
end = DateUtils.getScheduleDate(endDate);
|
||||
}
|
||||
if (StringUtils.isNotEmpty(endDate)) {
|
||||
end = DateUtils.getScheduleDate(endDate);
|
||||
if (end == null) {
|
||||
return generateInvalidParamRes(result, "endDate");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
|
||||
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate"));
|
||||
return result;
|
||||
}
|
||||
|
||||
Page<TaskInstance> page = new Page(pageNo, pageSize);
|
||||
|
|
@ -124,15 +131,15 @@ public class TaskInstanceService extends BaseService {
|
|||
exclusionSet.add("taskJson");
|
||||
List<TaskInstance> taskInstanceList = taskInstanceIPage.getRecords();
|
||||
|
||||
for(TaskInstance taskInstance : taskInstanceList){
|
||||
for (TaskInstance taskInstance : taskInstanceList) {
|
||||
taskInstance.setDuration(DateUtils.differSec(taskInstance.getStartTime(), taskInstance.getEndTime()));
|
||||
User executor = usersService.queryUser(taskInstance.getExecutorId());
|
||||
if (null != executor) {
|
||||
taskInstance.setExecutorName(executor.getUserName());
|
||||
}
|
||||
}
|
||||
pageInfo.setTotalCount((int)taskInstanceIPage.getTotal());
|
||||
pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceIPage.getRecords(),exclusionSet));
|
||||
pageInfo.setTotalCount((int) taskInstanceIPage.getTotal());
|
||||
pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceIPage.getRecords(), exclusionSet));
|
||||
result.put(Constants.DATA_LIST, pageInfo);
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
|
|
@ -181,6 +188,15 @@ public class TaskInstanceService extends BaseService {
|
|||
putMsg(result, Status.FORCE_TASK_SUCCESS_ERROR);
|
||||
}
|
||||
|
||||
/***
|
||||
* generate {@link org.apache.dolphinscheduler.api.enums.Status#REQUEST_PARAMS_NOT_VALID_ERROR} res with param name
|
||||
* @param result exist result map
|
||||
* @param params invalid params name
|
||||
* @return update result map
|
||||
*/
|
||||
private Map<String, Object> generateInvalidParamRes(Map<String, Object> result, String params) {
|
||||
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
|
||||
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), params));
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,338 +14,85 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import org.apache.dolphinscheduler.api.enums.Status;
|
||||
import org.apache.dolphinscheduler.api.utils.PageInfo;
|
||||
import org.apache.dolphinscheduler.api.utils.Result;
|
||||
import org.apache.dolphinscheduler.common.Constants;
|
||||
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.StringUtils;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
|
||||
import org.apache.dolphinscheduler.dao.entity.Tenant;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* tenant service
|
||||
*/
|
||||
@Service
|
||||
public class TenantService extends BaseService{
|
||||
public interface TenantService {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(TenantService.class);
|
||||
|
||||
@Autowired
|
||||
private TenantMapper tenantMapper;
|
||||
|
||||
@Autowired
|
||||
private ProcessInstanceMapper processInstanceMapper;
|
||||
|
||||
@Autowired
|
||||
private ProcessDefinitionMapper processDefinitionMapper;
|
||||
|
||||
@Autowired
|
||||
private UserMapper userMapper;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* create tenant
|
||||
*
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param tenantCode tenant code
|
||||
* @param tenantName tenant name
|
||||
* @param queueId queue id
|
||||
* @param desc description
|
||||
* @return create result code
|
||||
* @throws Exception exception
|
||||
*/
|
||||
@Transactional(rollbackFor = Exception.class)
|
||||
public Map<String,Object> createTenant(User loginUser,
|
||||
String tenantCode,
|
||||
String tenantName,
|
||||
int queueId,
|
||||
String desc) throws Exception {
|
||||
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
result.put(Constants.STATUS, false);
|
||||
if (checkAdmin(loginUser, result)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
if (checkTenantExists(tenantCode)){
|
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, tenantCode);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
Tenant tenant = new Tenant();
|
||||
Date now = new Date();
|
||||
|
||||
if (!tenantCode.matches("^[0-9a-zA-Z_.-]{1,}$") || tenantCode.startsWith("-") || tenantCode.startsWith(".")){
|
||||
putMsg(result, Status.VERIFY_TENANT_CODE_ERROR);
|
||||
return result;
|
||||
}
|
||||
tenant.setTenantCode(tenantCode);
|
||||
tenant.setTenantName(tenantName);
|
||||
tenant.setQueueId(queueId);
|
||||
tenant.setDescription(desc);
|
||||
tenant.setCreateTime(now);
|
||||
tenant.setUpdateTime(now);
|
||||
|
||||
// save
|
||||
tenantMapper.insert(tenant);
|
||||
|
||||
// if hdfs startup
|
||||
if (PropertyUtils.getResUploadStartupState()){
|
||||
createTenantDirIfNotExists(tenantCode);
|
||||
}
|
||||
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* query tenant list paging
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param searchVal search value
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @return tenant list page
|
||||
*/
|
||||
public Map<String,Object> queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
|
||||
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
if (checkAdmin(loginUser, result)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
Page<Tenant> page = new Page(pageNo, pageSize);
|
||||
IPage<Tenant> tenantIPage = tenantMapper.queryTenantPaging(page, searchVal);
|
||||
PageInfo<Tenant> pageInfo = new PageInfo<>(pageNo, pageSize);
|
||||
pageInfo.setTotalCount((int)tenantIPage.getTotal());
|
||||
pageInfo.setLists(tenantIPage.getRecords());
|
||||
result.put(Constants.DATA_LIST, pageInfo);
|
||||
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* updateProcessInstance tenant
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param id tennat id
|
||||
* @param tenantCode tennat code
|
||||
* @param tenantName tennat name
|
||||
* @param queueId queue id
|
||||
* @param desc description
|
||||
* @return update result code
|
||||
* @throws Exception exception
|
||||
*/
|
||||
public Map<String, Object> updateTenant(User loginUser,int id,String tenantCode, String tenantName, int queueId, String desc) throws Exception {
|
||||
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
result.put(Constants.STATUS, false);
|
||||
|
||||
if (checkAdmin(loginUser, result)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
Tenant tenant = tenantMapper.queryById(id);
|
||||
|
||||
if (tenant == null){
|
||||
putMsg(result, Status.TENANT_NOT_EXIST);
|
||||
return result;
|
||||
}
|
||||
|
||||
// updateProcessInstance tenant
|
||||
/**
|
||||
* if the tenant code is modified, the original resource needs to be copied to the new tenant.
|
||||
* create tenant
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param tenantCode tenant code
|
||||
* @param tenantName tenant name
|
||||
* @param queueId queue id
|
||||
* @param desc description
|
||||
* @return create result code
|
||||
* @throws Exception exception
|
||||
*/
|
||||
if (!tenant.getTenantCode().equals(tenantCode)){
|
||||
if (checkTenantExists(tenantCode)){
|
||||
// if hdfs startup
|
||||
if (PropertyUtils.getResUploadStartupState()){
|
||||
String resourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + tenantCode + "/resources";
|
||||
String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode);
|
||||
//init hdfs resource
|
||||
HadoopUtils.getInstance().mkdir(resourcePath);
|
||||
HadoopUtils.getInstance().mkdir(udfsPath);
|
||||
}
|
||||
}else {
|
||||
putMsg(result, Status.TENANT_CODE_HAS_ALREADY_EXISTS);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
Map<String, Object> createTenant(User loginUser,
|
||||
String tenantCode,
|
||||
String tenantName,
|
||||
int queueId,
|
||||
String desc) throws Exception;
|
||||
|
||||
Date now = new Date();
|
||||
/**
|
||||
* query tenant list paging
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param searchVal search value
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @return tenant list page
|
||||
*/
|
||||
Map<String, Object> queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize);
|
||||
|
||||
if (StringUtils.isNotEmpty(tenantCode)){
|
||||
tenant.setTenantCode(tenantCode);
|
||||
}
|
||||
/**
|
||||
* updateProcessInstance tenant
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param id tennat id
|
||||
* @param tenantCode tennat code
|
||||
* @param tenantName tennat name
|
||||
* @param queueId queue id
|
||||
* @param desc description
|
||||
* @return update result code
|
||||
* @throws Exception exception
|
||||
*/
|
||||
Map<String, Object> updateTenant(User loginUser, int id, String tenantCode, String tenantName, int queueId,
|
||||
String desc) throws Exception;
|
||||
|
||||
if (StringUtils.isNotEmpty(tenantName)){
|
||||
tenant.setTenantName(tenantName);
|
||||
}
|
||||
/**
|
||||
* delete tenant
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param id tenant id
|
||||
* @return delete result code
|
||||
* @throws Exception exception
|
||||
*/
|
||||
Map<String, Object> deleteTenantById(User loginUser, int id) throws Exception;
|
||||
|
||||
if (queueId != 0){
|
||||
tenant.setQueueId(queueId);
|
||||
}
|
||||
tenant.setDescription(desc);
|
||||
tenant.setUpdateTime(now);
|
||||
tenantMapper.updateById(tenant);
|
||||
/**
|
||||
* query tenant list
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @return tenant list
|
||||
*/
|
||||
Map<String, Object> queryTenantList(User loginUser);
|
||||
|
||||
result.put(Constants.STATUS, Status.SUCCESS);
|
||||
result.put(Constants.MSG, Status.SUCCESS.getMsg());
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* delete tenant
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param id tenant id
|
||||
* @return delete result code
|
||||
* @throws Exception exception
|
||||
*/
|
||||
@Transactional(rollbackFor = Exception.class)
|
||||
public Map<String, Object> deleteTenantById(User loginUser, int id) throws Exception {
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
|
||||
if (checkAdmin(loginUser, result)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
Tenant tenant = tenantMapper.queryById(id);
|
||||
if (tenant == null){
|
||||
putMsg(result, Status.TENANT_NOT_EXIST);
|
||||
return result;
|
||||
}
|
||||
|
||||
List<ProcessInstance> processInstances = getProcessInstancesByTenant(tenant);
|
||||
if(CollectionUtils.isNotEmpty(processInstances)){
|
||||
putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL, processInstances.size());
|
||||
return result;
|
||||
}
|
||||
|
||||
List<ProcessDefinition> processDefinitions = processDefinitionMapper.queryDefinitionListByTenant(tenant.getId());
|
||||
if(CollectionUtils.isNotEmpty(processDefinitions)){
|
||||
putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL_DEFINES, processDefinitions.size());
|
||||
return result;
|
||||
}
|
||||
|
||||
List<User> userList = userMapper.queryUserListByTenant(tenant.getId());
|
||||
if(CollectionUtils.isNotEmpty(userList)){
|
||||
putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL_USERS, userList.size());
|
||||
return result;
|
||||
}
|
||||
|
||||
// if resource upload startup
|
||||
if (PropertyUtils.getResUploadStartupState()){
|
||||
String tenantPath = HadoopUtils.getHdfsDataBasePath() + "/" + tenant.getTenantCode();
|
||||
|
||||
if (HadoopUtils.getInstance().exists(tenantPath)){
|
||||
HadoopUtils.getInstance().delete(tenantPath, true);
|
||||
}
|
||||
}
|
||||
|
||||
tenantMapper.deleteById(id);
|
||||
processInstanceMapper.updateProcessInstanceByTenantId(id, -1);
|
||||
putMsg(result, Status.SUCCESS);
|
||||
return result;
|
||||
}
|
||||
|
||||
private List<ProcessInstance> getProcessInstancesByTenant(Tenant tenant) {
|
||||
return processInstanceMapper.queryByTenantIdAndStatus(tenant.getId(), org.apache.dolphinscheduler.common.Constants.NOT_TERMINATED_STATES);
|
||||
}
|
||||
|
||||
/**
|
||||
* query tenant list
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @return tenant list
|
||||
*/
|
||||
public Map<String, Object> queryTenantList(User loginUser) {
|
||||
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
|
||||
List<Tenant> resourceList = tenantMapper.selectList(null);
|
||||
result.put(Constants.DATA_LIST, resourceList);
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* query tenant list via tenant code
|
||||
* @param tenantCode tenant code
|
||||
* @return tenant list
|
||||
*/
|
||||
public Map<String, Object> queryTenantList(String tenantCode) {
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
|
||||
List<Tenant> resourceList = tenantMapper.queryByTenantCode(tenantCode);
|
||||
if (CollectionUtils.isNotEmpty(resourceList)) {
|
||||
result.put(Constants.DATA_LIST, resourceList);
|
||||
putMsg(result, Status.SUCCESS);
|
||||
} else {
|
||||
putMsg(result, Status.TENANT_NOT_EXIST);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* verify tenant code
|
||||
*
|
||||
* @param tenantCode tenant code
|
||||
* @return true if tenant code can user, otherwise return false
|
||||
*/
|
||||
public Result verifyTenantCode(String tenantCode) {
|
||||
Result result = new Result();
|
||||
if (checkTenantExists(tenantCode)) {
|
||||
logger.error("tenant {} has exist, can't create again.", tenantCode);
|
||||
putMsg(result, Status.TENANT_NAME_EXIST, tenantCode);
|
||||
} else {
|
||||
putMsg(result, Status.SUCCESS);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* check tenant exists
|
||||
*
|
||||
* @param tenantCode tenant code
|
||||
* @return ture if the tenant code exists, otherwise return false
|
||||
*/
|
||||
private boolean checkTenantExists(String tenantCode) {
|
||||
List<Tenant> tenants = tenantMapper.queryByTenantCode(tenantCode);
|
||||
return CollectionUtils.isNotEmpty(tenants);
|
||||
}
|
||||
/**
|
||||
* verify tenant code
|
||||
*
|
||||
* @param tenantCode tenant code
|
||||
* @return true if tenant code can user, otherwise return false
|
||||
*/
|
||||
Result verifyTenantCode(String tenantCode);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ import org.springframework.stereotype.Service;
|
|||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
|
@ -946,6 +947,7 @@ public class UsersService extends BaseService {
|
|||
* activate user, only system admin have permission, change user state code 0 to 1
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param userName user name
|
||||
* @return create result code
|
||||
*/
|
||||
public Map<String, Object> activateUser(User loginUser, String userName) {
|
||||
|
|
@ -983,4 +985,52 @@ public class UsersService extends BaseService {
|
|||
result.put(Constants.DATA_LIST, responseUser);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* activate user, only system admin have permission, change users state code 0 to 1
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param userNames user name
|
||||
* @return create result code
|
||||
*/
|
||||
public Map<String, Object> batchActivateUser(User loginUser, List<String> userNames) {
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
|
||||
if (!isAdmin(loginUser)) {
|
||||
putMsg(result, Status.USER_NO_OPERATION_PERM);
|
||||
return result;
|
||||
}
|
||||
|
||||
int totalSuccess = 0;
|
||||
List<String> successUserNames = new ArrayList<>();
|
||||
Map<String, Object> successRes = new HashMap<>();
|
||||
int totalFailed = 0;
|
||||
List<Map<String, String>> failedInfo = new ArrayList<>();
|
||||
Map<String, Object> failedRes = new HashMap<>();
|
||||
for (String userName : userNames) {
|
||||
Map<String, Object> tmpResult = activateUser(loginUser, userName);
|
||||
if (tmpResult.get(Constants.STATUS) != Status.SUCCESS) {
|
||||
totalFailed++;
|
||||
Map<String, String> failedBody = new HashMap<>();
|
||||
failedBody.put("userName", userName);
|
||||
Status status = (Status) tmpResult.get(Constants.STATUS);
|
||||
String errorMessage = MessageFormat.format(status.getMsg(), userName);
|
||||
failedBody.put("msg", errorMessage);
|
||||
failedInfo.add(failedBody);
|
||||
} else {
|
||||
totalSuccess++;
|
||||
successUserNames.add(userName);
|
||||
}
|
||||
}
|
||||
successRes.put("sum", totalSuccess);
|
||||
successRes.put("userName", successUserNames);
|
||||
failedRes.put("sum", totalFailed);
|
||||
failedRes.put("info", failedInfo);
|
||||
Map<String, Object> res = new HashMap<>();
|
||||
res.put("success", successRes);
|
||||
res.put("failed", failedRes);
|
||||
putMsg(result, Status.SUCCESS);
|
||||
result.put(Constants.DATA_LIST, res);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto;
|
|||
import org.apache.dolphinscheduler.api.enums.Status;
|
||||
import org.apache.dolphinscheduler.api.service.BaseService;
|
||||
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
|
||||
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
|
||||
import org.apache.dolphinscheduler.api.service.ProjectService;
|
||||
import org.apache.dolphinscheduler.api.service.SchedulerService;
|
||||
import org.apache.dolphinscheduler.api.utils.CheckUtils;
|
||||
|
|
@ -56,6 +57,7 @@ import org.apache.dolphinscheduler.common.utils.StringUtils;
|
|||
import org.apache.dolphinscheduler.common.utils.TaskParametersUtils;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessData;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
|
||||
import org.apache.dolphinscheduler.dao.entity.Project;
|
||||
import org.apache.dolphinscheduler.dao.entity.Schedule;
|
||||
|
|
@ -125,6 +127,9 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
|
|||
@Autowired
|
||||
private ProjectService projectService;
|
||||
|
||||
@Autowired
|
||||
private ProcessDefinitionVersionService processDefinitionVersionService;
|
||||
|
||||
@Autowired
|
||||
private ProcessDefinitionMapper processDefineMapper;
|
||||
|
||||
|
|
@ -202,8 +207,17 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
|
|||
processDefine.setCreateTime(now);
|
||||
processDefine.setUpdateTime(now);
|
||||
processDefine.setFlag(Flag.YES);
|
||||
|
||||
// save the new process definition
|
||||
processDefineMapper.insert(processDefine);
|
||||
|
||||
// add process definition version
|
||||
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefine);
|
||||
|
||||
processDefine.setVersion(version);
|
||||
|
||||
processDefineMapper.updateVersionByProcessDefinitionId(processDefine.getId(), version);
|
||||
|
||||
// return processDefinition object with ID
|
||||
result.put(Constants.DATA_LIST, processDefineMapper.selectById(processDefine.getId()));
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
|
@ -239,7 +253,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
|
|||
return sb.toString();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* query process definition list
|
||||
*
|
||||
|
|
@ -265,7 +278,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
|
|||
return result;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* query process definition list paging
|
||||
*
|
||||
|
|
@ -311,7 +323,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
|
|||
*/
|
||||
public Map<String, Object> queryProcessDefinitionById(User loginUser, String projectName, Integer processId) {
|
||||
|
||||
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
Project project = projectMapper.queryByName(projectName);
|
||||
|
||||
|
|
@ -398,9 +409,14 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
|
|||
processDefine.setGlobalParamList(globalParamsList);
|
||||
processDefine.setUpdateTime(now);
|
||||
processDefine.setFlag(Flag.YES);
|
||||
|
||||
// add process definition version
|
||||
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefine);
|
||||
processDefine.setVersion(version);
|
||||
|
||||
if (processDefineMapper.updateById(processDefine) > 0) {
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
result.put(Constants.DATA_LIST, processDefineMapper.queryByDefineId(id));
|
||||
} else {
|
||||
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
|
||||
}
|
||||
|
|
@ -1058,7 +1074,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* check the process definition node meets the specifications
|
||||
*
|
||||
|
|
@ -1127,7 +1142,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
|
|||
return result;
|
||||
}
|
||||
|
||||
|
||||
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
|
||||
|
||||
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
|
||||
|
|
@ -1185,7 +1199,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
|
|||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* query process definition all by project id
|
||||
*
|
||||
|
|
@ -1277,7 +1290,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
|
|||
TaskNode taskNode = dag.getNode(nodeName);
|
||||
treeViewDto.setType(taskNode.getType());
|
||||
|
||||
|
||||
//set treeViewDto instances
|
||||
for (int i = limit - 1; i >= 0; i--) {
|
||||
ProcessInstance processInstance = processInstanceList.get(i);
|
||||
|
|
@ -1334,7 +1346,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
|
|||
return result;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Generate the DAG Graph based on the process definition id
|
||||
*
|
||||
|
|
@ -1360,7 +1371,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
|
|||
return new DAG<>();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* whether the graph has a ring
|
||||
*
|
||||
|
|
@ -1525,6 +1535,66 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
|
|||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* switch the defined process definition verison
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param projectName project name
|
||||
* @param processDefinitionId process definition id
|
||||
* @param version the version user want to switch
|
||||
* @return switch process definition version result code
|
||||
*/
|
||||
@Override
|
||||
public Map<String, Object> switchProcessDefinitionVersion(User loginUser, String projectName
|
||||
, int processDefinitionId, long version) {
|
||||
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
Project project = projectMapper.queryByName(projectName);
|
||||
// check project auth
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
|
||||
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
|
||||
if (resultStatus != Status.SUCCESS) {
|
||||
return checkResult;
|
||||
}
|
||||
|
||||
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processDefinitionId);
|
||||
if (Objects.isNull(processDefinition)) {
|
||||
putMsg(result
|
||||
, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR
|
||||
, processDefinitionId);
|
||||
return result;
|
||||
}
|
||||
|
||||
ProcessDefinitionVersion processDefinitionVersion = processDefinitionVersionService
|
||||
.queryByProcessDefinitionIdAndVersion(processDefinitionId, version);
|
||||
if (Objects.isNull(processDefinitionVersion)) {
|
||||
putMsg(result
|
||||
, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR
|
||||
, processDefinitionId
|
||||
, version);
|
||||
return result;
|
||||
}
|
||||
|
||||
processDefinition.setVersion(processDefinitionVersion.getVersion());
|
||||
processDefinition.setProcessDefinitionJson(processDefinitionVersion.getProcessDefinitionJson());
|
||||
processDefinition.setDescription(processDefinitionVersion.getDescription());
|
||||
processDefinition.setLocations(processDefinitionVersion.getLocations());
|
||||
processDefinition.setConnects(processDefinitionVersion.getConnects());
|
||||
processDefinition.setTimeout(processDefinitionVersion.getTimeout());
|
||||
processDefinition.setGlobalParams(processDefinitionVersion.getGlobalParams());
|
||||
processDefinition.setUpdateTime(new Date());
|
||||
processDefinition.setReceivers(processDefinitionVersion.getReceivers());
|
||||
processDefinition.setReceiversCc(processDefinitionVersion.getReceiversCc());
|
||||
processDefinition.setResourceIds(processDefinitionVersion.getResourceIds());
|
||||
|
||||
if (processDefineMapper.updateById(processDefinition) > 0) {
|
||||
putMsg(result, Status.SUCCESS);
|
||||
} else {
|
||||
putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* do batch move process definition
|
||||
*
|
||||
|
|
|
|||
|
|
@ -0,0 +1,181 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl;
|
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status;
|
||||
import org.apache.dolphinscheduler.api.service.BaseService;
|
||||
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
|
||||
import org.apache.dolphinscheduler.api.service.ProjectService;
|
||||
import org.apache.dolphinscheduler.api.utils.PageInfo;
|
||||
import org.apache.dolphinscheduler.common.Constants;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
|
||||
import org.apache.dolphinscheduler.dao.entity.Project;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionVersionMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
@Service
|
||||
public class ProcessDefinitionVersionServiceImpl extends BaseService implements
|
||||
ProcessDefinitionVersionService {
|
||||
|
||||
@Autowired
|
||||
private ProcessDefinitionVersionMapper processDefinitionVersionMapper;
|
||||
|
||||
@Autowired
|
||||
private ProjectService projectService;
|
||||
|
||||
@Autowired
|
||||
private ProjectMapper projectMapper;
|
||||
|
||||
/**
|
||||
* add the newest version of one process definition
|
||||
*
|
||||
* @param processDefinition the process definition that need to record version
|
||||
* @return the newest version number of this process definition
|
||||
*/
|
||||
public long addProcessDefinitionVersion(ProcessDefinition processDefinition) {
|
||||
|
||||
long version = this.queryMaxVersionByProcessDefinitionId(processDefinition.getId()) + 1;
|
||||
|
||||
ProcessDefinitionVersion processDefinitionVersion = ProcessDefinitionVersion
|
||||
.newBuilder()
|
||||
.processDefinitionId(processDefinition.getId())
|
||||
.version(version)
|
||||
.processDefinitionJson(processDefinition.getProcessDefinitionJson())
|
||||
.description(processDefinition.getDescription())
|
||||
.locations(processDefinition.getLocations())
|
||||
.connects(processDefinition.getConnects())
|
||||
.timeout(processDefinition.getTimeout())
|
||||
.globalParams(processDefinition.getGlobalParams())
|
||||
.createTime(processDefinition.getUpdateTime())
|
||||
.receivers(processDefinition.getReceivers())
|
||||
.receiversCc(processDefinition.getReceiversCc())
|
||||
.resourceIds(processDefinition.getResourceIds())
|
||||
.build();
|
||||
|
||||
processDefinitionVersionMapper.insert(processDefinitionVersion);
|
||||
|
||||
return version;
|
||||
}
|
||||
|
||||
/**
|
||||
* query the max version number by the process definition id
|
||||
*
|
||||
* @param processDefinitionId process definition id
|
||||
* @return the max version number of this id
|
||||
*/
|
||||
private long queryMaxVersionByProcessDefinitionId(int processDefinitionId) {
|
||||
Long maxVersion = processDefinitionVersionMapper.queryMaxVersionByProcessDefinitionId(processDefinitionId);
|
||||
if (Objects.isNull(maxVersion)) {
|
||||
return 0L;
|
||||
} else {
|
||||
return maxVersion;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* query the pagination versions info by one certain process definition id
|
||||
*
|
||||
* @param loginUser login user info to check auth
|
||||
* @param projectName process definition project name
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @param processDefinitionId process definition id
|
||||
* @return the pagination process definition versions info of the certain process definition
|
||||
*/
|
||||
public Map<String, Object> queryProcessDefinitionVersions(User loginUser, String projectName, int pageNo, int pageSize, int processDefinitionId) {
|
||||
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
|
||||
// check the if pageNo or pageSize less than 1
|
||||
if (pageNo <= 0 || pageSize <= 0) {
|
||||
putMsg(result
|
||||
, Status.QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR
|
||||
, pageNo
|
||||
, pageSize);
|
||||
return result;
|
||||
}
|
||||
|
||||
Project project = projectMapper.queryByName(projectName);
|
||||
|
||||
// check project auth
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
|
||||
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
|
||||
if (resultStatus != Status.SUCCESS) {
|
||||
return checkResult;
|
||||
}
|
||||
|
||||
PageInfo<ProcessDefinitionVersion> pageInfo = new PageInfo<>(pageNo, pageSize);
|
||||
Page<ProcessDefinitionVersion> page = new Page<>(pageNo, pageSize);
|
||||
IPage<ProcessDefinitionVersion> processDefinitionVersionsPaging = processDefinitionVersionMapper.queryProcessDefinitionVersionsPaging(page, processDefinitionId);
|
||||
List<ProcessDefinitionVersion> processDefinitionVersions = processDefinitionVersionsPaging.getRecords();
|
||||
pageInfo.setLists(processDefinitionVersions);
|
||||
pageInfo.setTotalCount((int) processDefinitionVersionsPaging.getTotal());
|
||||
return ImmutableMap.of(
|
||||
Constants.MSG, Status.SUCCESS.getMsg()
|
||||
, Constants.STATUS, Status.SUCCESS
|
||||
, Constants.DATA_LIST, pageInfo);
|
||||
}
|
||||
|
||||
/**
|
||||
* query one certain process definition version by version number and process definition id
|
||||
*
|
||||
* @param processDefinitionId process definition id
|
||||
* @param version version number
|
||||
* @return the process definition version info
|
||||
*/
|
||||
public ProcessDefinitionVersion queryByProcessDefinitionIdAndVersion(int processDefinitionId, long version) {
|
||||
return processDefinitionVersionMapper.queryByProcessDefinitionIdAndVersion(processDefinitionId, version);
|
||||
}
|
||||
|
||||
/**
|
||||
* delete one certain process definition by version number and process definition id
|
||||
*
|
||||
* @param loginUser login user info to check auth
|
||||
* @param projectName process definition project name
|
||||
* @param processDefinitionId process definition id
|
||||
* @param version version number
|
||||
* @return delele result code
|
||||
*/
|
||||
public Map<String, Object> deleteByProcessDefinitionIdAndVersion(User loginUser, String projectName, int processDefinitionId, long version) {
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
Project project = projectMapper.queryByName(projectName);
|
||||
// check project auth
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
|
||||
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
|
||||
if (resultStatus != Status.SUCCESS) {
|
||||
return checkResult;
|
||||
}
|
||||
processDefinitionVersionMapper.deleteByProcessDefinitionIdAndVersion(processDefinitionId, version);
|
||||
putMsg(result, Status.SUCCESS);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,331 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl;
|
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status;
|
||||
import org.apache.dolphinscheduler.api.service.BaseService;
|
||||
import org.apache.dolphinscheduler.api.service.TenantService;
|
||||
import org.apache.dolphinscheduler.api.utils.PageInfo;
|
||||
import org.apache.dolphinscheduler.api.utils.Result;
|
||||
import org.apache.dolphinscheduler.common.Constants;
|
||||
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.StringUtils;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
|
||||
import org.apache.dolphinscheduler.dao.entity.Tenant;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
|
||||
/**
|
||||
* tenant service
|
||||
*/
|
||||
@Service
|
||||
public class TenantServiceImpl extends BaseService implements TenantService {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(TenantServiceImpl.class);
|
||||
|
||||
@Autowired
|
||||
private TenantMapper tenantMapper;
|
||||
|
||||
@Autowired
|
||||
private ProcessInstanceMapper processInstanceMapper;
|
||||
|
||||
@Autowired
|
||||
private ProcessDefinitionMapper processDefinitionMapper;
|
||||
|
||||
@Autowired
|
||||
private UserMapper userMapper;
|
||||
|
||||
/**
|
||||
* create tenant
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param tenantCode tenant code
|
||||
* @param tenantName tenant name
|
||||
* @param queueId queue id
|
||||
* @param desc description
|
||||
* @return create result code
|
||||
* @throws Exception exception
|
||||
*/
|
||||
@Transactional(rollbackFor = Exception.class)
|
||||
public Map<String, Object> createTenant(User loginUser,
|
||||
String tenantCode,
|
||||
String tenantName,
|
||||
int queueId,
|
||||
String desc) throws Exception {
|
||||
|
||||
Map<String, Object> result = new HashMap<>(5);
|
||||
result.put(Constants.STATUS, false);
|
||||
if (checkAdmin(loginUser, result)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
if (checkTenantExists(tenantCode)) {
|
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, tenantCode);
|
||||
return result;
|
||||
}
|
||||
|
||||
Tenant tenant = new Tenant();
|
||||
Date now = new Date();
|
||||
|
||||
if (!tenantCode.matches("^[0-9a-zA-Z_.-]{1,}$") || tenantCode.startsWith("-") || tenantCode.startsWith(".")) {
|
||||
putMsg(result, Status.VERIFY_TENANT_CODE_ERROR);
|
||||
return result;
|
||||
}
|
||||
tenant.setTenantCode(tenantCode);
|
||||
tenant.setTenantName(tenantName);
|
||||
tenant.setQueueId(queueId);
|
||||
tenant.setDescription(desc);
|
||||
tenant.setCreateTime(now);
|
||||
tenant.setUpdateTime(now);
|
||||
|
||||
// save
|
||||
tenantMapper.insert(tenant);
|
||||
|
||||
// if hdfs startup
|
||||
if (PropertyUtils.getResUploadStartupState()) {
|
||||
createTenantDirIfNotExists(tenantCode);
|
||||
}
|
||||
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* query tenant list paging
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param searchVal search value
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @return tenant list page
|
||||
*/
|
||||
public Map<String, Object> queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
|
||||
|
||||
Map<String, Object> result = new HashMap<>(5);
|
||||
if (checkAdmin(loginUser, result)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
Page<Tenant> page = new Page<>(pageNo, pageSize);
|
||||
IPage<Tenant> tenantIPage = tenantMapper.queryTenantPaging(page, searchVal);
|
||||
PageInfo<Tenant> pageInfo = new PageInfo<>(pageNo, pageSize);
|
||||
pageInfo.setTotalCount((int) tenantIPage.getTotal());
|
||||
pageInfo.setLists(tenantIPage.getRecords());
|
||||
result.put(Constants.DATA_LIST, pageInfo);
|
||||
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* updateProcessInstance tenant
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param id tennat id
|
||||
* @param tenantCode tennat code
|
||||
* @param tenantName tennat name
|
||||
* @param queueId queue id
|
||||
* @param desc description
|
||||
* @return update result code
|
||||
* @throws Exception exception
|
||||
*/
|
||||
public Map<String, Object> updateTenant(User loginUser, int id, String tenantCode, String tenantName, int queueId,
|
||||
String desc) throws Exception {
|
||||
|
||||
Map<String, Object> result = new HashMap<>(5);
|
||||
result.put(Constants.STATUS, false);
|
||||
|
||||
if (checkAdmin(loginUser, result)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
Tenant tenant = tenantMapper.queryById(id);
|
||||
|
||||
if (tenant == null) {
|
||||
putMsg(result, Status.TENANT_NOT_EXIST);
|
||||
return result;
|
||||
}
|
||||
|
||||
// updateProcessInstance tenant
|
||||
/**
|
||||
* if the tenant code is modified, the original resource needs to be copied to the new tenant.
|
||||
*/
|
||||
if (!tenant.getTenantCode().equals(tenantCode)) {
|
||||
if (checkTenantExists(tenantCode)) {
|
||||
// if hdfs startup
|
||||
if (PropertyUtils.getResUploadStartupState()) {
|
||||
String resourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + tenantCode + "/resources";
|
||||
String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode);
|
||||
//init hdfs resource
|
||||
HadoopUtils.getInstance().mkdir(resourcePath);
|
||||
HadoopUtils.getInstance().mkdir(udfsPath);
|
||||
}
|
||||
} else {
|
||||
putMsg(result, Status.TENANT_CODE_HAS_ALREADY_EXISTS);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
Date now = new Date();
|
||||
|
||||
if (StringUtils.isNotEmpty(tenantCode)) {
|
||||
tenant.setTenantCode(tenantCode);
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(tenantName)) {
|
||||
tenant.setTenantName(tenantName);
|
||||
}
|
||||
|
||||
if (queueId != 0) {
|
||||
tenant.setQueueId(queueId);
|
||||
}
|
||||
tenant.setDescription(desc);
|
||||
tenant.setUpdateTime(now);
|
||||
tenantMapper.updateById(tenant);
|
||||
|
||||
result.put(Constants.STATUS, Status.SUCCESS);
|
||||
result.put(Constants.MSG, Status.SUCCESS.getMsg());
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* delete tenant
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @param id tenant id
|
||||
* @return delete result code
|
||||
* @throws Exception exception
|
||||
*/
|
||||
@Transactional(rollbackFor = Exception.class)
|
||||
public Map<String, Object> deleteTenantById(User loginUser, int id) throws Exception {
|
||||
Map<String, Object> result = new HashMap<>(5);
|
||||
|
||||
if (checkAdmin(loginUser, result)) {
|
||||
return result;
|
||||
}
|
||||
|
||||
Tenant tenant = tenantMapper.queryById(id);
|
||||
if (tenant == null) {
|
||||
putMsg(result, Status.TENANT_NOT_EXIST);
|
||||
return result;
|
||||
}
|
||||
|
||||
List<ProcessInstance> processInstances = getProcessInstancesByTenant(tenant);
|
||||
if (CollectionUtils.isNotEmpty(processInstances)) {
|
||||
putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL, processInstances.size());
|
||||
return result;
|
||||
}
|
||||
|
||||
List<ProcessDefinition> processDefinitions =
|
||||
processDefinitionMapper.queryDefinitionListByTenant(tenant.getId());
|
||||
if (CollectionUtils.isNotEmpty(processDefinitions)) {
|
||||
putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL_DEFINES, processDefinitions.size());
|
||||
return result;
|
||||
}
|
||||
|
||||
List<User> userList = userMapper.queryUserListByTenant(tenant.getId());
|
||||
if (CollectionUtils.isNotEmpty(userList)) {
|
||||
putMsg(result, Status.DELETE_TENANT_BY_ID_FAIL_USERS, userList.size());
|
||||
return result;
|
||||
}
|
||||
|
||||
// if resource upload startup
|
||||
if (PropertyUtils.getResUploadStartupState()) {
|
||||
String tenantPath = HadoopUtils.getHdfsDataBasePath() + "/" + tenant.getTenantCode();
|
||||
|
||||
if (HadoopUtils.getInstance().exists(tenantPath)) {
|
||||
HadoopUtils.getInstance().delete(tenantPath, true);
|
||||
}
|
||||
}
|
||||
|
||||
tenantMapper.deleteById(id);
|
||||
processInstanceMapper.updateProcessInstanceByTenantId(id, -1);
|
||||
putMsg(result, Status.SUCCESS);
|
||||
return result;
|
||||
}
|
||||
|
||||
private List<ProcessInstance> getProcessInstancesByTenant(Tenant tenant) {
|
||||
return processInstanceMapper.queryByTenantIdAndStatus(tenant.getId(), Constants.NOT_TERMINATED_STATES);
|
||||
}
|
||||
|
||||
/**
|
||||
* query tenant list
|
||||
*
|
||||
* @param loginUser login user
|
||||
* @return tenant list
|
||||
*/
|
||||
public Map<String, Object> queryTenantList(User loginUser) {
|
||||
|
||||
Map<String, Object> result = new HashMap<>(5);
|
||||
|
||||
List<Tenant> resourceList = tenantMapper.selectList(null);
|
||||
result.put(Constants.DATA_LIST, resourceList);
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* verify tenant code
|
||||
*
|
||||
* @param tenantCode tenant code
|
||||
* @return true if tenant code can user, otherwise return false
|
||||
*/
|
||||
public Result verifyTenantCode(String tenantCode) {
|
||||
Result result = new Result();
|
||||
if (checkTenantExists(tenantCode)) {
|
||||
putMsg(result, Status.TENANT_NAME_EXIST, tenantCode);
|
||||
} else {
|
||||
putMsg(result, Status.SUCCESS);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* check tenant exists
|
||||
*
|
||||
* @param tenantCode tenant code
|
||||
* @return ture if the tenant code exists, otherwise return false
|
||||
*/
|
||||
private boolean checkTenantExists(String tenantCode) {
|
||||
List<Tenant> tenants = tenantMapper.queryByTenantCode(tenantCode);
|
||||
return CollectionUtils.isNotEmpty(tenants);
|
||||
}
|
||||
}
|
||||
|
|
@ -258,3 +258,7 @@ COPY_PROCESS_DEFINITION_NOTES= copy process definition notes
|
|||
MOVE_PROCESS_DEFINITION_NOTES= move process definition notes
|
||||
TARGET_PROJECT_ID= target project id
|
||||
IS_COPY = is copy
|
||||
DELETE_PROCESS_DEFINITION_VERSION_NOTES=delete process definition version
|
||||
QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=query process definition versions
|
||||
SWITCH_PROCESS_DEFINITION_VERSION_NOTES=switch process definition version
|
||||
VERSION=version
|
||||
|
|
|
|||
|
|
@ -258,3 +258,7 @@ COPY_PROCESS_DEFINITION_NOTES= copy process definition notes
|
|||
MOVE_PROCESS_DEFINITION_NOTES= move process definition notes
|
||||
TARGET_PROJECT_ID= target project id
|
||||
IS_COPY = is copy
|
||||
DELETE_PROCESS_DEFINITION_VERSION_NOTES=delete process definition version
|
||||
QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=query process definition versions
|
||||
SWITCH_PROCESS_DEFINITION_VERSION_NOTES=switch process definition version
|
||||
VERSION=version
|
||||
|
|
|
|||
|
|
@ -256,4 +256,7 @@ COPY_PROCESS_DEFINITION_NOTES= 复制工作流定义
|
|||
MOVE_PROCESS_DEFINITION_NOTES= 移动工作流定义
|
||||
TARGET_PROJECT_ID= 目标项目ID
|
||||
IS_COPY = 是否复制
|
||||
|
||||
DELETE_PROCESS_DEFINITION_VERSION_NOTES=删除流程历史版本
|
||||
QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=查询流程历史版本信息
|
||||
SWITCH_PROCESS_DEFINITION_VERSION_NOTES=切换流程版本
|
||||
VERSION=版本号
|
||||
|
|
|
|||
|
|
@ -14,10 +14,11 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.controller;
|
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status;
|
||||
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
|
||||
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
|
||||
import org.apache.dolphinscheduler.api.service.impl.ProcessDefinitionServiceImpl;
|
||||
import org.apache.dolphinscheduler.api.utils.PageInfo;
|
||||
import org.apache.dolphinscheduler.api.utils.Result;
|
||||
|
|
@ -25,8 +26,18 @@ import org.apache.dolphinscheduler.common.Constants;
|
|||
import org.apache.dolphinscheduler.common.enums.ReleaseState;
|
||||
import org.apache.dolphinscheduler.common.enums.UserType;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
|
||||
import org.apache.dolphinscheduler.dao.entity.Resource;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
|
||||
import java.text.MessageFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
|
@ -38,12 +49,6 @@ import org.mockito.junit.MockitoJUnitRunner;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.mock.web.MockHttpServletResponse;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* process definition controller test
|
||||
|
|
@ -59,10 +64,13 @@ public class ProcessDefinitionControllerTest {
|
|||
@Mock
|
||||
private ProcessDefinitionServiceImpl processDefinitionService;
|
||||
|
||||
@Mock
|
||||
private ProcessDefinitionVersionService processDefinitionVersionService;
|
||||
|
||||
protected User user;
|
||||
|
||||
@Before
|
||||
public void before(){
|
||||
public void before() {
|
||||
User loginUser = new User();
|
||||
loginUser.setId(1);
|
||||
loginUser.setUserType(UserType.GENERAL_USER);
|
||||
|
|
@ -73,7 +81,11 @@ public class ProcessDefinitionControllerTest {
|
|||
|
||||
@Test
|
||||
public void testCreateProcessDefinition() throws Exception {
|
||||
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
|
||||
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\""
|
||||
+ ":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\"
|
||||
+ "necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\""
|
||||
+ ",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},"
|
||||
+ "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
|
||||
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
|
||||
|
||||
String projectName = "test";
|
||||
|
|
@ -82,14 +94,14 @@ public class ProcessDefinitionControllerTest {
|
|||
String connects = "[]";
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
putMsg(result, Status.SUCCESS);
|
||||
result.put("processDefinitionId",1);
|
||||
result.put("processDefinitionId", 1);
|
||||
|
||||
Mockito.when(processDefinitionService.createProcessDefinition(user, projectName, name, json,
|
||||
description, locations, connects)).thenReturn(result);
|
||||
|
||||
Result response = processDefinitionController.createProcessDefinition(user, projectName, name, json,
|
||||
locations, connects, description);
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
|
||||
}
|
||||
|
||||
private void putMsg(Map<String, Object> result, Status status, Object... statusParams) {
|
||||
|
|
@ -109,17 +121,21 @@ public class ProcessDefinitionControllerTest {
|
|||
String projectName = "test";
|
||||
String name = "dag_test";
|
||||
|
||||
Mockito.when(processDefinitionService.verifyProcessDefinitionName(user,projectName,name)).thenReturn(result);
|
||||
Mockito.when(processDefinitionService.verifyProcessDefinitionName(user, projectName, name)).thenReturn(result);
|
||||
|
||||
Result response = processDefinitionController.verifyProcessDefinitionName(user,projectName,name);
|
||||
Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST.getCode(),response.getCode().intValue());
|
||||
Result response = processDefinitionController.verifyProcessDefinitionName(user, projectName, name);
|
||||
Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST.getCode(), response.getCode().intValue());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void updateProcessDefinition() throws Exception {
|
||||
|
||||
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
|
||||
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\""
|
||||
+ ",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"}"
|
||||
+ ",\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\""
|
||||
+ ":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\""
|
||||
+ ":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
|
||||
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
|
||||
String projectName = "test";
|
||||
String name = "dag_test";
|
||||
|
|
@ -128,14 +144,14 @@ public class ProcessDefinitionControllerTest {
|
|||
int id = 1;
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
putMsg(result, Status.SUCCESS);
|
||||
result.put("processDefinitionId",1);
|
||||
result.put("processDefinitionId", 1);
|
||||
|
||||
Mockito.when(processDefinitionService.updateProcessDefinition(user, projectName, id,name, json,
|
||||
Mockito.when(processDefinitionService.updateProcessDefinition(user, projectName, id, name, json,
|
||||
description, locations, connects)).thenReturn(result);
|
||||
|
||||
Result response = processDefinitionController.updateProcessDefinition(user, projectName, name,id, json,
|
||||
Result response = processDefinitionController.updateProcessDefinition(user, projectName, name, id, json,
|
||||
locations, connects, description);
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -145,15 +161,19 @@ public class ProcessDefinitionControllerTest {
|
|||
Map<String, Object> result = new HashMap<>();
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
Mockito.when(processDefinitionService.releaseProcessDefinition(user, projectName,id,ReleaseState.OFFLINE.ordinal())).thenReturn(result);
|
||||
Result response = processDefinitionController.releaseProcessDefinition(user, projectName,id,ReleaseState.OFFLINE.ordinal());
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
|
||||
Mockito.when(processDefinitionService.releaseProcessDefinition(user, projectName, id, ReleaseState.OFFLINE.ordinal())).thenReturn(result);
|
||||
Result response = processDefinitionController.releaseProcessDefinition(user, projectName, id, ReleaseState.OFFLINE.ordinal());
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryProcessDefinitionById() throws Exception {
|
||||
|
||||
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
|
||||
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1"
|
||||
+ "\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}"
|
||||
+ "\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\""
|
||||
+ ":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":"
|
||||
+ "\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
|
||||
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
|
||||
String projectName = "test";
|
||||
String name = "dag_test";
|
||||
|
|
@ -174,10 +194,10 @@ public class ProcessDefinitionControllerTest {
|
|||
putMsg(result, Status.SUCCESS);
|
||||
result.put(Constants.DATA_LIST, processDefinition);
|
||||
|
||||
Mockito.when(processDefinitionService.queryProcessDefinitionById(user, projectName,id)).thenReturn(result);
|
||||
Result response = processDefinitionController.queryProcessDefinitionById(user, projectName,id);
|
||||
Mockito.when(processDefinitionService.queryProcessDefinitionById(user, projectName, id)).thenReturn(result);
|
||||
Result response = processDefinitionController.queryProcessDefinitionById(user, projectName, id);
|
||||
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -190,10 +210,10 @@ public class ProcessDefinitionControllerTest {
|
|||
Map<String, Object> result = new HashMap<>();
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
Mockito.when(processDefinitionService.batchCopyProcessDefinition(user,projectName,id,targetProjectId)).thenReturn(result);
|
||||
Result response = processDefinitionController.copyProcessDefinition(user, projectName,id,targetProjectId);
|
||||
Mockito.when(processDefinitionService.batchCopyProcessDefinition(user, projectName, id, targetProjectId)).thenReturn(result);
|
||||
Result response = processDefinitionController.copyProcessDefinition(user, projectName, id, targetProjectId);
|
||||
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -206,35 +226,37 @@ public class ProcessDefinitionControllerTest {
|
|||
Map<String, Object> result = new HashMap<>();
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
Mockito.when(processDefinitionService.batchMoveProcessDefinition(user,projectName,id,targetProjectId)).thenReturn(result);
|
||||
Result response = processDefinitionController.moveProcessDefinition(user, projectName,id,targetProjectId);
|
||||
Mockito.when(processDefinitionService.batchMoveProcessDefinition(user, projectName, id, targetProjectId)).thenReturn(result);
|
||||
Result response = processDefinitionController.moveProcessDefinition(user, projectName, id, targetProjectId);
|
||||
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testQueryProcessDefinitionList() throws Exception {
|
||||
|
||||
String projectName = "test";
|
||||
List<ProcessDefinition> resourceList = getDefinitionList();
|
||||
List<ProcessDefinition> resourceList = getDefinitionList();
|
||||
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
putMsg(result, Status.SUCCESS);
|
||||
result.put(Constants.DATA_LIST, resourceList);
|
||||
|
||||
|
||||
Mockito.when(processDefinitionService.queryProcessDefinitionList(user, projectName)).thenReturn(result);
|
||||
Result response = processDefinitionController.queryProcessDefinitionList(user, projectName);
|
||||
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
|
||||
}
|
||||
|
||||
public List<ProcessDefinition> getDefinitionList(){
|
||||
public List<ProcessDefinition> getDefinitionList() {
|
||||
|
||||
List<ProcessDefinition> resourceList = new ArrayList<>();
|
||||
|
||||
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
|
||||
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1"
|
||||
+ "\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}"
|
||||
+ "\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval"
|
||||
+ "\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\""
|
||||
+ ":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
|
||||
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
|
||||
String projectName = "test";
|
||||
String name = "dag_test";
|
||||
|
|
@ -266,7 +288,7 @@ public class ProcessDefinitionControllerTest {
|
|||
resourceList.add(processDefinition);
|
||||
resourceList.add(processDefinition2);
|
||||
|
||||
return resourceList;
|
||||
return resourceList;
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -277,13 +299,13 @@ public class ProcessDefinitionControllerTest {
|
|||
Map<String, Object> result = new HashMap<>();
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
Mockito.when(processDefinitionService.deleteProcessDefinitionById(user, projectName,id)).thenReturn(result);
|
||||
Result response = processDefinitionController.deleteProcessDefinitionById(user, projectName,id);
|
||||
Mockito.when(processDefinitionService.deleteProcessDefinitionById(user, projectName, id)).thenReturn(result);
|
||||
Result response = processDefinitionController.deleteProcessDefinitionById(user, projectName, id);
|
||||
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Test
|
||||
public void testGetNodeListByDefinitionId() throws Exception {
|
||||
String projectName = "test";
|
||||
int id = 1;
|
||||
|
|
@ -292,9 +314,9 @@ public class ProcessDefinitionControllerTest {
|
|||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
Mockito.when(processDefinitionService.getTaskNodeListByDefinitionId(id)).thenReturn(result);
|
||||
Result response = processDefinitionController.getNodeListByDefinitionId(user,projectName,id);
|
||||
Result response = processDefinitionController.getNodeListByDefinitionId(user, projectName, id);
|
||||
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -306,57 +328,57 @@ public class ProcessDefinitionControllerTest {
|
|||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
Mockito.when(processDefinitionService.getTaskNodeListByDefinitionIdList(idList)).thenReturn(result);
|
||||
Result response = processDefinitionController.getNodeListByDefinitionIdList(user,projectName,idList);
|
||||
Result response = processDefinitionController.getNodeListByDefinitionIdList(user, projectName, idList);
|
||||
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryProcessDefinitionAllByProjectId() throws Exception{
|
||||
public void testQueryProcessDefinitionAllByProjectId() throws Exception {
|
||||
int projectId = 1;
|
||||
Map<String,Object> result = new HashMap<>();
|
||||
putMsg(result,Status.SUCCESS);
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
Mockito.when(processDefinitionService.queryProcessDefinitionAllByProjectId(projectId)).thenReturn(result);
|
||||
Result response = processDefinitionController.queryProcessDefinitionAllByProjectId(user,projectId);
|
||||
Result response = processDefinitionController.queryProcessDefinitionAllByProjectId(user, projectId);
|
||||
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testViewTree() throws Exception{
|
||||
public void testViewTree() throws Exception {
|
||||
String projectName = "test";
|
||||
int processId = 1;
|
||||
int limit = 2;
|
||||
Map<String,Object> result = new HashMap<>();
|
||||
putMsg(result,Status.SUCCESS);
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
putMsg(result, Status.SUCCESS);
|
||||
|
||||
Mockito.when(processDefinitionService.viewTree(processId,limit)).thenReturn(result);
|
||||
Result response = processDefinitionController.viewTree(user,projectName,processId,limit);
|
||||
Mockito.when(processDefinitionService.viewTree(processId, limit)).thenReturn(result);
|
||||
Result response = processDefinitionController.viewTree(user, projectName, processId, limit);
|
||||
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryProcessDefinitionListPaging() throws Exception{
|
||||
public void testQueryProcessDefinitionListPaging() throws Exception {
|
||||
String projectName = "test";
|
||||
int pageNo = 1;
|
||||
int pageSize = 10;
|
||||
String searchVal = "";
|
||||
int userId = 1;
|
||||
|
||||
Map<String,Object> result = new HashMap<>();
|
||||
putMsg(result,Status.SUCCESS);
|
||||
result.put(Constants.DATA_LIST,new PageInfo<Resource>(1,10));
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
putMsg(result, Status.SUCCESS);
|
||||
result.put(Constants.DATA_LIST, new PageInfo<Resource>(1, 10));
|
||||
|
||||
Mockito.when(processDefinitionService.queryProcessDefinitionListPaging(user,projectName, searchVal, pageNo, pageSize, userId)).thenReturn(result);
|
||||
Result response = processDefinitionController.queryProcessDefinitionListPaging(user,projectName,pageNo,searchVal,userId,pageSize);
|
||||
Mockito.when(processDefinitionService.queryProcessDefinitionListPaging(user, projectName, searchVal, pageNo, pageSize, userId)).thenReturn(result);
|
||||
Result response = processDefinitionController.queryProcessDefinitionListPaging(user, projectName, pageNo, searchVal, userId, pageSize);
|
||||
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBatchExportProcessDefinitionByIds() throws Exception{
|
||||
public void testBatchExportProcessDefinitionByIds() throws Exception {
|
||||
|
||||
String processDefinitionIds = "1,2";
|
||||
String projectName = "test";
|
||||
|
|
@ -365,4 +387,67 @@ public class ProcessDefinitionControllerTest {
|
|||
processDefinitionController.batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryProcessDefinitionVersions() {
|
||||
String projectName = "test";
|
||||
Map<String, Object> resultMap = new HashMap<>();
|
||||
putMsg(resultMap, Status.SUCCESS);
|
||||
resultMap.put(Constants.DATA_LIST, new PageInfo<ProcessDefinitionVersion>(1, 10));
|
||||
Mockito.when(processDefinitionVersionService.queryProcessDefinitionVersions(
|
||||
user
|
||||
, projectName
|
||||
, 1
|
||||
, 10
|
||||
, 1))
|
||||
.thenReturn(resultMap);
|
||||
Result result = processDefinitionController.queryProcessDefinitionVersions(
|
||||
user
|
||||
, projectName
|
||||
, 1
|
||||
, 10
|
||||
, 1);
|
||||
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSwitchProcessDefinitionVersion() {
|
||||
String projectName = "test";
|
||||
Map<String, Object> resultMap = new HashMap<>();
|
||||
putMsg(resultMap, Status.SUCCESS);
|
||||
Mockito.when(processDefinitionService.switchProcessDefinitionVersion(
|
||||
user
|
||||
, projectName
|
||||
, 1
|
||||
, 10))
|
||||
.thenReturn(resultMap);
|
||||
Result result = processDefinitionController.switchProcessDefinitionVersion(
|
||||
user
|
||||
, projectName
|
||||
, 1
|
||||
, 10);
|
||||
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteProcessDefinitionVersion() {
|
||||
String projectName = "test";
|
||||
Map<String, Object> resultMap = new HashMap<>();
|
||||
putMsg(resultMap, Status.SUCCESS);
|
||||
Mockito.when(processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion(
|
||||
user
|
||||
, projectName
|
||||
, 1
|
||||
, 10))
|
||||
.thenReturn(resultMap);
|
||||
Result result = processDefinitionController.deleteProcessDefinitionVersion(
|
||||
user
|
||||
, projectName
|
||||
, 1
|
||||
, 10);
|
||||
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ package org.apache.dolphinscheduler.api.controller;
|
|||
|
||||
import org.apache.dolphinscheduler.api.enums.Status;
|
||||
import org.apache.dolphinscheduler.api.utils.Result;
|
||||
import org.apache.dolphinscheduler.common.utils.*;
|
||||
import org.apache.dolphinscheduler.common.utils.JSONUtils;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
|
|
@ -33,6 +33,9 @@ import static org.springframework.test.web.servlet.request.MockMvcRequestBuilder
|
|||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* users controller test
|
||||
*/
|
||||
|
|
@ -302,4 +305,22 @@ public class UsersControllerTest extends AbstractControllerTest{
|
|||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBatchActivateUser() throws Exception {
|
||||
List<String> userNames = new ArrayList<>();
|
||||
userNames.add("user_sky_cxl");
|
||||
userNames.add("19990323");
|
||||
userNames.add("test_sky_post_11");
|
||||
String jsonUserNames = JSONUtils.toJsonString(userNames);
|
||||
MvcResult mvcResult = mockMvc.perform(post("/users/batch/activate")
|
||||
.header(SESSION_ID, sessionId)
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content(jsonUserNames))
|
||||
.andExpect(status().isOk())
|
||||
.andReturn();
|
||||
|
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,10 +22,14 @@ import org.apache.dolphinscheduler.common.Constants;
|
|||
import org.apache.dolphinscheduler.common.enums.DbConnectType;
|
||||
import org.apache.dolphinscheduler.common.enums.DbType;
|
||||
import org.apache.dolphinscheduler.common.enums.UserType;
|
||||
import org.apache.dolphinscheduler.common.utils.JSONUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
|
||||
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
|
||||
import org.apache.dolphinscheduler.dao.datasource.MySQLDataSource;
|
||||
import org.apache.dolphinscheduler.dao.entity.DataSource;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
|
@ -35,8 +39,6 @@ import org.mockito.Mockito;
|
|||
import org.powermock.api.mockito.PowerMockito;
|
||||
import org.powermock.core.classloader.annotations.PowerMockIgnore;
|
||||
import org.powermock.modules.junit4.PowerMockRunner;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
|
@ -45,16 +47,172 @@ import java.util.Map;
|
|||
@RunWith(PowerMockRunner.class)
|
||||
@PowerMockIgnore({"sun.security.*", "javax.net.*"})
|
||||
public class DataSourceServiceTest {
|
||||
private static final Logger logger = LoggerFactory.getLogger(DataSourceServiceTest.class);
|
||||
|
||||
@InjectMocks
|
||||
private DataSourceService dataSourceService;
|
||||
@Mock
|
||||
private DataSourceMapper dataSourceMapper;
|
||||
@Mock
|
||||
private DataSourceUserMapper datasourceUserMapper;
|
||||
|
||||
public void createDataSourceTest() {
|
||||
User loginUser = getAdminUser();
|
||||
|
||||
String dataSourceName = "dataSource01";
|
||||
String dataSourceDesc = "test dataSource";
|
||||
DbType dataSourceType = DbType.POSTGRESQL;
|
||||
String parameter = dataSourceService.buildParameter(dataSourceType, "172.16.133.200", "5432", "dolphinscheduler", null, "postgres", "", null, null);
|
||||
|
||||
// data source exits
|
||||
List<DataSource> dataSourceList = new ArrayList<>();
|
||||
DataSource dataSource = new DataSource();
|
||||
dataSource.setName(dataSourceName);
|
||||
dataSourceList.add(dataSource);
|
||||
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(dataSourceList);
|
||||
Map<String, Object> dataSourceExitsResult = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
|
||||
Assert.assertEquals(Status.DATASOURCE_EXIST, dataSourceExitsResult.get(Constants.STATUS));
|
||||
|
||||
// data source exits
|
||||
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null);
|
||||
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(false);
|
||||
Map<String, Object> connectFailedResult = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
|
||||
Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED, connectFailedResult.get(Constants.STATUS));
|
||||
|
||||
// data source exits
|
||||
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null);
|
||||
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(true);
|
||||
PowerMockito.when(DataSourceFactory.getDatasource(dataSourceType, parameter)).thenReturn(null);
|
||||
Map<String, Object> notValidError = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
|
||||
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, notValidError.get(Constants.STATUS));
|
||||
|
||||
// success
|
||||
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null);
|
||||
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(true);
|
||||
PowerMockito.when(DataSourceFactory.getDatasource(dataSourceType, parameter)).thenReturn(JSONUtils.parseObject(parameter, MySQLDataSource.class));
|
||||
Map<String, Object> success = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
|
||||
Assert.assertEquals(Status.SUCCESS, success.get(Constants.STATUS));
|
||||
}
|
||||
|
||||
public void updateDataSourceTest() {
|
||||
User loginUser = getAdminUser();
|
||||
|
||||
int dataSourceId = 12;
|
||||
String dataSourceName = "dataSource01";
|
||||
String dataSourceDesc = "test dataSource";
|
||||
DbType dataSourceType = DbType.POSTGRESQL;
|
||||
String parameter = dataSourceService.buildParameter(dataSourceType, "172.16.133.200", "5432", "dolphinscheduler", null, "postgres", "", null, null);
|
||||
|
||||
// data source not exits
|
||||
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null);
|
||||
Map<String, Object> resourceNotExits = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
|
||||
Assert.assertEquals(Status.RESOURCE_NOT_EXIST, resourceNotExits.get(Constants.STATUS));
|
||||
// user no operation perm
|
||||
DataSource dataSource = new DataSource();
|
||||
dataSource.setUserId(0);
|
||||
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
|
||||
Map<String, Object> userNoOperationPerm = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
|
||||
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, userNoOperationPerm.get(Constants.STATUS));
|
||||
|
||||
// data source name exits
|
||||
dataSource.setUserId(-1);
|
||||
List<DataSource> dataSourceList = new ArrayList<>();
|
||||
dataSourceList.add(dataSource);
|
||||
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
|
||||
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(dataSourceList);
|
||||
Map<String, Object> dataSourceNameExist = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
|
||||
Assert.assertEquals(Status.DATASOURCE_EXIST, dataSourceNameExist.get(Constants.STATUS));
|
||||
|
||||
// data source connect failed
|
||||
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
|
||||
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(null);
|
||||
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(true);
|
||||
Map<String, Object> connectFailed = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
|
||||
Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED, connectFailed.get(Constants.STATUS));
|
||||
|
||||
//success
|
||||
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
|
||||
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(null);
|
||||
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(false);
|
||||
Map<String, Object> success = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
|
||||
Assert.assertEquals(Status.SUCCESS, connectFailed.get(Constants.STATUS));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void queryDataSourceListTest(){
|
||||
public void queryDataSourceListPagingTest() {
|
||||
User loginUser = getAdminUser();
|
||||
String searchVal = "";
|
||||
int pageNo = 1;
|
||||
int pageSize = 10;
|
||||
Map<String, Object> success = dataSourceService.queryDataSourceListPaging(loginUser, searchVal, pageNo, pageSize);
|
||||
Assert.assertEquals(Status.SUCCESS, success.get(Constants.STATUS));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void connectionTest() {
|
||||
int dataSourceId = -1;
|
||||
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null);
|
||||
Assert.assertFalse(dataSourceService.connectionTest(dataSourceId));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void deleteTest() {
|
||||
User loginUser = getAdminUser();
|
||||
int dataSourceId = 1;
|
||||
Result result = new Result();
|
||||
|
||||
//resource not exist
|
||||
dataSourceService.putMsg(result, Status.RESOURCE_NOT_EXIST);
|
||||
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null);
|
||||
Assert.assertEquals(result.getCode(), dataSourceService.delete(loginUser, dataSourceId).getCode());
|
||||
|
||||
// user no operation perm
|
||||
dataSourceService.putMsg(result, Status.USER_NO_OPERATION_PERM);
|
||||
DataSource dataSource = new DataSource();
|
||||
dataSource.setUserId(0);
|
||||
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
|
||||
Assert.assertEquals(result.getCode(), dataSourceService.delete(loginUser, dataSourceId).getCode());
|
||||
|
||||
// success
|
||||
dataSourceService.putMsg(result, Status.SUCCESS);
|
||||
dataSource.setUserId(-1);
|
||||
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
|
||||
Assert.assertEquals(result.getCode(), dataSourceService.delete(loginUser, dataSourceId).getCode());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void unauthDatasourceTest() {
|
||||
User loginUser = getAdminUser();
|
||||
int userId = -1;
|
||||
|
||||
//user no operation perm
|
||||
Map<String, Object> noOperationPerm = dataSourceService.unauthDatasource(loginUser, userId);
|
||||
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, noOperationPerm.get(Constants.STATUS));
|
||||
|
||||
//success
|
||||
loginUser.setUserType(UserType.ADMIN_USER);
|
||||
Map<String, Object> success = dataSourceService.unauthDatasource(loginUser, userId);
|
||||
Assert.assertEquals(Status.SUCCESS, success.get(Constants.STATUS));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void authedDatasourceTest() {
|
||||
User loginUser = getAdminUser();
|
||||
int userId = -1;
|
||||
|
||||
//user no operation perm
|
||||
Map<String, Object> noOperationPerm = dataSourceService.authedDatasource(loginUser, userId);
|
||||
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, noOperationPerm.get(Constants.STATUS));
|
||||
|
||||
//success
|
||||
loginUser.setUserType(UserType.ADMIN_USER);
|
||||
Map<String, Object> success = dataSourceService.authedDatasource(loginUser, userId);
|
||||
Assert.assertEquals(Status.SUCCESS, success.get(Constants.STATUS));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void queryDataSourceListTest() {
|
||||
User loginUser = new User();
|
||||
loginUser.setUserType(UserType.GENERAL_USER);
|
||||
Map<String, Object> map = dataSourceService.queryDataSourceList(loginUser, DbType.MYSQL.ordinal());
|
||||
|
|
@ -62,35 +220,34 @@ public class DataSourceServiceTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void verifyDataSourceNameTest(){
|
||||
public void verifyDataSourceNameTest() {
|
||||
User loginUser = new User();
|
||||
loginUser.setUserType(UserType.GENERAL_USER);
|
||||
String dataSourceName = "dataSource1";
|
||||
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(getDataSourceList());
|
||||
Result result = dataSourceService.verifyDataSourceName(loginUser, dataSourceName);
|
||||
Assert.assertEquals(Status.DATASOURCE_EXIST.getMsg(),result.getMsg());
|
||||
Result result = dataSourceService.verifyDataSourceName(dataSourceName);
|
||||
Assert.assertEquals(Status.DATASOURCE_EXIST.getMsg(), result.getMsg());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void queryDataSourceTest(){
|
||||
public void queryDataSourceTest() {
|
||||
PowerMockito.when(dataSourceMapper.selectById(Mockito.anyInt())).thenReturn(null);
|
||||
Map<String, Object> result = dataSourceService.queryDataSource(Mockito.anyInt());
|
||||
Assert.assertEquals(((Status)result.get(Constants.STATUS)).getCode(),Status.RESOURCE_NOT_EXIST.getCode());
|
||||
Assert.assertEquals(((Status) result.get(Constants.STATUS)).getCode(), Status.RESOURCE_NOT_EXIST.getCode());
|
||||
|
||||
PowerMockito.when(dataSourceMapper.selectById(Mockito.anyInt())).thenReturn(getOracleDataSource());
|
||||
result = dataSourceService.queryDataSource(Mockito.anyInt());
|
||||
Assert.assertEquals(((Status)result.get(Constants.STATUS)).getCode(),Status.SUCCESS.getCode());
|
||||
Assert.assertEquals(((Status) result.get(Constants.STATUS)).getCode(), Status.SUCCESS.getCode());
|
||||
}
|
||||
|
||||
private List<DataSource> getDataSourceList() {
|
||||
|
||||
private List<DataSource> getDataSourceList(){
|
||||
|
||||
List<DataSource> dataSources = new ArrayList<>();
|
||||
List<DataSource> dataSources = new ArrayList<>();
|
||||
dataSources.add(getOracleDataSource());
|
||||
return dataSources;
|
||||
}
|
||||
|
||||
private DataSource getOracleDataSource(){
|
||||
private DataSource getOracleDataSource() {
|
||||
DataSource dataSource = new DataSource();
|
||||
dataSource.setName("test");
|
||||
dataSource.setNote("Note");
|
||||
|
|
@ -101,31 +258,40 @@ public class DataSourceServiceTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void buildParameter(){
|
||||
String param = dataSourceService.buildParameter("","", DbType.ORACLE, "192.168.9.1","1521","im"
|
||||
,"","test","test", DbConnectType.ORACLE_SERVICE_NAME,"");
|
||||
public void buildParameter() {
|
||||
String param = dataSourceService.buildParameter(DbType.ORACLE, "192.168.9.1", "1521", "im"
|
||||
, "", "test", "test", DbConnectType.ORACLE_SERVICE_NAME, "");
|
||||
String expected = "{\"connectType\":\"ORACLE_SERVICE_NAME\",\"type\":\"ORACLE_SERVICE_NAME\",\"address\":\"jdbc:oracle:thin:@//192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:oracle:thin:@//192.168.9.1:1521/im\",\"user\":\"test\",\"password\":\"test\"}";
|
||||
Assert.assertEquals(expected, param);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void buildParameterWithDecodePassword(){
|
||||
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE,"true");
|
||||
String param = dataSourceService.buildParameter("name","desc", DbType.MYSQL, "192.168.9.1","1521","im"
|
||||
,"","test","123456", null,"");
|
||||
public void buildParameterWithDecodePassword() {
|
||||
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "true");
|
||||
String param = dataSourceService.buildParameter(DbType.MYSQL, "192.168.9.1", "1521", "im"
|
||||
, "", "test", "123456", null, "");
|
||||
String expected = "{\"type\":null,\"address\":\"jdbc:mysql://192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\",\"user\":\"test\",\"password\":\"IUAjJCVeJipNVEl6TkRVMg==\"}";
|
||||
Assert.assertEquals(expected, param);
|
||||
|
||||
|
||||
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE,"false");
|
||||
param = dataSourceService.buildParameter("name","desc", DbType.MYSQL, "192.168.9.1","1521","im"
|
||||
,"","test","123456", null,"");
|
||||
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "false");
|
||||
param = dataSourceService.buildParameter(DbType.MYSQL, "192.168.9.1", "1521", "im"
|
||||
, "", "test", "123456", null, "");
|
||||
expected = "{\"type\":null,\"address\":\"jdbc:mysql://192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\",\"user\":\"test\",\"password\":\"123456\"}";
|
||||
Assert.assertEquals(expected, param);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* get Mock Admin User
|
||||
*
|
||||
* @return admin user
|
||||
*/
|
||||
private User getAdminUser() {
|
||||
User loginUser = new User();
|
||||
loginUser.setId(-1);
|
||||
loginUser.setUserName("admin");
|
||||
loginUser.setUserType(UserType.GENERAL_USER);
|
||||
return loginUser;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -14,6 +14,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
import org.apache.dolphinscheduler.api.dto.ProcessMeta;
|
||||
|
|
@ -98,6 +99,9 @@ public class ProcessDefinitionServiceTest {
|
|||
@Mock
|
||||
private TaskInstanceMapper taskInstanceMapper;
|
||||
|
||||
@Mock
|
||||
private ProcessDefinitionVersionService processDefinitionVersionService;
|
||||
|
||||
private static final String SHELL_JSON = "{\n"
|
||||
+ " \"globalParams\": [\n"
|
||||
+ " \n"
|
||||
|
|
@ -390,7 +394,11 @@ public class ProcessDefinitionServiceTest {
|
|||
// instance exit
|
||||
ProcessDefinition definition = getProcessDefinition();
|
||||
definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}");
|
||||
definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}");
|
||||
definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\","
|
||||
+ "\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234"
|
||||
+ "\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\","
|
||||
+ "\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},"
|
||||
+ "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}");
|
||||
definition.setConnects("[]");
|
||||
|
||||
Mockito.when(processDefineMapper.selectById(46)).thenReturn(definition);
|
||||
|
|
@ -432,7 +440,11 @@ public class ProcessDefinitionServiceTest {
|
|||
|
||||
ProcessDefinition definition = getProcessDefinition();
|
||||
definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}");
|
||||
definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}");
|
||||
definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\""
|
||||
+ ",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234"
|
||||
+ "\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\","
|
||||
+ "\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},"
|
||||
+ "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}");
|
||||
definition.setConnects("[]");
|
||||
|
||||
// check target project result == null
|
||||
|
|
@ -568,14 +580,14 @@ public class ProcessDefinitionServiceTest {
|
|||
|
||||
//FIXME has function exit code 1 when exception
|
||||
//process definition offline
|
||||
// List<Schedule> schedules = new ArrayList<>();
|
||||
// Schedule schedule = getSchedule();
|
||||
// schedules.add(schedule);
|
||||
// Mockito.when(scheduleMapper.selectAllByProcessDefineArray(new int[]{46})).thenReturn(schedules);
|
||||
// Mockito.when(scheduleMapper.updateById(schedule)).thenReturn(1);
|
||||
// Map<String, Object> offlineRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1",
|
||||
// 46, ReleaseState.OFFLINE.getCode());
|
||||
// Assert.assertEquals(Status.SUCCESS, offlineRes.get(Constants.STATUS));
|
||||
// List<Schedule> schedules = new ArrayList<>();
|
||||
// Schedule schedule = getSchedule();
|
||||
// schedules.add(schedule);
|
||||
// Mockito.when(scheduleMapper.selectAllByProcessDefineArray(new int[]{46})).thenReturn(schedules);
|
||||
// Mockito.when(scheduleMapper.updateById(schedule)).thenReturn(1);
|
||||
// Map<String, Object> offlineRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1",
|
||||
// 46, ReleaseState.OFFLINE.getCode());
|
||||
// Assert.assertEquals(Status.SUCCESS, offlineRes.get(Constants.STATUS));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -850,9 +862,12 @@ public class ProcessDefinitionServiceTest {
|
|||
String projectName = "project_test1";
|
||||
Project project = getProject(projectName);
|
||||
|
||||
ProcessDefinition processDefinition = getProcessDefinition();
|
||||
|
||||
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
|
||||
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
|
||||
Mockito.when(processService.findProcessDefineById(1)).thenReturn(getProcessDefinition());
|
||||
Mockito.when(processService.findProcessDefineById(1)).thenReturn(processDefinition);
|
||||
Mockito.when(processDefinitionVersionService.addProcessDefinitionVersion(processDefinition)).thenReturn(1L);
|
||||
|
||||
String sqlDependentJson = "{\n"
|
||||
+ " \"globalParams\": [\n"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,274 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status;
|
||||
import org.apache.dolphinscheduler.api.service.impl.ProcessDefinitionVersionServiceImpl;
|
||||
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
|
||||
import org.apache.dolphinscheduler.api.utils.PageInfo;
|
||||
import org.apache.dolphinscheduler.common.Constants;
|
||||
import org.apache.dolphinscheduler.common.enums.UserType;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
|
||||
import org.apache.dolphinscheduler.dao.entity.Project;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionVersionMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
|
||||
|
||||
import java.text.MessageFormat;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class ProcessDefinitionVersionServiceTest {
|
||||
|
||||
@InjectMocks
|
||||
private ProcessDefinitionVersionServiceImpl processDefinitionVersionService;
|
||||
|
||||
@Mock
|
||||
private ProcessDefinitionVersionMapper processDefinitionVersionMapper;
|
||||
|
||||
@Mock
|
||||
private ProjectMapper projectMapper;
|
||||
|
||||
@Mock
|
||||
private ProjectServiceImpl projectService;
|
||||
|
||||
@Test
|
||||
public void testAddProcessDefinitionVersion() {
|
||||
long expectedVersion = 5L;
|
||||
ProcessDefinition processDefinition = getProcessDefinition();
|
||||
Mockito.when(processDefinitionVersionMapper
|
||||
.queryMaxVersionByProcessDefinitionId(processDefinition.getId()))
|
||||
.thenReturn(expectedVersion);
|
||||
|
||||
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefinition);
|
||||
|
||||
Assert.assertEquals(expectedVersion + 1, version);
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testQueryProcessDefinitionVersions() {
|
||||
// pageNo <= 0
|
||||
int pageNo = -1;
|
||||
int pageSize = 10;
|
||||
int processDefinitionId = 66;
|
||||
|
||||
String projectName = "project_test1";
|
||||
User loginUser = new User();
|
||||
loginUser.setId(-1);
|
||||
loginUser.setUserType(UserType.GENERAL_USER);
|
||||
Map<String, Object> resultMap1 = processDefinitionVersionService.queryProcessDefinitionVersions(
|
||||
loginUser
|
||||
, projectName
|
||||
, pageNo
|
||||
, pageSize
|
||||
, processDefinitionId);
|
||||
Assert.assertEquals(Status.QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR
|
||||
, resultMap1.get(Constants.STATUS));
|
||||
|
||||
// pageSize <= 0
|
||||
pageNo = 1;
|
||||
pageSize = -1;
|
||||
Map<String, Object> resultMap2 = processDefinitionVersionService.queryProcessDefinitionVersions(
|
||||
loginUser
|
||||
, projectName
|
||||
, pageNo
|
||||
, pageSize
|
||||
, processDefinitionId);
|
||||
Assert.assertEquals(Status.QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR
|
||||
, resultMap2.get(Constants.STATUS));
|
||||
|
||||
Map<String, Object> res = new HashMap<>();
|
||||
putMsg(res, Status.PROJECT_NOT_FOUNT);
|
||||
Project project = getProject(projectName);
|
||||
Mockito.when(projectMapper.queryByName(projectName))
|
||||
.thenReturn(project);
|
||||
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName))
|
||||
.thenReturn(res);
|
||||
|
||||
// project auth fail
|
||||
pageNo = 1;
|
||||
pageSize = 10;
|
||||
Map<String, Object> resultMap3 = processDefinitionVersionService.queryProcessDefinitionVersions(
|
||||
loginUser
|
||||
, projectName
|
||||
, pageNo
|
||||
, pageSize
|
||||
, processDefinitionId);
|
||||
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, resultMap3.get(Constants.STATUS));
|
||||
|
||||
putMsg(res, Status.SUCCESS);
|
||||
|
||||
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName))
|
||||
.thenReturn(res);
|
||||
|
||||
ProcessDefinitionVersion processDefinitionVersion = getProcessDefinitionVersion(getProcessDefinition());
|
||||
|
||||
Mockito.when(processDefinitionVersionMapper
|
||||
.queryProcessDefinitionVersionsPaging(Mockito.any(Page.class), Mockito.eq(processDefinitionId)))
|
||||
.thenReturn(new Page<ProcessDefinitionVersion>()
|
||||
.setRecords(Lists.newArrayList(processDefinitionVersion)));
|
||||
|
||||
Map<String, Object> resultMap4 = processDefinitionVersionService.queryProcessDefinitionVersions(
|
||||
loginUser
|
||||
, projectName
|
||||
, pageNo
|
||||
, pageSize
|
||||
, processDefinitionId);
|
||||
Assert.assertEquals(Status.SUCCESS, resultMap4.get(Constants.STATUS));
|
||||
Assert.assertEquals(processDefinitionVersion
|
||||
, ((PageInfo<ProcessDefinitionVersion>) resultMap4.get(Constants.DATA_LIST))
|
||||
.getLists().get(0));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryByProcessDefinitionIdAndVersion() {
|
||||
|
||||
ProcessDefinitionVersion expectedProcessDefinitionVersion =
|
||||
getProcessDefinitionVersion(getProcessDefinition());
|
||||
|
||||
int processDefinitionId = 66;
|
||||
long version = 10;
|
||||
Mockito.when(processDefinitionVersionMapper.queryByProcessDefinitionIdAndVersion(processDefinitionId, version))
|
||||
.thenReturn(expectedProcessDefinitionVersion);
|
||||
|
||||
ProcessDefinitionVersion processDefinitionVersion = processDefinitionVersionService
|
||||
.queryByProcessDefinitionIdAndVersion(processDefinitionId, version);
|
||||
|
||||
Assert.assertEquals(expectedProcessDefinitionVersion, processDefinitionVersion);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteByProcessDefinitionIdAndVersion() {
|
||||
String projectName = "project_test1";
|
||||
int processDefinitionId = 66;
|
||||
long version = 10;
|
||||
Project project = getProject(projectName);
|
||||
Mockito.when(projectMapper.queryByName(projectName))
|
||||
.thenReturn(project);
|
||||
|
||||
User loginUser = new User();
|
||||
loginUser.setId(-1);
|
||||
loginUser.setUserType(UserType.GENERAL_USER);
|
||||
|
||||
// project auth fail
|
||||
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName))
|
||||
.thenReturn(new HashMap<>());
|
||||
|
||||
Map<String, Object> resultMap1 = processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion(
|
||||
loginUser
|
||||
, projectName
|
||||
, processDefinitionId
|
||||
, version);
|
||||
|
||||
Assert.assertEquals(0, resultMap1.size());
|
||||
|
||||
Map<String, Object> res = new HashMap<>();
|
||||
putMsg(res, Status.SUCCESS);
|
||||
|
||||
Mockito.when(processDefinitionVersionMapper.deleteByProcessDefinitionIdAndVersion(processDefinitionId, version))
|
||||
.thenReturn(1);
|
||||
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName))
|
||||
.thenReturn(res);
|
||||
|
||||
Map<String, Object> resultMap2 = processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion(
|
||||
loginUser
|
||||
, projectName
|
||||
, processDefinitionId
|
||||
, version);
|
||||
|
||||
Assert.assertEquals(Status.SUCCESS, resultMap2.get(Constants.STATUS));
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* get mock processDefinitionVersion by processDefinition
|
||||
*
|
||||
* @return processDefinitionVersion
|
||||
*/
|
||||
private ProcessDefinitionVersion getProcessDefinitionVersion(ProcessDefinition processDefinition) {
|
||||
return ProcessDefinitionVersion
|
||||
.newBuilder()
|
||||
.processDefinitionId(processDefinition.getId())
|
||||
.version(1)
|
||||
.processDefinitionJson(processDefinition.getProcessDefinitionJson())
|
||||
.description(processDefinition.getDescription())
|
||||
.locations(processDefinition.getLocations())
|
||||
.connects(processDefinition.getConnects())
|
||||
.timeout(processDefinition.getTimeout())
|
||||
.globalParams(processDefinition.getGlobalParams())
|
||||
.createTime(processDefinition.getUpdateTime())
|
||||
.receivers(processDefinition.getReceivers())
|
||||
.receiversCc(processDefinition.getReceiversCc())
|
||||
.resourceIds(processDefinition.getResourceIds())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* get mock processDefinition
|
||||
*
|
||||
* @return ProcessDefinition
|
||||
*/
|
||||
private ProcessDefinition getProcessDefinition() {
|
||||
|
||||
ProcessDefinition processDefinition = new ProcessDefinition();
|
||||
processDefinition.setId(66);
|
||||
processDefinition.setName("test_pdf");
|
||||
processDefinition.setProjectId(2);
|
||||
processDefinition.setTenantId(1);
|
||||
processDefinition.setDescription("");
|
||||
|
||||
return processDefinition;
|
||||
}
|
||||
|
||||
/**
|
||||
* get mock Project
|
||||
*
|
||||
* @param projectName projectName
|
||||
* @return Project
|
||||
*/
|
||||
private Project getProject(String projectName) {
|
||||
Project project = new Project();
|
||||
project.setId(1);
|
||||
project.setName(projectName);
|
||||
project.setUserId(1);
|
||||
return project;
|
||||
}
|
||||
|
||||
private void putMsg(Map<String, Object> result, Status status, Object... statusParams) {
|
||||
result.put(Constants.STATUS, status);
|
||||
if (statusParams != null && statusParams.length > 0) {
|
||||
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams));
|
||||
} else {
|
||||
result.put(Constants.MSG, status.getMsg());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -14,6 +14,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
|
|
@ -87,6 +88,9 @@ public class ProcessInstanceServiceTest {
|
|||
@Mock
|
||||
ProcessDefinitionService processDefinitionService;
|
||||
|
||||
@Mock
|
||||
ProcessDefinitionVersionService processDefinitionVersionService;
|
||||
|
||||
@Mock
|
||||
ExecutorService execService;
|
||||
|
||||
|
|
@ -99,12 +103,11 @@ public class ProcessInstanceServiceTest {
|
|||
@Mock
|
||||
UsersService usersService;
|
||||
|
||||
private String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\"," +
|
||||
"\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," +
|
||||
"\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," +
|
||||
"\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," +
|
||||
"\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}";
|
||||
|
||||
private String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\","
|
||||
+ "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"},"
|
||||
+ "\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\","
|
||||
+ "\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\","
|
||||
+ "\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}";
|
||||
|
||||
@Test
|
||||
public void testQueryProcessInstanceList() {
|
||||
|
|
@ -265,19 +268,16 @@ public class ProcessInstanceServiceTest {
|
|||
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testParseLogForDependentResult() {
|
||||
String logString = "[INFO] 2019-03-19 17:11:08.475 org.apache.dolphinscheduler.server.worker.log.TaskLogger:[172] - [taskAppId=TASK_223_10739_452334] dependent item complete :|| 223-ALL-day-last1Day,SUCCESS\n" +
|
||||
"[INFO] 2019-03-19 17:11:08.476 org.apache.dolphinscheduler.server.worker.runner.TaskScheduleThread:[172] - task : 223_10739_452334 exit status code : 0\n" +
|
||||
"[root@node2 current]# ";
|
||||
try {
|
||||
Map<String, DependResult> resultMap =
|
||||
processInstanceService.parseLogForDependentResult(logString);
|
||||
Assert.assertEquals(1, resultMap.size());
|
||||
} catch (IOException e) {
|
||||
|
||||
}
|
||||
public void testParseLogForDependentResult() throws IOException {
|
||||
String logString = "[INFO] 2019-03-19 17:11:08.475 org.apache.dolphinscheduler.server.worker.log.TaskLogger:[172]"
|
||||
+ " - [taskAppId=TASK_223_10739_452334] dependent item complete :|| 223-ALL-day-last1Day,SUCCESS\n"
|
||||
+ "[INFO] 2019-03-19 17:11:08.476 org.apache.dolphinscheduler.server.worker.runner.TaskScheduleThread:[172]"
|
||||
+ " - task : 223_10739_452334 exit status code : 0\n"
|
||||
+ "[root@node2 current]# ";
|
||||
Map<String, DependResult> resultMap =
|
||||
processInstanceService.parseLogForDependentResult(logString);
|
||||
Assert.assertEquals(1, resultMap.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -371,6 +371,7 @@ public class ProcessInstanceServiceTest {
|
|||
when(processService.getTenantForProcess(Mockito.anyInt(), Mockito.anyInt())).thenReturn(tenant);
|
||||
when(processService.updateProcessInstance(processInstance)).thenReturn(1);
|
||||
when(processDefinitionService.checkProcessNodeList(Mockito.any(), eq(shellJson))).thenReturn(result);
|
||||
when(processDefinitionVersionService.addProcessDefinitionVersion(processDefinition)).thenReturn(1L);
|
||||
Map<String, Object> processInstanceFinishRes = processInstanceService.updateProcessInstance(loginUser, projectName, 1,
|
||||
shellJson, "2020-02-21 00:00:00", true, Flag.YES, "", "");
|
||||
Assert.assertEquals(Status.UPDATE_PROCESS_INSTANCE_ERROR, processInstanceFinishRes.get(Constants.STATUS));
|
||||
|
|
@ -401,6 +402,7 @@ public class ProcessInstanceServiceTest {
|
|||
when(projectMapper.queryByName(projectName)).thenReturn(project);
|
||||
when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
|
||||
when(processService.findProcessInstanceDetailById(1)).thenReturn(null);
|
||||
when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
|
||||
Map<String, Object> processInstanceNullRes = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, 1);
|
||||
Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceNullRes.get(Constants.STATUS));
|
||||
|
||||
|
|
@ -559,5 +561,4 @@ public class ProcessInstanceServiceTest {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
@ -14,6 +14,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status;
|
||||
|
|
@ -35,9 +36,7 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.InjectMocks;
|
||||
|
|
@ -53,7 +52,6 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
|||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class ProjectServiceTest {
|
||||
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ProjectServiceTest.class);
|
||||
|
||||
@InjectMocks
|
||||
|
|
@ -73,17 +71,6 @@ public class ProjectServiceTest {
|
|||
|
||||
private String userName = "ProjectServiceTest";
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
}
|
||||
|
||||
|
||||
@After
|
||||
public void after() {
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateProject() {
|
||||
|
||||
|
|
@ -105,7 +92,6 @@ public class ProjectServiceTest {
|
|||
logger.info(result.toString());
|
||||
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -148,6 +134,21 @@ public class ProjectServiceTest {
|
|||
logger.info(result.toString());
|
||||
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
|
||||
|
||||
Map<String, Object> result2 = new HashMap<>();
|
||||
|
||||
result2 = projectService.checkProjectAndAuth(loginUser, null, projectName);
|
||||
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result2.get(Constants.STATUS));
|
||||
|
||||
Project project1 = getProject();
|
||||
// USER_NO_OPERATION_PROJECT_PERM
|
||||
project1.setUserId(2);
|
||||
result2 = projectService.checkProjectAndAuth(loginUser, project1, projectName);
|
||||
Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result2.get(Constants.STATUS));
|
||||
|
||||
//success
|
||||
project1.setUserId(1);
|
||||
projectService.checkProjectAndAuth(loginUser, project1, projectName);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -225,7 +226,6 @@ public class ProjectServiceTest {
|
|||
logger.info(result.toString());
|
||||
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -322,7 +322,6 @@ public class ProjectServiceTest {
|
|||
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
|
||||
}
|
||||
|
||||
|
||||
private Project getProject() {
|
||||
Project project = new Project();
|
||||
project.setId(1);
|
||||
|
|
@ -337,7 +336,6 @@ public class ProjectServiceTest {
|
|||
return list;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* create admin user
|
||||
*/
|
||||
|
|
@ -369,13 +367,11 @@ public class ProjectServiceTest {
|
|||
return list;
|
||||
}
|
||||
|
||||
|
||||
private String getDesc() {
|
||||
return "projectUserMapper.deleteProjectRelation(projectId,userId)projectUserMappe" +
|
||||
".deleteProjectRelation(projectId,userId)projectUserMappe" +
|
||||
"r.deleteProjectRelation(projectId,userId)projectUserMapper" +
|
||||
".deleteProjectRelation(projectId,userId)projectUserMapper.deleteProjectRelation(projectId,userId)";
|
||||
return "projectUserMapper.deleteProjectRelation(projectId,userId)projectUserMappe"
|
||||
+ ".deleteProjectRelation(projectId,userId)projectUserMappe"
|
||||
+ "r.deleteProjectRelation(projectId,userId)projectUserMapper"
|
||||
+ ".deleteProjectRelation(projectId,userId)projectUserMapper.deleteProjectRelation(projectId,userId)";
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
|
|
@ -75,15 +76,11 @@ public class TaskInstanceServiceTest {
|
|||
@Mock
|
||||
TaskInstanceMapper taskInstanceMapper;
|
||||
|
||||
@Mock
|
||||
ProcessInstanceService processInstanceService;
|
||||
|
||||
@Mock
|
||||
UsersService usersService;
|
||||
|
||||
@Test
|
||||
public void queryTaskListPaging() {
|
||||
|
||||
String projectName = "project_test1";
|
||||
User loginUser = getAdminUser();
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
|
|
@ -96,7 +93,6 @@ public class TaskInstanceServiceTest {
|
|||
"test_user", "2019-02-26 19:48:00", "2019-02-26 19:48:22", "", null, "", 1, 20);
|
||||
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS));
|
||||
|
||||
|
||||
//project
|
||||
putMsg(result, Status.SUCCESS, projectName);
|
||||
Project project = getProject(projectName);
|
||||
|
|
@ -134,6 +130,23 @@ public class TaskInstanceServiceTest {
|
|||
Map<String, Object> executorNullRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "",
|
||||
"test_user", "2020-01-01 00:00:00", "2020-01-02 00:00:00", "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20);
|
||||
Assert.assertEquals(Status.SUCCESS, executorNullRes.get(Constants.STATUS));
|
||||
|
||||
//start/end date null
|
||||
when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""),
|
||||
eq(0), Mockito.any(), eq("192.168.xx.xx"), any(), any())).thenReturn(pageReturn);
|
||||
Map<String, Object> executorNullDateRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "",
|
||||
"", null, null, "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20);
|
||||
Assert.assertEquals(Status.SUCCESS, executorNullDateRes.get(Constants.STATUS));
|
||||
|
||||
//start date error format
|
||||
when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""),
|
||||
eq(0), Mockito.any(), eq("192.168.xx.xx"), any(), any())).thenReturn(pageReturn);
|
||||
Map<String, Object> executorErrorStartDateRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "",
|
||||
"", "error date", null, "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20);
|
||||
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, executorErrorStartDateRes.get(Constants.STATUS));
|
||||
Map<String, Object> executorErrorEndDateRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "",
|
||||
"", null, "error date", "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20);
|
||||
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, executorErrorEndDateRes.get(Constants.STATUS));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -14,14 +14,11 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.api.service;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status;
|
||||
import org.apache.dolphinscheduler.api.service.impl.TenantServiceImpl;
|
||||
import org.apache.dolphinscheduler.api.utils.PageInfo;
|
||||
import org.apache.dolphinscheduler.api.utils.Result;
|
||||
import org.apache.dolphinscheduler.common.Constants;
|
||||
|
|
@ -35,6 +32,12 @@ import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
|
|||
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
|
@ -54,54 +57,61 @@ public class TenantServiceTest {
|
|||
private static final Logger logger = LoggerFactory.getLogger(TenantServiceTest.class);
|
||||
|
||||
@InjectMocks
|
||||
private TenantService tenantService;
|
||||
private TenantServiceImpl tenantService;
|
||||
|
||||
@Mock
|
||||
private TenantMapper tenantMapper;
|
||||
|
||||
@Mock
|
||||
private ProcessDefinitionMapper processDefinitionMapper;
|
||||
|
||||
@Mock
|
||||
private ProcessInstanceMapper processInstanceMapper;
|
||||
|
||||
@Mock
|
||||
private UserMapper userMapper;
|
||||
|
||||
private String tenantCode = "TenantServiceTest";
|
||||
private String tenantName = "TenantServiceTest";
|
||||
private static final String tenantCode = "TenantServiceTest";
|
||||
|
||||
private static final String tenantName = "TenantServiceTest";
|
||||
|
||||
@Test
|
||||
public void testCreateTenant(){
|
||||
public void testCreateTenant() {
|
||||
|
||||
User loginUser = getLoginUser();
|
||||
Mockito.when(tenantMapper.queryByTenantCode(tenantCode)).thenReturn(getList());
|
||||
try {
|
||||
//check tenantCode
|
||||
Map<String, Object> result = tenantService.createTenant(getLoginUser(), "%!1111", tenantName, 1, "TenantServiceTest");
|
||||
Map<String, Object> result =
|
||||
tenantService.createTenant(getLoginUser(), "%!1111", tenantName, 1, "TenantServiceTest");
|
||||
logger.info(result.toString());
|
||||
Assert.assertEquals(Status.VERIFY_TENANT_CODE_ERROR,result.get(Constants.STATUS));
|
||||
Assert.assertEquals(Status.VERIFY_TENANT_CODE_ERROR, result.get(Constants.STATUS));
|
||||
|
||||
//check exist
|
||||
result = tenantService.createTenant(loginUser, tenantCode, tenantName, 1, "TenantServiceTest");
|
||||
logger.info(result.toString());
|
||||
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR,result.get(Constants.STATUS));
|
||||
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
|
||||
|
||||
// success
|
||||
result = tenantService.createTenant(loginUser, "test", "test", 1, "TenantServiceTest");
|
||||
logger.info(result.toString());
|
||||
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
|
||||
|
||||
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("create tenant error",e);
|
||||
Assert.assertTrue(false);
|
||||
logger.error("create tenant error", e);
|
||||
Assert.fail();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryTenantListPage(){
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testQueryTenantListPage() {
|
||||
|
||||
IPage<Tenant> page = new Page<>(1,10);
|
||||
IPage<Tenant> page = new Page<>(1, 10);
|
||||
page.setRecords(getList());
|
||||
page.setTotal(1L);
|
||||
Mockito.when(tenantMapper.queryTenantPaging(Mockito.any(Page.class), Mockito.eq("TenantServiceTest"))).thenReturn(page);
|
||||
Mockito.when(tenantMapper.queryTenantPaging(Mockito.any(Page.class), Mockito.eq("TenantServiceTest")))
|
||||
.thenReturn(page);
|
||||
Map<String, Object> result = tenantService.queryTenantList(getLoginUser(), "TenantServiceTest", 1, 10);
|
||||
logger.info(result.toString());
|
||||
PageInfo<Tenant> pageInfo = (PageInfo<Tenant>) result.get(Constants.DATA_LIST);
|
||||
|
|
@ -110,87 +120,71 @@ public class TenantServiceTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testUpdateTenant(){
|
||||
public void testUpdateTenant() {
|
||||
|
||||
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
|
||||
try {
|
||||
// id not exist
|
||||
Map<String, Object> result = tenantService.updateTenant(getLoginUser(), 912222, tenantCode, tenantName, 1, "desc");
|
||||
Map<String, Object> result =
|
||||
tenantService.updateTenant(getLoginUser(), 912222, tenantCode, tenantName, 1, "desc");
|
||||
logger.info(result.toString());
|
||||
// success
|
||||
Assert.assertEquals(Status.TENANT_NOT_EXIST,result.get(Constants.STATUS));
|
||||
Assert.assertEquals(Status.TENANT_NOT_EXIST, result.get(Constants.STATUS));
|
||||
result = tenantService.updateTenant(getLoginUser(), 1, tenantCode, "TenantServiceTest001", 1, "desc");
|
||||
logger.info(result.toString());
|
||||
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
|
||||
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
|
||||
} catch (Exception e) {
|
||||
logger.error("update tenant error",e);
|
||||
Assert.assertTrue(false);
|
||||
logger.error("update tenant error", e);
|
||||
Assert.fail();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteTenantById(){
|
||||
public void testDeleteTenantById() {
|
||||
|
||||
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
|
||||
Mockito.when(processInstanceMapper.queryByTenantIdAndStatus(1, Constants.NOT_TERMINATED_STATES)).thenReturn(getInstanceList());
|
||||
Mockito.when(processInstanceMapper.queryByTenantIdAndStatus(1, Constants.NOT_TERMINATED_STATES))
|
||||
.thenReturn(getInstanceList());
|
||||
Mockito.when(processDefinitionMapper.queryDefinitionListByTenant(2)).thenReturn(getDefinitionsList());
|
||||
Mockito.when( userMapper.queryUserListByTenant(3)).thenReturn(getUserList());
|
||||
Mockito.when(userMapper.queryUserListByTenant(3)).thenReturn(getUserList());
|
||||
|
||||
try {
|
||||
//TENANT_NOT_EXIST
|
||||
Map<String, Object> result = tenantService.deleteTenantById(getLoginUser(),12);
|
||||
Map<String, Object> result = tenantService.deleteTenantById(getLoginUser(), 12);
|
||||
logger.info(result.toString());
|
||||
Assert.assertEquals(Status.TENANT_NOT_EXIST,result.get(Constants.STATUS));
|
||||
Assert.assertEquals(Status.TENANT_NOT_EXIST, result.get(Constants.STATUS));
|
||||
|
||||
//DELETE_TENANT_BY_ID_FAIL
|
||||
result = tenantService.deleteTenantById(getLoginUser(),1);
|
||||
result = tenantService.deleteTenantById(getLoginUser(), 1);
|
||||
logger.info(result.toString());
|
||||
Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL,result.get(Constants.STATUS));
|
||||
Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL, result.get(Constants.STATUS));
|
||||
|
||||
//DELETE_TENANT_BY_ID_FAIL_DEFINES
|
||||
Mockito.when(tenantMapper.queryById(2)).thenReturn(getTenant(2));
|
||||
result = tenantService.deleteTenantById(getLoginUser(),2);
|
||||
result = tenantService.deleteTenantById(getLoginUser(), 2);
|
||||
logger.info(result.toString());
|
||||
Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL_DEFINES,result.get(Constants.STATUS));
|
||||
Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL_DEFINES, result.get(Constants.STATUS));
|
||||
|
||||
//DELETE_TENANT_BY_ID_FAIL_USERS
|
||||
Mockito.when(tenantMapper.queryById(3)).thenReturn(getTenant(3));
|
||||
result = tenantService.deleteTenantById(getLoginUser(),3);
|
||||
result = tenantService.deleteTenantById(getLoginUser(), 3);
|
||||
logger.info(result.toString());
|
||||
Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL_USERS,result.get(Constants.STATUS));
|
||||
Assert.assertEquals(Status.DELETE_TENANT_BY_ID_FAIL_USERS, result.get(Constants.STATUS));
|
||||
|
||||
// success
|
||||
Mockito.when(tenantMapper.queryById(4)).thenReturn(getTenant(4));
|
||||
result = tenantService.deleteTenantById(getLoginUser(),4);
|
||||
result = tenantService.deleteTenantById(getLoginUser(), 4);
|
||||
logger.info(result.toString());
|
||||
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
|
||||
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
|
||||
} catch (Exception e) {
|
||||
logger.error("delete tenant error",e);
|
||||
Assert.assertTrue(false);
|
||||
logger.error("delete tenant error", e);
|
||||
Assert.fail();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryTenantList(){
|
||||
|
||||
Mockito.when( tenantMapper.selectList(null)).thenReturn(getList());
|
||||
Map<String, Object> result = tenantService.queryTenantList(getLoginUser());
|
||||
logger.info(result.toString());
|
||||
List<Tenant> tenantList = (List<Tenant>) result.get(Constants.DATA_LIST);
|
||||
Assert.assertTrue(CollectionUtils.isNotEmpty(tenantList));
|
||||
|
||||
Mockito.when( tenantMapper.queryByTenantCode("1")).thenReturn(getList());
|
||||
Map<String, Object> successRes = tenantService.queryTenantList("1");
|
||||
Assert.assertEquals(Status.SUCCESS,successRes.get(Constants.STATUS));
|
||||
|
||||
Mockito.when( tenantMapper.queryByTenantCode("1")).thenReturn(null);
|
||||
Map<String, Object> tenantNotExistRes = tenantService.queryTenantList("1");
|
||||
Assert.assertEquals(Status.TENANT_NOT_EXIST,tenantNotExistRes.get(Constants.STATUS));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testVerifyTenantCode(){
|
||||
public void testVerifyTenantCode() {
|
||||
|
||||
Mockito.when(tenantMapper.queryByTenantCode(tenantCode)).thenReturn(getList());
|
||||
// tenantCode not exist
|
||||
|
|
@ -209,12 +203,10 @@ public class TenantServiceTest {
|
|||
Assert.assertEquals(resultString, result.getMsg());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* get user
|
||||
* @return
|
||||
*/
|
||||
private User getLoginUser(){
|
||||
private User getLoginUser() {
|
||||
|
||||
User loginUser = new User();
|
||||
loginUser.setUserType(UserType.ADMIN_USER);
|
||||
|
|
@ -223,9 +215,8 @@ public class TenantServiceTest {
|
|||
|
||||
/**
|
||||
* get list
|
||||
* @return
|
||||
*/
|
||||
private List<Tenant> getList(){
|
||||
private List<Tenant> getList() {
|
||||
List<Tenant> tenantList = new ArrayList<>();
|
||||
tenantList.add(getTenant());
|
||||
return tenantList;
|
||||
|
|
@ -233,16 +224,15 @@ public class TenantServiceTest {
|
|||
|
||||
/**
|
||||
* get tenant
|
||||
* @return
|
||||
*/
|
||||
private Tenant getTenant(){
|
||||
private Tenant getTenant() {
|
||||
return getTenant(1);
|
||||
}
|
||||
|
||||
/**
|
||||
* get tenant
|
||||
* @return
|
||||
*/
|
||||
private Tenant getTenant(int id){
|
||||
private Tenant getTenant(int id) {
|
||||
Tenant tenant = new Tenant();
|
||||
tenant.setId(id);
|
||||
tenant.setTenantCode(tenantCode);
|
||||
|
|
@ -250,25 +240,24 @@ public class TenantServiceTest {
|
|||
return tenant;
|
||||
}
|
||||
|
||||
private List<User> getUserList(){
|
||||
private List<User> getUserList() {
|
||||
List<User> userList = new ArrayList<>();
|
||||
userList.add(getLoginUser());
|
||||
return userList;
|
||||
}
|
||||
|
||||
private List<ProcessInstance> getInstanceList(){
|
||||
private List<ProcessInstance> getInstanceList() {
|
||||
List<ProcessInstance> processInstances = new ArrayList<>();
|
||||
ProcessInstance processInstance = new ProcessInstance();
|
||||
processInstances.add(processInstance);
|
||||
return processInstances;
|
||||
}
|
||||
|
||||
private List<ProcessDefinition> getDefinitionsList(){
|
||||
private List<ProcessDefinition> getDefinitionsList() {
|
||||
List<ProcessDefinition> processDefinitions = new ArrayList<>();
|
||||
ProcessDefinition processDefinition = new ProcessDefinition();
|
||||
processDefinitions.add(processDefinition);
|
||||
return processDefinitions;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -42,14 +42,14 @@ import org.mockito.junit.MockitoJUnitRunner;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class UsersServiceTest {
|
||||
private static final Logger logger = LoggerFactory.getLogger(UsersServiceTest.class);
|
||||
|
|
@ -528,6 +528,41 @@ public class UsersServiceTest {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBatchActivateUser() {
|
||||
User user = new User();
|
||||
user.setUserType(UserType.GENERAL_USER);
|
||||
List<String> userNames = new ArrayList<>();
|
||||
userNames.add("userTest0001");
|
||||
userNames.add("userTest0002");
|
||||
userNames.add("userTest0003~");
|
||||
userNames.add("userTest0004");
|
||||
|
||||
try {
|
||||
//not admin
|
||||
Map<String, Object> result = usersService.batchActivateUser(user, userNames);
|
||||
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
|
||||
|
||||
//batch activate user names
|
||||
user.setUserType(UserType.ADMIN_USER);
|
||||
when(userMapper.queryByUserNameAccurately("userTest0001")).thenReturn(getUser());
|
||||
when(userMapper.queryByUserNameAccurately("userTest0002")).thenReturn(getDisabledUser());
|
||||
result = usersService.batchActivateUser(user, userNames);
|
||||
Map<String, Object> responseData = (Map<String, Object>) result.get(Constants.DATA_LIST);
|
||||
Map<String, Object> successData = (Map<String, Object>) responseData.get("success");
|
||||
int totalSuccess = (Integer) successData.get("sum");
|
||||
|
||||
Map<String, Object> failedData = (Map<String, Object>) responseData.get("failed");
|
||||
int totalFailed = (Integer) failedData.get("sum");
|
||||
|
||||
Assert.assertEquals(1, totalSuccess);
|
||||
Assert.assertEquals(3, totalFailed);
|
||||
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
|
||||
} catch (Exception e) {
|
||||
Assert.assertTrue(false);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* get disabled user
|
||||
* @return
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.common.enums;
|
||||
|
||||
public enum AlertEvent {
|
||||
|
||||
SERVER_DOWN,TIME_OUT
|
||||
}
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.common.enums;
|
||||
|
||||
public enum AlertWarnLevel {
|
||||
|
||||
MIDDLE,SERIOUS
|
||||
}
|
||||
|
|
@ -32,7 +32,7 @@ import java.io.IOException;
|
|||
import java.util.*;
|
||||
|
||||
import static com.fasterxml.jackson.databind.DeserializationFeature.*;
|
||||
|
||||
import static com.fasterxml.jackson.databind.MapperFeature.REQUIRE_SETTERS_FOR_GETTERS;
|
||||
|
||||
/**
|
||||
* json utils
|
||||
|
|
@ -48,6 +48,7 @@ public class JSONUtils {
|
|||
.configure(FAIL_ON_UNKNOWN_PROPERTIES, false)
|
||||
.configure(ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true)
|
||||
.configure(READ_UNKNOWN_ENUM_VALUES_AS_NULL, true)
|
||||
.configure(REQUIRE_SETTERS_FOR_GETTERS, true)
|
||||
.setTimeZone(TimeZone.getDefault())
|
||||
;
|
||||
|
||||
|
|
|
|||
|
|
@ -17,22 +17,25 @@
|
|||
|
||||
package org.apache.dolphinscheduler.dao;
|
||||
|
||||
import org.apache.dolphinscheduler.common.enums.AlertEvent;
|
||||
import org.apache.dolphinscheduler.common.enums.AlertStatus;
|
||||
import org.apache.dolphinscheduler.common.enums.AlertType;
|
||||
import org.apache.dolphinscheduler.common.enums.AlertWarnLevel;
|
||||
import org.apache.dolphinscheduler.common.enums.ShowType;
|
||||
import org.apache.dolphinscheduler.common.utils.JSONUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.StringUtils;
|
||||
import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory;
|
||||
import org.apache.dolphinscheduler.dao.entity.Alert;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessAlertContent;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
|
||||
import org.apache.dolphinscheduler.dao.entity.ServerAlertContent;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
import org.apache.dolphinscheduler.dao.mapper.AlertMapper;
|
||||
import org.apache.dolphinscheduler.dao.mapper.UserAlertGroupMapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
|
|
@ -103,14 +106,12 @@ public class AlertDao extends AbstractBaseDao {
|
|||
*/
|
||||
public void sendServerStopedAlert(int alertgroupId, String host, String serverType) {
|
||||
Alert alert = new Alert();
|
||||
List<LinkedHashMap> serverStopList = new ArrayList<>(1);
|
||||
LinkedHashMap<String, String> serverStopedMap = new LinkedHashMap();
|
||||
serverStopedMap.put("type", serverType);
|
||||
serverStopedMap.put("host", host);
|
||||
serverStopedMap.put("event", "server down");
|
||||
serverStopedMap.put("warning level", "serious");
|
||||
serverStopList.add(serverStopedMap);
|
||||
String content = JSONUtils.toJsonString(serverStopList);
|
||||
List<ServerAlertContent> serverAlertContents = new ArrayList<>(1);
|
||||
ServerAlertContent serverStopAlertContent = ServerAlertContent.newBuilder().
|
||||
type(serverType).host(host).event(AlertEvent.SERVER_DOWN).warningLevel(AlertWarnLevel.SERIOUS).
|
||||
build();
|
||||
serverAlertContents.add(serverStopAlertContent);
|
||||
String content = JSONUtils.toJsonString(serverAlertContents);
|
||||
alert.setTitle("Fault tolerance warning");
|
||||
saveTaskTimeoutAlert(alert, content, alertgroupId, null, null);
|
||||
}
|
||||
|
|
@ -126,14 +127,15 @@ public class AlertDao extends AbstractBaseDao {
|
|||
String receivers = processDefinition.getReceivers();
|
||||
String receiversCc = processDefinition.getReceiversCc();
|
||||
Alert alert = new Alert();
|
||||
List<LinkedHashMap> processTimeoutList = new ArrayList<>(1);
|
||||
LinkedHashMap<String, String> processTimeoutMap = new LinkedHashMap();
|
||||
processTimeoutMap.put("id", String.valueOf(processInstance.getId()));
|
||||
processTimeoutMap.put("name", processInstance.getName());
|
||||
processTimeoutMap.put("event", "timeout");
|
||||
processTimeoutMap.put("warnLevel", "middle");
|
||||
processTimeoutList.add(processTimeoutMap);
|
||||
String content = JSONUtils.toJsonString(processTimeoutList);
|
||||
List<ProcessAlertContent> processAlertContentList = new ArrayList<>(1);
|
||||
ProcessAlertContent processAlertContent = ProcessAlertContent.newBuilder()
|
||||
.processId(processInstance.getId())
|
||||
.processName(processInstance.getName())
|
||||
.event(AlertEvent.TIME_OUT)
|
||||
.warningLevel(AlertWarnLevel.MIDDLE)
|
||||
.build();
|
||||
processAlertContentList.add(processAlertContent);
|
||||
String content = JSONUtils.toJsonString(processAlertContentList);
|
||||
alert.setTitle("Process Timeout Warn");
|
||||
saveTaskTimeoutAlert(alert, content, alertgroupId, receivers, receiversCc);
|
||||
}
|
||||
|
|
@ -169,16 +171,17 @@ public class AlertDao extends AbstractBaseDao {
|
|||
public void sendTaskTimeoutAlert(int alertgroupId, String receivers, String receiversCc, int processInstanceId,
|
||||
String processInstanceName, int taskId, String taskName) {
|
||||
Alert alert = new Alert();
|
||||
List<LinkedHashMap> taskTimeoutList = new ArrayList<>(1);
|
||||
LinkedHashMap<String, String> taskTimeoutMap = new LinkedHashMap();
|
||||
taskTimeoutMap.put("process instance id", String.valueOf(processInstanceId));
|
||||
taskTimeoutMap.put("process name", processInstanceName);
|
||||
taskTimeoutMap.put("task id", String.valueOf(taskId));
|
||||
taskTimeoutMap.put("task name", taskName);
|
||||
taskTimeoutMap.put("event", "timeout");
|
||||
taskTimeoutMap.put("warnLevel", "middle");
|
||||
taskTimeoutList.add(taskTimeoutMap);
|
||||
String content = JSONUtils.toJsonString(taskTimeoutList);
|
||||
List<ProcessAlertContent> processAlertContentList = new ArrayList<>(1);
|
||||
ProcessAlertContent processAlertContent = ProcessAlertContent.newBuilder()
|
||||
.processId(processInstanceId)
|
||||
.processName(processInstanceName)
|
||||
.taskId(taskId)
|
||||
.taskName(taskName)
|
||||
.event(AlertEvent.TIME_OUT)
|
||||
.warningLevel(AlertWarnLevel.MIDDLE)
|
||||
.build();
|
||||
processAlertContentList.add(processAlertContent);
|
||||
String content = JSONUtils.toJsonString(processAlertContentList);
|
||||
alert.setTitle("Task Timeout Warn");
|
||||
saveTaskTimeoutAlert(alert, content, alertgroupId, receivers, receiversCc);
|
||||
}
|
||||
|
|
@ -210,4 +213,5 @@ public class AlertDao extends AbstractBaseDao {
|
|||
public AlertMapper getAlertMapper() {
|
||||
return alertMapper;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,236 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.entity;
|
||||
|
||||
import org.apache.dolphinscheduler.common.enums.AlertEvent;
|
||||
import org.apache.dolphinscheduler.common.enums.AlertWarnLevel;
|
||||
import org.apache.dolphinscheduler.common.enums.CommandType;
|
||||
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
|
||||
import org.apache.dolphinscheduler.common.enums.Flag;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
@JsonInclude(Include.NON_NULL)
|
||||
public class ProcessAlertContent implements Serializable {
|
||||
@JsonProperty("processId")
|
||||
private int processId;
|
||||
@JsonProperty("processName")
|
||||
private String processName;
|
||||
@JsonProperty("processType")
|
||||
private CommandType processType;
|
||||
@JsonProperty("processState")
|
||||
private ExecutionStatus processState;
|
||||
@JsonProperty("recovery")
|
||||
private Flag recovery;
|
||||
@JsonProperty("runTimes")
|
||||
private int runTimes;
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
|
||||
@JsonProperty("processStartTime")
|
||||
private Date processStartTime;
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
|
||||
@JsonProperty("processEndTime")
|
||||
private Date processEndTime;
|
||||
@JsonProperty("processHost")
|
||||
private String processHost;
|
||||
@JsonProperty("taskId")
|
||||
private int taskId;
|
||||
@JsonProperty("taskName")
|
||||
private String taskName;
|
||||
@JsonProperty("event")
|
||||
private AlertEvent event;
|
||||
@JsonProperty("warnLevel")
|
||||
private AlertWarnLevel warnLevel;
|
||||
@JsonProperty("taskType")
|
||||
private String taskType;
|
||||
@JsonProperty("retryTimes")
|
||||
private int retryTimes;
|
||||
@JsonProperty("taskState")
|
||||
private ExecutionStatus taskState;
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
|
||||
@JsonProperty("taskStartTime")
|
||||
private Date taskStartTime;
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
|
||||
@JsonProperty("taskEndTime")
|
||||
private Date taskEndTime;
|
||||
@JsonProperty("taskHost")
|
||||
private String taskHost;
|
||||
@JsonProperty("logPath")
|
||||
private String logPath;
|
||||
|
||||
private ProcessAlertContent(Builder builder) {
|
||||
this.processId = builder.processId;
|
||||
this.processName = builder.processName;
|
||||
this.processType = builder.processType;
|
||||
this.recovery = builder.recovery;
|
||||
this.processState = builder.processState;
|
||||
this.runTimes = builder.runTimes;
|
||||
this.processStartTime = builder.processStartTime;
|
||||
this.processEndTime = builder.processEndTime;
|
||||
this.processHost = builder.processHost;
|
||||
this.taskId = builder.taskId;
|
||||
this.taskName = builder.taskName;
|
||||
this.event = builder.event;
|
||||
this.warnLevel = builder.warnLevel;
|
||||
this.taskType = builder.taskType;
|
||||
this.taskState = builder.taskState;
|
||||
this.taskStartTime = builder.taskStartTime;
|
||||
this.taskEndTime = builder.taskEndTime;
|
||||
this.taskHost = builder.taskHost;
|
||||
this.logPath = builder.logPath;
|
||||
this.retryTimes = builder.retryTimes;
|
||||
|
||||
}
|
||||
|
||||
public static Builder newBuilder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private int processId;
|
||||
private String processName;
|
||||
private CommandType processType;
|
||||
private Flag recovery;
|
||||
private ExecutionStatus processState;
|
||||
private int runTimes;
|
||||
private Date processStartTime;
|
||||
private Date processEndTime;
|
||||
private String processHost;
|
||||
private int taskId;
|
||||
private String taskName;
|
||||
private AlertEvent event;
|
||||
private AlertWarnLevel warnLevel;
|
||||
private String taskType;
|
||||
private int retryTimes;
|
||||
private ExecutionStatus taskState;
|
||||
private Date taskStartTime;
|
||||
private Date taskEndTime;
|
||||
private String taskHost;
|
||||
private String logPath;
|
||||
|
||||
public Builder processId(int processId) {
|
||||
this.processId = processId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder processName(String processName) {
|
||||
this.processName = processName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder processType(CommandType processType) {
|
||||
this.processType = processType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder recovery(Flag recovery) {
|
||||
this.recovery = recovery;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder processState(ExecutionStatus processState) {
|
||||
this.processState = processState;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder runTimes(int runTimes) {
|
||||
this.runTimes = runTimes;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder processStartTime(Date processStartTime) {
|
||||
this.processStartTime = processStartTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder processEndTime(Date processEndTime) {
|
||||
this.processEndTime = processEndTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder processHost(String processHost) {
|
||||
this.processHost = processHost;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder taskId(int taskId) {
|
||||
this.taskId = taskId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder taskName(String taskName) {
|
||||
this.taskName = taskName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder event(AlertEvent event) {
|
||||
this.event = event;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder warningLevel(AlertWarnLevel warnLevel) {
|
||||
this.warnLevel = warnLevel;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder taskType(String taskType) {
|
||||
this.taskType = taskType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder retryTimes(int retryTimes) {
|
||||
this.retryTimes = retryTimes;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder taskState(ExecutionStatus taskState) {
|
||||
this.taskState = taskState;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder taskStartTime(Date taskStartTime) {
|
||||
this.taskStartTime = taskStartTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder taskEndTime(Date taskEndTime) {
|
||||
this.taskEndTime = taskEndTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder taskHost(String taskHost) {
|
||||
this.taskHost = taskHost;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder logPath(String logPath) {
|
||||
this.logPath = logPath;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ProcessAlertContent build() {
|
||||
return new ProcessAlertContent(this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -14,24 +14,26 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.entity;
|
||||
|
||||
import org.apache.dolphinscheduler.common.enums.Flag;
|
||||
import org.apache.dolphinscheduler.common.enums.ReleaseState;
|
||||
import org.apache.dolphinscheduler.common.process.Property;
|
||||
import org.apache.dolphinscheduler.common.utils.JSONUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.IdType;
|
||||
import com.baomidou.mybatisplus.annotation.TableField;
|
||||
import com.baomidou.mybatisplus.annotation.TableId;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
import org.apache.dolphinscheduler.common.enums.Flag;
|
||||
import org.apache.dolphinscheduler.common.enums.ReleaseState;
|
||||
import org.apache.dolphinscheduler.common.process.Property;
|
||||
import org.apache.dolphinscheduler.common.utils.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
||||
/**
|
||||
|
|
@ -54,7 +56,7 @@ public class ProcessDefinition {
|
|||
/**
|
||||
* version
|
||||
*/
|
||||
private int version;
|
||||
private long version;
|
||||
|
||||
/**
|
||||
* release state : online/offline
|
||||
|
|
@ -96,13 +98,13 @@ public class ProcessDefinition {
|
|||
/**
|
||||
* create time
|
||||
*/
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8")
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
|
||||
private Date createTime;
|
||||
|
||||
/**
|
||||
* update time
|
||||
*/
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8")
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
|
||||
private Date updateTime;
|
||||
|
||||
/**
|
||||
|
|
@ -182,11 +184,11 @@ public class ProcessDefinition {
|
|||
this.name = name;
|
||||
}
|
||||
|
||||
public int getVersion() {
|
||||
public long getVersion() {
|
||||
return version;
|
||||
}
|
||||
|
||||
public void setVersion(int version) {
|
||||
public void setVersion(long version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
|
|
@ -276,9 +278,9 @@ public class ProcessDefinition {
|
|||
}
|
||||
|
||||
public void setGlobalParams(String globalParams) {
|
||||
if (globalParams == null){
|
||||
if (globalParams == null) {
|
||||
this.globalParamList = new ArrayList<>();
|
||||
}else {
|
||||
} else {
|
||||
this.globalParamList = JSONUtils.toList(globalParams, Property.class);
|
||||
}
|
||||
this.globalParams = globalParams;
|
||||
|
|
@ -295,7 +297,7 @@ public class ProcessDefinition {
|
|||
|
||||
public Map<String, String> getGlobalParamMap() {
|
||||
if (globalParamMap == null && StringUtils.isNotEmpty(globalParams)) {
|
||||
List<Property> propList = JSONUtils.toList(globalParams,Property.class);
|
||||
List<Property> propList = JSONUtils.toList(globalParams, Property.class);
|
||||
globalParamMap = propList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue));
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,329 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.entity;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.IdType;
|
||||
import com.baomidou.mybatisplus.annotation.TableId;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
|
||||
|
||||
/**
|
||||
* process definition version
|
||||
*/
|
||||
@TableName("t_ds_process_definition_version")
|
||||
public class ProcessDefinitionVersion {
|
||||
|
||||
/**
|
||||
* id
|
||||
*/
|
||||
@TableId(value = "id", type = IdType.AUTO)
|
||||
private int id;
|
||||
|
||||
/**
|
||||
* process definition id
|
||||
*/
|
||||
private int processDefinitionId;
|
||||
|
||||
/**
|
||||
* version
|
||||
*/
|
||||
private long version;
|
||||
|
||||
/**
|
||||
* definition json string
|
||||
*/
|
||||
private String processDefinitionJson;
|
||||
|
||||
/**
|
||||
* description
|
||||
*/
|
||||
private String description;
|
||||
|
||||
/**
|
||||
* receivers
|
||||
*/
|
||||
private String receivers;
|
||||
|
||||
/**
|
||||
* receivers cc
|
||||
*/
|
||||
private String receiversCc;
|
||||
|
||||
/**
|
||||
* process warning time out. unit: minute
|
||||
*/
|
||||
private int timeout;
|
||||
|
||||
/**
|
||||
* resource ids
|
||||
*/
|
||||
private String resourceIds;
|
||||
|
||||
/**
|
||||
* create time
|
||||
*/
|
||||
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
|
||||
private Date createTime;
|
||||
|
||||
/**
|
||||
* user defined parameters
|
||||
*/
|
||||
private String globalParams;
|
||||
|
||||
/**
|
||||
* locations array for web
|
||||
*/
|
||||
private String locations;
|
||||
|
||||
/**
|
||||
* connects array for web
|
||||
*/
|
||||
private String connects;
|
||||
|
||||
public String getGlobalParams() {
|
||||
return globalParams;
|
||||
}
|
||||
|
||||
public void setGlobalParams(String globalParams) {
|
||||
this.globalParams = globalParams;
|
||||
}
|
||||
|
||||
public int getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(int id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public int getProcessDefinitionId() {
|
||||
return processDefinitionId;
|
||||
}
|
||||
|
||||
public void setProcessDefinitionId(int processDefinitionId) {
|
||||
this.processDefinitionId = processDefinitionId;
|
||||
}
|
||||
|
||||
public long getVersion() {
|
||||
return version;
|
||||
}
|
||||
|
||||
public void setVersion(long version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
public String getProcessDefinitionJson() {
|
||||
return processDefinitionJson;
|
||||
}
|
||||
|
||||
public void setProcessDefinitionJson(String processDefinitionJson) {
|
||||
this.processDefinitionJson = processDefinitionJson;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public Date getCreateTime() {
|
||||
return createTime;
|
||||
}
|
||||
|
||||
public void setCreateTime(Date createTime) {
|
||||
this.createTime = createTime;
|
||||
}
|
||||
|
||||
public String getLocations() {
|
||||
return locations;
|
||||
}
|
||||
|
||||
public void setLocations(String locations) {
|
||||
this.locations = locations;
|
||||
}
|
||||
|
||||
public String getConnects() {
|
||||
return connects;
|
||||
}
|
||||
|
||||
public void setConnects(String connects) {
|
||||
this.connects = connects;
|
||||
}
|
||||
|
||||
public String getReceivers() {
|
||||
return receivers;
|
||||
}
|
||||
|
||||
public void setReceivers(String receivers) {
|
||||
this.receivers = receivers;
|
||||
}
|
||||
|
||||
public String getReceiversCc() {
|
||||
return receiversCc;
|
||||
}
|
||||
|
||||
public void setReceiversCc(String receiversCc) {
|
||||
this.receiversCc = receiversCc;
|
||||
}
|
||||
|
||||
public int getTimeout() {
|
||||
return timeout;
|
||||
}
|
||||
|
||||
public void setTimeout(int timeout) {
|
||||
this.timeout = timeout;
|
||||
}
|
||||
|
||||
public String getResourceIds() {
|
||||
return resourceIds;
|
||||
}
|
||||
|
||||
public void setResourceIds(String resourceIds) {
|
||||
this.resourceIds = resourceIds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ProcessDefinitionVersion{"
|
||||
+ "id=" + id
|
||||
+ ", processDefinitionId=" + processDefinitionId
|
||||
+ ", version=" + version
|
||||
+ ", processDefinitionJson='" + processDefinitionJson + '\''
|
||||
+ ", description='" + description + '\''
|
||||
+ ", globalParams='" + globalParams + '\''
|
||||
+ ", createTime=" + createTime
|
||||
+ ", locations='" + locations + '\''
|
||||
+ ", connects='" + connects + '\''
|
||||
+ ", receivers='" + receivers + '\''
|
||||
+ ", receiversCc='" + receiversCc + '\''
|
||||
+ ", timeout=" + timeout
|
||||
+ ", resourceIds='" + resourceIds + '\''
|
||||
+ '}';
|
||||
}
|
||||
|
||||
public static Builder newBuilder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public static final class Builder {
|
||||
private int id;
|
||||
private int processDefinitionId;
|
||||
private long version;
|
||||
private String processDefinitionJson;
|
||||
private String description;
|
||||
private String globalParams;
|
||||
private Date createTime;
|
||||
private String locations;
|
||||
private String connects;
|
||||
private String receivers;
|
||||
private String receiversCc;
|
||||
private int timeout;
|
||||
private String resourceIds;
|
||||
|
||||
private Builder() {
|
||||
}
|
||||
|
||||
public Builder id(int id) {
|
||||
this.id = id;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder processDefinitionId(int processDefinitionId) {
|
||||
this.processDefinitionId = processDefinitionId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder version(long version) {
|
||||
this.version = version;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder processDefinitionJson(String processDefinitionJson) {
|
||||
this.processDefinitionJson = processDefinitionJson;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder description(String description) {
|
||||
this.description = description;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder globalParams(String globalParams) {
|
||||
this.globalParams = globalParams;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder createTime(Date createTime) {
|
||||
this.createTime = createTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder locations(String locations) {
|
||||
this.locations = locations;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder connects(String connects) {
|
||||
this.connects = connects;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder receivers(String receivers) {
|
||||
this.receivers = receivers;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder receiversCc(String receiversCc) {
|
||||
this.receiversCc = receiversCc;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder timeout(int timeout) {
|
||||
this.timeout = timeout;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder resourceIds(String resourceIds) {
|
||||
this.resourceIds = resourceIds;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ProcessDefinitionVersion build() {
|
||||
ProcessDefinitionVersion processDefinitionVersion = new ProcessDefinitionVersion();
|
||||
processDefinitionVersion.setId(id);
|
||||
processDefinitionVersion.setProcessDefinitionId(processDefinitionId);
|
||||
processDefinitionVersion.setVersion(version);
|
||||
processDefinitionVersion.setProcessDefinitionJson(processDefinitionJson);
|
||||
processDefinitionVersion.setDescription(description);
|
||||
processDefinitionVersion.setGlobalParams(globalParams);
|
||||
processDefinitionVersion.setCreateTime(createTime);
|
||||
processDefinitionVersion.setLocations(locations);
|
||||
processDefinitionVersion.setConnects(connects);
|
||||
processDefinitionVersion.setReceivers(receivers);
|
||||
processDefinitionVersion.setReceiversCc(receiversCc);
|
||||
processDefinitionVersion.setTimeout(timeout);
|
||||
processDefinitionVersion.setResourceIds(resourceIds);
|
||||
return processDefinitionVersion;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,85 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.entity;
|
||||
|
||||
import org.apache.dolphinscheduler.common.enums.AlertEvent;
|
||||
import org.apache.dolphinscheduler.common.enums.AlertWarnLevel;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
public class ServerAlertContent {
|
||||
|
||||
/**
|
||||
* server type :master or worker
|
||||
*/
|
||||
@JsonProperty("type")
|
||||
final String type;
|
||||
@JsonProperty("host")
|
||||
final String host;
|
||||
@JsonProperty("event")
|
||||
final AlertEvent event;
|
||||
@JsonProperty("warningLevel")
|
||||
final AlertWarnLevel warningLevel;
|
||||
|
||||
private ServerAlertContent(Builder builder) {
|
||||
this.type = builder.type;
|
||||
this.host = builder.host;
|
||||
this.event = builder.event;
|
||||
this.warningLevel = builder.warningLevel;
|
||||
|
||||
}
|
||||
|
||||
public static Builder newBuilder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
private String type;
|
||||
|
||||
private String host;
|
||||
|
||||
private AlertEvent event;
|
||||
|
||||
private AlertWarnLevel warningLevel;
|
||||
|
||||
public Builder type(String type) {
|
||||
this.type = type;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder host(String host) {
|
||||
this.host = host;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder event(AlertEvent event) {
|
||||
this.event = event;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder warningLevel(AlertWarnLevel warningLevel) {
|
||||
this.warningLevel = warningLevel;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ServerAlertContent build() {
|
||||
return new ServerAlertContent(this);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -14,18 +14,21 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.mapper;
|
||||
|
||||
import org.apache.dolphinscheduler.dao.entity.DefinitionGroupByUser;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
|
||||
import org.apache.ibatis.annotations.MapKey;
|
||||
import org.apache.ibatis.annotations.Param;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
|
||||
/**
|
||||
* process definition mapper interface
|
||||
*/
|
||||
|
|
@ -34,6 +37,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
|
|||
|
||||
/**
|
||||
* query process definition by name
|
||||
*
|
||||
* @param projectId projectId
|
||||
* @param name name
|
||||
* @return process definition
|
||||
|
|
@ -43,6 +47,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
|
|||
|
||||
/**
|
||||
* query process definition by id
|
||||
*
|
||||
* @param processDefineId processDefineId
|
||||
* @return process definition
|
||||
*/
|
||||
|
|
@ -50,6 +55,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
|
|||
|
||||
/**
|
||||
* process definition page
|
||||
*
|
||||
* @param page page
|
||||
* @param searchVal searchVal
|
||||
* @param userId userId
|
||||
|
|
@ -65,6 +71,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
|
|||
|
||||
/**
|
||||
* query all process definition list
|
||||
*
|
||||
* @param projectId projectId
|
||||
* @return process definition list
|
||||
*/
|
||||
|
|
@ -72,6 +79,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
|
|||
|
||||
/**
|
||||
* query process definition by ids
|
||||
*
|
||||
* @param ids ids
|
||||
* @return process definition list
|
||||
*/
|
||||
|
|
@ -79,6 +87,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
|
|||
|
||||
/**
|
||||
* query process definition by tenant
|
||||
*
|
||||
* @param tenantId tenantId
|
||||
* @return process definition list
|
||||
*/
|
||||
|
|
@ -86,6 +95,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
|
|||
|
||||
/**
|
||||
* count process definition group by user
|
||||
*
|
||||
* @param userId userId
|
||||
* @param projectIds projectIds
|
||||
* @param isAdmin isAdmin
|
||||
|
|
@ -98,6 +108,7 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
|
|||
|
||||
/**
|
||||
* list all resource ids
|
||||
*
|
||||
* @return resource ids list
|
||||
*/
|
||||
@MapKey("id")
|
||||
|
|
@ -105,8 +116,17 @@ public interface ProcessDefinitionMapper extends BaseMapper<ProcessDefinition> {
|
|||
|
||||
/**
|
||||
* list all resource ids by user id
|
||||
*
|
||||
* @return resource ids list
|
||||
*/
|
||||
@MapKey("id")
|
||||
List<Map<String, Object>> listResourcesByUser(@Param("userId") Integer userId);
|
||||
|
||||
/**
|
||||
* update process definition version by process definitionId
|
||||
*
|
||||
* @param processDefinitionId process definition id
|
||||
* @param version version
|
||||
*/
|
||||
void updateVersionByProcessDefinitionId(@Param("processDefinitionId") int processDefinitionId, @Param("version") long version);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.mapper;
|
||||
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
|
||||
|
||||
import org.apache.ibatis.annotations.Param;
|
||||
|
||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
|
||||
/**
|
||||
* process definition mapper interface
|
||||
*/
|
||||
public interface ProcessDefinitionVersionMapper extends BaseMapper<ProcessDefinitionVersion> {
|
||||
|
||||
/**
|
||||
* query max version by process definition id
|
||||
*
|
||||
* @param processDefinitionId process definition id
|
||||
* @return the max version of this process definition id
|
||||
*/
|
||||
Long queryMaxVersionByProcessDefinitionId(@Param("processDefinitionId") int processDefinitionId);
|
||||
|
||||
/**
|
||||
* query the paging process definition version list by pagination info
|
||||
*
|
||||
* @param page pagination info
|
||||
* @param processDefinitionId process definition id
|
||||
* @return the paging process definition version list
|
||||
*/
|
||||
IPage<ProcessDefinitionVersion> queryProcessDefinitionVersionsPaging(Page<ProcessDefinitionVersion> page,
|
||||
@Param("processDefinitionId") int processDefinitionId);
|
||||
|
||||
/**
|
||||
* query the certain process definition version info by process definition id and version number
|
||||
*
|
||||
* @param processDefinitionId process definition id
|
||||
* @param version version number
|
||||
* @return the process definition version info
|
||||
*/
|
||||
ProcessDefinitionVersion queryByProcessDefinitionIdAndVersion(@Param("processDefinitionId") int processDefinitionId, @Param("version") long version);
|
||||
|
||||
/**
|
||||
* delete the certain process definition version by process definition id and version number
|
||||
*
|
||||
* @param processDefinitionId process definition id
|
||||
* @param version version number
|
||||
* @return delete result
|
||||
*/
|
||||
int deleteByProcessDefinitionIdAndVersion(@Param("processDefinitionId") int processDefinitionId, @Param("version") long version);
|
||||
|
||||
}
|
||||
|
|
@ -102,4 +102,10 @@
|
|||
FROM t_ds_process_definition
|
||||
WHERE user_id = #{userId} and release_state = 1 and resource_ids is not null and resource_ids != ''
|
||||
</select>
|
||||
|
||||
<update id="updateVersionByProcessDefinitionId">
|
||||
update t_ds_process_definition
|
||||
set version = #{version}
|
||||
where id = #{processDefinitionId}
|
||||
</update>
|
||||
</mapper>
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!--
|
||||
~ Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
~ contributor license agreements. See the NOTICE file distributed with
|
||||
~ this work for additional information regarding copyright ownership.
|
||||
~ The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
~ (the "License"); you may not use this file except in compliance with
|
||||
~ the License. You may obtain a copy of the License at
|
||||
~
|
||||
~ http://www.apache.org/licenses/LICENSE-2.0
|
||||
~
|
||||
~ Unless required by applicable law or agreed to in writing, software
|
||||
~ distributed under the License is distributed on an "AS IS" BASIS,
|
||||
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
~ See the License for the specific language governing permissions and
|
||||
~ limitations under the License.
|
||||
-->
|
||||
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
|
||||
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionVersionMapper">
|
||||
<select id="queryMaxVersionByProcessDefinitionId" resultType="java.lang.Long">
|
||||
select max(version)
|
||||
from t_ds_process_definition_version
|
||||
where process_definition_id = #{processDefinitionId}
|
||||
</select>
|
||||
|
||||
<select id="queryProcessDefinitionVersionsPaging" resultType="org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion">
|
||||
select *
|
||||
from t_ds_process_definition_version
|
||||
where process_definition_id = #{processDefinitionId}
|
||||
order by version desc
|
||||
</select>
|
||||
|
||||
<select id="queryByProcessDefinitionIdAndVersion" resultType="org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion">
|
||||
select *
|
||||
from t_ds_process_definition_version
|
||||
where process_definition_id = #{processDefinitionId}
|
||||
and version = #{version}
|
||||
</select>
|
||||
|
||||
<delete id="deleteByProcessDefinitionIdAndVersion">
|
||||
delete
|
||||
from t_ds_process_definition_version
|
||||
where process_definition_id = #{processDefinitionId}
|
||||
and version = #{version}
|
||||
</delete>
|
||||
</mapper>
|
||||
|
|
@ -14,14 +14,22 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.dolphinscheduler.dao.mapper;
|
||||
|
||||
package org.apache.dolphinscheduler.dao.mapper;
|
||||
|
||||
import org.apache.dolphinscheduler.common.enums.ReleaseState;
|
||||
import org.apache.dolphinscheduler.common.enums.UserType;
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
import org.apache.dolphinscheduler.dao.entity.*;
|
||||
import org.apache.dolphinscheduler.dao.entity.DefinitionGroupByUser;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
|
||||
import org.apache.dolphinscheduler.dao.entity.Project;
|
||||
import org.apache.dolphinscheduler.dao.entity.Queue;
|
||||
import org.apache.dolphinscheduler.dao.entity.Tenant;
|
||||
import org.apache.dolphinscheduler.dao.entity.User;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
|
@ -31,9 +39,8 @@ import org.springframework.test.annotation.Rollback;
|
|||
import org.springframework.test.context.junit4.SpringRunner;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
|
||||
@RunWith(SpringRunner.class)
|
||||
@SpringBootTest
|
||||
|
|
@ -59,9 +66,10 @@ public class ProcessDefinitionMapperTest {
|
|||
|
||||
/**
|
||||
* insert
|
||||
*
|
||||
* @return ProcessDefinition
|
||||
*/
|
||||
private ProcessDefinition insertOne(){
|
||||
private ProcessDefinition insertOne() {
|
||||
//insertOne
|
||||
ProcessDefinition processDefinition = new ProcessDefinition();
|
||||
processDefinition.setName("def 1");
|
||||
|
|
@ -77,9 +85,10 @@ public class ProcessDefinitionMapperTest {
|
|||
|
||||
/**
|
||||
* insert
|
||||
*
|
||||
* @return ProcessDefinition
|
||||
*/
|
||||
private ProcessDefinition insertTwo(){
|
||||
private ProcessDefinition insertTwo() {
|
||||
//insertOne
|
||||
ProcessDefinition processDefinition = new ProcessDefinition();
|
||||
processDefinition.setName("def 2");
|
||||
|
|
@ -95,7 +104,7 @@ public class ProcessDefinitionMapperTest {
|
|||
* test update
|
||||
*/
|
||||
@Test
|
||||
public void testUpdate(){
|
||||
public void testUpdate() {
|
||||
//insertOne
|
||||
ProcessDefinition processDefinition = insertOne();
|
||||
//update
|
||||
|
|
@ -108,7 +117,7 @@ public class ProcessDefinitionMapperTest {
|
|||
* test delete
|
||||
*/
|
||||
@Test
|
||||
public void testDelete(){
|
||||
public void testDelete() {
|
||||
ProcessDefinition processDefinition = insertOne();
|
||||
int delete = processDefinitionMapper.deleteById(processDefinition.getId());
|
||||
Assert.assertEquals(1, delete);
|
||||
|
|
@ -175,8 +184,8 @@ public class ProcessDefinitionMapperTest {
|
|||
@Test
|
||||
public void testQueryDefineListPaging() {
|
||||
ProcessDefinition processDefinition = insertOne();
|
||||
Page<ProcessDefinition> page = new Page(1,3);
|
||||
IPage<ProcessDefinition> processDefinitionIPage = processDefinitionMapper.queryDefineListPaging(page, "def", 101, 1010,true);
|
||||
Page<ProcessDefinition> page = new Page(1, 3);
|
||||
IPage<ProcessDefinition> processDefinitionIPage = processDefinitionMapper.queryDefineListPaging(page, "def", 101, 1010, true);
|
||||
Assert.assertNotEquals(processDefinitionIPage.getTotal(), 0);
|
||||
}
|
||||
|
||||
|
|
@ -186,7 +195,7 @@ public class ProcessDefinitionMapperTest {
|
|||
@Test
|
||||
public void testQueryAllDefinitionList() {
|
||||
ProcessDefinition processDefinition = insertOne();
|
||||
List<ProcessDefinition> processDefinitionIPage = processDefinitionMapper.queryAllDefinitionList(1010);
|
||||
List<ProcessDefinition> processDefinitionIPage = processDefinitionMapper.queryAllDefinitionList(1010);
|
||||
Assert.assertNotEquals(processDefinitionIPage.size(), 0);
|
||||
}
|
||||
|
||||
|
|
@ -214,7 +223,7 @@ public class ProcessDefinitionMapperTest {
|
|||
@Test
|
||||
public void testCountDefinitionGroupByUser() {
|
||||
|
||||
User user= new User();
|
||||
User user = new User();
|
||||
user.setUserName("user1");
|
||||
user.setUserPassword("1");
|
||||
user.setEmail("xx@123.com");
|
||||
|
|
@ -239,7 +248,7 @@ public class ProcessDefinitionMapperTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void listResourcesTest(){
|
||||
public void listResourcesTest() {
|
||||
ProcessDefinition processDefinition = insertOne();
|
||||
processDefinition.setResourceIds("3,5");
|
||||
processDefinition.setReleaseState(ReleaseState.ONLINE);
|
||||
|
|
@ -248,11 +257,22 @@ public class ProcessDefinitionMapperTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void listResourcesByUserTest(){
|
||||
public void listResourcesByUserTest() {
|
||||
ProcessDefinition processDefinition = insertOne();
|
||||
processDefinition.setResourceIds("3,5");
|
||||
processDefinition.setReleaseState(ReleaseState.ONLINE);
|
||||
List<Map<String, Object>> maps = processDefinitionMapper.listResourcesByUser(processDefinition.getUserId());
|
||||
Assert.assertNotNull(maps);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUpdateVersionByProcessDefinitionId() {
|
||||
long expectedVersion = 10;
|
||||
ProcessDefinition processDefinition = insertOne();
|
||||
processDefinition.setVersion(expectedVersion);
|
||||
processDefinitionMapper.updateVersionByProcessDefinitionId(
|
||||
processDefinition.getId(), processDefinition.getVersion());
|
||||
ProcessDefinition processDefinition1 = processDefinitionMapper.selectById(processDefinition.getId());
|
||||
Assert.assertEquals(expectedVersion, processDefinition1.getVersion());
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,172 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.dao.mapper;
|
||||
|
||||
import org.apache.dolphinscheduler.common.utils.StringUtils;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.test.annotation.Rollback;
|
||||
import org.springframework.test.context.junit4.SpringRunner;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage;
|
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
|
||||
|
||||
@RunWith(SpringRunner.class)
|
||||
@SpringBootTest
|
||||
@Transactional
|
||||
@Rollback(true)
|
||||
public class ProcessDefinitionVersionMapperTest {
|
||||
|
||||
|
||||
@Autowired
|
||||
ProcessDefinitionMapper processDefinitionMapper;
|
||||
|
||||
@Autowired
|
||||
ProcessDefinitionVersionMapper processDefinitionVersionMapper;
|
||||
|
||||
@Autowired
|
||||
UserMapper userMapper;
|
||||
|
||||
@Autowired
|
||||
QueueMapper queueMapper;
|
||||
|
||||
@Autowired
|
||||
TenantMapper tenantMapper;
|
||||
|
||||
@Autowired
|
||||
ProjectMapper projectMapper;
|
||||
|
||||
/**
|
||||
* insert
|
||||
*
|
||||
* @return ProcessDefinition
|
||||
*/
|
||||
private ProcessDefinitionVersion insertOne() {
|
||||
// insertOne
|
||||
ProcessDefinitionVersion processDefinitionVersion
|
||||
= new ProcessDefinitionVersion();
|
||||
processDefinitionVersion.setProcessDefinitionId(66);
|
||||
processDefinitionVersion.setVersion(10);
|
||||
processDefinitionVersion.setProcessDefinitionJson(StringUtils.EMPTY);
|
||||
processDefinitionVersion.setDescription(StringUtils.EMPTY);
|
||||
processDefinitionVersion.setGlobalParams(StringUtils.EMPTY);
|
||||
processDefinitionVersion.setCreateTime(new Date());
|
||||
processDefinitionVersion.setLocations(StringUtils.EMPTY);
|
||||
processDefinitionVersion.setConnects(StringUtils.EMPTY);
|
||||
processDefinitionVersion.setReceivers(StringUtils.EMPTY);
|
||||
processDefinitionVersion.setReceiversCc(StringUtils.EMPTY);
|
||||
processDefinitionVersion.setTimeout(10);
|
||||
processDefinitionVersion.setResourceIds("1,2");
|
||||
processDefinitionVersionMapper.insert(processDefinitionVersion);
|
||||
return processDefinitionVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* insert
|
||||
*
|
||||
* @return ProcessDefinitionVersion
|
||||
*/
|
||||
private ProcessDefinitionVersion insertTwo() {
|
||||
// insertTwo
|
||||
ProcessDefinitionVersion processDefinitionVersion
|
||||
= new ProcessDefinitionVersion();
|
||||
processDefinitionVersion.setProcessDefinitionId(67);
|
||||
processDefinitionVersion.setVersion(11);
|
||||
processDefinitionVersion.setProcessDefinitionJson(StringUtils.EMPTY);
|
||||
processDefinitionVersion.setDescription(StringUtils.EMPTY);
|
||||
processDefinitionVersion.setGlobalParams(StringUtils.EMPTY);
|
||||
processDefinitionVersion.setCreateTime(new Date());
|
||||
processDefinitionVersion.setLocations(StringUtils.EMPTY);
|
||||
processDefinitionVersion.setConnects(StringUtils.EMPTY);
|
||||
processDefinitionVersion.setReceivers(StringUtils.EMPTY);
|
||||
processDefinitionVersion.setReceiversCc(StringUtils.EMPTY);
|
||||
processDefinitionVersion.setTimeout(10);
|
||||
processDefinitionVersion.setResourceIds("1,2");
|
||||
processDefinitionVersionMapper.insert(processDefinitionVersion);
|
||||
return processDefinitionVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* test insert
|
||||
*/
|
||||
@Test
|
||||
public void testInsert() {
|
||||
ProcessDefinitionVersion processDefinitionVersion = insertOne();
|
||||
Assert.assertTrue(processDefinitionVersion.getId() > 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* test query
|
||||
*/
|
||||
@Test
|
||||
public void testQueryMaxVersionByProcessDefinitionId() {
|
||||
ProcessDefinitionVersion processDefinitionVersion = insertOne();
|
||||
|
||||
Long version = processDefinitionVersionMapper.queryMaxVersionByProcessDefinitionId(
|
||||
processDefinitionVersion.getProcessDefinitionId());
|
||||
// query
|
||||
Assert.assertEquals(10, (long) version);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryProcessDefinitionVersionsPaging() {
|
||||
insertOne();
|
||||
insertTwo();
|
||||
|
||||
Page<ProcessDefinitionVersion> page = new Page<>(1, 3);
|
||||
|
||||
IPage<ProcessDefinitionVersion> processDefinitionVersionIPage =
|
||||
processDefinitionVersionMapper.queryProcessDefinitionVersionsPaging(page, 10);
|
||||
|
||||
Assert.assertTrue(processDefinitionVersionIPage.getSize() >= 2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteByProcessDefinitionIdAndVersion() {
|
||||
ProcessDefinitionVersion processDefinitionVersion = insertOne();
|
||||
int i = processDefinitionVersionMapper.deleteByProcessDefinitionIdAndVersion(
|
||||
processDefinitionVersion.getProcessDefinitionId(), processDefinitionVersion.getVersion());
|
||||
Assert.assertEquals(1, i);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryByProcessDefinitionIdAndVersion() {
|
||||
ProcessDefinitionVersion processDefinitionVersion1 = insertOne();
|
||||
ProcessDefinitionVersion processDefinitionVersion3 = processDefinitionVersionMapper.queryByProcessDefinitionIdAndVersion(
|
||||
processDefinitionVersion1.getProcessDefinitionId(), 10);
|
||||
|
||||
ProcessDefinitionVersion processDefinitionVersion2 = insertTwo();
|
||||
ProcessDefinitionVersion processDefinitionVersion4 = processDefinitionVersionMapper.queryByProcessDefinitionIdAndVersion(
|
||||
processDefinitionVersion2.getProcessDefinitionId(), 11);
|
||||
|
||||
Assert.assertEquals(processDefinitionVersion1.getProcessDefinitionId(),
|
||||
processDefinitionVersion3.getProcessDefinitionId());
|
||||
Assert.assertEquals(processDefinitionVersion2.getProcessDefinitionId(),
|
||||
processDefinitionVersion4.getProcessDefinitionId());
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -18,10 +18,17 @@
|
|||
package org.apache.dolphinscheduler.remote;
|
||||
|
||||
import io.netty.bootstrap.Bootstrap;
|
||||
import io.netty.channel.*;
|
||||
import io.netty.channel.Channel;
|
||||
import io.netty.channel.ChannelFuture;
|
||||
import io.netty.channel.ChannelFutureListener;
|
||||
import io.netty.channel.ChannelInitializer;
|
||||
import io.netty.channel.ChannelOption;
|
||||
import io.netty.channel.EventLoopGroup;
|
||||
import io.netty.channel.epoll.EpollEventLoopGroup;
|
||||
import io.netty.channel.nio.NioEventLoopGroup;
|
||||
import io.netty.channel.socket.SocketChannel;
|
||||
import io.netty.channel.socket.nio.NioSocketChannel;
|
||||
|
||||
import org.apache.dolphinscheduler.remote.codec.NettyDecoder;
|
||||
import org.apache.dolphinscheduler.remote.codec.NettyEncoder;
|
||||
import org.apache.dolphinscheduler.remote.command.Command;
|
||||
|
|
@ -38,6 +45,8 @@ import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
|
|||
import org.apache.dolphinscheduler.remote.utils.Host;
|
||||
import org.apache.dolphinscheduler.remote.utils.CallerThreadExecutePolicy;
|
||||
import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory;
|
||||
import org.apache.dolphinscheduler.remote.utils.NettyUtils;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
@ -47,7 +56,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
/**
|
||||
* remoting netty client
|
||||
* remoting netty client
|
||||
*/
|
||||
public class NettyRemotingClient {
|
||||
|
||||
|
|
@ -59,7 +68,7 @@ public class NettyRemotingClient {
|
|||
private final Bootstrap bootstrap = new Bootstrap();
|
||||
|
||||
/**
|
||||
* encoder
|
||||
* encoder
|
||||
*/
|
||||
private final NettyEncoder encoder = new NettyEncoder();
|
||||
|
||||
|
|
@ -69,57 +78,69 @@ public class NettyRemotingClient {
|
|||
private final ConcurrentHashMap<Host, Channel> channels = new ConcurrentHashMap(128);
|
||||
|
||||
/**
|
||||
* started flag
|
||||
* started flag
|
||||
*/
|
||||
private final AtomicBoolean isStarted = new AtomicBoolean(false);
|
||||
|
||||
/**
|
||||
* worker group
|
||||
* worker group
|
||||
*/
|
||||
private final NioEventLoopGroup workerGroup;
|
||||
private final EventLoopGroup workerGroup;
|
||||
|
||||
/**
|
||||
* client config
|
||||
* client config
|
||||
*/
|
||||
private final NettyClientConfig clientConfig;
|
||||
|
||||
/**
|
||||
* saync semaphore
|
||||
* saync semaphore
|
||||
*/
|
||||
private final Semaphore asyncSemaphore = new Semaphore(200, true);
|
||||
|
||||
/**
|
||||
* callback thread executor
|
||||
* callback thread executor
|
||||
*/
|
||||
private final ExecutorService callbackExecutor;
|
||||
|
||||
/**
|
||||
* client handler
|
||||
* client handler
|
||||
*/
|
||||
private final NettyClientHandler clientHandler;
|
||||
|
||||
/**
|
||||
* response future executor
|
||||
* response future executor
|
||||
*/
|
||||
private final ScheduledExecutorService responseFutureExecutor;
|
||||
|
||||
/**
|
||||
* client init
|
||||
* client init
|
||||
*
|
||||
* @param clientConfig client config
|
||||
*/
|
||||
public NettyRemotingClient(final NettyClientConfig clientConfig){
|
||||
public NettyRemotingClient(final NettyClientConfig clientConfig) {
|
||||
this.clientConfig = clientConfig;
|
||||
this.workerGroup = new NioEventLoopGroup(clientConfig.getWorkerThreads(), new ThreadFactory() {
|
||||
private AtomicInteger threadIndex = new AtomicInteger(0);
|
||||
if (NettyUtils.useEpoll()) {
|
||||
this.workerGroup = new EpollEventLoopGroup(clientConfig.getWorkerThreads(), new ThreadFactory() {
|
||||
private AtomicInteger threadIndex = new AtomicInteger(0);
|
||||
|
||||
@Override
|
||||
public Thread newThread(Runnable r) {
|
||||
return new Thread(r, String.format("NettyClient_%d", this.threadIndex.incrementAndGet()));
|
||||
}
|
||||
});
|
||||
@Override
|
||||
public Thread newThread(Runnable r) {
|
||||
return new Thread(r, String.format("NettyClient_%d", this.threadIndex.incrementAndGet()));
|
||||
}
|
||||
});
|
||||
} else {
|
||||
this.workerGroup = new NioEventLoopGroup(clientConfig.getWorkerThreads(), new ThreadFactory() {
|
||||
private AtomicInteger threadIndex = new AtomicInteger(0);
|
||||
|
||||
@Override
|
||||
public Thread newThread(Runnable r) {
|
||||
return new Thread(r, String.format("NettyClient_%d", this.threadIndex.incrementAndGet()));
|
||||
}
|
||||
});
|
||||
}
|
||||
this.callbackExecutor = new ThreadPoolExecutor(5, 10, 1, TimeUnit.MINUTES,
|
||||
new LinkedBlockingQueue<>(1000), new NamedThreadFactory("CallbackExecutor", 10),
|
||||
new CallerThreadExecutePolicy());
|
||||
new LinkedBlockingQueue<>(1000), new NamedThreadFactory("CallbackExecutor", 10),
|
||||
new CallerThreadExecutePolicy());
|
||||
this.clientHandler = new NettyClientHandler(this, callbackExecutor);
|
||||
|
||||
this.responseFutureExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("ResponseFutureExecutor"));
|
||||
|
|
@ -128,26 +149,26 @@ public class NettyRemotingClient {
|
|||
}
|
||||
|
||||
/**
|
||||
* start
|
||||
* start
|
||||
*/
|
||||
private void start(){
|
||||
private void start() {
|
||||
|
||||
this.bootstrap
|
||||
.group(this.workerGroup)
|
||||
.channel(NioSocketChannel.class)
|
||||
.option(ChannelOption.SO_KEEPALIVE, clientConfig.isSoKeepalive())
|
||||
.option(ChannelOption.TCP_NODELAY, clientConfig.isTcpNoDelay())
|
||||
.option(ChannelOption.SO_SNDBUF, clientConfig.getSendBufferSize())
|
||||
.option(ChannelOption.SO_RCVBUF, clientConfig.getReceiveBufferSize())
|
||||
.handler(new ChannelInitializer<SocketChannel>() {
|
||||
@Override
|
||||
public void initChannel(SocketChannel ch) throws Exception {
|
||||
ch.pipeline().addLast(
|
||||
new NettyDecoder(),
|
||||
clientHandler,
|
||||
encoder);
|
||||
}
|
||||
});
|
||||
.group(this.workerGroup)
|
||||
.channel(NioSocketChannel.class)
|
||||
.option(ChannelOption.SO_KEEPALIVE, clientConfig.isSoKeepalive())
|
||||
.option(ChannelOption.TCP_NODELAY, clientConfig.isTcpNoDelay())
|
||||
.option(ChannelOption.SO_SNDBUF, clientConfig.getSendBufferSize())
|
||||
.option(ChannelOption.SO_RCVBUF, clientConfig.getReceiveBufferSize())
|
||||
.handler(new ChannelInitializer<SocketChannel>() {
|
||||
@Override
|
||||
public void initChannel(SocketChannel ch) throws Exception {
|
||||
ch.pipeline().addLast(
|
||||
new NettyDecoder(),
|
||||
clientHandler,
|
||||
encoder);
|
||||
}
|
||||
});
|
||||
this.responseFutureExecutor.scheduleAtFixedRate(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
|
|
@ -159,10 +180,11 @@ public class NettyRemotingClient {
|
|||
}
|
||||
|
||||
/**
|
||||
* async send
|
||||
* @param host host
|
||||
* @param command command
|
||||
* @param timeoutMillis timeoutMillis
|
||||
* async send
|
||||
*
|
||||
* @param host host
|
||||
* @param command command
|
||||
* @param timeoutMillis timeoutMillis
|
||||
* @param invokeCallback callback function
|
||||
* @throws InterruptedException
|
||||
* @throws RemotingException
|
||||
|
|
@ -182,22 +204,22 @@ public class NettyRemotingClient {
|
|||
* control concurrency number
|
||||
*/
|
||||
boolean acquired = this.asyncSemaphore.tryAcquire(timeoutMillis, TimeUnit.MILLISECONDS);
|
||||
if(acquired){
|
||||
if (acquired) {
|
||||
final ReleaseSemaphore releaseSemaphore = new ReleaseSemaphore(this.asyncSemaphore);
|
||||
|
||||
/**
|
||||
* response future
|
||||
*/
|
||||
final ResponseFuture responseFuture = new ResponseFuture(opaque,
|
||||
timeoutMillis,
|
||||
invokeCallback,
|
||||
releaseSemaphore);
|
||||
timeoutMillis,
|
||||
invokeCallback,
|
||||
releaseSemaphore);
|
||||
try {
|
||||
channel.writeAndFlush(command).addListener(new ChannelFutureListener(){
|
||||
channel.writeAndFlush(command).addListener(new ChannelFutureListener() {
|
||||
|
||||
@Override
|
||||
public void operationComplete(ChannelFuture future) throws Exception {
|
||||
if(future.isSuccess()){
|
||||
if (future.isSuccess()) {
|
||||
responseFuture.setSendOk(true);
|
||||
return;
|
||||
} else {
|
||||
|
|
@ -207,28 +229,29 @@ public class NettyRemotingClient {
|
|||
responseFuture.putResponse(null);
|
||||
try {
|
||||
responseFuture.executeInvokeCallback();
|
||||
} catch (Throwable ex){
|
||||
} catch (Throwable ex) {
|
||||
logger.error("execute callback error", ex);
|
||||
} finally{
|
||||
} finally {
|
||||
responseFuture.release();
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (Throwable ex){
|
||||
} catch (Throwable ex) {
|
||||
responseFuture.release();
|
||||
throw new RemotingException(String.format("send command to host: %s failed", host), ex);
|
||||
}
|
||||
} else{
|
||||
} else {
|
||||
String message = String.format("try to acquire async semaphore timeout: %d, waiting thread num: %d, total permits: %d",
|
||||
timeoutMillis, asyncSemaphore.getQueueLength(), asyncSemaphore.availablePermits());
|
||||
timeoutMillis, asyncSemaphore.getQueueLength(), asyncSemaphore.availablePermits());
|
||||
throw new RemotingTooMuchRequestException(message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* sync send
|
||||
* @param host host
|
||||
* @param command command
|
||||
*
|
||||
* @param host host
|
||||
* @param command command
|
||||
* @param timeoutMillis timeoutMillis
|
||||
* @return command
|
||||
* @throws InterruptedException
|
||||
|
|
@ -244,7 +267,7 @@ public class NettyRemotingClient {
|
|||
channel.writeAndFlush(command).addListener(new ChannelFutureListener() {
|
||||
@Override
|
||||
public void operationComplete(ChannelFuture future) throws Exception {
|
||||
if(future.isSuccess()){
|
||||
if (future.isSuccess()) {
|
||||
responseFuture.setSendOk(true);
|
||||
return;
|
||||
} else {
|
||||
|
|
@ -259,10 +282,10 @@ public class NettyRemotingClient {
|
|||
* sync wait for result
|
||||
*/
|
||||
Command result = responseFuture.waitResponse();
|
||||
if(result == null){
|
||||
if(responseFuture.isSendOK()){
|
||||
if (result == null) {
|
||||
if (responseFuture.isSendOK()) {
|
||||
throw new RemotingTimeoutException(host.toString(), timeoutMillis, responseFuture.getCause());
|
||||
} else{
|
||||
} else {
|
||||
throw new RemotingException(host.toString(), responseFuture.getCause());
|
||||
}
|
||||
}
|
||||
|
|
@ -270,8 +293,9 @@ public class NettyRemotingClient {
|
|||
}
|
||||
|
||||
/**
|
||||
* send task
|
||||
* @param host host
|
||||
* send task
|
||||
*
|
||||
* @param host host
|
||||
* @param command command
|
||||
* @throws RemotingException
|
||||
*/
|
||||
|
|
@ -296,33 +320,35 @@ public class NettyRemotingClient {
|
|||
}
|
||||
|
||||
/**
|
||||
* register processor
|
||||
* register processor
|
||||
*
|
||||
* @param commandType command type
|
||||
* @param processor processor
|
||||
* @param processor processor
|
||||
*/
|
||||
public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor) {
|
||||
this.registerProcessor(commandType, processor, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* register processor
|
||||
* register processor
|
||||
*
|
||||
* @param commandType command type
|
||||
* @param processor processor
|
||||
* @param executor thread executor
|
||||
* @param processor processor
|
||||
* @param executor thread executor
|
||||
*/
|
||||
public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor, final ExecutorService executor) {
|
||||
this.clientHandler.registerProcessor(commandType, processor, executor);
|
||||
}
|
||||
|
||||
/**
|
||||
* get channel
|
||||
* get channel
|
||||
*
|
||||
* @param host
|
||||
* @return
|
||||
*/
|
||||
public Channel getChannel(Host host) {
|
||||
Channel channel = channels.get(host);
|
||||
if(channel != null && channel.isActive()){
|
||||
if (channel != null && channel.isActive()) {
|
||||
return channel;
|
||||
}
|
||||
return createChannel(host, true);
|
||||
|
|
@ -330,17 +356,18 @@ public class NettyRemotingClient {
|
|||
|
||||
/**
|
||||
* create channel
|
||||
* @param host host
|
||||
*
|
||||
* @param host host
|
||||
* @param isSync sync flag
|
||||
* @return channel
|
||||
*/
|
||||
public Channel createChannel(Host host, boolean isSync) {
|
||||
ChannelFuture future;
|
||||
try {
|
||||
synchronized (bootstrap){
|
||||
synchronized (bootstrap) {
|
||||
future = bootstrap.connect(new InetSocketAddress(host.getIp(), host.getPort()));
|
||||
}
|
||||
if(isSync){
|
||||
if (isSync) {
|
||||
future.sync();
|
||||
}
|
||||
if (future.isSuccess()) {
|
||||
|
|
@ -358,16 +385,16 @@ public class NettyRemotingClient {
|
|||
* close
|
||||
*/
|
||||
public void close() {
|
||||
if(isStarted.compareAndSet(true, false)){
|
||||
if (isStarted.compareAndSet(true, false)) {
|
||||
try {
|
||||
closeChannels();
|
||||
if(workerGroup != null){
|
||||
if (workerGroup != null) {
|
||||
this.workerGroup.shutdownGracefully();
|
||||
}
|
||||
if(callbackExecutor != null){
|
||||
if (callbackExecutor != null) {
|
||||
this.callbackExecutor.shutdownNow();
|
||||
}
|
||||
if(this.responseFutureExecutor != null){
|
||||
if (this.responseFutureExecutor != null) {
|
||||
this.responseFutureExecutor.shutdownNow();
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
|
|
@ -378,9 +405,9 @@ public class NettyRemotingClient {
|
|||
}
|
||||
|
||||
/**
|
||||
* close channels
|
||||
* close channels
|
||||
*/
|
||||
private void closeChannels(){
|
||||
private void closeChannels() {
|
||||
for (Channel channel : this.channels.values()) {
|
||||
channel.close();
|
||||
}
|
||||
|
|
@ -389,11 +416,12 @@ public class NettyRemotingClient {
|
|||
|
||||
/**
|
||||
* close channel
|
||||
*
|
||||
* @param host host
|
||||
*/
|
||||
public void closeChannel(Host host){
|
||||
public void closeChannel(Host host) {
|
||||
Channel channel = this.channels.remove(host);
|
||||
if(channel != null){
|
||||
if (channel != null) {
|
||||
channel.close();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,9 +22,12 @@ import io.netty.channel.ChannelFuture;
|
|||
import io.netty.channel.ChannelInitializer;
|
||||
import io.netty.channel.ChannelOption;
|
||||
import io.netty.channel.ChannelPipeline;
|
||||
import io.netty.channel.EventLoopGroup;
|
||||
import io.netty.channel.epoll.EpollEventLoopGroup;
|
||||
import io.netty.channel.nio.NioEventLoopGroup;
|
||||
import io.netty.channel.socket.nio.NioServerSocketChannel;
|
||||
import io.netty.channel.socket.nio.NioSocketChannel;
|
||||
|
||||
import org.apache.dolphinscheduler.remote.codec.NettyDecoder;
|
||||
import org.apache.dolphinscheduler.remote.codec.NettyEncoder;
|
||||
import org.apache.dolphinscheduler.remote.command.CommandType;
|
||||
|
|
@ -32,6 +35,8 @@ import org.apache.dolphinscheduler.remote.config.NettyServerConfig;
|
|||
import org.apache.dolphinscheduler.remote.handler.NettyServerHandler;
|
||||
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
|
||||
import org.apache.dolphinscheduler.remote.utils.Constants;
|
||||
import org.apache.dolphinscheduler.remote.utils.NettyUtils;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
@ -42,44 +47,44 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
/**
|
||||
* remoting netty server
|
||||
* remoting netty server
|
||||
*/
|
||||
public class NettyRemotingServer {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(NettyRemotingServer.class);
|
||||
|
||||
/**
|
||||
* server bootstrap
|
||||
* server bootstrap
|
||||
*/
|
||||
private final ServerBootstrap serverBootstrap = new ServerBootstrap();
|
||||
|
||||
/**
|
||||
* encoder
|
||||
* encoder
|
||||
*/
|
||||
private final NettyEncoder encoder = new NettyEncoder();
|
||||
|
||||
/**
|
||||
* default executor
|
||||
* default executor
|
||||
*/
|
||||
private final ExecutorService defaultExecutor = Executors.newFixedThreadPool(Constants.CPUS);
|
||||
|
||||
/**
|
||||
* boss group
|
||||
*/
|
||||
private final NioEventLoopGroup bossGroup;
|
||||
private final EventLoopGroup bossGroup;
|
||||
|
||||
/**
|
||||
* worker group
|
||||
* worker group
|
||||
*/
|
||||
private final NioEventLoopGroup workGroup;
|
||||
private final EventLoopGroup workGroup;
|
||||
|
||||
/**
|
||||
* server config
|
||||
* server config
|
||||
*/
|
||||
private final NettyServerConfig serverConfig;
|
||||
|
||||
/**
|
||||
* server handler
|
||||
* server handler
|
||||
*/
|
||||
private final NettyServerHandler serverHandler = new NettyServerHandler(this);
|
||||
|
||||
|
|
@ -89,59 +94,78 @@ public class NettyRemotingServer {
|
|||
private final AtomicBoolean isStarted = new AtomicBoolean(false);
|
||||
|
||||
/**
|
||||
* server init
|
||||
* server init
|
||||
*
|
||||
* @param serverConfig server config
|
||||
*/
|
||||
public NettyRemotingServer(final NettyServerConfig serverConfig){
|
||||
public NettyRemotingServer(final NettyServerConfig serverConfig) {
|
||||
this.serverConfig = serverConfig;
|
||||
if (NettyUtils.useEpoll()) {
|
||||
this.bossGroup = new EpollEventLoopGroup(1, new ThreadFactory() {
|
||||
private AtomicInteger threadIndex = new AtomicInteger(0);
|
||||
|
||||
this.bossGroup = new NioEventLoopGroup(1, new ThreadFactory() {
|
||||
private AtomicInteger threadIndex = new AtomicInteger(0);
|
||||
@Override
|
||||
public Thread newThread(Runnable r) {
|
||||
return new Thread(r, String.format("NettyServerBossThread_%d", this.threadIndex.incrementAndGet()));
|
||||
}
|
||||
});
|
||||
|
||||
@Override
|
||||
public Thread newThread(Runnable r) {
|
||||
return new Thread(r, String.format("NettyServerBossThread_%d", this.threadIndex.incrementAndGet()));
|
||||
}
|
||||
});
|
||||
this.workGroup = new EpollEventLoopGroup(serverConfig.getWorkerThread(), new ThreadFactory() {
|
||||
private AtomicInteger threadIndex = new AtomicInteger(0);
|
||||
|
||||
this.workGroup = new NioEventLoopGroup(serverConfig.getWorkerThread(), new ThreadFactory() {
|
||||
private AtomicInteger threadIndex = new AtomicInteger(0);
|
||||
@Override
|
||||
public Thread newThread(Runnable r) {
|
||||
return new Thread(r, String.format("NettyServerWorkerThread_%d", this.threadIndex.incrementAndGet()));
|
||||
}
|
||||
});
|
||||
} else {
|
||||
this.bossGroup = new NioEventLoopGroup(1, new ThreadFactory() {
|
||||
private AtomicInteger threadIndex = new AtomicInteger(0);
|
||||
|
||||
@Override
|
||||
public Thread newThread(Runnable r) {
|
||||
return new Thread(r, String.format("NettyServerWorkerThread_%d", this.threadIndex.incrementAndGet()));
|
||||
}
|
||||
});
|
||||
@Override
|
||||
public Thread newThread(Runnable r) {
|
||||
return new Thread(r, String.format("NettyServerBossThread_%d", this.threadIndex.incrementAndGet()));
|
||||
}
|
||||
});
|
||||
|
||||
this.workGroup = new NioEventLoopGroup(serverConfig.getWorkerThread(), new ThreadFactory() {
|
||||
private AtomicInteger threadIndex = new AtomicInteger(0);
|
||||
|
||||
@Override
|
||||
public Thread newThread(Runnable r) {
|
||||
return new Thread(r, String.format("NettyServerWorkerThread_%d", this.threadIndex.incrementAndGet()));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* server start
|
||||
* server start
|
||||
*/
|
||||
public void start(){
|
||||
public void start() {
|
||||
if (isStarted.compareAndSet(false, true)) {
|
||||
this.serverBootstrap
|
||||
.group(this.bossGroup, this.workGroup)
|
||||
.channel(NioServerSocketChannel.class)
|
||||
.option(ChannelOption.SO_REUSEADDR, true)
|
||||
.option(ChannelOption.SO_BACKLOG, serverConfig.getSoBacklog())
|
||||
.childOption(ChannelOption.SO_KEEPALIVE, serverConfig.isSoKeepalive())
|
||||
.childOption(ChannelOption.TCP_NODELAY, serverConfig.isTcpNoDelay())
|
||||
.childOption(ChannelOption.SO_SNDBUF, serverConfig.getSendBufferSize())
|
||||
.childOption(ChannelOption.SO_RCVBUF, serverConfig.getReceiveBufferSize())
|
||||
.childHandler(new ChannelInitializer<NioSocketChannel>() {
|
||||
.group(this.bossGroup, this.workGroup)
|
||||
.channel(NioServerSocketChannel.class)
|
||||
.option(ChannelOption.SO_REUSEADDR, true)
|
||||
.option(ChannelOption.SO_BACKLOG, serverConfig.getSoBacklog())
|
||||
.childOption(ChannelOption.SO_KEEPALIVE, serverConfig.isSoKeepalive())
|
||||
.childOption(ChannelOption.TCP_NODELAY, serverConfig.isTcpNoDelay())
|
||||
.childOption(ChannelOption.SO_SNDBUF, serverConfig.getSendBufferSize())
|
||||
.childOption(ChannelOption.SO_RCVBUF, serverConfig.getReceiveBufferSize())
|
||||
.childHandler(new ChannelInitializer<NioSocketChannel>() {
|
||||
|
||||
@Override
|
||||
protected void initChannel(NioSocketChannel ch) throws Exception {
|
||||
initNettyChannel(ch);
|
||||
}
|
||||
});
|
||||
@Override
|
||||
protected void initChannel(NioSocketChannel ch) throws Exception {
|
||||
initNettyChannel(ch);
|
||||
}
|
||||
});
|
||||
|
||||
ChannelFuture future;
|
||||
try {
|
||||
future = serverBootstrap.bind(serverConfig.getListenPort()).sync();
|
||||
} catch (Exception e) {
|
||||
logger.error("NettyRemotingServer bind fail {}, exit",e.getMessage(), e);
|
||||
logger.error("NettyRemotingServer bind fail {}, exit", e.getMessage(), e);
|
||||
throw new RuntimeException(String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort()));
|
||||
}
|
||||
if (future.isSuccess()) {
|
||||
|
|
@ -155,11 +179,12 @@ public class NettyRemotingServer {
|
|||
}
|
||||
|
||||
/**
|
||||
* init netty channel
|
||||
* init netty channel
|
||||
*
|
||||
* @param ch socket channel
|
||||
* @throws Exception
|
||||
*/
|
||||
private void initNettyChannel(NioSocketChannel ch) throws Exception{
|
||||
private void initNettyChannel(NioSocketChannel ch) throws Exception {
|
||||
ChannelPipeline pipeline = ch.pipeline();
|
||||
pipeline.addLast("encoder", encoder);
|
||||
pipeline.addLast("decoder", new NettyDecoder());
|
||||
|
|
@ -167,27 +192,29 @@ public class NettyRemotingServer {
|
|||
}
|
||||
|
||||
/**
|
||||
* register processor
|
||||
* register processor
|
||||
*
|
||||
* @param commandType command type
|
||||
* @param processor processor
|
||||
* @param processor processor
|
||||
*/
|
||||
public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor) {
|
||||
this.registerProcessor(commandType, processor, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* register processor
|
||||
* register processor
|
||||
*
|
||||
* @param commandType command type
|
||||
* @param processor processor
|
||||
* @param executor thread executor
|
||||
* @param processor processor
|
||||
* @param executor thread executor
|
||||
*/
|
||||
public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor, final ExecutorService executor) {
|
||||
this.serverHandler.registerProcessor(commandType, processor, executor);
|
||||
}
|
||||
|
||||
/**
|
||||
* get default thread executor
|
||||
* get default thread executor
|
||||
*
|
||||
* @return thread executor
|
||||
*/
|
||||
public ExecutorService getDefaultExecutor() {
|
||||
|
|
@ -195,12 +222,12 @@ public class NettyRemotingServer {
|
|||
}
|
||||
|
||||
public void close() {
|
||||
if(isStarted.compareAndSet(true, false)){
|
||||
if (isStarted.compareAndSet(true, false)) {
|
||||
try {
|
||||
if(bossGroup != null){
|
||||
if (bossGroup != null) {
|
||||
this.bossGroup.shutdownGracefully();
|
||||
}
|
||||
if(workGroup != null){
|
||||
if (workGroup != null) {
|
||||
this.workGroup.shutdownGracefully();
|
||||
}
|
||||
defaultExecutor.shutdown();
|
||||
|
|
|
|||
|
|
@ -42,4 +42,14 @@ public class Constants {
|
|||
|
||||
public static final String LOCAL_ADDRESS = IPUtils.getFirstNoLoopbackIP4Address();
|
||||
|
||||
/**
|
||||
* netty epoll enable switch
|
||||
*/
|
||||
public static final String NETTY_EPOLL_ENABLE = System.getProperty("netty.epoll.enable");
|
||||
|
||||
/**
|
||||
* OS Name
|
||||
*/
|
||||
public static final String OS_NAME = System.getProperty("os.name");
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.remote.utils;
|
||||
|
||||
import io.netty.channel.epoll.Epoll;
|
||||
|
||||
/**
|
||||
* NettyUtils
|
||||
*/
|
||||
public class NettyUtils {
|
||||
|
||||
private NettyUtils() {
|
||||
}
|
||||
|
||||
public static boolean useEpoll() {
|
||||
String osName = Constants.OS_NAME;
|
||||
if (!osName.toLowerCase().contains("linux")) {
|
||||
return false;
|
||||
}
|
||||
if (!Epoll.isAvailable()) {
|
||||
return false;
|
||||
}
|
||||
String enableNettyEpoll = Constants.NETTY_EPOLL_ENABLE;
|
||||
return Boolean.parseBoolean(enableNettyEpoll);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.remote;
|
||||
|
||||
import org.apache.dolphinscheduler.remote.utils.NettyUtils;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* NettyUtilTest
|
||||
*/
|
||||
public class NettyUtilTest {
|
||||
|
||||
@Test
|
||||
public void testUserEpoll() {
|
||||
System.setProperty("netty.epoll.enable", "false");
|
||||
Assert.assertFalse(NettyUtils.useEpoll());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -14,8 +14,20 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.server.master.registry;
|
||||
|
||||
import org.apache.dolphinscheduler.common.utils.DateUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.NetUtils;
|
||||
import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory;
|
||||
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
|
||||
import org.apache.dolphinscheduler.server.registry.HeartBeatTask;
|
||||
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
|
||||
|
||||
import org.apache.curator.framework.CuratorFramework;
|
||||
import org.apache.curator.framework.state.ConnectionState;
|
||||
import org.apache.curator.framework.state.ConnectionStateListener;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
|
|
@ -23,15 +35,6 @@ import java.util.concurrent.TimeUnit;
|
|||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import org.apache.curator.framework.CuratorFramework;
|
||||
import org.apache.curator.framework.state.ConnectionState;
|
||||
import org.apache.curator.framework.state.ConnectionStateListener;
|
||||
import org.apache.dolphinscheduler.common.utils.DateUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.NetUtils;
|
||||
import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory;
|
||||
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
|
||||
import org.apache.dolphinscheduler.server.registry.HeartBeatTask;
|
||||
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
|
@ -40,7 +43,7 @@ import org.springframework.stereotype.Service;
|
|||
import com.google.common.collect.Sets;
|
||||
|
||||
/**
|
||||
* master registry
|
||||
* master registry
|
||||
*/
|
||||
@Service
|
||||
public class MasterRegistry {
|
||||
|
|
@ -48,7 +51,7 @@ public class MasterRegistry {
|
|||
private final Logger logger = LoggerFactory.getLogger(MasterRegistry.class);
|
||||
|
||||
/**
|
||||
* zookeeper registry center
|
||||
* zookeeper registry center
|
||||
*/
|
||||
@Autowired
|
||||
private ZookeeperRegistryCenter zookeeperRegistryCenter;
|
||||
|
|
@ -65,19 +68,18 @@ public class MasterRegistry {
|
|||
private ScheduledExecutorService heartBeatExecutor;
|
||||
|
||||
/**
|
||||
* worker start time
|
||||
* master start time
|
||||
*/
|
||||
private String startTime;
|
||||
|
||||
|
||||
@PostConstruct
|
||||
public void init(){
|
||||
public void init() {
|
||||
this.startTime = DateUtils.dateToString(new Date());
|
||||
this.heartBeatExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("HeartBeatExecutor"));
|
||||
}
|
||||
|
||||
/**
|
||||
* registry
|
||||
* registry
|
||||
*/
|
||||
public void registry() {
|
||||
String address = NetUtils.getHost();
|
||||
|
|
@ -86,12 +88,12 @@ public class MasterRegistry {
|
|||
zookeeperRegistryCenter.getZookeeperCachedOperator().getZkClient().getConnectionStateListenable().addListener(new ConnectionStateListener() {
|
||||
@Override
|
||||
public void stateChanged(CuratorFramework client, ConnectionState newState) {
|
||||
if(newState == ConnectionState.LOST){
|
||||
if (newState == ConnectionState.LOST) {
|
||||
logger.error("master : {} connection lost from zookeeper", address);
|
||||
} else if(newState == ConnectionState.RECONNECTED){
|
||||
} else if (newState == ConnectionState.RECONNECTED) {
|
||||
logger.info("master : {} reconnected to zookeeper", address);
|
||||
zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(localNodePath, "");
|
||||
} else if(newState == ConnectionState.SUSPENDED){
|
||||
} else if (newState == ConnectionState.SUSPENDED) {
|
||||
logger.warn("master : {} connection SUSPENDED ", address);
|
||||
}
|
||||
}
|
||||
|
|
@ -103,36 +105,35 @@ public class MasterRegistry {
|
|||
Sets.newHashSet(getMasterPath()),
|
||||
zookeeperRegistryCenter);
|
||||
|
||||
this.heartBeatExecutor.scheduleAtFixedRate(heartBeatTask, masterHeartbeatInterval, masterHeartbeatInterval, TimeUnit.SECONDS);
|
||||
logger.info("master node : {} registry to ZK successfully with heartBeatInterval : {}s", address, masterHeartbeatInterval);
|
||||
this.heartBeatExecutor.scheduleAtFixedRate(heartBeatTask, 0, masterHeartbeatInterval, TimeUnit.SECONDS);
|
||||
logger.info("master node : {} registry to ZK path {} successfully with heartBeatInterval : {}s"
|
||||
, address, localNodePath, masterHeartbeatInterval);
|
||||
}
|
||||
|
||||
/**
|
||||
* remove registry info
|
||||
* remove registry info
|
||||
*/
|
||||
public void unRegistry() {
|
||||
String address = getLocalAddress();
|
||||
String localNodePath = getMasterPath();
|
||||
heartBeatExecutor.shutdownNow();
|
||||
zookeeperRegistryCenter.getZookeeperCachedOperator().remove(localNodePath);
|
||||
logger.info("master node : {} unRegistry to ZK.", address);
|
||||
logger.info("master node : {} unRegistry from ZK path {}."
|
||||
, address, localNodePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* get master path
|
||||
* @return
|
||||
* get master path
|
||||
*/
|
||||
private String getMasterPath() {
|
||||
String address = getLocalAddress();
|
||||
String localNodePath = this.zookeeperRegistryCenter.getMasterPath() + "/" + address;
|
||||
return localNodePath;
|
||||
return this.zookeeperRegistryCenter.getMasterPath() + "/" + address;
|
||||
}
|
||||
|
||||
/**
|
||||
* get local address
|
||||
* @return
|
||||
* get local address
|
||||
*/
|
||||
private String getLocalAddress(){
|
||||
private String getLocalAddress() {
|
||||
|
||||
return NetUtils.getHost() + ":" + masterConfig.getListenPort();
|
||||
|
||||
|
|
|
|||
|
|
@ -21,18 +21,17 @@ import org.apache.dolphinscheduler.common.enums.AlertType;
|
|||
import org.apache.dolphinscheduler.common.enums.CommandType;
|
||||
import org.apache.dolphinscheduler.common.enums.ShowType;
|
||||
import org.apache.dolphinscheduler.common.enums.WarningType;
|
||||
import org.apache.dolphinscheduler.common.utils.DateUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.JSONUtils;
|
||||
import org.apache.dolphinscheduler.dao.AlertDao;
|
||||
import org.apache.dolphinscheduler.dao.DaoFactory;
|
||||
import org.apache.dolphinscheduler.dao.entity.Alert;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessAlertContent;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
|
||||
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
|
||||
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
|
|
@ -100,39 +99,40 @@ public class AlertManager {
|
|||
|
||||
String res = "";
|
||||
if (processInstance.getState().typeIsSuccess()) {
|
||||
List<LinkedHashMap> successTaskList = new ArrayList<>(1);
|
||||
LinkedHashMap<String, String> successTaskMap = new LinkedHashMap();
|
||||
successTaskMap.put("id", String.valueOf(processInstance.getId()));
|
||||
successTaskMap.put("name", processInstance.getName());
|
||||
successTaskMap.put("job type", getCommandCnName(processInstance.getCommandType()));
|
||||
successTaskMap.put("state", processInstance.getState().toString());
|
||||
successTaskMap.put("recovery", processInstance.getRecovery().toString());
|
||||
successTaskMap.put("run time", String.valueOf(processInstance.getRunTimes()));
|
||||
successTaskMap.put("start time", DateUtils.dateToString(processInstance.getStartTime()));
|
||||
successTaskMap.put("end time", DateUtils.dateToString(processInstance.getEndTime()));
|
||||
successTaskMap.put("host", processInstance.getHost());
|
||||
successTaskList.add(successTaskMap);
|
||||
List<ProcessAlertContent> successTaskList = new ArrayList<>(1);
|
||||
ProcessAlertContent processAlertContent = ProcessAlertContent.newBuilder()
|
||||
.processId(processInstance.getId())
|
||||
.processName(processInstance.getName())
|
||||
.processType(processInstance.getCommandType())
|
||||
.processState(processInstance.getState())
|
||||
.recovery(processInstance.getRecovery())
|
||||
.runTimes(processInstance.getRunTimes())
|
||||
.processStartTime(processInstance.getStartTime())
|
||||
.processEndTime(processInstance.getEndTime())
|
||||
.processHost(processInstance.getHost())
|
||||
.build();
|
||||
successTaskList.add(processAlertContent);
|
||||
res = JSONUtils.toJsonString(successTaskList);
|
||||
} else if (processInstance.getState().typeIsFailure()) {
|
||||
|
||||
List<LinkedHashMap> failedTaskList = new ArrayList<>();
|
||||
|
||||
List<ProcessAlertContent> failedTaskList = new ArrayList<>();
|
||||
for (TaskInstance task : taskInstances) {
|
||||
if (task.getState().typeIsSuccess()) {
|
||||
continue;
|
||||
}
|
||||
LinkedHashMap<String, String> failedTaskMap = new LinkedHashMap();
|
||||
failedTaskMap.put("process instance id", String.valueOf(processInstance.getId()));
|
||||
failedTaskMap.put("process instance name", processInstance.getName());
|
||||
failedTaskMap.put("task id", String.valueOf(task.getId()));
|
||||
failedTaskMap.put("task name", task.getName());
|
||||
failedTaskMap.put("task type", task.getTaskType());
|
||||
failedTaskMap.put("task state", task.getState().toString());
|
||||
failedTaskMap.put("task start time", DateUtils.dateToString(task.getStartTime()));
|
||||
failedTaskMap.put("task end time", DateUtils.dateToString(task.getEndTime()));
|
||||
failedTaskMap.put("host", task.getHost());
|
||||
failedTaskMap.put("log path", task.getLogPath());
|
||||
failedTaskList.add(failedTaskMap);
|
||||
ProcessAlertContent processAlertContent = ProcessAlertContent.newBuilder()
|
||||
.processId(processInstance.getId())
|
||||
.processName(processInstance.getName())
|
||||
.taskId(task.getId())
|
||||
.taskName(task.getName())
|
||||
.taskType(task.getTaskType())
|
||||
.taskState(task.getState())
|
||||
.taskStartTime(task.getStartTime())
|
||||
.taskEndTime(task.getEndTime())
|
||||
.taskHost(task.getHost())
|
||||
.logPath(task.getLogPath())
|
||||
.build();
|
||||
failedTaskList.add(processAlertContent);
|
||||
}
|
||||
res = JSONUtils.toJsonString(failedTaskList);
|
||||
}
|
||||
|
|
@ -149,15 +149,16 @@ public class AlertManager {
|
|||
*/
|
||||
private String getWorkerToleranceContent(ProcessInstance processInstance, List<TaskInstance> toleranceTaskList) {
|
||||
|
||||
List<LinkedHashMap<String, String>> toleranceTaskInstanceList = new ArrayList<>();
|
||||
List<ProcessAlertContent> toleranceTaskInstanceList = new ArrayList<>();
|
||||
|
||||
for (TaskInstance taskInstance : toleranceTaskList) {
|
||||
LinkedHashMap<String, String> toleranceWorkerContentMap = new LinkedHashMap();
|
||||
toleranceWorkerContentMap.put("process name", processInstance.getName());
|
||||
toleranceWorkerContentMap.put("task name", taskInstance.getName());
|
||||
toleranceWorkerContentMap.put("host", taskInstance.getHost());
|
||||
toleranceWorkerContentMap.put("task retry times", String.valueOf(taskInstance.getRetryTimes()));
|
||||
toleranceTaskInstanceList.add(toleranceWorkerContentMap);
|
||||
ProcessAlertContent processAlertContent = ProcessAlertContent.newBuilder()
|
||||
.processName(processInstance.getName())
|
||||
.taskName(taskInstance.getName())
|
||||
.taskHost(taskInstance.getHost())
|
||||
.retryTimes(taskInstance.getRetryTimes())
|
||||
.build();
|
||||
toleranceTaskInstanceList.add(processAlertContent);
|
||||
}
|
||||
return JSONUtils.toJsonString(toleranceTaskInstanceList);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,53 +14,52 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.dolphinscheduler.server.utils;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.dolphinscheduler.common.Constants;
|
||||
import org.apache.dolphinscheduler.common.utils.CommonUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.FileUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.OSUtils;
|
||||
import org.apache.dolphinscheduler.common.utils.StringUtils;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.dolphinscheduler.remote.utils.Host;
|
||||
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
|
||||
import org.apache.dolphinscheduler.service.log.LogClientService;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
|
||||
/**
|
||||
* mainly used to get the start command line of a process
|
||||
* mainly used to get the start command line of a process.
|
||||
*/
|
||||
public class ProcessUtils {
|
||||
/**
|
||||
* logger
|
||||
* logger.
|
||||
*/
|
||||
private final static Logger logger = LoggerFactory.getLogger(ProcessUtils.class);
|
||||
private static final Logger logger = LoggerFactory.getLogger(ProcessUtils.class);
|
||||
|
||||
/**
|
||||
* Initialization regularization, solve the problem of pre-compilation performance,
|
||||
* avoid the thread safety problem of multi-thread operation
|
||||
* avoid the thread safety problem of multi-thread operation.
|
||||
*/
|
||||
private static final Pattern MACPATTERN = Pattern.compile("-[+|-]-\\s(\\d+)");
|
||||
|
||||
private static final Pattern WINDOWSATTERN = Pattern.compile("(\\d+)");
|
||||
|
||||
/**
|
||||
* build command line characters
|
||||
* build command line characters.
|
||||
* @param commandList command list
|
||||
* @return command
|
||||
* @throws IOException io exception
|
||||
*/
|
||||
public static String buildCommandStr(List<String> commandList) throws IOException {
|
||||
public static String buildCommandStr(List<String> commandList) {
|
||||
String cmdstr;
|
||||
String[] cmd = commandList.toArray(new String[commandList.size()]);
|
||||
SecurityManager security = System.getSecurityManager();
|
||||
|
|
@ -102,7 +101,6 @@ public class ProcessUtils {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
cmdstr = createCommandLine(
|
||||
|
||||
isShellFile(executablePath) ? VERIFICATION_CMD_BAT : VERIFICATION_WIN32, quoteString(executablePath), cmd);
|
||||
|
|
@ -111,13 +109,12 @@ public class ProcessUtils {
|
|||
}
|
||||
|
||||
/**
|
||||
* get executable path
|
||||
* get executable path.
|
||||
*
|
||||
* @param path path
|
||||
* @return executable path
|
||||
* @throws IOException io exception
|
||||
*/
|
||||
private static String getExecutablePath(String path) throws IOException {
|
||||
private static String getExecutablePath(String path) {
|
||||
boolean pathIsQuoted = isQuoted(true, path, "Executable name has embedded quote, split the arguments");
|
||||
|
||||
File fileToRun = new File(pathIsQuoted ? path.substring(1, path.length() - 1) : path);
|
||||
|
|
@ -125,7 +122,7 @@ public class ProcessUtils {
|
|||
}
|
||||
|
||||
/**
|
||||
* whether is shell file
|
||||
* whether is shell file.
|
||||
*
|
||||
* @param executablePath executable path
|
||||
* @return true if endsWith .CMD or .BAT
|
||||
|
|
@ -136,7 +133,7 @@ public class ProcessUtils {
|
|||
}
|
||||
|
||||
/**
|
||||
* quote string
|
||||
* quote string.
|
||||
*
|
||||
* @param arg argument
|
||||
* @return format arg
|
||||
|
|
@ -147,7 +144,7 @@ public class ProcessUtils {
|
|||
}
|
||||
|
||||
/**
|
||||
* get tokens from command
|
||||
* get tokens from command.
|
||||
*
|
||||
* @param command command
|
||||
* @return token string array
|
||||
|
|
@ -162,7 +159,7 @@ public class ProcessUtils {
|
|||
}
|
||||
|
||||
/**
|
||||
* Lazy Pattern
|
||||
* Lazy Pattern.
|
||||
*/
|
||||
private static class LazyPattern {
|
||||
// Escape-support version:
|
||||
|
|
@ -171,34 +168,29 @@ public class ProcessUtils {
|
|||
}
|
||||
|
||||
/**
|
||||
* verification cmd bat
|
||||
* verification cmd bat.
|
||||
*/
|
||||
private static final int VERIFICATION_CMD_BAT = 0;
|
||||
|
||||
/**
|
||||
* verification win32
|
||||
* verification win32.
|
||||
*/
|
||||
private static final int VERIFICATION_WIN32 = 1;
|
||||
|
||||
/**
|
||||
* verification legacy
|
||||
* verification legacy.
|
||||
*/
|
||||
private static final int VERIFICATION_LEGACY = 2;
|
||||
|
||||
/**
|
||||
* escape verification
|
||||
* escape verification.
|
||||
*/
|
||||
private static final char[][] ESCAPE_VERIFICATION = {{' ', '\t', '<', '>', '&', '|', '^'},
|
||||
|
||||
{' ', '\t', '<', '>'}, {' ', '\t'}};
|
||||
{' ', '\t', '<', '>'}, {' ', '\t'}};
|
||||
|
||||
/**
|
||||
* matcher
|
||||
*/
|
||||
private static Matcher matcher;
|
||||
|
||||
/**
|
||||
* create command line
|
||||
* create command line.
|
||||
* @param verificationType verification type
|
||||
* @param executablePath executable path
|
||||
* @param cmd cmd
|
||||
|
|
@ -227,7 +219,7 @@ public class ProcessUtils {
|
|||
}
|
||||
|
||||
/**
|
||||
* whether is quoted
|
||||
* whether is quoted.
|
||||
* @param noQuotesInside
|
||||
* @param arg
|
||||
* @param errorMessage
|
||||
|
|
@ -255,7 +247,7 @@ public class ProcessUtils {
|
|||
}
|
||||
|
||||
/**
|
||||
* whether needs escaping
|
||||
* whether needs escaping.
|
||||
*
|
||||
* @param verificationType verification type
|
||||
* @param arg arg
|
||||
|
|
@ -277,16 +269,14 @@ public class ProcessUtils {
|
|||
}
|
||||
|
||||
/**
|
||||
* kill yarn application
|
||||
* kill yarn application.
|
||||
*
|
||||
* @param appIds app id list
|
||||
* @param logger logger
|
||||
* @param tenantCode tenant code
|
||||
* @param executePath execute path
|
||||
* @throws IOException io exception
|
||||
*/
|
||||
public static void cancelApplication(List<String> appIds, Logger logger, String tenantCode,String executePath)
|
||||
throws IOException {
|
||||
public static void cancelApplication(List<String> appIds, Logger logger, String tenantCode, String executePath) {
|
||||
if (appIds.size() > 0) {
|
||||
String appid = appIds.get(appIds.size() - 1);
|
||||
String commandFile = String
|
||||
|
|
@ -324,17 +314,17 @@ public class ProcessUtils {
|
|||
}
|
||||
|
||||
/**
|
||||
* kill tasks according to different task types
|
||||
* kill tasks according to different task types.
|
||||
*
|
||||
* @param taskExecutionContext taskExecutionContext
|
||||
*/
|
||||
public static void kill(TaskExecutionContext taskExecutionContext) {
|
||||
try {
|
||||
int processId = taskExecutionContext.getProcessId();
|
||||
if(processId == 0 ){
|
||||
if (processId == 0) {
|
||||
logger.error("process kill failed, process id :{}, task id:{}",
|
||||
processId, taskExecutionContext.getTaskInstanceId());
|
||||
return ;
|
||||
return;
|
||||
}
|
||||
|
||||
String cmd = String.format("sudo kill -9 %s", getPidsStr(processId));
|
||||
|
|
@ -352,13 +342,13 @@ public class ProcessUtils {
|
|||
}
|
||||
|
||||
/**
|
||||
* get pids str
|
||||
* get pids str.
|
||||
*
|
||||
* @param processId process id
|
||||
* @return pids
|
||||
* @throws Exception exception
|
||||
*/
|
||||
public static String getPidsStr(int processId)throws Exception{
|
||||
public static String getPidsStr(int processId) throws Exception {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
Matcher mat;
|
||||
// pstree pid get sub pids
|
||||
|
|
@ -370,14 +360,14 @@ public class ProcessUtils {
|
|||
mat = WINDOWSATTERN.matcher(pids);
|
||||
}
|
||||
|
||||
while (mat.find()){
|
||||
while (mat.find()) {
|
||||
sb.append(mat.group(1)).append(" ");
|
||||
}
|
||||
return sb.toString().trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* find logs and kill yarn tasks
|
||||
* find logs and kill yarn tasks.
|
||||
*
|
||||
* @param taskExecutionContext taskExecutionContext
|
||||
*/
|
||||
|
|
@ -392,7 +382,7 @@ public class ProcessUtils {
|
|||
Constants.RPC_PORT,
|
||||
taskExecutionContext.getLogPath());
|
||||
} finally {
|
||||
if(logClient != null){
|
||||
if (logClient != null) {
|
||||
logClient.close();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -312,14 +312,8 @@ public abstract class AbstractCommandExecutor {
|
|||
* @param commands process builder
|
||||
*/
|
||||
private void printCommand(List<String> commands) {
|
||||
String cmdStr;
|
||||
|
||||
try {
|
||||
cmdStr = ProcessUtils.buildCommandStr(commands);
|
||||
logger.info("task run command:\n{}", cmdStr);
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
String cmdStr = ProcessUtils.buildCommandStr(commands);
|
||||
logger.info("task run command:\n{}", cmdStr);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -17,6 +17,8 @@
|
|||
|
||||
package org.apache.dolphinscheduler.server.master.registry;
|
||||
|
||||
import static org.apache.dolphinscheduler.common.Constants.HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH;
|
||||
|
||||
import org.apache.dolphinscheduler.remote.utils.Constants;
|
||||
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
|
||||
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
|
||||
|
|
@ -24,6 +26,10 @@ import org.apache.dolphinscheduler.server.zk.SpringZKServer;
|
|||
import org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient;
|
||||
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
|
||||
import org.apache.dolphinscheduler.service.zk.ZookeeperConfig;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
|
@ -31,10 +37,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringRunner;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.apache.dolphinscheduler.common.Constants.HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH;
|
||||
/**
|
||||
* master registry test
|
||||
*/
|
||||
|
|
@ -65,6 +67,7 @@ public class MasterRegistryTest {
|
|||
|
||||
@Test
|
||||
public void testUnRegistry() throws InterruptedException {
|
||||
masterRegistry.init();
|
||||
masterRegistry.registry();
|
||||
TimeUnit.SECONDS.sleep(masterConfig.getMasterHeartbeatInterval() + 2); //wait heartbeat info write into zk node
|
||||
masterRegistry.unRegistry();
|
||||
|
|
|
|||
|
|
@ -40,11 +40,8 @@ public class ProcessUtilsTest {
|
|||
public void testBuildCommandStr() {
|
||||
List<String> commands = new ArrayList<>();
|
||||
commands.add("sudo");
|
||||
try {
|
||||
Assert.assertEquals(ProcessUtils.buildCommandStr(commands), "sudo");
|
||||
} catch (IOException e) {
|
||||
Assert.fail(e.getMessage());
|
||||
}
|
||||
Assert.assertEquals(ProcessUtils.buildCommandStr(commands), "sudo");
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@
|
|||
</x-button>
|
||||
<x-button
|
||||
type="primary"
|
||||
v-tooltip.light="$t('Close')"
|
||||
v-tooltip.light="$t('Close')"
|
||||
icon="ans-icon-off"
|
||||
size="xsmall"
|
||||
data-container="body"
|
||||
|
|
@ -122,6 +122,17 @@
|
|||
>
|
||||
{{spinnerLoading ? 'Loading...' : $t('Save')}}
|
||||
</x-button>
|
||||
<x-button
|
||||
style="vertical-align: middle;"
|
||||
type="primary"
|
||||
size="xsmall"
|
||||
v-if="this.type !== 'instance' && this.urlParam.id !== null"
|
||||
:loading="spinnerLoading"
|
||||
@click="_version"
|
||||
icon="ans-icon-dependence"
|
||||
>
|
||||
{{spinnerLoading ? 'Loading...' : $t('Version Info')}}
|
||||
</x-button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="scrollbar dag-container">
|
||||
|
|
@ -147,6 +158,7 @@
|
|||
import { findComponentDownward } from '@/module/util/'
|
||||
import disabledState from '@/module/mixin/disabledState'
|
||||
import { mapActions, mapState, mapMutations } from 'vuex'
|
||||
import mVersions from '../../projects/pages/definition/pages/list/_source/versions'
|
||||
|
||||
let eventModel
|
||||
|
||||
|
|
@ -176,7 +188,7 @@
|
|||
releaseState: String
|
||||
},
|
||||
methods: {
|
||||
...mapActions('dag', ['saveDAGchart', 'updateInstance', 'updateDefinition', 'getTaskState']),
|
||||
...mapActions('dag', ['saveDAGchart', 'updateInstance', 'updateDefinition', 'getTaskState', 'switchProcessDefinitionVersion', 'getProcessDefinitionVersionsPage', 'deleteProcessDefinitionVersion']),
|
||||
...mapMutations('dag', ['addTasks', 'cacheTasks', 'resetParams', 'setIsEditDag', 'setName', 'addConnects']),
|
||||
|
||||
// DAG automatic layout
|
||||
|
|
@ -196,7 +208,6 @@
|
|||
],
|
||||
Connector: 'Bezier',
|
||||
PaintStyle: { lineWidth: 2, stroke: '#456' }, // Connection style
|
||||
HoverPaintStyle: {stroke: '#ccc', strokeWidth: 3},
|
||||
ConnectionOverlays: [
|
||||
[
|
||||
'Arrow',
|
||||
|
|
@ -370,6 +381,12 @@
|
|||
this[this.type === 'instance' ? 'updateInstance' : 'updateDefinition'](this.urlParam.id).then(res => {
|
||||
this.$message.success(res.msg)
|
||||
this.spinnerLoading = false
|
||||
// Jump process definition
|
||||
if (this.type === 'instance') {
|
||||
this.$router.push({ path: `/projects/instance/list/${this.urlParam.id}?_t=${new Date().getTime()}` })
|
||||
} else {
|
||||
this.$router.push({ path: `/projects/definition/list/${this.urlParam.id}?_t=${new Date().getTime()}` })
|
||||
}
|
||||
resolve()
|
||||
}).catch(e => {
|
||||
this.$message.error(e.msg || '')
|
||||
|
|
@ -657,6 +674,135 @@
|
|||
if(eventModel && this.taskId == $id){
|
||||
eventModel.remove()
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* query the process definition pagination version
|
||||
*/
|
||||
_version (item) {
|
||||
let self = this
|
||||
this.getProcessDefinitionVersionsPage({
|
||||
pageNo: 1,
|
||||
pageSize: 10,
|
||||
processDefinitionId: this.urlParam.id
|
||||
}).then(res => {
|
||||
let processDefinitionVersions = res.data.lists
|
||||
let total = res.data.totalCount
|
||||
let pageSize = res.data.pageSize
|
||||
let pageNo = res.data.currentPage
|
||||
if (this.versionsModel) {
|
||||
this.versionsModel.remove()
|
||||
}
|
||||
this.versionsModel = this.$drawer({
|
||||
direction: 'right',
|
||||
closable: true,
|
||||
showMask: true,
|
||||
escClose: true,
|
||||
render (h) {
|
||||
return h(mVersions, {
|
||||
on: {
|
||||
/**
|
||||
* switch version in process definition version list
|
||||
*
|
||||
* @param version the version user want to change
|
||||
* @param processDefinitionId the process definition id
|
||||
* @param fromThis fromThis
|
||||
*/
|
||||
mVersionSwitchProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) {
|
||||
|
||||
self.$store.state.dag.isSwitchVersion = true
|
||||
|
||||
self.switchProcessDefinitionVersion({
|
||||
version: version,
|
||||
processDefinitionId: processDefinitionId
|
||||
}).then(res => {
|
||||
self.$message.success($t('Switch Version Successfully'))
|
||||
setTimeout(() => {
|
||||
fromThis.$destroy()
|
||||
self.versionsModel.remove()
|
||||
}, 0)
|
||||
self.$router.push({ path: `/projects/definition/list/${processDefinitionId}?_t=${new Date().getTime()}` })
|
||||
}).catch(e => {
|
||||
self.$store.state.dag.isSwitchVersion = false
|
||||
self.$message.error(e.msg || '')
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Paging event of process definition versions
|
||||
*
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @param processDefinitionId the process definition id of page version
|
||||
* @param fromThis fromThis
|
||||
*/
|
||||
mVersionGetProcessDefinitionVersionsPage ({ pageNo, pageSize, processDefinitionId, fromThis }) {
|
||||
self.getProcessDefinitionVersionsPage({
|
||||
pageNo: pageNo,
|
||||
pageSize: pageSize,
|
||||
processDefinitionId: processDefinitionId
|
||||
}).then(res => {
|
||||
fromThis.processDefinitionVersions = res.data.lists
|
||||
fromThis.total = res.data.totalCount
|
||||
fromThis.pageSize = res.data.pageSize
|
||||
fromThis.pageNo = res.data.currentPage
|
||||
}).catch(e => {
|
||||
self.$message.error(e.msg || '')
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* delete one version of process definition
|
||||
*
|
||||
* @param version the version need to delete
|
||||
* @param processDefinitionId the process definition id user want to delete
|
||||
* @param fromThis fromThis
|
||||
*/
|
||||
mVersionDeleteProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) {
|
||||
self.deleteProcessDefinitionVersion({
|
||||
version: version,
|
||||
processDefinitionId: processDefinitionId
|
||||
}).then(res => {
|
||||
self.$message.success(res.msg || '')
|
||||
fromThis.$emit('mVersionGetProcessDefinitionVersionsPage', {
|
||||
pageNo: 1,
|
||||
pageSize: 10,
|
||||
processDefinitionId: processDefinitionId,
|
||||
fromThis: fromThis
|
||||
})
|
||||
}).catch(e => {
|
||||
self.$message.error(e.msg || '')
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* remove this drawer
|
||||
*
|
||||
* @param fromThis
|
||||
*/
|
||||
close ({ fromThis }) {
|
||||
setTimeout(() => {
|
||||
fromThis.$destroy()
|
||||
self.versionsModel.remove()
|
||||
}, 0)
|
||||
}
|
||||
},
|
||||
props: {
|
||||
processDefinition: {
|
||||
id: self.urlParam.id,
|
||||
version: self.$store.state.dag.version
|
||||
},
|
||||
processDefinitionVersions: processDefinitionVersions,
|
||||
total: total,
|
||||
pageNo: pageNo,
|
||||
pageSize: pageSize
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}).catch(e => {
|
||||
this.$message.error(e.msg || '')
|
||||
})
|
||||
}
|
||||
},
|
||||
watch: {
|
||||
|
|
@ -685,7 +831,6 @@
|
|||
],
|
||||
Connector: 'Bezier',
|
||||
PaintStyle: { lineWidth: 2, stroke: '#456' }, // Connection style
|
||||
HoverPaintStyle: {stroke: '#ccc', strokeWidth: 3},
|
||||
ConnectionOverlays: [
|
||||
[
|
||||
'Arrow',
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@
|
|||
<m-list-box>
|
||||
<div slot="text">{{$t('Main jar package')}}</div>
|
||||
<div slot="content">
|
||||
<treeselect v-model="mainJar" :options="mainJarLists" :disable-branch-nodes="true" :normalizer="normalizer" :disabled="isDetails" :placeholder="$t('Please enter main jar package')">
|
||||
<treeselect v-model="mainJar" maxHeight="200" :options="mainJarLists" :disable-branch-nodes="true" :normalizer="normalizer" :disabled="isDetails" :placeholder="$t('Please enter main jar package')">
|
||||
<div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div>
|
||||
</treeselect>
|
||||
</div>
|
||||
|
|
@ -158,7 +158,7 @@
|
|||
<m-list-box>
|
||||
<div slot="text">{{$t('Resources')}}</div>
|
||||
<div slot="content">
|
||||
<treeselect v-model="resourceList" :multiple="true" :options="mainJarList" :normalizer="normalizer" :disabled="isDetails" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')">
|
||||
<treeselect v-model="resourceList" :multiple="true" maxHeight="200" :options="mainJarList" :normalizer="normalizer" :disabled="isDetails" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')">
|
||||
<div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div>
|
||||
</treeselect>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@
|
|||
<m-list-box>
|
||||
<div slot="text">{{$t('Main jar package')}}</div>
|
||||
<div slot="content">
|
||||
<treeselect v-model="mainJar" :options="mainJarLists" :disable-branch-nodes="true" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :disabled="isDetails" :placeholder="$t('Please enter main jar package')">
|
||||
<treeselect v-model="mainJar" maxHeight="200" :options="mainJarLists" :disable-branch-nodes="true" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :disabled="isDetails" :placeholder="$t('Please enter main jar package')">
|
||||
<div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div>
|
||||
</treeselect>
|
||||
</div>
|
||||
|
|
@ -78,7 +78,7 @@
|
|||
<m-list-box>
|
||||
<div slot="text">{{$t('Resources')}}</div>
|
||||
<div slot="content">
|
||||
<treeselect v-model="resourceList" :multiple="true" :options="mainJarList" :normalizer="normalizer" :disabled="isDetails" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')">
|
||||
<treeselect v-model="resourceList" :multiple="true" maxHeight="200" :options="mainJarList" :normalizer="normalizer" :disabled="isDetails" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')">
|
||||
<div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div>
|
||||
</treeselect>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@
|
|||
<m-list-box>
|
||||
<div slot="text">{{$t('Resources')}}</div>
|
||||
<div slot="content">
|
||||
<treeselect v-model="resourceList" :multiple="true" :options="resourceOptions" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :disabled="isDetails" :placeholder="$t('Please select resources')">
|
||||
<treeselect v-model="resourceList" :multiple="true" maxHeight="200" :options="resourceOptions" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :disabled="isDetails" :placeholder="$t('Please select resources')">
|
||||
<div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div>
|
||||
</treeselect>
|
||||
<!-- <m-resources
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@
|
|||
<m-list-box>
|
||||
<div slot="text">{{$t('Resources')}}</div>
|
||||
<div slot="content">
|
||||
<treeselect v-model="resourceList" :multiple="true" :options="options" :normalizer="normalizer" :disabled="isDetails" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')">
|
||||
<treeselect v-model="resourceList" :multiple="true" maxHeight="200" :options="options" :normalizer="normalizer" :disabled="isDetails" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')">
|
||||
<div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div>
|
||||
</treeselect>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -63,7 +63,7 @@
|
|||
<m-list-box>
|
||||
<div slot="text">{{$t('Main jar package')}}</div>
|
||||
<div slot="content">
|
||||
<treeselect v-model="mainJar" :options="mainJarLists" :disable-branch-nodes="true" :normalizer="normalizer" :disabled="isDetails" :placeholder="$t('Please enter main jar package')">
|
||||
<treeselect v-model="mainJar" maxHeight="200" :options="mainJarLists" :disable-branch-nodes="true" :normalizer="normalizer" :disabled="isDetails" :placeholder="$t('Please enter main jar package')">
|
||||
<div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div>
|
||||
</treeselect>
|
||||
</div>
|
||||
|
|
@ -169,7 +169,7 @@
|
|||
<m-list-box>
|
||||
<div slot="text">{{$t('Resources')}}</div>
|
||||
<div slot="content">
|
||||
<treeselect v-model="resourceList" :multiple="true" :options="mainJarList" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :disabled="isDetails" :placeholder="$t('Please select resources')">
|
||||
<treeselect v-model="resourceList" :multiple="true" maxHeight="200" :options="mainJarList" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :disabled="isDetails" :placeholder="$t('Please select resources')">
|
||||
<div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div>
|
||||
</treeselect>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@
|
|||
<m-list-box>
|
||||
<div slot="text">{{$t('Resources')}}</div>
|
||||
<div slot="content">
|
||||
<treeselect v-model="resourceList" :disable-branch-nodes="true" :multiple="true" :options="options" :normalizer="normalizer" :disabled="isDetails" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')">
|
||||
<treeselect v-model="resourceList" maxHeight="200" :disable-branch-nodes="true" :multiple="true" :options="options" :normalizer="normalizer" :disabled="isDetails" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')">
|
||||
<div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div>
|
||||
</treeselect>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ Affirm.paramVerification = (name) => {
|
|||
}
|
||||
} else {
|
||||
// View history direct jump
|
||||
flag = name === 'projects-instance-details' ? true : !dagStore.isEditDag
|
||||
flag = name === 'projects-instance-details' ? true : (dagStore.isSwitchVersion || !dagStore.isEditDag)
|
||||
}
|
||||
return flag
|
||||
}
|
||||
|
|
|
|||
|
|
@ -100,13 +100,27 @@ const setSvgColor = (e, color) => {
|
|||
// Traverse clear all colors
|
||||
$('.jtk-connector').each((i, o) => {
|
||||
_.map($(o)[0].childNodes, v => {
|
||||
$(v).attr('fill', '#2d8cf0').attr('stroke', '#2d8cf0').attr('stroke-width', 2)
|
||||
if($(v).attr('fill') ==='#ccc') {
|
||||
$(v).attr('fill', '#2d8cf0')
|
||||
}
|
||||
if($(v).attr('fill') ==='#4caf50') {
|
||||
$(v).attr('fill','#4caf50').attr('stroke', '#4caf50').attr('stroke-width', 2)
|
||||
$(v).prev().attr('stroke', '#4caf50').attr('stroke-width', 2)
|
||||
} else if($(v).attr('fill') ==='#252d39') {
|
||||
$(v).attr('stroke', '#252d39').attr('stroke-width', 2)
|
||||
$(v).prev().attr('stroke', '#252d39').attr('stroke-width', 2)
|
||||
} else {
|
||||
$(v).attr('stroke', '#2d8cf0').attr('stroke-width', 2)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// Add color to the selection
|
||||
_.map($(e.canvas)[0].childNodes, (v, i) => {
|
||||
$(v).attr('fill', color).attr('stroke', color)
|
||||
if($(v).attr('fill') ==='#2d8cf0') {
|
||||
$(v).attr('fill', '#ccc')
|
||||
}
|
||||
$(v).attr('stroke', '#ccc')
|
||||
if ($(v).attr('class')) {
|
||||
$(v).attr('stroke-width', 2)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -99,6 +99,7 @@
|
|||
name: 'udp',
|
||||
data () {
|
||||
return {
|
||||
originalName: '',
|
||||
// dag name
|
||||
name: '',
|
||||
// dag description
|
||||
|
|
@ -169,12 +170,15 @@
|
|||
this.$emit('onUdp')
|
||||
}
|
||||
|
||||
// verify that the name exists
|
||||
this.store.dispatch('dag/verifDAGName', this.name).then(res => {
|
||||
if (this.originalName !== this.name) {
|
||||
this.store.dispatch('dag/verifDAGName', this.name).then(res => {
|
||||
_verif()
|
||||
}).catch(e => {
|
||||
this.$message.error(e.msg || '')
|
||||
})
|
||||
} else {
|
||||
_verif()
|
||||
}).catch(e => {
|
||||
this.$message.error(e.msg || '')
|
||||
})
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Close the popup
|
||||
|
|
@ -196,6 +200,7 @@
|
|||
this.udpList = dag.globalParams
|
||||
this.udpListCache = dag.globalParams
|
||||
this.name = dag.name
|
||||
this.originalName = dag.name
|
||||
this.description = dag.description
|
||||
this.syncDefine = dag.syncDefine
|
||||
this.timeout = dag.timeout || 0
|
||||
|
|
|
|||
|
|
@ -95,6 +95,7 @@
|
|||
this.getMasterData().then(res => {
|
||||
this.masterList = _.map(res, (v, i) => {
|
||||
return _.assign(v, {
|
||||
id: v.host + "_" + v.id,
|
||||
resInfo: JSON.parse(v.resInfo)
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -115,6 +115,7 @@
|
|||
this.getWorkerData().then(res => {
|
||||
this.workerList = _.map(res, (v, i) => {
|
||||
return _.assign(v, {
|
||||
id: v.host + "_" + v.id,
|
||||
resInfo: JSON.parse(v.resInfo)
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -117,6 +117,7 @@
|
|||
</x-poptip>
|
||||
<x-button type="info" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('TreeView')" @click="_treeView(item)" icon="ans-icon-node"><!--{{$t('树形图')}}--></x-button>
|
||||
<x-button type="info" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Export')" @click="_export(item)" icon="ans-icon-download"><!--{{$t('导出')}}--></x-button>
|
||||
<x-button type="info" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Version Info')" @click="_version(item)" :disabled="item.releaseState === 'ONLINE'" icon="ans-icon-dependence"><!--{{$t('版本信息')}}--></x-button>
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
|
@ -148,6 +149,7 @@
|
|||
import mTiming from './timing'
|
||||
import { mapActions } from 'vuex'
|
||||
import { publishStatus } from '@/conf/home/pages/dag/_source/config'
|
||||
import mVersions from './versions'
|
||||
|
||||
export default {
|
||||
name: 'definition-list',
|
||||
|
|
@ -164,7 +166,7 @@
|
|||
pageSize: Number
|
||||
},
|
||||
methods: {
|
||||
...mapActions('dag', ['editProcessState', 'getStartCheck', 'getReceiver', 'deleteDefinition', 'batchDeleteDefinition','exportDefinition','copyProcess']),
|
||||
...mapActions('dag', ['editProcessState', 'getStartCheck', 'getReceiver', 'deleteDefinition', 'batchDeleteDefinition', 'exportDefinition', 'getProcessDefinitionVersionsPage', 'copyProcess', 'switchProcessDefinitionVersion', 'deleteProcessDefinitionVersion']),
|
||||
...mapActions('security', ['getWorkerGroupsAll']),
|
||||
_rtPublishStatus (code) {
|
||||
return _.filter(publishStatus, v => v.code === code)[0].desc
|
||||
|
|
@ -334,6 +336,125 @@
|
|||
})
|
||||
},
|
||||
|
||||
_version (item) {
|
||||
let self = this
|
||||
this.getProcessDefinitionVersionsPage({
|
||||
pageNo: 1,
|
||||
pageSize: 10,
|
||||
processDefinitionId: item.id
|
||||
}).then(res => {
|
||||
let processDefinitionVersions = res.data.lists
|
||||
let total = res.data.totalCount
|
||||
let pageSize = res.data.pageSize
|
||||
let pageNo = res.data.currentPage
|
||||
if (this.versionsModel) {
|
||||
this.versionsModel.remove()
|
||||
}
|
||||
this.versionsModel = this.$drawer({
|
||||
direction: 'right',
|
||||
closable: true,
|
||||
showMask: true,
|
||||
escClose: true,
|
||||
render (h) {
|
||||
return h(mVersions, {
|
||||
on: {
|
||||
/**
|
||||
* switch version in process definition version list
|
||||
*
|
||||
* @param version the version user want to change
|
||||
* @param processDefinitionId the process definition id
|
||||
* @param fromThis fromThis
|
||||
*/
|
||||
mVersionSwitchProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) {
|
||||
self.switchProcessDefinitionVersion({
|
||||
version: version,
|
||||
processDefinitionId: processDefinitionId
|
||||
}).then(res => {
|
||||
self.$message.success($t('Switch Version Successfully'))
|
||||
setTimeout(() => {
|
||||
fromThis.$destroy()
|
||||
self.versionsModel.remove()
|
||||
}, 0)
|
||||
self.$router.push({ path: `/projects/definition/list/${processDefinitionId}` })
|
||||
}).catch(e => {
|
||||
self.$message.error(e.msg || '')
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Paging event of process definition versions
|
||||
*
|
||||
* @param pageNo page number
|
||||
* @param pageSize page size
|
||||
* @param processDefinitionId the process definition id of page version
|
||||
* @param fromThis fromThis
|
||||
*/
|
||||
mVersionGetProcessDefinitionVersionsPage ({ pageNo, pageSize, processDefinitionId, fromThis }) {
|
||||
self.getProcessDefinitionVersionsPage({
|
||||
pageNo: pageNo,
|
||||
pageSize: pageSize,
|
||||
processDefinitionId: processDefinitionId
|
||||
}).then(res => {
|
||||
fromThis.processDefinitionVersions = res.data.lists
|
||||
fromThis.total = res.data.totalCount
|
||||
fromThis.pageSize = res.data.pageSize
|
||||
fromThis.pageNo = res.data.currentPage
|
||||
}).catch(e => {
|
||||
self.$message.error(e.msg || '')
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* delete one version of process definition
|
||||
*
|
||||
* @param version the version need to delete
|
||||
* @param processDefinitionId the process definition id user want to delete
|
||||
* @param fromThis fromThis
|
||||
*/
|
||||
mVersionDeleteProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) {
|
||||
self.deleteProcessDefinitionVersion({
|
||||
version: version,
|
||||
processDefinitionId: processDefinitionId
|
||||
}).then(res => {
|
||||
self.$message.success(res.msg || '')
|
||||
fromThis.$emit('mVersionGetProcessDefinitionVersionsPage', {
|
||||
pageNo: 1,
|
||||
pageSize: 10,
|
||||
processDefinitionId: processDefinitionId,
|
||||
fromThis: fromThis
|
||||
})
|
||||
}).catch(e => {
|
||||
self.$message.error(e.msg || '')
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* remove this drawer
|
||||
*
|
||||
* @param fromThis
|
||||
*/
|
||||
close ({ fromThis }) {
|
||||
setTimeout(() => {
|
||||
fromThis.$destroy()
|
||||
self.versionsModel.remove()
|
||||
}, 0)
|
||||
}
|
||||
},
|
||||
props: {
|
||||
processDefinition: item,
|
||||
processDefinitionVersions: processDefinitionVersions,
|
||||
total: total,
|
||||
pageNo: pageNo,
|
||||
pageSize: pageSize
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}).catch(e => {
|
||||
this.$message.error(e.msg || '')
|
||||
})
|
||||
},
|
||||
|
||||
_batchExport () {
|
||||
this.exportDefinition({
|
||||
processDefinitionIds: this.strSelectIds,
|
||||
|
|
@ -423,6 +544,6 @@
|
|||
},
|
||||
mounted () {
|
||||
},
|
||||
components: { }
|
||||
components: { mVersions }
|
||||
}
|
||||
</script>
|
||||
|
|
|
|||
|
|
@ -0,0 +1,255 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
<template>
|
||||
<div class="container">
|
||||
|
||||
<div class="title-box">
|
||||
<span class="name">{{$t('Version Info')}}</span>
|
||||
</div>
|
||||
|
||||
<div class="table-box" v-if="processDefinitionVersions.length > 0">
|
||||
<table class="fixed">
|
||||
<caption><!-- placeHolder --></caption>
|
||||
<tr>
|
||||
<th scope="col">
|
||||
<span>#</span>
|
||||
</th>
|
||||
<th scope="col" style="min-width: 40px">
|
||||
<span>Version</span>
|
||||
</th>
|
||||
<th scope="col" style="min-width: 200px;max-width: 300px;">
|
||||
<span>{{$t('Description')}}</span>
|
||||
</th>
|
||||
<th scope="col" style="min-width: 50px">
|
||||
<span>{{$t('Create Time')}}</span>
|
||||
</th>
|
||||
<th scope="col" style="min-width: 300px">
|
||||
<span>{{$t('Operation')}}</span>
|
||||
</th>
|
||||
</tr>
|
||||
<tr v-for="(item, $index) in processDefinitionVersions" :key="item.id">
|
||||
<td>
|
||||
<span>-</span>
|
||||
</td>
|
||||
<td>
|
||||
<span v-if="item.version">
|
||||
<span v-if="item.version === processDefinition.version" style="color: green"><strong>{{item.version}} {{$t('Current Version')}}</strong></span>
|
||||
<span v-else>{{item.version}}</span>
|
||||
</span>
|
||||
<span v-else>-</span>
|
||||
</td>
|
||||
<td>
|
||||
<span v-if="item.description">{{item.description}}</span>
|
||||
<span v-else>-</span>
|
||||
</td>
|
||||
<td>
|
||||
<span v-if="item.createTime">{{item.createTime}}</span>
|
||||
<span v-else>-</span>
|
||||
</td>
|
||||
<td>
|
||||
<x-poptip
|
||||
:ref="'poptip-switch-version-' + $index"
|
||||
placement="top-end"
|
||||
width="90">
|
||||
<p>{{$t('Confirm Switch To This Version?')}}</p>
|
||||
<div style="text-align: right; margin: 0;padding-top: 4px;">
|
||||
<x-button type="text" size="xsmall" shape="circle" @click="_closeSwitchVersion($index)">{{$t('Cancel')}}</x-button>
|
||||
<x-button type="primary" size="xsmall" shape="circle" @click="_mVersionSwitchProcessDefinitionVersion(item)">{{$t('Confirm')}}</x-button>
|
||||
</div>
|
||||
<template slot="reference">
|
||||
<x-button
|
||||
icon="ans-icon-dependence"
|
||||
type="primary"
|
||||
shape="circle"
|
||||
size="xsmall"
|
||||
:disabled="item.version === processDefinition.version"
|
||||
data-toggle="tooltip"
|
||||
:title="$t('Switch To This Version')">
|
||||
</x-button>
|
||||
</template>
|
||||
</x-poptip>
|
||||
<x-poptip
|
||||
:ref="'poptip-delete-' + $index"
|
||||
placement="top-end"
|
||||
width="90">
|
||||
<p>{{$t('Delete?')}}</p>
|
||||
<div style="text-align: right; margin: 0;padding-top: 4px;">
|
||||
<x-button type="text" size="xsmall" shape="circle" @click="_closeDelete($index)">{{$t('Cancel')}}</x-button>
|
||||
<x-button type="primary" size="xsmall" shape="circle" @click="_mVersionDeleteProcessDefinitionVersion(item,$index)">{{$t('Confirm')}}</x-button>
|
||||
</div>
|
||||
<template slot="reference">
|
||||
<x-button
|
||||
icon="ans-icon-trash"
|
||||
type="error"
|
||||
shape="circle"
|
||||
size="xsmall"
|
||||
:disabled="item.version === processDefinition.version"
|
||||
data-toggle="tooltip"
|
||||
:title="$t('delete')">
|
||||
</x-button>
|
||||
</template>
|
||||
</x-poptip>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div v-if="processDefinitionVersions.length === 0">
|
||||
<m-no-data><!----></m-no-data>
|
||||
</div>
|
||||
|
||||
<div v-if="processDefinitionVersions.length > 0">
|
||||
<div class="bottom-box">
|
||||
<x-button type="text" @click="_close()"> {{$t('Cancel')}} </x-button>
|
||||
<x-page :current="pageNo" :total="total" @on-change="_mVersionGetProcessDefinitionVersionsPage" small><!----></x-page>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import mNoData from '@/module/components/noData/noData'
|
||||
|
||||
export default {
|
||||
name: 'versions',
|
||||
data () {
|
||||
return {
|
||||
tableHeaders: [
|
||||
{
|
||||
label: 'version',
|
||||
prop: 'version'
|
||||
},
|
||||
{
|
||||
label: 'createTime',
|
||||
prop: 'createTime'
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
props: {
|
||||
processDefinition: Object,
|
||||
processDefinitionVersions: Array,
|
||||
total: Number,
|
||||
pageNo: Number,
|
||||
pageSize: Number
|
||||
},
|
||||
methods: {
|
||||
/**
|
||||
* switch version in process definition version list
|
||||
*/
|
||||
_mVersionSwitchProcessDefinitionVersion (item) {
|
||||
this.$emit('mVersionSwitchProcessDefinitionVersion', {
|
||||
version: item.version,
|
||||
processDefinitionId: this.processDefinition.id,
|
||||
fromThis: this
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* delete one version of process definition
|
||||
*/
|
||||
_mVersionDeleteProcessDefinitionVersion (item) {
|
||||
this.$emit('mVersionDeleteProcessDefinitionVersion', {
|
||||
version: item.version,
|
||||
processDefinitionId: this.processDefinition.id,
|
||||
fromThis: this
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Paging event of process definition versions
|
||||
*/
|
||||
_mVersionGetProcessDefinitionVersionsPage (val) {
|
||||
this.$emit('mVersionGetProcessDefinitionVersionsPage', {
|
||||
pageNo: val,
|
||||
pageSize: this.pageSize,
|
||||
processDefinitionId: this.processDefinition.id,
|
||||
fromThis: this
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Close the switch version layer
|
||||
*/
|
||||
_closeSwitchVersion (i) {
|
||||
if (i > 0) {
|
||||
this.$refs[`poptip-switch-version-${i}`][0].doClose()
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Close the delete layer
|
||||
*/
|
||||
_closeDelete (i) {
|
||||
if (i > 0) {
|
||||
this.$refs[`poptip-delete-${i}`][0].doClose()
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Close and destroy component and component internal events
|
||||
*/
|
||||
_close () {
|
||||
// flag Whether to delete a node this.$destroy()
|
||||
this.$emit('close', {
|
||||
fromThis: this
|
||||
})
|
||||
}
|
||||
},
|
||||
created () {},
|
||||
mounted () {},
|
||||
components: { mNoData }
|
||||
}
|
||||
</script>
|
||||
|
||||
<style lang="scss" rel="stylesheet/scss">
|
||||
.container {
|
||||
width: 500px;
|
||||
position: relative;
|
||||
.title-box {
|
||||
height: 61px;
|
||||
border-bottom: 1px solid #DCDEDC;
|
||||
position: relative;
|
||||
.name {
|
||||
position: absolute;
|
||||
left: 24px;
|
||||
top: 18px;
|
||||
font-size: 16px;
|
||||
}
|
||||
}
|
||||
.bottom-box {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
text-align: right;
|
||||
height: 60px;
|
||||
line-height: 60px;
|
||||
border-top: 1px solid #DCDEDC;
|
||||
background: #fff;
|
||||
.ans-page {
|
||||
display: inline-block;
|
||||
}
|
||||
}
|
||||
.table-box {
|
||||
overflow-y: scroll;
|
||||
height: calc(100vh - 61px);
|
||||
padding-bottom: 60px;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
|
@ -72,7 +72,7 @@
|
|||
<m-list-box-f>
|
||||
<template slot="name"><strong>*</strong>{{$t('UDF Resources')}}</template>
|
||||
<template slot="content">
|
||||
<treeselect style="width:535px;float:left;" v-model="resourceId" :disable-branch-nodes="true" :options="udfResourceList" :disabled="isUpdate" :normalizer="normalizer" :placeholder="$t('Please select UDF resources directory')">
|
||||
<treeselect style="width:535px;float:left;" v-model="resourceId" maxHeight="200" :disable-branch-nodes="true" :options="udfResourceList" :disabled="isUpdate" :normalizer="normalizer" :placeholder="$t('Please select UDF resources directory')">
|
||||
<div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div>
|
||||
</treeselect>
|
||||
<x-button type="primary" @click="_toggleUpdate" :disabled="upDisabled">{{$t('Upload Resources')}}</x-button>
|
||||
|
|
@ -81,7 +81,7 @@
|
|||
<m-list-box-f v-if="isUpdate">
|
||||
<template slot="name"><strong>*</strong>{{$t('UDF resources directory')}}</template>
|
||||
<template slot="content">
|
||||
<treeselect style="width:535px;float:left;" v-model="pid" @select="selTree" :options="udfResourceDirList" :normalizer="normalizer" :placeholder="$t('Please select UDF resources directory')">
|
||||
<treeselect style="width:535px;float:left;" v-model="pid" maxHeight="200" @select="selTree" :options="udfResourceDirList" :normalizer="normalizer" :placeholder="$t('Please select UDF resources directory')">
|
||||
<div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div>
|
||||
</treeselect>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -79,6 +79,46 @@ export default {
|
|||
})
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* get process definition versions pagination info
|
||||
*/
|
||||
getProcessDefinitionVersionsPage ({ state }, payload) {
|
||||
return new Promise((resolve, reject) => {
|
||||
io.get(`projects/${state.projectName}/process/versions`, payload, res => {
|
||||
resolve(res)
|
||||
}).catch(e => {
|
||||
reject(e)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* switch process definition version
|
||||
*/
|
||||
switchProcessDefinitionVersion ({ state }, payload) {
|
||||
return new Promise((resolve, reject) => {
|
||||
io.get(`projects/${state.projectName}/process/version/switch`, payload, res => {
|
||||
resolve(res)
|
||||
}).catch(e => {
|
||||
reject(e)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* delete process definition version
|
||||
*/
|
||||
deleteProcessDefinitionVersion ({ state }, payload) {
|
||||
return new Promise((resolve, reject) => {
|
||||
io.get(`projects/${state.projectName}/process/version/delete`, payload, res => {
|
||||
resolve(res)
|
||||
}).catch(e => {
|
||||
reject(e)
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Update process instance status
|
||||
*/
|
||||
|
|
@ -126,6 +166,8 @@ export default {
|
|||
state.connects = JSON.parse(res.data.connects)
|
||||
// locations
|
||||
state.locations = JSON.parse(res.data.locations)
|
||||
// version
|
||||
state.version = res.data.version
|
||||
// Process definition
|
||||
const processDefinitionJson = JSON.parse(res.data.processDefinitionJson)
|
||||
// tasks info
|
||||
|
|
|
|||
|
|
@ -24,10 +24,10 @@
|
|||
<x-button type="ghost" value="udfResource" @click="_ckUDf">{{$t('UDF resources')}}</x-button>
|
||||
</x-button-group>
|
||||
</div>
|
||||
<treeselect v-show="checkedValue=='fileResource'" v-model="selectFileSource" :multiple="true" :options="fileList" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')">
|
||||
<treeselect v-show="checkedValue=='fileResource'" v-model="selectFileSource" :multiple="true" maxHeight="200" :options="fileList" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')">
|
||||
<div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div>
|
||||
</treeselect>
|
||||
<treeselect v-show="checkedValue=='udfResource'" v-model="selectUdfSource" :multiple="true" :options="udfList" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')">
|
||||
<treeselect v-show="checkedValue=='udfResource'" v-model="selectUdfSource" :multiple="true" maxHeight="200" :options="udfList" :normalizer="normalizer" :value-consists-of="valueConsistsOf" :placeholder="$t('Please select resources')">
|
||||
<div slot="value-label" slot-scope="{ node }">{{ node.raw.fullName }}</div>
|
||||
</treeselect>
|
||||
<!-- <div class="select-list-box">
|
||||
|
|
|
|||
|
|
@ -135,6 +135,7 @@ export default {
|
|||
'Child Node': 'Child Node',
|
||||
'Please select a sub-Process': 'Please select a sub-Process',
|
||||
Edit: 'Edit',
|
||||
'Switch To This Version': 'Switch To This Version',
|
||||
'Datasource Name': 'Datasource Name',
|
||||
'Please enter datasource name': 'Please enter datasource name',
|
||||
IP: 'IP',
|
||||
|
|
@ -159,8 +160,11 @@ export default {
|
|||
'Create Time': 'Create Time',
|
||||
'Update Time': 'Update Time',
|
||||
Operation: 'Operation',
|
||||
'Current Version': 'Current Version',
|
||||
'Click to view': 'Click to view',
|
||||
'Delete?': 'Delete?',
|
||||
'Switch Version Successfully': 'Switch Version Successfully',
|
||||
'Confirm Switch To This Version?': 'Confirm Switch To This Version?',
|
||||
Confirm: 'Confirm',
|
||||
'Task status statistics': 'Task Status Statistics',
|
||||
Number: 'Number',
|
||||
|
|
@ -288,6 +292,7 @@ export default {
|
|||
Rename: 'Rename',
|
||||
Download: 'Download',
|
||||
Export: 'Export',
|
||||
'Version Info': 'Version Info',
|
||||
Submit: 'Submit',
|
||||
'Edit UDF Function': 'Edit UDF Function',
|
||||
type: 'type',
|
||||
|
|
@ -632,5 +637,6 @@ export default {
|
|||
'Directory detail': 'Directory detail',
|
||||
'Connection name': 'Connection name',
|
||||
'Current connection settings': 'Current connection settings',
|
||||
'Please save the DAG before formatting': 'Please save the DAG before formatting'
|
||||
'Please save the DAG before formatting': 'Please save the DAG before formatting',
|
||||
'Batch copy': 'Batch copy'
|
||||
}
|
||||
|
|
|
|||
|
|
@ -138,6 +138,7 @@ export default {
|
|||
'Child Node': '子节点',
|
||||
'Please select a sub-Process': '请选择子工作流',
|
||||
Edit: '编辑',
|
||||
'Switch To This Version': '切换到该版本',
|
||||
'Datasource Name': '数据源名称',
|
||||
'Please enter datasource name': '请输入数据源名称',
|
||||
IP: 'IP主机名',
|
||||
|
|
@ -162,8 +163,11 @@ export default {
|
|||
'Create Time': '创建时间',
|
||||
'Update Time': '更新时间',
|
||||
Operation: '操作',
|
||||
'Current Version': '当前版本',
|
||||
'Click to view': '点击查看',
|
||||
'Delete?': '确定删除吗?',
|
||||
'Switch Version Successfully': '切换版本成功',
|
||||
'Confirm Switch To This Version?': '确定切换到该版本吗?',
|
||||
Confirm: '确定',
|
||||
'Task status statistics': '任务状态统计',
|
||||
Number: '数量',
|
||||
|
|
@ -289,6 +293,7 @@ export default {
|
|||
Rename: '重命名',
|
||||
Download: '下载',
|
||||
Export: '导出',
|
||||
'Version Info': '版本信息',
|
||||
Submit: '提交',
|
||||
'Edit UDF Function': '编辑UDF函数',
|
||||
type: '类型',
|
||||
|
|
@ -632,5 +637,6 @@ export default {
|
|||
'Directory detail': '查看目录详情',
|
||||
'Connection name': '连线名',
|
||||
'Current connection settings': '当前连线设置',
|
||||
'Please save the DAG before formatting': '格式化前请先保存DAG'
|
||||
'Please save the DAG before formatting': '格式化前请先保存DAG',
|
||||
'Batch copy': '批量复制'
|
||||
}
|
||||
|
|
|
|||
|
|
@ -352,6 +352,10 @@ body::-webkit-scrollbar-thumb {
|
|||
.f-f18 {
|
||||
font-size:18px
|
||||
}
|
||||
.vue-treeselect__multi-value {
|
||||
max-height: 200px;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -444,4 +444,8 @@ public class BrowserCommon {
|
|||
public boolean ifTextExists(By locator, String text) {
|
||||
return wait.until(ExpectedConditions.textToBePresentInElementLocated(locator, text));
|
||||
}
|
||||
|
||||
public void flushPage() {
|
||||
driver.navigate().refresh();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,6 +17,9 @@
|
|||
package org.apache.dolphinscheduler.data.project;
|
||||
|
||||
public class ProjectData {
|
||||
|
||||
public static final String CREATE_PROJECT_BUTTON = "创建项目";
|
||||
|
||||
// create project name
|
||||
public static final String PROJECT_NAME = "selenium_project_1";
|
||||
// create project description
|
||||
|
|
|
|||
|
|
@ -30,6 +30,9 @@ public class TimingData {
|
|||
|
||||
public static final String EDIT_Cc = "test.edit123qwe@qq.com";
|
||||
|
||||
public static final String WORKFLOW_TITLE = "工作流定义 - DolphinScheduler";
|
||||
public static final String TIMING_OFFLINE_STATE = "下线";
|
||||
|
||||
public static final String TIMING_ONLINE_STATE = "上线";
|
||||
|
||||
public static final String TIMING_TITLE = "定时任务列表 - DolphinScheduler";
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,6 +20,9 @@ public class WorkflowDefineData {
|
|||
/**
|
||||
* create workflow data
|
||||
*/
|
||||
|
||||
public static final String workflow_define = "工作流定义";
|
||||
|
||||
//input shell task name
|
||||
public static final String SHELL_TASK_NAME = "shell_task_selenium_1";
|
||||
|
||||
|
|
@ -74,4 +77,13 @@ public class WorkflowDefineData {
|
|||
|
||||
//input to add workflow global parameters value
|
||||
public static final String INPUT_ADD_WORKFLOW_GLOBAL_PARAMETERS_VALUES = "selenium_global_parameters_value_2";
|
||||
|
||||
/**
|
||||
* workflowDefine list
|
||||
*/
|
||||
public static final String WORKFLOW_ONLINE_STATE = "上线";
|
||||
|
||||
public static final String WORKFLOW_OFFLINE_STATE = "下线";
|
||||
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,18 +17,14 @@
|
|||
package org.apache.dolphinscheduler.data.security;
|
||||
|
||||
public class AlertManageData {
|
||||
/**
|
||||
* Alert Name
|
||||
*/
|
||||
//Alert Name
|
||||
public static final String ALERT_NAME = "selenium_alert_Name";
|
||||
/**
|
||||
* Alert Type
|
||||
*/
|
||||
public static final String CREATE_ALERT = "创建告警组";
|
||||
|
||||
// Alert Type
|
||||
public static final String ALERT_TYPE = "邮件";
|
||||
|
||||
/**
|
||||
* Alert Description
|
||||
*/
|
||||
//Alert Description
|
||||
public static final String DESCRIPTION = "create alert test";
|
||||
|
||||
public static final String ALERT_MANAGE = "告警组管理 - DolphinScheduler";
|
||||
|
|
|
|||
|
|
@ -22,6 +22,8 @@ public class QueueManageData {
|
|||
*/
|
||||
public static final String QUEUE_NAME = "selenium_queue_name";
|
||||
|
||||
public static final String CREATE_QUEUE = "创建队列";
|
||||
|
||||
/**
|
||||
* Create Queue Value
|
||||
*/
|
||||
|
|
@ -30,12 +32,12 @@ public class QueueManageData {
|
|||
/**
|
||||
* Edit Queue Name
|
||||
*/
|
||||
public static final String EDIT_QUEUE_NAME = "_edit";
|
||||
public static final String EDIT_QUEUE_NAME = "selenium_queue_value_edit";
|
||||
|
||||
/**
|
||||
* Edit Queue Value
|
||||
*/
|
||||
public static final String EDIT_QUEUE_VALUE = "_edit";
|
||||
public static final String EDIT_QUEUE_VALUE = "selenium_queue_value_edit";
|
||||
|
||||
public static final String QUEUE_MANAGE = "队列管理 - DolphinScheduler";
|
||||
|
||||
|
|
|
|||
|
|
@ -18,7 +18,11 @@ package org.apache.dolphinscheduler.data.security;
|
|||
|
||||
public class TokenManageData {
|
||||
public static final String TOKEN_MANAGE = "令牌管理 - DolphinScheduler";
|
||||
public static final String CREATE_TOKEN = "创建令牌";
|
||||
|
||||
public static final String DATE = "2038-06-10 00:00:00";
|
||||
public static final String NAME = "admin";
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -31,6 +31,9 @@ public class UserManageData {
|
|||
|
||||
public static final String USER_MANAGE = "用户管理 - DolphinScheduler";
|
||||
|
||||
public static final String CREATE_USER_BUTTON = "创建用户";
|
||||
|
||||
|
||||
/**
|
||||
* edit user
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -20,6 +20,9 @@ import org.openqa.selenium.By;
|
|||
|
||||
public class ProcessInstanceLocator {
|
||||
// jump Process Instance page
|
||||
// process instance state is success
|
||||
public static final By PROCESS_INSTANCE_SUCCESS_STATE = By.xpath("//table/tr[2]/td[4]/span/em[@title['data-original-title']='成功']");
|
||||
|
||||
//click Process Instance name
|
||||
public static final By CLICK_PROCESS_INSTANCE_NAME = By.xpath("//div[4]/div/ul/li[2]");
|
||||
|
||||
|
|
|
|||
|
|
@ -34,6 +34,9 @@ public class ProjectLocator {
|
|||
//submit button
|
||||
public static final By SUBMIT_BUTTON = By.xpath("//div[3]/button[2]/span");
|
||||
|
||||
// project name
|
||||
public static final By LIST_PROJECT_NAME = By.xpath("//table/tr[2]/td[2]/span/a");
|
||||
|
||||
//delete project button
|
||||
public static final By DELETE_PROJECT_BUTTON = By.xpath("//div[3]/div[1]/div/table/tr[2]/td[9]/span/button");
|
||||
|
||||
|
|
|
|||
|
|
@ -45,17 +45,23 @@ public class TimingLocator {
|
|||
public static final By CLICK_CREATE_BUTTON = By.xpath("//div[12]/button[2]/span");
|
||||
|
||||
//edit timing
|
||||
public static final By TIMING_STATE = By.xpath("//table/tr[2]/td[9]/span");
|
||||
|
||||
public static final By CLICK_TIMING_MANAGEMENT_BUTTON = By.xpath("//tr[2]/td[10]/button[6]");
|
||||
|
||||
public static final By WORKFLOW_NAME = By.xpath("//table/tr[2]/td[2]/span/a");
|
||||
|
||||
public static final By CLICK_EDIT_TIMING_BUTTON = By.xpath("//tr[2]/td[10]/button[1]/i");
|
||||
|
||||
//online timing
|
||||
public static final By CLICK_ONLINE_TIMING_BUTTON = By.xpath("//td[10]/button[2]");
|
||||
public static final By TIMING_MANAGEMENT_TIMING_STATE = By.xpath("//table/tr[2]/td[7]/span");
|
||||
|
||||
public static final By CLICK_ONLINE_TIMING_BUTTON = By.xpath("//table/tr[2]/td[10]/button[@title['data-original-title']='上线']");
|
||||
|
||||
//offline timing
|
||||
public static final By CLICK_OFFLINE_TIMING_BUTTON = By.xpath("//div[2]/div[3]/div/div[2]/div[1]/table/tr[2]/td[10]/button[2]");
|
||||
public static final By CLICK_OFFLINE_TIMING_BUTTON = By.xpath("//table/tr[2]/td[10]/button[@title['data-original-title']='下线']");
|
||||
|
||||
//delete timing
|
||||
public static final By CLICK_DELETE_TIMING_BUTTON = By.xpath("//div[2]/div[3]/div/div[2]/div[1]/table/tr[2]/td[10]/span/button");
|
||||
public static final By CLICK_DELETE_TIMING_BUTTON = By.xpath("//table/tr[2]/td[10]/span/button");
|
||||
public static final By CLICK_CONFIRM_DELETE_TIMING_BUTTON = By.xpath("//div[2]/div/button[2]/span");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -49,10 +49,10 @@ public class WorkflowDefineLocator {
|
|||
public static final By INPUT_SHELL_TASK_NAME = By.xpath("//input");
|
||||
|
||||
//click stop run type
|
||||
public static final By CLICK_STOP_RUN_TYPE = By.xpath("//label[2]/span/input");
|
||||
public static final By CLICK_STOP_RUN_TYPE = By.xpath("//label[2]/span[1]/input");
|
||||
|
||||
//click normal run type
|
||||
public static final By CLICK_NORMAL_RUN_TYPE = By.xpath("//span/input");
|
||||
public static final By CLICK_NORMAL_RUN_TYPE = By.xpath("//label[1]/span[1]/input");
|
||||
|
||||
//input shell task description
|
||||
public static final By INPUT_SHELL_TASK_DESCRIPTION = By.xpath("//label/div/textarea");
|
||||
|
|
@ -182,23 +182,24 @@ public class WorkflowDefineLocator {
|
|||
/**
|
||||
* online workflow
|
||||
*/
|
||||
public static final By WORKFLOW_STATE = By.xpath("//table/tr[2]/td[4]/span");
|
||||
|
||||
|
||||
// click online workflow button
|
||||
public static final By CLICK_ONLINE_WORKFLOW_BUTTON = By.xpath("//div[1]/div/table/tr[2]/td[10]/button[4]");
|
||||
public static final By CLICK_ONLINE_WORKFLOW_BUTTON = By.xpath("//button[@title['data-original-title']='上线']");
|
||||
|
||||
/**
|
||||
* offline workflow
|
||||
*/
|
||||
// click offline workflow button
|
||||
public static final By CLICK_OFFLINE_WORKFLOW_BUTTON = By.xpath("//div[1]/div/table/tr[2]/td[10]/button[4]");
|
||||
|
||||
public static final By CLICK_OFFLINE_WORKFLOW_BUTTON = By.xpath("//button[@title['data-original-title']='下线']");
|
||||
|
||||
/**
|
||||
* delete workflow
|
||||
*/
|
||||
//click delete workflow button
|
||||
public static final By DELETE_WORKFLOW_BOTTOM = By.xpath("//div[3]/div[1]/div/table/tr[2]/td[10]/span/button");
|
||||
public static final By DELETE_WORKFLOW_BOTTOM = By.xpath("//table/tr[2]/td[10]/span/button");
|
||||
|
||||
//click confirm delete workflow button
|
||||
public static final By CONFIRM_DELETE_WORKFLOW_BOTTOM = By.xpath("//div[2]/div/button[2]/span");
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -28,8 +28,9 @@ public class AlertManageLocator {
|
|||
public static final By INPUT_ALERT_DESCRIPTION = By.xpath("//textarea");
|
||||
public static final By SUBMIT_ALERT = By.xpath("//div[3]/button[2]/span");
|
||||
|
||||
public static final By ALERT_NAME = By.xpath("//table/tr[2]/td[2]/span");
|
||||
|
||||
//delete alert locator
|
||||
public static final By DELETE_ALERT_BUTTON = By.xpath("//span/button");
|
||||
public static final By CONFIRM_DELETE_ALERT_BUTTON = By.xpath("//div[2]/div/button[2]/span");
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -26,6 +26,8 @@ public class QueueManageLocator {
|
|||
public static final By INPUT_QUEUE_VALUE = By.xpath("//div[2]/div[2]/div/input");
|
||||
public static final By SUBMIT_QUEUE = By.xpath("//button[2]/span");
|
||||
|
||||
public static final By LIST_QUEUE_NAME = By.xpath("//table/tr[2]/td[2]/span");
|
||||
|
||||
//edit queue locator
|
||||
public static final By CLICK_EDIT_QUEUE = By.xpath("//td[6]/button/i");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -38,4 +38,6 @@ public class TenantManageLocator{
|
|||
public static final By DELETE_TENANT_BUTTON = By.xpath("//div[3]/div[1]/div/table/tr[2]/td[8]/span/button");
|
||||
|
||||
public static final By CONFIRM_DELETE_TENANT_BUTTON = By.xpath("//div[2]/div/button[2]/span");
|
||||
|
||||
public static final By TENANT_CODE_FIRST = By.xpath("//table/tr[2]/td[2]/span");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,7 +30,11 @@ public class TokenManageLocator {
|
|||
|
||||
public static final By CLICK_SUBMIT_BUTTON = By.xpath("//div[3]/button[2]/span");
|
||||
|
||||
public static final By EDIT_TOKEN_BUTTON = By.xpath("//table/tr[2]/td[7]/button");
|
||||
|
||||
//edit token
|
||||
public static final By TOKEN = By.xpath("//table/tr[2]/td[1]/span");
|
||||
|
||||
public static final By CLICK_EDIT_BUTTON = By.xpath("//div[3]/div[1]/div/table/tr[2]/td[7]/button/i");
|
||||
|
||||
//delete token
|
||||
|
|
|
|||
|
|
@ -52,13 +52,15 @@ public class UserManageLocator {
|
|||
/**
|
||||
* edit user
|
||||
*/
|
||||
public static final By USER_NAME = By.xpath("//table/tr[2]/td[2]/span");
|
||||
|
||||
public static final By EDIT_GENERAL_USER_BUTTON = By.xpath("//div[3]/div[1]/div/table/tr[2]/td[11]/button");
|
||||
|
||||
public static final By EDIT_ADMIN_USER_BUTTON = By.xpath("//div[3]/div[1]/div/table/tr[3]/td[11]/button");
|
||||
/**
|
||||
* delete user
|
||||
*/
|
||||
public static final By DELETE_USER_BUTTON = By.xpath("//div[3]/div[1]/div/table/tr[2]/td[11]/span[2]/button");
|
||||
public static final By DELETE_USER_BUTTON = By.xpath("//table/tr[3]/td[11]/span[2]/button");
|
||||
|
||||
public static final By CONFIRM_DELETE_USER_BUTTON = By.xpath("//div[2]/div/button[2]/span");
|
||||
public static final By CONFIRM_DELETE_USER_BUTTON = By.xpath("//tr[3]/td[11]/span[2]/div/div[2]/div/button[2]/span");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -34,6 +34,7 @@ public class ProcessInstancePage extends PageCommon {
|
|||
public boolean rerunWorkflowPage() throws InterruptedException {
|
||||
Thread.sleep(2000);
|
||||
clickTopElement(ProcessInstanceLocator.CLICK_PROCESS_INSTANCE_NAME);
|
||||
locateElement(ProcessInstanceLocator.PROCESS_INSTANCE_SUCCESS_STATE);
|
||||
clickTopElement(ProcessInstanceLocator.CLICK_RERUN_BUTTON);
|
||||
return ifTitleContains(ProcessInstanceData.PROCESS_INSTANCE_TITLE);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -32,7 +32,6 @@ public class ProjectPage extends PageCommon {
|
|||
*/
|
||||
public boolean jumpProjectManagePage() throws InterruptedException {
|
||||
clickTopElement(ProjectLocator.PROJECT_MANAGE);
|
||||
Thread.sleep(TestConstant.ONE_THOUSAND);
|
||||
return ifTitleContains(ProjectData.PROJECT_TITLE);
|
||||
}
|
||||
|
||||
|
|
@ -42,7 +41,7 @@ public class ProjectPage extends PageCommon {
|
|||
* @return Whether to enter the specified page after create project
|
||||
*/
|
||||
public boolean createProject() throws InterruptedException {
|
||||
Thread.sleep(500);
|
||||
ifTextExists(ProjectLocator.CREATE_PROJECT_BUTTON,ProjectData.CREATE_PROJECT_BUTTON);
|
||||
clickElement(ProjectLocator.CREATE_PROJECT_BUTTON);
|
||||
|
||||
// input create project data
|
||||
|
|
@ -53,7 +52,7 @@ public class ProjectPage extends PageCommon {
|
|||
clickButton(ProjectLocator.SUBMIT_BUTTON);
|
||||
|
||||
// Whether to enter the specified page after submit
|
||||
return ifTitleContains(ProjectData.PROJECT_TITLE);
|
||||
return ifTextExists(ProjectLocator.LIST_PROJECT_NAME,ProjectData.PROJECT_NAME);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -18,7 +18,9 @@ package org.apache.dolphinscheduler.page.project;
|
|||
|
||||
import org.apache.dolphinscheduler.common.PageCommon;
|
||||
import org.apache.dolphinscheduler.data.project.RunWorkflowData;
|
||||
import org.apache.dolphinscheduler.data.project.WorkflowDefineData;
|
||||
import org.apache.dolphinscheduler.locator.project.RunWorkflowLocator;
|
||||
import org.apache.dolphinscheduler.locator.project.WorkflowDefineLocator;
|
||||
import org.openqa.selenium.WebDriver;
|
||||
|
||||
public class RunWorkflowPage extends PageCommon {
|
||||
|
|
@ -27,17 +29,17 @@ public class RunWorkflowPage extends PageCommon {
|
|||
}
|
||||
|
||||
public boolean runWorkflow() throws InterruptedException {
|
||||
// Determine whether the workflow status is online
|
||||
ifTextExists(WorkflowDefineLocator.WORKFLOW_STATE, WorkflowDefineData.WORKFLOW_ONLINE_STATE);
|
||||
|
||||
// click run workflow button
|
||||
System.out.println("Click run workflow button");
|
||||
Thread.sleep(1000);
|
||||
clickButton(RunWorkflowLocator.CLICK_RUN_WORKFLOW_BUTTON);
|
||||
Thread.sleep(1000);
|
||||
|
||||
clickElement(RunWorkflowLocator.SELECT_FAILURE_STRATEGY_END);
|
||||
clickElement(RunWorkflowLocator.SELECT_FAILURE_STRATEGY_CONTINUE);
|
||||
clickElement(RunWorkflowLocator.CLICK_NOTICE_STRATEGY);
|
||||
clickElement(RunWorkflowLocator.SELECT_NOTICE_STRATEGY);
|
||||
Thread.sleep(500);
|
||||
clickElement(RunWorkflowLocator.CLICK_PROCESS_PRIORITY);
|
||||
clickElement(RunWorkflowLocator.SELECT_PROCESS_PRIORITY_HIGHEST);
|
||||
clickElement(RunWorkflowLocator.CLICK_WORKER_GROUP);
|
||||
|
|
|
|||
|
|
@ -18,8 +18,9 @@ package org.apache.dolphinscheduler.page.project;
|
|||
|
||||
import org.apache.dolphinscheduler.common.PageCommon;
|
||||
import org.apache.dolphinscheduler.data.project.TimingData;
|
||||
import org.apache.dolphinscheduler.locator.project.RunWorkflowLocator;
|
||||
import org.apache.dolphinscheduler.data.project.WorkflowDefineData;
|
||||
import org.apache.dolphinscheduler.locator.project.TimingLocator;
|
||||
import org.apache.dolphinscheduler.locator.project.WorkflowDefineLocator;
|
||||
import org.openqa.selenium.WebDriver;
|
||||
|
||||
public class TimingPage extends PageCommon {
|
||||
|
|
@ -32,19 +33,19 @@ public class TimingPage extends PageCommon {
|
|||
* create timing
|
||||
*/
|
||||
public boolean createTiming() throws InterruptedException {
|
||||
// Determine whether the workflow status is online
|
||||
ifTextExists(WorkflowDefineLocator.WORKFLOW_STATE, WorkflowDefineData.WORKFLOW_ONLINE_STATE);
|
||||
|
||||
// click timing button
|
||||
System.out.println("Click timing button");
|
||||
Thread.sleep(1000);
|
||||
clickButton(TimingLocator.CLICK_TIMING_BUTTON);
|
||||
System.out.println("Click execution timing button");
|
||||
clickButton(TimingLocator.CLICK_EXECUTION_TIMING_BUTTON);
|
||||
Thread.sleep(1000);
|
||||
|
||||
clickElement(TimingLocator.SELECT_FAILURE_STRATEGY_END);
|
||||
clickElement(TimingLocator.SELECT_FAILURE_STRATEGY_CONTINUE);
|
||||
clickElement(TimingLocator.CLICK_NOTICE_STRATEGY);
|
||||
clickElement(TimingLocator.SELECT_NOTICE_STRATEGY);
|
||||
Thread.sleep(500);
|
||||
clickElement(TimingLocator.CLICK_PROCESS_PRIORITY);
|
||||
clickElement(TimingLocator.SELECT_PROCESS_PRIORITY);
|
||||
clickElement(TimingLocator.CLICK_WORKER_GROUP);
|
||||
|
|
@ -55,7 +56,7 @@ public class TimingPage extends PageCommon {
|
|||
sendInput(TimingLocator.INPUT_Cc,TimingData.Cc);
|
||||
clickButton(TimingLocator.CLICK_CREATE_BUTTON);
|
||||
|
||||
return ifTitleContains(TimingData.WORKFLOW_TITLE);
|
||||
return ifTextExists(TimingLocator.TIMING_STATE, TimingData.TIMING_OFFLINE_STATE);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -64,20 +65,20 @@ public class TimingPage extends PageCommon {
|
|||
public boolean editTiming() throws InterruptedException {
|
||||
// click timing button
|
||||
System.out.println("Click timing management button");
|
||||
Thread.sleep(1000);
|
||||
clickButton(TimingLocator.CLICK_TIMING_MANAGEMENT_BUTTON);
|
||||
Thread.sleep(1000);
|
||||
|
||||
// Determine whether the workflow name exists
|
||||
ifTextExists(TimingLocator.WORKFLOW_NAME, WorkflowDefineData.INPUT_WORKFLOW_NAME);
|
||||
|
||||
System.out.println("Click edit timing button");
|
||||
clickButton(TimingLocator.CLICK_EDIT_TIMING_BUTTON);
|
||||
System.out.println("Click execution timing button");
|
||||
clickButton(TimingLocator.CLICK_EXECUTION_TIMING_BUTTON);
|
||||
Thread.sleep(1000);
|
||||
|
||||
clickElement(TimingLocator.SELECT_FAILURE_STRATEGY_END);
|
||||
clickElement(TimingLocator.SELECT_FAILURE_STRATEGY_CONTINUE);
|
||||
clickElement(TimingLocator.CLICK_NOTICE_STRATEGY);
|
||||
clickElement(TimingLocator.SELECT_NOTICE_STRATEGY);
|
||||
Thread.sleep(500);
|
||||
clickElement(TimingLocator.CLICK_PROCESS_PRIORITY);
|
||||
clickElement(TimingLocator.SELECT_PROCESS_PRIORITY);
|
||||
clickElement(TimingLocator.CLICK_WORKER_GROUP);
|
||||
|
|
@ -96,12 +97,15 @@ public class TimingPage extends PageCommon {
|
|||
* online timing
|
||||
*/
|
||||
public boolean onlineTiming() throws InterruptedException {
|
||||
flushPage();
|
||||
// Determine whether the timing is offline
|
||||
ifTextExists(TimingLocator.TIMING_MANAGEMENT_TIMING_STATE, TimingData.TIMING_OFFLINE_STATE);
|
||||
|
||||
// click online timing button
|
||||
System.out.println("Click online timing button");
|
||||
Thread.sleep(500);
|
||||
clickButton(TimingLocator.CLICK_ONLINE_TIMING_BUTTON);
|
||||
clickElement(TimingLocator.CLICK_ONLINE_TIMING_BUTTON);
|
||||
|
||||
return ifTitleContains(TimingData.TIMING_TITLE );
|
||||
return ifTextExists(TimingLocator.TIMING_MANAGEMENT_TIMING_STATE, TimingData.TIMING_ONLINE_STATE);
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -109,12 +113,15 @@ public class TimingPage extends PageCommon {
|
|||
* offline timing
|
||||
*/
|
||||
public boolean offlineTiming() throws InterruptedException {
|
||||
flushPage();
|
||||
// Determine whether the timing is online
|
||||
ifTextExists(TimingLocator.TIMING_MANAGEMENT_TIMING_STATE, TimingData.TIMING_ONLINE_STATE);
|
||||
|
||||
// click offline timing button
|
||||
System.out.println("Click offline timing button");
|
||||
Thread.sleep(500);
|
||||
clickButton(TimingLocator.CLICK_OFFLINE_TIMING_BUTTON);
|
||||
clickElement(TimingLocator.CLICK_OFFLINE_TIMING_BUTTON);
|
||||
|
||||
return ifTitleContains(TimingData.TIMING_TITLE );
|
||||
return ifTextExists(TimingLocator.TIMING_MANAGEMENT_TIMING_STATE, TimingData.TIMING_OFFLINE_STATE);
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -123,12 +130,14 @@ public class TimingPage extends PageCommon {
|
|||
* delete timing
|
||||
*/
|
||||
public boolean deleteTiming() throws InterruptedException {
|
||||
// Determine whether the timing is offline
|
||||
ifTextExists(TimingLocator.TIMING_MANAGEMENT_TIMING_STATE, TimingData.TIMING_OFFLINE_STATE);
|
||||
|
||||
// click offline timing button
|
||||
System.out.println("Click delete timing button");
|
||||
Thread.sleep(500);
|
||||
clickButton(TimingLocator.CLICK_DELETE_TIMING_BUTTON);
|
||||
clickButton(TimingLocator.CLICK_CONFIRM_DELETE_TIMING_BUTTON);
|
||||
|
||||
return ifTitleContains(TimingData.WORKFLOW_TITLE );
|
||||
return ifTextExists(TimingLocator.TIMING_STATE, "-");
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,7 +18,9 @@ package org.apache.dolphinscheduler.page.project;
|
|||
|
||||
import org.apache.dolphinscheduler.common.PageCommon;
|
||||
import org.apache.dolphinscheduler.constant.TestConstant;
|
||||
import org.apache.dolphinscheduler.data.project.ProjectData;
|
||||
import org.apache.dolphinscheduler.data.project.WorkflowDefineData;
|
||||
import org.apache.dolphinscheduler.locator.project.ProjectLocator;
|
||||
import org.apache.dolphinscheduler.locator.project.WorkflowDefineLocator;
|
||||
import org.openqa.selenium.WebDriver;
|
||||
|
||||
|
|
@ -32,9 +34,12 @@ public class WorkflowDefinePage extends PageCommon {
|
|||
*/
|
||||
|
||||
public boolean jumpWorkflowPage() throws InterruptedException {
|
||||
ifTextExists(ProjectLocator.LIST_PROJECT_NAME, ProjectData.PROJECT_NAME);
|
||||
|
||||
// click project name
|
||||
clickElement(WorkflowDefineLocator.CLICK_PROJECT_NAME);
|
||||
Thread.sleep(TestConstant.ONE_THOUSAND);
|
||||
|
||||
ifTextExists(WorkflowDefineLocator.CLICK_WORKFLOW_DEFINE,WorkflowDefineData.workflow_define);
|
||||
|
||||
System.out.println("Click on workflow define to jump to workflow define page");
|
||||
// click workflow define
|
||||
|
|
@ -126,7 +131,6 @@ public class WorkflowDefinePage extends PageCommon {
|
|||
|
||||
//click submit button
|
||||
clickElement(WorkflowDefineLocator.CLICK_SUBMIT_BUTTON);
|
||||
Thread.sleep(TestConstant.ONE_THOUSAND);
|
||||
System.out.println("Task node set up successfully");
|
||||
System.out.println("move to Dag Element ");
|
||||
moveToDragElement(WorkflowDefineLocator.MOUSE_MOVE_SHELL_AT_DAG,-300,-100);
|
||||
|
|
@ -185,7 +189,6 @@ public class WorkflowDefinePage extends PageCommon {
|
|||
|
||||
//delete workflow global parameters value
|
||||
clickElement(WorkflowDefineLocator.CLICK_DELETE_WORKFLOW_GLOBAL_PARAMETERS);
|
||||
Thread.sleep(TestConstant.ONE_THOUSAND);
|
||||
|
||||
//click add button
|
||||
System.out.println("submit workflow");
|
||||
|
|
@ -197,6 +200,9 @@ public class WorkflowDefinePage extends PageCommon {
|
|||
public boolean onlineWorkflow() throws InterruptedException {
|
||||
clickElement(WorkflowDefineLocator.CLICK_WORKFLOW_DEFINE);
|
||||
|
||||
// Determine whether the workflow status is offline
|
||||
ifTextExists(WorkflowDefineLocator.WORKFLOW_STATE,WorkflowDefineData.WORKFLOW_OFFLINE_STATE);
|
||||
|
||||
// click online button
|
||||
System.out.println("Click online workflow button");
|
||||
clickButton(WorkflowDefineLocator.CLICK_ONLINE_WORKFLOW_BUTTON);
|
||||
|
|
@ -207,9 +213,11 @@ public class WorkflowDefinePage extends PageCommon {
|
|||
public boolean offlineWorkflow() throws InterruptedException {
|
||||
clickElement(WorkflowDefineLocator.CLICK_WORKFLOW_DEFINE);
|
||||
|
||||
// Determine whether the workflow status is online
|
||||
ifTextExists(WorkflowDefineLocator.WORKFLOW_STATE,WorkflowDefineData.WORKFLOW_ONLINE_STATE);
|
||||
|
||||
// click offline button
|
||||
System.out.println("offline workflow");
|
||||
Thread.sleep(500);
|
||||
clickButton(WorkflowDefineLocator.CLICK_OFFLINE_WORKFLOW_BUTTON);
|
||||
|
||||
return ifTitleContains(WorkflowDefineData.WORKFLOW_TITLE);
|
||||
|
|
@ -218,8 +226,11 @@ public class WorkflowDefinePage extends PageCommon {
|
|||
|
||||
public boolean deleteWorkflow() throws InterruptedException {
|
||||
//click delete workflow
|
||||
Thread.sleep(500);
|
||||
clickElement(WorkflowDefineLocator.CLICK_WORKFLOW_DEFINE);
|
||||
|
||||
// Determine whether the workflow status is offline
|
||||
ifTextExists(WorkflowDefineLocator.WORKFLOW_STATE,WorkflowDefineData.WORKFLOW_OFFLINE_STATE);
|
||||
|
||||
clickButton(WorkflowDefineLocator.DELETE_WORKFLOW_BOTTOM);
|
||||
|
||||
//click confirm delete project
|
||||
|
|
|
|||
|
|
@ -31,22 +31,21 @@ public class AlertManagePage extends PageCommon {
|
|||
}
|
||||
|
||||
/**
|
||||
* createTenant
|
||||
* create alert
|
||||
*
|
||||
* @return Whether to enter the specified page after create tenant
|
||||
*/
|
||||
public boolean createAlert() throws InterruptedException {
|
||||
// click alert manage
|
||||
Thread.sleep(500);
|
||||
System.out.println("start click alert manage button");
|
||||
clickElement(AlertManageLocator.CLICK_ALERT_MANAGE);
|
||||
Thread.sleep(500);
|
||||
|
||||
//determine whether the create alert button exists
|
||||
ifTextExists(AlertManageLocator.CLICK_CREATE_ALERT,AlertManageData.CREATE_ALERT);
|
||||
|
||||
// click create alert button
|
||||
System.out.println("start click create alert button");
|
||||
clickElement(AlertManageLocator.CLICK_CREATE_ALERT);
|
||||
Thread.sleep(500);
|
||||
|
||||
// input alert data
|
||||
System.out.println("start input alert ");
|
||||
sendInput(AlertManageLocator.INPUT_ALERT_NAME, AlertManageData.ALERT_NAME);
|
||||
|
|
@ -61,15 +60,17 @@ public class AlertManagePage extends PageCommon {
|
|||
clickButton(AlertManageLocator.SUBMIT_ALERT);
|
||||
|
||||
// Whether to enter the specified page after submit
|
||||
return ifTitleContains(AlertManageData.ALERT_MANAGE);
|
||||
return ifTextExists(AlertManageLocator.ALERT_NAME, AlertManageData.ALERT_NAME);
|
||||
}
|
||||
|
||||
public boolean deleteAlert() throws InterruptedException {
|
||||
|
||||
// click user manage
|
||||
// click alert manage
|
||||
clickElement(AlertManageLocator.CLICK_ALERT_MANAGE);
|
||||
|
||||
// click delete user button
|
||||
ifTextExists(AlertManageLocator.ALERT_NAME, AlertManageData.ALERT_NAME);
|
||||
|
||||
// click delete alert button
|
||||
clickButton(AlertManageLocator.DELETE_ALERT_BUTTON);
|
||||
|
||||
// click confirm delete button
|
||||
|
|
|
|||
|
|
@ -37,15 +37,15 @@ public class QueueManagePage extends PageCommon {
|
|||
*/
|
||||
public boolean createQueue() throws InterruptedException {
|
||||
// click queue manage
|
||||
Thread.sleep(500);
|
||||
System.out.println("start click queue manage button");
|
||||
clickElement(QueueManageLocator.CLICK_QUEUE_MANAGE);
|
||||
Thread.sleep(500);
|
||||
|
||||
//determine whether the create queue button exists
|
||||
ifTextExists(QueueManageLocator.CLICK_CREATE_QUEUE,QueueManageData.CREATE_QUEUE);
|
||||
|
||||
// click create queue button
|
||||
System.out.println("start click create queue button");
|
||||
clickElement(QueueManageLocator.CLICK_CREATE_QUEUE);
|
||||
Thread.sleep(500);
|
||||
clickButton(QueueManageLocator.CLICK_CREATE_QUEUE);
|
||||
|
||||
// input queue data
|
||||
System.out.println("start input queue");
|
||||
|
|
@ -57,7 +57,7 @@ public class QueueManagePage extends PageCommon {
|
|||
clickButton(QueueManageLocator.SUBMIT_QUEUE);
|
||||
|
||||
// Whether to enter the specified page after submit
|
||||
return ifTitleContains(QueueManageData.QUEUE_MANAGE);
|
||||
return ifTextExists(QueueManageLocator.LIST_QUEUE_NAME, QueueManageData.QUEUE_NAME);
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -68,22 +68,21 @@ public class QueueManagePage extends PageCommon {
|
|||
*/
|
||||
public boolean editQueue() throws InterruptedException {
|
||||
// click queue manage
|
||||
Thread.sleep(1000);
|
||||
clickElement(QueueManageLocator.CLICK_QUEUE_MANAGE);
|
||||
Thread.sleep(1000);
|
||||
|
||||
ifTextExists(QueueManageLocator.LIST_QUEUE_NAME, QueueManageData.QUEUE_NAME);
|
||||
|
||||
// click edit queue button
|
||||
clickElement(QueueManageLocator.CLICK_EDIT_QUEUE);
|
||||
Thread.sleep(1000);
|
||||
clickButton(QueueManageLocator.CLICK_EDIT_QUEUE);
|
||||
|
||||
// input queue data
|
||||
sendInput(QueueManageLocator.INPUT_QUEUE_NAME, QueueManageData.EDIT_QUEUE_NAME);
|
||||
sendInput(QueueManageLocator.INPUT_QUEUE_VALUE, QueueManageData.EDIT_QUEUE_VALUE);
|
||||
clearSendInput(QueueManageLocator.INPUT_QUEUE_NAME, QueueManageData.EDIT_QUEUE_NAME);
|
||||
clearSendInput(QueueManageLocator.INPUT_QUEUE_VALUE, QueueManageData.EDIT_QUEUE_VALUE);
|
||||
|
||||
// click button
|
||||
clickButton(QueueManageLocator.SUBMIT_QUEUE);
|
||||
|
||||
// Whether to enter the specified page after submit
|
||||
return ifTitleContains(QueueManageData.QUEUE_MANAGE);
|
||||
return ifTextExists(QueueManageLocator.LIST_QUEUE_NAME, QueueManageData.EDIT_QUEUE_NAME);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue