人员部门变动后流程可见性优化增加定时任务

feature-LeeD-20221025
李栋 2 years ago
parent 9e927a8a8b
commit 0d8e3fc096

@ -3,5 +3,5 @@ package com.api.signature;
import javax.ws.rs.Path;
@Path("/docSign")
public class GetDocSignatureWebAction extends com.engine.haojing.web.GetDocSignatureWebAction {
public class GetDocSignatureWebAction extends com.engine.signature.GetDocSignatureWebAction {
}

@ -1,112 +0,0 @@
package com.customization.dito.intercept;
import com.engine.core.cfg.annotation.CommandDynamicProxy;
import com.engine.core.interceptor.AbstractCommandProxy;
import com.engine.core.interceptor.Command;
import com.engine.workflow.cmd.workflowPath.node.operatorSetting.DoSaveOperatorGroupInfoCmd;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import weaver.conn.RecordSet;
import weaver.general.TimeUtil;
import weaver.interfaces.dito.comInfo.PropBean;
import weaver.workflow.request.RequestCheckUser;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
/**
* @author Lee
*/
@CommandDynamicProxy(target = DoSaveOperatorGroupInfoCmd.class, desc = "流程创建权限保存后将有权限人员插入到新表")
public class NodeSetCmdIntercept extends AbstractCommandProxy<Map<String, Object>> {
private final Log log = LogFactory.getLog(NodeSetCmdIntercept.class);
@Override
public Map<String, Object> execute(Command<Map<String, Object>> command) {
DoSaveOperatorGroupInfoCmd cmd = (DoSaveOperatorGroupInfoCmd) command;
String workflowIdString = (String) cmd.getParams().get("workflowId");
int workflowId = Integer.parseInt(workflowIdString);
Map<String, Object> result = nextExecute(cmd);
try {
//启动线程同步人员到中间表
new Thread(new Runnable() {
@Override
public void run() {
log.info(workflowId + "-DoSaveOperatorGroupInfoCmd-start-" + TimeUtil.getCurrentTimeString());
String userStatus = PropBean.getUfPropValue("userStatus");
String tableName = PropBean.getUfPropValue("workflowCreatorTableName");
// String userStatus = "0,1,2,3";
// String tableName = "uf_workflowcreator";
RecordSet rs = new RecordSet();
//先查出该流程已经插入的人员
String createrSql = "SELECT userid FROM " + tableName + " where workflowid=" + workflowId;
HashSet<Integer> existUserSet = new HashSet<>();
rs.execute(createrSql);
while (rs.next()) {
existUserSet.add(rs.getInt("userid"));
}
//查询所有人员sql
String userSql = "SELECT id FROM HrmResource WHERE status IN (" + userStatus + ")";
//插入中间表sql
String insertSql = "INSERT INTO " + tableName + " (workflowid,userid)VALUES(?,?)";
HashSet<Integer> userIdSet = new HashSet<>();
rs.execute(userSql);
while (rs.next()) {
userIdSet.add(rs.getInt("id"));
}
//有权限创建该流程的人员集合
HashSet<Integer> rightUserSet = new HashSet<>();
RequestCheckUser rcu = new RequestCheckUser();
userIdSet.forEach(userId -> {
rcu.resetParameter();
rcu.setUserid(userId);
rcu.setWorkflowid(workflowId);
rcu.setLogintype("1");
try {
rcu.checkUser();
} catch (Exception e) {
e.printStackTrace();
}
int hasRight = rcu.getHasright();
if (1 == hasRight) {
rightUserSet.add(userId);
}
});
//本次需要删除的中间表人员
HashSet<Integer> set = new HashSet<>(existUserSet);
HashSet<Integer> set2 = new HashSet<>(existUserSet);
set.retainAll(rightUserSet);
set2.removeAll(set);
String deleteSql = "delete from " + tableName + " where workflowid=? and userid=?";
for (Integer creator : set2) {
rs.executeUpdate(deleteSql, workflowId, creator);
}
//集合移除已经插入的人员,避免重复插入
rightUserSet.removeAll(existUserSet);
List<List> batchList = new ArrayList<>();
for (Integer creator : rightUserSet) {
List<Integer> list = new ArrayList<>();
list.add(workflowId);
list.add(creator);
batchList.add(list);
}
rs.executeBatchSql(insertSql, batchList);
log.info(workflowId + "-DoSaveOperatorGroupInfoCmd-end-" + TimeUtil.getCurrentTimeString());
}
}).start();
Thread.sleep(3000);
} catch (Exception e) {
log.error("DoSaveOperatorGroupInfoCmd-error-" + e.getMessage());
}
return result;
}
}

@ -1,23 +0,0 @@
package weaver.interfaces.dito.job;
import weaver.general.BaseBean;
import weaver.interfaces.schedule.BaseCronJob;
/**
* @version 1.0
* @Title ecology-9
* @Company
* @CreateDate 2022/11/14
* @Description ${description}
* @Author Lee
*/
public class WorkflowAuthModelingCronJobTest extends BaseCronJob {
@Override
public void execute() {
BaseBean bb = new BaseBean();
bb.writeLog("start WorkflowAuthModelingCronJobTest");
WorkflowAuthModelingUtil workflowAuthModelingUtil = new WorkflowAuthModelingUtil();
workflowAuthModelingUtil.executeData("");
}
}

@ -1,102 +0,0 @@
package weaver.interfaces.dito.job;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import weaver.conn.RecordSet;
import weaver.general.TimeUtil;
import weaver.interfaces.dito.comInfo.PropBean;
import weaver.interfaces.schedule.BaseCronJob;
import weaver.workflow.request.RequestCheckUser;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
/**
* @version 1.0
* @Title ecology-9
* @Company
* @CreateDate 2023/3/30
* @Description
* @Author Lee
*/
public class WorkflowCreatorsCornJob extends BaseCronJob {
private static final Log log = LogFactory.getLog(WorkflowCreatorsCornJob.class);
@Override
public void execute() {
//查询出中间表所有流程,重新校验人员权限
RecordSet rs = new RecordSet();
rs.execute("SELECT DISTINCT workflowselect as workflowId FROM uf_workflow_auth");
HashSet<Integer> workflowIds = new HashSet<>();
while (rs.next()) {
workflowIds.add(rs.getInt("workflowId"));
}
//查询所有人员,用于比对每条流程有创建权限的人员
String userStatus = PropBean.getUfPropValue("userStatus");
String userSql = "SELECT id FROM HrmResource WHERE status IN (" + userStatus + ")";
HashSet<Integer> userIdSet = new HashSet<>();
rs.execute(userSql);
while (rs.next()) {
userIdSet.add(rs.getInt("id"));
}
String tableName = PropBean.getUfPropValue("workflowCreatorTableName");
//插入中间表sql
String insertSql = "INSERT INTO " + tableName + " (workflowid,userid)VALUES(?,?)";
//每条流程重新比对人员权限
for (int workflowId : workflowIds) {
log.info(workflowId + "-WorkflowCreatorsCornJob-start-" + TimeUtil.getCurrentTimeString());
//先查出该流程已经插入的人员
String createrSql = "SELECT userid FROM " + tableName + " where workflowid=" + workflowId;
HashSet<Integer> existUserSet = new HashSet<>();
rs.execute(createrSql);
while (rs.next()) {
existUserSet.add(rs.getInt("userid"));
}
//有权限创建该流程的人员集合
HashSet<Integer> rightUserSet = new HashSet<>();
RequestCheckUser rcu = new RequestCheckUser();
userIdSet.forEach(userId -> {
rcu.resetParameter();
rcu.setUserid(userId);
rcu.setWorkflowid(workflowId);
rcu.setLogintype("1");
try {
rcu.checkUser();
} catch (Exception e) {
e.printStackTrace();
}
int hasRight = rcu.getHasright();
if (1 == hasRight) {
rightUserSet.add(userId);
}
});
//本次需要删除的中间表人员
HashSet<Integer> set = new HashSet<>(existUserSet);
HashSet<Integer> set2 = new HashSet<>(existUserSet);
set.retainAll(rightUserSet);
set2.removeAll(set);
String deleteSql = "delete from " + tableName + " where workflowid=? and userid=?";
for (Integer creator : set2) {
rs.executeUpdate(deleteSql, workflowId, creator);
}
//集合移除已经插入的人员,避免重复插入
rightUserSet.removeAll(existUserSet);
List<List> batchList = new ArrayList<>();
for (Integer creator : rightUserSet) {
List<Integer> list = new ArrayList<>();
list.add(workflowId);
list.add(creator);
batchList.add(list);
}
rs.executeBatchSql(insertSql, batchList);
log.info(workflowId + "-WorkflowCreatorsCornJob-end-" + TimeUtil.getCurrentTimeString());
}
}
}

@ -10,6 +10,7 @@ import weaver.general.Util;
import weaver.interfaces.dito.comInfo.PropBean;
import weaver.interfaces.dito.util.HttpUtils;
import weaver.interfaces.schedule.BaseCronJob;
import weaver.workflow.request.DevWorkflowCreater;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
@ -53,7 +54,7 @@ public class WorkflowUserCronJob extends BaseCronJob {
E9idSet
*/
//根据流程id查询出所有创建人id,封装成Map<String,Set>
// ShareManager shareManager = new ShareManager();
DevWorkflowCreater workflowCreater = new DevWorkflowCreater();
Set<String> baseWorkflowIdSet = new HashSet<>();
Map<String, Set<String>> baseWorkflowMap = new HashMap<>();
Map<String, String> baseWorkflowName = new HashMap<>();
@ -69,13 +70,14 @@ public class WorkflowUserCronJob extends BaseCronJob {
continue;
}
baseWorkflowIdSet.add(workflowId);
// String userIds = shareManager.getWorkflowCreater(Integer.parseInt(workflowId));
String baseUserSql = "select userid from "+workflowCreatorTableName+ " where workflowid=?";
recordSet.executeQuery(baseUserSql,workflowId);
String userIds = workflowCreater.getWorkflowCreater(Integer.parseInt(workflowId));
// String baseUserSql = "select userid from "+workflowCreatorTableName+ " where workflowid=?";
// recordSet.executeQuery(baseUserSql,workflowId);
Set<String> userIdList = new HashSet<>();
while (recordSet.next()) {
userIdList.add(recordSet.getString("userid"));
}
Collections.addAll(userIdList, userIds.split(","));
// while (recordSet.next()) {
// userIdList.add(recordSet.getString("userid"));
// }
logger.info("workflowname-baseUserListSize-" + userIdList.size());
baseWorkflowMap.put(workflowId, userIdList);
baseWorkflowName.put(workflowId, workflowname);

Loading…
Cancel
Save