Merge branch 'main' of http://221.226.25.34:3000/shilei/haojing
commit
7db3854537
@ -0,0 +1,102 @@
|
|||||||
|
package weaver.interfaces.dito.job;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import weaver.conn.RecordSet;
|
||||||
|
import weaver.general.TimeUtil;
|
||||||
|
import weaver.interfaces.dito.comInfo.PropBean;
|
||||||
|
import weaver.interfaces.schedule.BaseCronJob;
|
||||||
|
import weaver.workflow.request.RequestCheckUser;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @version 1.0
|
||||||
|
* @Title ecology-9
|
||||||
|
* @Company 泛微软件
|
||||||
|
* @CreateDate 2023/3/30
|
||||||
|
* @Description 人员变动后流程创建权限重新刷新
|
||||||
|
* @Author Lee
|
||||||
|
*/
|
||||||
|
public class WorkflowCreatorsCornJob extends BaseCronJob {
|
||||||
|
private static final Log log = LogFactory.getLog(WorkflowCreatorsCornJob.class);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void execute() {
|
||||||
|
//查询出中间表所有流程,重新校验人员权限
|
||||||
|
RecordSet rs = new RecordSet();
|
||||||
|
rs.execute("SELECT DISTINCT workflowselect as workflowId FROM uf_workflow_auth");
|
||||||
|
HashSet<Integer> workflowIds = new HashSet<>();
|
||||||
|
while (rs.next()) {
|
||||||
|
workflowIds.add(rs.getInt("workflowId"));
|
||||||
|
}
|
||||||
|
//查询所有人员,用于比对每条流程有创建权限的人员
|
||||||
|
String userStatus = PropBean.getUfPropValue("userStatus");
|
||||||
|
String userSql = "SELECT id FROM HrmResource WHERE status IN (" + userStatus + ")";
|
||||||
|
HashSet<Integer> userIdSet = new HashSet<>();
|
||||||
|
rs.execute(userSql);
|
||||||
|
while (rs.next()) {
|
||||||
|
userIdSet.add(rs.getInt("id"));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
String tableName = PropBean.getUfPropValue("workflowCreatorTableName");
|
||||||
|
//插入中间表sql
|
||||||
|
String insertSql = "INSERT INTO " + tableName + " (workflowid,userid)VALUES(?,?)";
|
||||||
|
|
||||||
|
//每条流程重新比对人员权限
|
||||||
|
for (int workflowId : workflowIds) {
|
||||||
|
log.info(workflowId + "-WorkflowCreatorsCornJob-start-" + TimeUtil.getCurrentTimeString());
|
||||||
|
//先查出该流程已经插入的人员
|
||||||
|
String createrSql = "SELECT userid FROM " + tableName + " where workflowid=" + workflowId;
|
||||||
|
HashSet<Integer> existUserSet = new HashSet<>();
|
||||||
|
rs.execute(createrSql);
|
||||||
|
while (rs.next()) {
|
||||||
|
existUserSet.add(rs.getInt("userid"));
|
||||||
|
}
|
||||||
|
|
||||||
|
//有权限创建该流程的人员集合
|
||||||
|
HashSet<Integer> rightUserSet = new HashSet<>();
|
||||||
|
RequestCheckUser rcu = new RequestCheckUser();
|
||||||
|
userIdSet.forEach(userId -> {
|
||||||
|
rcu.resetParameter();
|
||||||
|
rcu.setUserid(userId);
|
||||||
|
rcu.setWorkflowid(workflowId);
|
||||||
|
rcu.setLogintype("1");
|
||||||
|
try {
|
||||||
|
rcu.checkUser();
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
int hasRight = rcu.getHasright();
|
||||||
|
if (1 == hasRight) {
|
||||||
|
rightUserSet.add(userId);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
//本次需要删除的中间表人员
|
||||||
|
HashSet<Integer> set = new HashSet<>(existUserSet);
|
||||||
|
HashSet<Integer> set2 = new HashSet<>(existUserSet);
|
||||||
|
set.retainAll(rightUserSet);
|
||||||
|
set2.removeAll(set);
|
||||||
|
String deleteSql = "delete from " + tableName + " where workflowid=? and userid=?";
|
||||||
|
for (Integer creator : set2) {
|
||||||
|
rs.executeUpdate(deleteSql, workflowId, creator);
|
||||||
|
}
|
||||||
|
|
||||||
|
//集合移除已经插入的人员,避免重复插入
|
||||||
|
rightUserSet.removeAll(existUserSet);
|
||||||
|
List<List> batchList = new ArrayList<>();
|
||||||
|
for (Integer creator : rightUserSet) {
|
||||||
|
List<Integer> list = new ArrayList<>();
|
||||||
|
list.add(workflowId);
|
||||||
|
list.add(creator);
|
||||||
|
batchList.add(list);
|
||||||
|
}
|
||||||
|
rs.executeBatchSql(insertSql, batchList);
|
||||||
|
log.info(workflowId + "-WorkflowCreatorsCornJob-end-" + TimeUtil.getCurrentTimeString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
Loading…
Reference in New Issue