diff --git a/src/com/api/signature/GetDocSignatureWebAction.java b/src/com/api/signature/GetDocSignatureWebAction.java index 7dc462b0..73341068 100644 --- a/src/com/api/signature/GetDocSignatureWebAction.java +++ b/src/com/api/signature/GetDocSignatureWebAction.java @@ -3,5 +3,5 @@ package com.api.signature; import javax.ws.rs.Path; @Path("/docSign") -public class GetDocSignatureWebAction extends com.engine.haojing.web.GetDocSignatureWebAction { +public class GetDocSignatureWebAction extends com.engine.signature.GetDocSignatureWebAction { } diff --git a/src/com/customization/dito/intercept/NodeSetCmdIntercept.java b/src/com/customization/dito/intercept/NodeSetCmdIntercept.java deleted file mode 100644 index 8b666e85..00000000 --- a/src/com/customization/dito/intercept/NodeSetCmdIntercept.java +++ /dev/null @@ -1,112 +0,0 @@ -package com.customization.dito.intercept; - -import com.engine.core.cfg.annotation.CommandDynamicProxy; -import com.engine.core.interceptor.AbstractCommandProxy; -import com.engine.core.interceptor.Command; -import com.engine.workflow.cmd.workflowPath.node.operatorSetting.DoSaveOperatorGroupInfoCmd; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import weaver.conn.RecordSet; -import weaver.general.TimeUtil; -import weaver.interfaces.dito.comInfo.PropBean; -import weaver.workflow.request.RequestCheckUser; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Map; - - -/** - * @author Lee - */ -@CommandDynamicProxy(target = DoSaveOperatorGroupInfoCmd.class, desc = "流程创建权限保存后将有权限人员插入到新表") -public class NodeSetCmdIntercept extends AbstractCommandProxy> { - private final Log log = LogFactory.getLog(NodeSetCmdIntercept.class); - - @Override - public Map execute(Command> command) { - DoSaveOperatorGroupInfoCmd cmd = (DoSaveOperatorGroupInfoCmd) command; - String workflowIdString = (String) cmd.getParams().get("workflowId"); - int workflowId = Integer.parseInt(workflowIdString); - Map result = nextExecute(cmd); - try { - //启动线程同步人员到中间表 - new Thread(new Runnable() { - @Override - public void run() { - log.info(workflowId + "-DoSaveOperatorGroupInfoCmd-start-" + TimeUtil.getCurrentTimeString()); - - String userStatus = PropBean.getUfPropValue("userStatus"); - String tableName = PropBean.getUfPropValue("workflowCreatorTableName"); -// String userStatus = "0,1,2,3"; -// String tableName = "uf_workflowcreator"; - - RecordSet rs = new RecordSet(); - //先查出该流程已经插入的人员 - String createrSql = "SELECT userid FROM " + tableName + " where workflowid=" + workflowId; - HashSet existUserSet = new HashSet<>(); - rs.execute(createrSql); - while (rs.next()) { - existUserSet.add(rs.getInt("userid")); - } - - //查询所有人员sql - String userSql = "SELECT id FROM HrmResource WHERE status IN (" + userStatus + ")"; - //插入中间表sql - String insertSql = "INSERT INTO " + tableName + " (workflowid,userid)VALUES(?,?)"; - - HashSet userIdSet = new HashSet<>(); - rs.execute(userSql); - while (rs.next()) { - userIdSet.add(rs.getInt("id")); - } - //有权限创建该流程的人员集合 - HashSet rightUserSet = new HashSet<>(); - RequestCheckUser rcu = new RequestCheckUser(); - userIdSet.forEach(userId -> { - rcu.resetParameter(); - rcu.setUserid(userId); - rcu.setWorkflowid(workflowId); - rcu.setLogintype("1"); - try { - rcu.checkUser(); - } catch (Exception e) { - e.printStackTrace(); - } - int hasRight = rcu.getHasright(); - if (1 == hasRight) { - rightUserSet.add(userId); - } - }); - //本次需要删除的中间表人员 - HashSet set = new HashSet<>(existUserSet); - HashSet set2 = new HashSet<>(existUserSet); - set.retainAll(rightUserSet); - set2.removeAll(set); - String deleteSql = "delete from " + tableName + " where workflowid=? and userid=?"; - for (Integer creator : set2) { - rs.executeUpdate(deleteSql, workflowId, creator); - } - - //集合移除已经插入的人员,避免重复插入 - rightUserSet.removeAll(existUserSet); - List batchList = new ArrayList<>(); - for (Integer creator : rightUserSet) { - List list = new ArrayList<>(); - list.add(workflowId); - list.add(creator); - batchList.add(list); - } - rs.executeBatchSql(insertSql, batchList); - log.info(workflowId + "-DoSaveOperatorGroupInfoCmd-end-" + TimeUtil.getCurrentTimeString()); - } - }).start(); - - Thread.sleep(3000); - } catch (Exception e) { - log.error("DoSaveOperatorGroupInfoCmd-error-" + e.getMessage()); - } - return result; - } -} diff --git a/src/weaver/interfaces/dito/job/WorkflowAuthModelingCronJobTest.java b/src/weaver/interfaces/dito/job/WorkflowAuthModelingCronJobTest.java deleted file mode 100644 index 9932d0d1..00000000 --- a/src/weaver/interfaces/dito/job/WorkflowAuthModelingCronJobTest.java +++ /dev/null @@ -1,23 +0,0 @@ -package weaver.interfaces.dito.job; - -import weaver.general.BaseBean; -import weaver.interfaces.schedule.BaseCronJob; - - -/** - * @version 1.0 - * @Title ecology-9 - * @Company 泛微软件 - * @CreateDate 2022/11/14 - * @Description ${description} - * @Author Lee - */ -public class WorkflowAuthModelingCronJobTest extends BaseCronJob { - @Override - public void execute() { - BaseBean bb = new BaseBean(); - bb.writeLog("start WorkflowAuthModelingCronJobTest"); - WorkflowAuthModelingUtil workflowAuthModelingUtil = new WorkflowAuthModelingUtil(); - workflowAuthModelingUtil.executeData(""); - } -} diff --git a/src/weaver/interfaces/dito/job/WorkflowCreatorsCornJob.java b/src/weaver/interfaces/dito/job/WorkflowCreatorsCornJob.java deleted file mode 100644 index 8419b11e..00000000 --- a/src/weaver/interfaces/dito/job/WorkflowCreatorsCornJob.java +++ /dev/null @@ -1,102 +0,0 @@ -package weaver.interfaces.dito.job; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import weaver.conn.RecordSet; -import weaver.general.TimeUtil; -import weaver.interfaces.dito.comInfo.PropBean; -import weaver.interfaces.schedule.BaseCronJob; -import weaver.workflow.request.RequestCheckUser; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; - -/** - * @version 1.0 - * @Title ecology-9 - * @Company 泛微软件 - * @CreateDate 2023/3/30 - * @Description 人员变动后流程创建权限重新刷新 - * @Author Lee - */ -public class WorkflowCreatorsCornJob extends BaseCronJob { - private static final Log log = LogFactory.getLog(WorkflowCreatorsCornJob.class); - - @Override - public void execute() { - //查询出中间表所有流程,重新校验人员权限 - RecordSet rs = new RecordSet(); - rs.execute("SELECT DISTINCT workflowselect as workflowId FROM uf_workflow_auth"); - HashSet workflowIds = new HashSet<>(); - while (rs.next()) { - workflowIds.add(rs.getInt("workflowId")); - } - //查询所有人员,用于比对每条流程有创建权限的人员 - String userStatus = PropBean.getUfPropValue("userStatus"); - String userSql = "SELECT id FROM HrmResource WHERE status IN (" + userStatus + ")"; - HashSet userIdSet = new HashSet<>(); - rs.execute(userSql); - while (rs.next()) { - userIdSet.add(rs.getInt("id")); - } - - - String tableName = PropBean.getUfPropValue("workflowCreatorTableName"); - //插入中间表sql - String insertSql = "INSERT INTO " + tableName + " (workflowid,userid)VALUES(?,?)"; - - //每条流程重新比对人员权限 - for (int workflowId : workflowIds) { - log.info(workflowId + "-WorkflowCreatorsCornJob-start-" + TimeUtil.getCurrentTimeString()); - //先查出该流程已经插入的人员 - String createrSql = "SELECT userid FROM " + tableName + " where workflowid=" + workflowId; - HashSet existUserSet = new HashSet<>(); - rs.execute(createrSql); - while (rs.next()) { - existUserSet.add(rs.getInt("userid")); - } - - //有权限创建该流程的人员集合 - HashSet rightUserSet = new HashSet<>(); - RequestCheckUser rcu = new RequestCheckUser(); - userIdSet.forEach(userId -> { - rcu.resetParameter(); - rcu.setUserid(userId); - rcu.setWorkflowid(workflowId); - rcu.setLogintype("1"); - try { - rcu.checkUser(); - } catch (Exception e) { - e.printStackTrace(); - } - int hasRight = rcu.getHasright(); - if (1 == hasRight) { - rightUserSet.add(userId); - } - }); - //本次需要删除的中间表人员 - HashSet set = new HashSet<>(existUserSet); - HashSet set2 = new HashSet<>(existUserSet); - set.retainAll(rightUserSet); - set2.removeAll(set); - String deleteSql = "delete from " + tableName + " where workflowid=? and userid=?"; - for (Integer creator : set2) { - rs.executeUpdate(deleteSql, workflowId, creator); - } - - //集合移除已经插入的人员,避免重复插入 - rightUserSet.removeAll(existUserSet); - List batchList = new ArrayList<>(); - for (Integer creator : rightUserSet) { - List list = new ArrayList<>(); - list.add(workflowId); - list.add(creator); - batchList.add(list); - } - rs.executeBatchSql(insertSql, batchList); - log.info(workflowId + "-WorkflowCreatorsCornJob-end-" + TimeUtil.getCurrentTimeString()); - } - } - -} diff --git a/src/weaver/interfaces/dito/job/WorkflowUserCronJob.java b/src/weaver/interfaces/dito/job/WorkflowUserCronJob.java index b2426e8b..ac4a7702 100644 --- a/src/weaver/interfaces/dito/job/WorkflowUserCronJob.java +++ b/src/weaver/interfaces/dito/job/WorkflowUserCronJob.java @@ -10,6 +10,7 @@ import weaver.general.Util; import weaver.interfaces.dito.comInfo.PropBean; import weaver.interfaces.dito.util.HttpUtils; import weaver.interfaces.schedule.BaseCronJob; +import weaver.workflow.request.DevWorkflowCreater; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; @@ -53,7 +54,7 @@ public class WorkflowUserCronJob extends BaseCronJob { 查询出所有E9有效流程id,Set */ //根据流程id查询出所有创建人id,封装成Map -// ShareManager shareManager = new ShareManager(); + DevWorkflowCreater workflowCreater = new DevWorkflowCreater(); Set baseWorkflowIdSet = new HashSet<>(); Map> baseWorkflowMap = new HashMap<>(); Map baseWorkflowName = new HashMap<>(); @@ -69,13 +70,14 @@ public class WorkflowUserCronJob extends BaseCronJob { continue; } baseWorkflowIdSet.add(workflowId); -// String userIds = shareManager.getWorkflowCreater(Integer.parseInt(workflowId)); - String baseUserSql = "select userid from "+workflowCreatorTableName+ " where workflowid=?"; - recordSet.executeQuery(baseUserSql,workflowId); + String userIds = workflowCreater.getWorkflowCreater(Integer.parseInt(workflowId)); +// String baseUserSql = "select userid from "+workflowCreatorTableName+ " where workflowid=?"; +// recordSet.executeQuery(baseUserSql,workflowId); Set userIdList = new HashSet<>(); - while (recordSet.next()) { - userIdList.add(recordSet.getString("userid")); - } + Collections.addAll(userIdList, userIds.split(",")); +// while (recordSet.next()) { +// userIdList.add(recordSet.getString("userid")); +// } logger.info("workflowname-baseUserListSize-" + userIdList.size()); baseWorkflowMap.put(workflowId, userIdList); baseWorkflowName.put(workflowId, workflowname);