|
@@ -6,12 +6,16 @@ import com.platform.common.util.RedisUtils;
|
|
|
import com.platform.common.util.StringUtils;
|
|
|
import com.platform.dao.entity.remote.RemoteOpc;
|
|
|
import com.platform.dao.entity.remote.RemoteOpcLog;
|
|
|
+import com.platform.dao.enums.YesNoEnum;
|
|
|
import com.platform.dao.mapper.remote.RemoteOpcLogMapper;
|
|
|
import com.platform.dao.mapper.remote.RemoteOpcMapper;
|
|
|
import com.platform.opc.entity.OpcResult;
|
|
|
import com.platform.opc.util.OpcDAClient;
|
|
|
+import org.openscada.opc.lib.da.AddFailedException;
|
|
|
+import org.openscada.opc.lib.da.Item;
|
|
|
import lombok.AllArgsConstructor;
|
|
|
import lombok.extern.slf4j.Slf4j;
|
|
|
+import org.springframework.context.annotation.DependsOn;
|
|
|
import org.springframework.scheduling.annotation.Async;
|
|
|
import org.springframework.scheduling.annotation.EnableAsync;
|
|
|
import org.springframework.scheduling.annotation.EnableScheduling;
|
|
@@ -21,25 +25,78 @@ import org.springframework.util.CollectionUtils;
|
|
|
import tk.mybatis.mapper.weekend.Weekend;
|
|
|
import tk.mybatis.mapper.weekend.WeekendCriteria;
|
|
|
|
|
|
+import javax.annotation.PostConstruct;
|
|
|
import java.time.LocalDateTime;
|
|
|
import java.util.ArrayList;
|
|
|
import java.util.List;
|
|
|
+import java.util.Map;
|
|
|
+import java.util.stream.Collectors;
|
|
|
|
|
|
@Service("opcTaskService")
|
|
|
+@DependsOn({"beanUtils", "redisTemplate"})
|
|
|
@AllArgsConstructor
|
|
|
@Slf4j
|
|
|
@EnableScheduling // 1.开启定时任务
|
|
|
-@EnableAsync
|
|
|
public class OpcTaskService {
|
|
|
|
|
|
private final RemoteOpcMapper remoteOpcMapper;
|
|
|
private final RemoteOpcLogMapper remoteOpcLogMapper;
|
|
|
|
|
|
+ /**
|
|
|
+ * 初始化redis和点位分组数据,并启动循环判断
|
|
|
+ */
|
|
|
+ @PostConstruct
|
|
|
+ public void initClients() {
|
|
|
+ log.info("开始初始化");
|
|
|
+ // 判断开发环境还是本地环境
|
|
|
+ // 启动分组,按照车间line分组,选择已经在opc server中配置的
|
|
|
+ Weekend<RemoteOpc> weekend = new Weekend<>(RemoteOpc.class);
|
|
|
+ WeekendCriteria<RemoteOpc, Object> weekendCriteria = weekend.weekendCriteria();
|
|
|
+ weekendCriteria.andEqualTo(RemoteOpc::getCreatedFlag, YesNoEnum.YES.getValue());
|
|
|
+ List<RemoteOpc> remoteOpcList = remoteOpcMapper.selectByExample(weekend);
|
|
|
+ log.info("remoteOpcList: " + remoteOpcList.size());
|
|
|
+ Map<String, List<RemoteOpc>> listMap = remoteOpcList.stream().collect(Collectors.groupingBy(RemoteOpc::getLine));
|
|
|
+ OpcDAClient.connect();
|
|
|
+ OpcDAClient.findAllItem();
|
|
|
+ List<RemoteOpc> remoteOpcFailList = new ArrayList<>();
|
|
|
+ AddFailedException exception = OpcDAClient.addGroupList(listMap);
|
|
|
+ if (exception != null) {
|
|
|
+ Map<String, Integer> failedItems = exception.getErrors();
|
|
|
+ Map<String, Item> addItems = exception.getItems();
|
|
|
+ if (failedItems != null) {// 有不存在的item,需要更新对应的点位信息
|
|
|
+ for (Map.Entry<String, Integer> entry : failedItems.entrySet()) {
|
|
|
+ RemoteOpc remoteOpc = new RemoteOpc();
|
|
|
+ // 因为有些标签是:PT_9836_AV,不能用 entry.getKey().split("_")[0],需要找到最后一个_
|
|
|
+ int index = entry.getKey().lastIndexOf("_");
|
|
|
+ remoteOpc.setPositionNum(entry.getKey());
|
|
|
+ remoteOpc.setCreatedFlag(0);
|
|
|
+ remoteOpc.setRemark("opc server未找到改点位。可能原因1:AV/DV配置错误,2:opc server中未配置");
|
|
|
+ log.error("opc server未找到该点位。key: " + remoteOpc.getPositionNum() + ", value: " + entry.getValue());
|
|
|
+ remoteOpcFailList.add(remoteOpc);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ if (addItems != null) {// 有不存在的item,需要更新对应的点位信息
|
|
|
+ for (Map.Entry<String, Item> entry : addItems.entrySet()) {
|
|
|
+ RemoteOpc remoteOpc = new RemoteOpc();
|
|
|
+ // 因为有些标签是:PT_9836_AV,不能用 entry.getKey().split("_")[0],需要找到最后一个_
|
|
|
+ // int index = entry.getKey().lastIndexOf("_");
|
|
|
+ remoteOpc.setPositionNum(entry.getKey());
|
|
|
+ remoteOpc.setCreatedFlag(1);
|
|
|
+ remoteOpc.setRemark("opc server已配置,AV/DV配置正确");
|
|
|
+ log.error("opc server已配置。key: " + remoteOpc.getPositionNum() + ", value: " + entry.getValue());
|
|
|
+ remoteOpcFailList.add(remoteOpc);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ if (!CollectionUtils.isEmpty(remoteOpcFailList)) {
|
|
|
+ remoteOpcMapper.updateBatch(remoteOpcFailList);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
/**
|
|
|
* 1: 分组获取数据
|
|
|
* a:保存到redis,前端页面实时从数据库获取数据,5秒刷新一次
|
|
|
*/
|
|
|
- @Async
|
|
|
@Scheduled(fixedDelay = 5000) //间隔5秒
|
|
|
public void getValue() {
|
|
|
log.info("开始定时任务");
|
|
@@ -58,6 +115,9 @@ public class OpcTaskService {
|
|
|
remoteOpcList.add(remoteOpc);
|
|
|
}
|
|
|
remoteOpcMapper.updateBatch(remoteOpcList);
|
|
|
+ }else{
|
|
|
+ log.info("初始化启动分组错误,重新启动分组");
|
|
|
+ initClients();
|
|
|
}
|
|
|
log.info("结束定时任务");
|
|
|
}
|
|
@@ -69,7 +129,6 @@ public class OpcTaskService {
|
|
|
* 1)循环查询点位在当天是否存在记录,如果存在,则追加
|
|
|
* 2)批量写入数据库,每天的数据追加到一条里面
|
|
|
*/
|
|
|
- @Async
|
|
|
@Scheduled(fixedDelay = 300000) //间隔300秒,5分钟保存一次数据到数据库,确保每天不超过700万数据
|
|
|
public void saveValue() {
|
|
|
log.info("开始读取redis1");
|