|
@@ -1,17 +1,13 @@
|
|
package com.platform.opc.servie;
|
|
package com.platform.opc.servie;
|
|
|
|
|
|
-import com.alibaba.fastjson.JSON;
|
|
|
|
import com.platform.common.util.RedisUtils;
|
|
import com.platform.common.util.RedisUtils;
|
|
-import com.platform.common.util.StringUtils;
|
|
|
|
import com.platform.dao.entity.remote.RemoteOpc;
|
|
import com.platform.dao.entity.remote.RemoteOpc;
|
|
import com.platform.dao.enums.YesNoEnum;
|
|
import com.platform.dao.enums.YesNoEnum;
|
|
import com.platform.dao.mapper.remote.RemoteOpcMapper;
|
|
import com.platform.dao.mapper.remote.RemoteOpcMapper;
|
|
-import com.platform.opc.entity.OpcResult;
|
|
|
|
import com.platform.opc.util.OpcDAClient;
|
|
import com.platform.opc.util.OpcDAClient;
|
|
import lombok.AllArgsConstructor;
|
|
import lombok.AllArgsConstructor;
|
|
import lombok.extern.slf4j.Slf4j;
|
|
import lombok.extern.slf4j.Slf4j;
|
|
import org.openscada.opc.lib.da.AddFailedException;
|
|
import org.openscada.opc.lib.da.AddFailedException;
|
|
-import org.openscada.opc.lib.da.Group;
|
|
|
|
import org.openscada.opc.lib.da.Item;
|
|
import org.openscada.opc.lib.da.Item;
|
|
import org.springframework.context.annotation.DependsOn;
|
|
import org.springframework.context.annotation.DependsOn;
|
|
import org.springframework.scheduling.annotation.EnableScheduling;
|
|
import org.springframework.scheduling.annotation.EnableScheduling;
|
|
@@ -22,20 +18,17 @@ import tk.mybatis.mapper.weekend.Weekend;
|
|
import tk.mybatis.mapper.weekend.WeekendCriteria;
|
|
import tk.mybatis.mapper.weekend.WeekendCriteria;
|
|
|
|
|
|
import javax.annotation.PostConstruct;
|
|
import javax.annotation.PostConstruct;
|
|
-import java.math.BigDecimal;
|
|
|
|
-import java.time.LocalDateTime;
|
|
|
|
-import java.util.*;
|
|
|
|
-import java.util.concurrent.Executors;
|
|
|
|
-import java.util.concurrent.ScheduledExecutorService;
|
|
|
|
-import java.util.concurrent.TimeUnit;
|
|
|
|
|
|
+import java.util.ArrayList;
|
|
|
|
+import java.util.List;
|
|
|
|
+import java.util.Map;
|
|
import java.util.stream.Collectors;
|
|
import java.util.stream.Collectors;
|
|
|
|
|
|
-@Service("opcInitService")
|
|
|
|
|
|
+@Service("opcInit")
|
|
@DependsOn({"beanUtils", "redisTemplate"})
|
|
@DependsOn({"beanUtils", "redisTemplate"})
|
|
@AllArgsConstructor
|
|
@AllArgsConstructor
|
|
@Slf4j
|
|
@Slf4j
|
|
@EnableScheduling // 1.开启定时任务
|
|
@EnableScheduling // 1.开启定时任务
|
|
-public class OpcInitService {
|
|
|
|
|
|
+public class OpcInit {
|
|
|
|
|
|
private final RemoteOpcMapper remoteOpcMapper;
|
|
private final RemoteOpcMapper remoteOpcMapper;
|
|
|
|
|
|
@@ -52,77 +45,8 @@ public class OpcInitService {
|
|
@PostConstruct
|
|
@PostConstruct
|
|
public void initAddAllItem() {
|
|
public void initAddAllItem() {
|
|
RedisUtils.del(OpcDAClient.redis_ok);
|
|
RedisUtils.del(OpcDAClient.redis_ok);
|
|
- RedisUtils.del(OpcDAClient.redis_opc_item_values);
|
|
|
|
log.info("开始初始化分组");
|
|
log.info("开始初始化分组");
|
|
addGroupAndItems(findAllItems(true, null));
|
|
addGroupAndItems(findAllItems(true, null));
|
|
- log.info("开始执行分组任务-拉取数据");
|
|
|
|
- startFetchDataByGroupLine();
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- private void startFetchDataByGroupLine() {
|
|
|
|
- // 创建核心线程数量为分组数量的线程池
|
|
|
|
- // 先判断是否分组完毕
|
|
|
|
- String ok = RedisUtils.getString(OpcDAClient.redis_ok);
|
|
|
|
- if(StringUtils.isNotBlank(ok)){
|
|
|
|
- initAddAllItem();
|
|
|
|
- return;
|
|
|
|
- }
|
|
|
|
- ScheduledExecutorService pool = Executors.newScheduledThreadPool(OpcDAClient.groupList.size());
|
|
|
|
- for (int i = 0; i < OpcDAClient.groupList.size(); i++) {
|
|
|
|
- Group group = OpcDAClient.groupList.get(i);
|
|
|
|
- pool.scheduleAtFixedRate(new TimerTask() {
|
|
|
|
- @Override
|
|
|
|
- public void run() {
|
|
|
|
- try {
|
|
|
|
- log.info(Thread.currentThread().getName() + "-线程-" + group.getName() + new Date());
|
|
|
|
- saveValue(OpcDAClient.getItemValuesList(group));
|
|
|
|
- } catch (Exception e) {
|
|
|
|
- // 关闭线程池
|
|
|
|
- pool.shutdown();
|
|
|
|
- e.printStackTrace();
|
|
|
|
- log.error("批量获取数据异常:", e);
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- }, 1, 2, TimeUnit.SECONDS);
|
|
|
|
- }
|
|
|
|
- /*// 任务1,延迟1秒执行,每5秒循环一次
|
|
|
|
- pool.scheduleAtFixedRate(new TimerTask() {
|
|
|
|
- @Override
|
|
|
|
- public void run() {
|
|
|
|
- System.out.println(Thread.currentThread().getName() + "线程A" + new Date());
|
|
|
|
- }
|
|
|
|
- }, 1, 1, TimeUnit.SECONDS);
|
|
|
|
-
|
|
|
|
- // 任务2,延迟3秒执行,每5秒循环一次
|
|
|
|
- pool.scheduleAtFixedRate(new TimerTask() {
|
|
|
|
- @Override
|
|
|
|
- public void run() {
|
|
|
|
- System.out.println(Thread.currentThread().getName() + "线程B" + new Date());
|
|
|
|
- }
|
|
|
|
- }, 1, 1, TimeUnit.SECONDS);*/
|
|
|
|
- // 关闭线程池
|
|
|
|
-
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- private void saveValue(List<OpcResult> resultList){
|
|
|
|
- if (!CollectionUtils.isEmpty(resultList)) {
|
|
|
|
- log.info("拉取数量:" + resultList.size());
|
|
|
|
- RedisUtils.setString(OpcDAClient.redis_opc_item_values, JSON.toJSONString(resultList));
|
|
|
|
- // 更新数据库实时数据
|
|
|
|
- List<RemoteOpc> remoteOpcList = new ArrayList<>();
|
|
|
|
- LocalDateTime localDateTime = LocalDateTime.now();
|
|
|
|
- for (OpcResult result : resultList) {
|
|
|
|
- RemoteOpc remoteOpc = new RemoteOpc();
|
|
|
|
- remoteOpc.setResult(new BigDecimal(result.getValue()).setScale(2));
|
|
|
|
- remoteOpc.setPositionNum(result.getId());
|
|
|
|
- remoteOpc.setUpdateTime(localDateTime);
|
|
|
|
- remoteOpcList.add(remoteOpc);
|
|
|
|
- }
|
|
|
|
- remoteOpcMapper.updateBatch(remoteOpcList);
|
|
|
|
- } else {
|
|
|
|
- log.info("初始化启动分组错误,等待下次重新启动分组");
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
/**
|