Browse Source

Merge branch 'demo_' of http://124.71.196.186:8888/hfxc226/hitch-manage into demo_

guarantee-lsq 2 years ago
parent
commit
2fc0825421

+ 1 - 1
platform-dao/src/main/java/com/platform/dao/dto/sb/SbPositionDTO.java

@@ -53,7 +53,7 @@ public class SbPositionDTO extends BaseDTO implements Serializable {
     private String id;
     private String id;
 
 
     /**
     /**
-     * 编码
+     * 编码:位号
      */
      */
     private String no;
     private String no;
 
 

+ 1 - 2
platform-dao/src/main/resources/application-daoDev.yml

@@ -142,5 +142,4 @@ quartz:
 logging:
 logging:
   level:
   level:
     com.platform.dao.mapper: DEBUG
     com.platform.dao.mapper: DEBUG
-    org.activiti: ERROR
-    org.activiti.engine.impl.persistence.entity: DEBUG
+    org.activiti: DEBUG

+ 9 - 5
platform-dao/src/main/resources/application-daoProd.yml

@@ -3,7 +3,7 @@ spring:
     druid:
     druid:
       master:
       master:
         driver-class-name: com.mysql.cj.jdbc.Driver
         driver-class-name: com.mysql.cj.jdbc.Driver
-        url: jdbc:mysql://192.168.16.222:3306/hitch-sb?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8&autoReconnect=true&allowMultiQueries=true&removeAbandoned=true&removeAbandonedTimeout=60&logAbandoned=true
+        url: jdbc:mysql://192.168.16.222:3306/hitch-sb?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=false&serverTimezone=GMT%2B8&autoReconnect=true&allowMultiQueries=true&removeAbandoned=true&removeAbandonedTimeout=60&logAbandoned=true
         username: root
         username: root
         password: mydm888
         password: mydm888
         filters: wall,stat
         filters: wall,stat
@@ -32,8 +32,9 @@ spring:
         validation-query: SELECT 'x'
         validation-query: SELECT 'x'
       slave:
       slave:
         driver-class-name: com.mysql.cj.jdbc.Driver
         driver-class-name: com.mysql.cj.jdbc.Driver
-        url: jdbc:mysql://localhost:3306/hitch-sb?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=true&serverTimezone=GMT%2B8&autoReconnect=true&allowMultiQueries=true
+        url: jdbc:mysql://localhost:3306/hitch-sb?useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&useSSL=false&serverTimezone=GMT%2B8&autoReconnect=true&allowMultiQueries=true
         username: root
         username: root
+        #password: mysql?123!@MYSQL
         password: mydm888
         password: mydm888
         filters: wall,stat
         filters: wall,stat
         filter:
         filter:
@@ -88,6 +89,9 @@ spring:
         #间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
         #间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
         time-between-eviction-runs-millis: 60000
         time-between-eviction-runs-millis: 60000
         validation-query: SELECT 'x'
         validation-query: SELECT 'x'
+        stat-view-servlet:
+          login-password: platform8888
+          login-username: root
       stat-view-servlet:
       stat-view-servlet:
         login-password: platform8888
         login-password: platform8888
         login-username: root
         login-username: root
@@ -97,6 +101,7 @@ spring:
         temp:
         temp:
           use_jdbc_metadata_defaults: false
           use_jdbc_metadata_defaults: false
         dialect: org.hibernate.dialect.MySQLDialect
         dialect: org.hibernate.dialect.MySQLDialect
+
 # mybaits配置  tk
 # mybaits配置  tk
 mybatis:
 mybatis:
   mapper-locations: classpath:mapper/**/*.xml
   mapper-locations: classpath:mapper/**/*.xml
@@ -131,6 +136,5 @@ quartz:
 
 
 logging:
 logging:
   level:
   level:
-    com.platform.dao.mapper: DEBUG
-    org.activiti: ERROR
-    org.activiti.engine.impl.persistence.entity: DEBUG
+    com.platform: INFO
+    org.activiti: INFO

+ 2 - 3
platform-dao/src/main/resources/application-daoTest.yml

@@ -136,6 +136,5 @@ quartz:
 
 
 logging:
 logging:
   level:
   level:
-    com.platform.dao.mapper: DEBUG
-    org.activiti: ERROR
-    org.activiti.engine.impl.persistence.entity: DEBUG
+    com.platform: INFO
+    org.activiti: INFO

+ 1 - 1
platform-dao/src/main/resources/mapper/sb/SbPositionMapper.xml

@@ -26,7 +26,7 @@
             position.no like concat('%',#{keyword},'%'))
             position.no like concat('%',#{keyword},'%'))
         </if>
         </if>
         <if test="parentId != null and parentId != ''">
         <if test="parentId != null and parentId != ''">
-            and ( position.parent_id = #{parentId} or position.id = #{parentId})
+            and position.parent_id = #{parentId}
         </if>
         </if>
         <if test="parentCode != null and parentCode != ''">
         <if test="parentCode != null and parentCode != ''">
             and position.code like concat(#{parentCode},'%')
             and position.code like concat(#{parentCode},'%')

+ 4 - 1
platform-opc/src/main/java/com/platform/opc/OpcApplication.java

@@ -1,16 +1,18 @@
 package com.platform.opc;
 package com.platform.opc;
 
 
 import com.github.pagehelper.autoconfigure.PageHelperAutoConfiguration;
 import com.github.pagehelper.autoconfigure.PageHelperAutoConfiguration;
-import com.platform.opc.util.OpcDAClient;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
 import org.springframework.boot.CommandLineRunner;
 import org.springframework.boot.CommandLineRunner;
 import org.springframework.boot.SpringApplication;
 import org.springframework.boot.SpringApplication;
 import org.springframework.boot.autoconfigure.SpringBootApplication;
 import org.springframework.boot.autoconfigure.SpringBootApplication;
 import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
 import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
+import org.springframework.scheduling.annotation.EnableAsync;
 
 
 import java.util.TimeZone;
 import java.util.TimeZone;
 
 
 /**
 /**
+ *
+ * 不启动quartz :防止和设备定时任务冲突
  * @Description
  * @Description
  * @Author chenli
  * @Author chenli
  * @Date 2019/7/22
  * @Date 2019/7/22
@@ -19,6 +21,7 @@ import java.util.TimeZone;
 @SpringBootApplication(scanBasePackages = {"com.platform.common","com.platform.dao", "com.platform.opc"}, exclude = {
 @SpringBootApplication(scanBasePackages = {"com.platform.common","com.platform.dao", "com.platform.opc"}, exclude = {
         DataSourceAutoConfiguration.class, PageHelperAutoConfiguration.class})
         DataSourceAutoConfiguration.class, PageHelperAutoConfiguration.class})
 @Slf4j
 @Slf4j
+@EnableAsync
 public class OpcApplication implements CommandLineRunner {
 public class OpcApplication implements CommandLineRunner {
 
 
     public static void main(String[] args) {
     public static void main(String[] args) {

+ 7 - 24
platform-opc/src/main/java/com/platform/opc/servie/OpcInitService.java → platform-opc/src/main/java/com/platform/opc/servie/OpcInit.java

@@ -23,12 +23,12 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 
 
-@Service("opcInitService")
+@Service("opcInit")
 @DependsOn({"beanUtils", "redisTemplate"})
 @DependsOn({"beanUtils", "redisTemplate"})
 @AllArgsConstructor
 @AllArgsConstructor
 @Slf4j
 @Slf4j
 @EnableScheduling   // 1.开启定时任务
 @EnableScheduling   // 1.开启定时任务
-public class OpcInitService {
+public class OpcInit {
 
 
     private final RemoteOpcMapper remoteOpcMapper;
     private final RemoteOpcMapper remoteOpcMapper;
 
 
@@ -44,27 +44,11 @@ public class OpcInitService {
      */
      */
     @PostConstruct
     @PostConstruct
     public void initAddAllItem() {
     public void initAddAllItem() {
-        RedisUtils.del(OpcDAClient.redis_opc_item_values);
+        RedisUtils.del(OpcDAClient.redis_ok);
         log.info("开始初始化分组");
         log.info("开始初始化分组");
         addGroupAndItems(findAllItems(true, null));
         addGroupAndItems(findAllItems(true, null));
     }
     }
 
 
-    /**
-     * 1: 新增的数据添加到分组里面
-     * a: 之前已经新增,但是server里面没配置,现在server刚配置(修改createdFlag = 1)
-     * b: 新增的点位,在server里面也配置了,createdFlag = 1
-     * c: 新增的点位,设置了createdFlag = 1,但是加入到分组的时候,server里面就没配置,这个时候需要更新:createdFlag = 0
-     * d: 之前点位配置到线路1,但是现在修改到了线路2,需要重新添加到分组,要从原有分组进行删除,不然2个组都获取到了数据*
-     * <p>
-     * 注意:获取的时候写入一个key,用于标识正在读,这个时候,不进行点位新增* * *
-     */
-    /*@Scheduled(fixedDelay = 300000)  //间隔5分钟,300秒查询,需要同步新增的点位。
-    public void addItems() {
-        RedisUtils.setString(OpcDAClient.redis_opc_update_flag, "1");
-        addGroupAndItems(findAllItems(true, null));
-        RedisUtils.del(OpcDAClient.redis_opc_update_flag);
-    }*/
-
     /**
     /**
      * 立即新增点位:在新增点位后,点击立即生效按钮,这个时间不能再执行上面的addItems()方法
      * 立即新增点位:在新增点位后,点击立即生效按钮,这个时间不能再执行上面的addItems()方法
      * 立即取消点位:在点位,点击取消采集按钮
      * 立即取消点位:在点位,点击取消采集按钮
@@ -72,14 +56,13 @@ public class OpcInitService {
      */
      */
     @Scheduled(fixedDelay = 10000)
     @Scheduled(fixedDelay = 10000)
     public void addAndDelItems() {
     public void addAndDelItems() {
-        log.info("开始更新分组和点位数据:");
         RedisUtils.setString(OpcDAClient.redis_opc_update_flag, "1");
         RedisUtils.setString(OpcDAClient.redis_opc_update_flag, "1");
-
         List<String> list = RedisUtils.getList(OpcDAClient.redis_opc_wait_add_list, 0, -1);
         List<String> list = RedisUtils.getList(OpcDAClient.redis_opc_wait_add_list, 0, -1);
         if (!CollectionUtils.isEmpty(list)) {
         if (!CollectionUtils.isEmpty(list)) {
             List<String> uniqueStr = list.stream().distinct().collect(Collectors.toList());
             List<String> uniqueStr = list.stream().distinct().collect(Collectors.toList());
             if (!CollectionUtils.isEmpty(uniqueStr)) {
             if (!CollectionUtils.isEmpty(uniqueStr)) {
                 addGroupAndItems(findAllItems(false, uniqueStr));
                 addGroupAndItems(findAllItems(false, uniqueStr));
+                log.info("开始添加新点位:");
             }
             }
         }
         }
         RedisUtils.del(OpcDAClient.redis_opc_wait_add_list);
         RedisUtils.del(OpcDAClient.redis_opc_wait_add_list);
@@ -91,13 +74,12 @@ public class OpcInitService {
                 List<RemoteOpc> uniqueStr = remoteOpcList.stream().distinct().collect(Collectors.toList());
                 List<RemoteOpc> uniqueStr = remoteOpcList.stream().distinct().collect(Collectors.toList());
                 if (!CollectionUtils.isEmpty(uniqueStr)) {
                 if (!CollectionUtils.isEmpty(uniqueStr)) {
                     OpcDAClient.removeItems(remoteOpcList.stream().collect(Collectors.groupingBy(RemoteOpc::getLine)));
                     OpcDAClient.removeItems(remoteOpcList.stream().collect(Collectors.groupingBy(RemoteOpc::getLine)));
+                    log.info("开始移除删除点位:");
                 }
                 }
             }
             }
         }
         }
         RedisUtils.del(OpcDAClient.redis_opc_wait_remove_list);
         RedisUtils.del(OpcDAClient.redis_opc_wait_remove_list);
-
         RedisUtils.del(OpcDAClient.redis_opc_update_flag);
         RedisUtils.del(OpcDAClient.redis_opc_update_flag);
-        log.info("结束更新分组和点位数据:");
     }
     }
 
 
     /**
     /**
@@ -137,6 +119,7 @@ public class OpcInitService {
         List<RemoteOpc> remoteOpcFailList = new ArrayList<>();
         List<RemoteOpc> remoteOpcFailList = new ArrayList<>();
         AddFailedException exception = OpcDAClient.addGroupList(listMap);
         AddFailedException exception = OpcDAClient.addGroupList(listMap);
         if (exception != null) {
         if (exception != null) {
+            RedisUtils.setString(OpcDAClient.redis_ok, "0");
             Map<String, Integer> failedItems = exception.getErrors();
             Map<String, Integer> failedItems = exception.getErrors();
             Map<String, Item> addItems = exception.getItems();
             Map<String, Item> addItems = exception.getItems();
             if (failedItems != null) {// 有不存在的item,需要更新对应的点位信息
             if (failedItems != null) {// 有不存在的item,需要更新对应的点位信息
@@ -157,7 +140,7 @@ public class OpcInitService {
                     remoteOpc.setPositionNum(entry.getKey());
                     remoteOpc.setPositionNum(entry.getKey());
                     remoteOpc.setCreatedFlag(1);
                     remoteOpc.setCreatedFlag(1);
                     remoteOpc.setRemark("opc server已配置,AV/DV配置正确");
                     remoteOpc.setRemark("opc server已配置,AV/DV配置正确");
-                    log.error("opc server已配置。key: " + remoteOpc.getPositionNum() + ", value: " + entry.getValue());
+                    // log.error("opc server已配置。key: " + remoteOpc.getPositionNum() + ", value: " + entry.getValue());
                     remoteOpcFailList.add(remoteOpc);
                     remoteOpcFailList.add(remoteOpc);
                 }
                 }
             }
             }

+ 105 - 0
platform-opc/src/main/java/com/platform/opc/servie/OpcService.java

@@ -0,0 +1,105 @@
+package com.platform.opc.servie;
+
+import com.alibaba.fastjson.JSON;
+import com.platform.common.util.DateUtils;
+import com.platform.common.util.RedisUtils;
+import com.platform.common.util.StringUtils;
+import com.platform.dao.entity.remote.RemoteOpcLog;
+import com.platform.dao.mapper.remote.RemoteOpcLogMapper;
+import com.platform.dao.mapper.remote.RemoteOpcMapper;
+import com.platform.opc.entity.OpcResult;
+import com.platform.opc.util.OpcDAClient;
+import lombok.AllArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.jinterop.dcom.common.JIException;
+import org.openscada.opc.lib.da.Group;
+import org.springframework.scheduling.annotation.Async;
+import org.springframework.scheduling.annotation.EnableScheduling;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Service;
+import org.springframework.util.CollectionUtils;
+import tk.mybatis.mapper.weekend.Weekend;
+import tk.mybatis.mapper.weekend.WeekendCriteria;
+
+import java.math.BigDecimal;
+import java.time.LocalDateTime;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.stream.Collectors;
+
+@Service("opcService")
+@AllArgsConstructor
+@Slf4j
+public class OpcService {
+
+    private final RemoteOpcLogMapper remoteOpcLogMapper;
+
+    /**
+     * 1: 分组获取数据
+     * a:保存到redis,前端页面实时从数据库获取数据,5秒刷新一次
+     */
+    @Async
+    public void getValue(Group group) throws JIException {
+        log.info("拉取-" + group.getName());
+        log.info(Thread.currentThread().getName() + "-线程-" + group.getName() + new Date());
+        List<OpcResult> resultList = OpcDAClient.getItemValuesList(group);
+        RedisUtils.setString("opc-id-" + group.getName(), JSON.toJSONString(resultList));
+        log.info("结束拉取" + group.getName());
+    }
+
+    /**
+     *
+     */
+    @Async
+    public void saveValue(String id) {
+        log.info("保存-" + id);
+        String jsonStr = RedisUtils.getString(id);
+        if (StringUtils.isNotBlank(jsonStr)) {
+            List<OpcResult> resultList = JSON.parseArray(jsonStr, OpcResult.class);
+            List<RemoteOpcLog> addOpcLogList = new ArrayList<>();
+            List<RemoteOpcLog> updateRemoteOpcLogList = new ArrayList<>();
+            Weekend<RemoteOpcLog> weekend = new Weekend<>(RemoteOpcLog.class);
+            WeekendCriteria<RemoteOpcLog, Object> weekendCriteria = weekend.weekendCriteria();
+            // 查询当天是否已经存在了,存在则追加,否则更新
+            LocalDateTime time = LocalDateTime.now();
+            weekendCriteria.andIn(RemoteOpcLog::getPositionNum, resultList.stream().map(OpcResult::getId).collect(Collectors.toList()));
+            weekendCriteria.andEqualTo(RemoteOpcLog::getYear, time.getYear());
+            weekendCriteria.andEqualTo(RemoteOpcLog::getMonth, time.getMonthValue());
+            weekendCriteria.andEqualTo(RemoteOpcLog::getDay, time.getDayOfMonth());
+            List<RemoteOpcLog> checkList = remoteOpcLogMapper.selectByExample(weekend);
+            for (OpcResult result : resultList) {
+                RemoteOpcLog remoteOpcLog = new RemoteOpcLog();
+                remoteOpcLog.setPositionNum(result.getId());
+                remoteOpcLog.setResult(new BigDecimal(result.getValue()));
+                LocalDateTime localDateTime = DateUtils.strToLocalDateTime(result.getTime(), DateUtils.PATTERN_YMD_HMS);
+                remoteOpcLog.setCreatedTime(localDateTime);
+                remoteOpcLog.setYear(localDateTime.getYear());
+                remoteOpcLog.setMonth(localDateTime.getMonthValue());
+                remoteOpcLog.setDay(localDateTime.getDayOfMonth());
+                // remoteOpcLog.setHour(localDateTime.getHour());
+                // remoteOpcLog.setMinute(localDateTime.getMinute());
+                remoteOpcLog.setRemark(result.getTime().split(" ")[1] + "," + result.getValue() + ";");
+                List<RemoteOpcLog> findItemList = checkList.stream().filter(remoteOpcLog1 -> remoteOpcLog1.getPositionNum().equals(result.getId())).collect(Collectors.toList());
+                if (!CollectionUtils.isEmpty(findItemList) && findItemList.size() > 0) {
+                    updateRemoteOpcLogList.add(remoteOpcLog);
+                } else {
+                    addOpcLogList.add(remoteOpcLog);
+                }
+            }
+            if (!CollectionUtils.isEmpty(updateRemoteOpcLogList)) {
+                remoteOpcLogMapper.updateBatch(updateRemoteOpcLogList);
+                log.info("更新数量:" + resultList.size());
+            }
+            if (!CollectionUtils.isEmpty(addOpcLogList)) {
+                remoteOpcLogMapper.insertListforComplex(addOpcLogList);
+                log.info("写入数量:" + resultList.size());
+            }
+            log.info("写入/更新数据库数量:" + resultList.size());
+        }
+        log.info("结束保存-" + id);
+    }
+
+}

+ 59 - 0
platform-opc/src/main/java/com/platform/opc/servie/OpcTask.java

@@ -0,0 +1,59 @@
+package com.platform.opc.servie;
+
+import com.platform.common.util.RedisUtils;
+import com.platform.common.util.StringUtils;
+import com.platform.dao.mapper.remote.RemoteOpcLogMapper;
+import com.platform.opc.util.OpcDAClient;
+import lombok.AllArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.jinterop.dcom.common.JIException;
+import org.openscada.opc.lib.da.Group;
+import org.springframework.scheduling.annotation.EnableScheduling;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Service;
+
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+
+@Service("opcTask")
+@AllArgsConstructor
+@Slf4j
+@EnableScheduling   // 1.开启定时任务
+public class OpcTask {
+
+    private final OpcService opcService;
+
+    /**
+     * 1: 分组获取数据
+     * a:保存到redis,前端页面实时从数据库获取数据,5秒刷新一次
+     */
+    @Scheduled(fixedDelay = 2000)  //间隔2秒
+    public void getValue() throws JIException {
+        String key = RedisUtils.getString(OpcDAClient.redis_opc_update_flag);
+        if(StringUtils.isBlank(key)){
+            for (int i = 0; i < OpcDAClient.groupList.size(); i++) {
+                Group group = OpcDAClient.groupList.get(i);
+                opcService.getValue(group);
+            }
+        }
+    }
+
+    /**
+     * 1: 保存获取的数据
+     * b: 开启新线程,队列写入数据库,每2分钟启动一次
+     * c: 每个点位,每条数据保存一天的
+     * 1)循环查询点位在当天是否存在记录,如果存在,则追加
+     * 2)批量写入数据库,每天的数据追加到一条里面
+     */
+    @Scheduled(fixedDelay = 300000)  //间隔300秒,5分钟保存一次数据到数据库,确保每天不超过700万数据
+    public void saveValue() throws JIException {
+        String key = RedisUtils.getString(OpcDAClient.redis_opc_update_flag);
+        if(StringUtils.isBlank(key)){
+            for (int i = 0; i < OpcDAClient.groupList.size(); i++) {
+                Group group = OpcDAClient.groupList.get(i);
+                opcService.saveValue(group.getName());
+            }
+        }
+    }
+
+}

+ 0 - 128
platform-opc/src/main/java/com/platform/opc/servie/OpcTaskService.java

@@ -1,128 +0,0 @@
-package com.platform.opc.servie;
-
-import com.alibaba.fastjson.JSON;
-import com.platform.common.util.DateUtils;
-import com.platform.common.util.RedisUtils;
-import com.platform.common.util.StringUtils;
-import com.platform.dao.dto.check.CheckProjectDTO;
-import com.platform.dao.entity.remote.RemoteOpc;
-import com.platform.dao.entity.remote.RemoteOpcLog;
-import com.platform.dao.mapper.remote.RemoteOpcLogMapper;
-import com.platform.dao.mapper.remote.RemoteOpcMapper;
-import com.platform.opc.entity.OpcResult;
-import com.platform.opc.util.OpcDAClient;
-import lombok.AllArgsConstructor;
-import lombok.extern.slf4j.Slf4j;
-import org.springframework.scheduling.annotation.EnableScheduling;
-import org.springframework.scheduling.annotation.Scheduled;
-import org.springframework.stereotype.Service;
-import org.springframework.util.CollectionUtils;
-import tk.mybatis.mapper.weekend.Weekend;
-import tk.mybatis.mapper.weekend.WeekendCriteria;
-
-import java.math.BigDecimal;
-import java.time.LocalDateTime;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.stream.Collectors;
-
-@Service("opcTaskService")
-@AllArgsConstructor
-@Slf4j
-@EnableScheduling   // 1.开启定时任务
-public class OpcTaskService {
-
-    private final RemoteOpcMapper remoteOpcMapper;
-    private final RemoteOpcLogMapper remoteOpcLogMapper;
-
-    /**
-     * 1: 分组获取数据
-     * a:保存到redis,前端页面实时从数据库获取数据,5秒刷新一次
-     */
-    @Scheduled(fixedDelay = 2000)  //间隔2秒
-    public void getValue() {
-        String key = RedisUtils.getString(OpcDAClient.redis_opc_update_flag);
-        if(StringUtils.isBlank(key)){
-            log.info("开始拉取数据");
-            List<OpcResult> resultList = OpcDAClient.getItemValuesList();
-            if (!CollectionUtils.isEmpty(resultList)) {
-                log.info("拉取数量:" + resultList.size());
-                RedisUtils.setString(OpcDAClient.redis_opc_item_values, JSON.toJSONString(resultList));
-                // 更新数据库实时数据
-                List<RemoteOpc> remoteOpcList = new ArrayList<>();
-                LocalDateTime localDateTime = LocalDateTime.now();
-                for (OpcResult result : resultList) {
-                    RemoteOpc remoteOpc = new RemoteOpc();
-                    remoteOpc.setResult(new BigDecimal(result.getValue()));
-                    remoteOpc.setPositionNum(result.getId());
-                    remoteOpc.setUpdateTime(localDateTime);
-                    remoteOpcList.add(remoteOpc);
-                }
-                remoteOpcMapper.updateBatch(remoteOpcList);
-            } else {
-                log.info("初始化启动分组错误,等待下次重新启动分组");
-            }
-            log.info("结束拉取数据");
-        }
-    }
-
-    /**
-     * 1: 保存获取的数据
-     * b: 开启新线程,队列写入数据库,每2分钟启动一次
-     * c: 每个点位,每条数据保存一天的
-     * 1)循环查询点位在当天是否存在记录,如果存在,则追加
-     * 2)批量写入数据库,每天的数据追加到一条里面
-     */
-    @Scheduled(fixedDelay = 300000)  //间隔300秒,5分钟保存一次数据到数据库,确保每天不超过700万数据
-    public void saveValue() {
-        String key = RedisUtils.getString(OpcDAClient.redis_opc_update_flag);
-        if(StringUtils.isBlank(key)){
-            log.info("开始保存点位");
-            String jsonStr = RedisUtils.getString(OpcDAClient.redis_opc_item_values);
-            if (StringUtils.isNotBlank(jsonStr)) {
-                List<OpcResult> resultList = JSON.parseArray(jsonStr, OpcResult.class);
-                List<RemoteOpcLog> addOpcLogList = new ArrayList<>();
-                List<RemoteOpcLog> updateRemoteOpcLogList = new ArrayList<>();
-                Weekend<RemoteOpcLog> weekend = new Weekend<>(RemoteOpcLog.class);
-                WeekendCriteria<RemoteOpcLog, Object> weekendCriteria = weekend.weekendCriteria();
-                // 查询当天是否已经存在了,存在则追加,否则更新
-                LocalDateTime time = LocalDateTime.now();
-                weekendCriteria.andIn(RemoteOpcLog::getPositionNum, resultList.stream().map(OpcResult::getId).collect(Collectors.toList()));
-                weekendCriteria.andEqualTo(RemoteOpcLog::getYear, time.getYear());
-                weekendCriteria.andEqualTo(RemoteOpcLog::getMonth, time.getMonthValue());
-                weekendCriteria.andEqualTo(RemoteOpcLog::getDay, time.getDayOfMonth());
-                List<RemoteOpcLog> checkList = remoteOpcLogMapper.selectByExample(weekend);
-                for (OpcResult result : resultList) {
-                    RemoteOpcLog remoteOpcLog = new RemoteOpcLog();
-                    remoteOpcLog.setPositionNum(result.getId());
-                    remoteOpcLog.setResult(new BigDecimal(result.getValue()));
-                    LocalDateTime localDateTime = DateUtils.strToLocalDateTime(result.getTime(), DateUtils.PATTERN_YMD_HMS);
-                    remoteOpcLog.setCreatedTime(localDateTime);
-                    remoteOpcLog.setYear(localDateTime.getYear());
-                    remoteOpcLog.setMonth(localDateTime.getMonthValue());
-                    remoteOpcLog.setDay(localDateTime.getDayOfMonth());
-                    // remoteOpcLog.setHour(localDateTime.getHour());
-                    // remoteOpcLog.setMinute(localDateTime.getMinute());
-                    remoteOpcLog.setRemark(result.getTime().split(" ")[1] + "," + result.getValue() + ";");
-                    List<RemoteOpcLog> findItemList = checkList.stream().filter(remoteOpcLog1 -> remoteOpcLog1.getPositionNum().equals(result.getId())).collect(Collectors.toList());
-                    if (!CollectionUtils.isEmpty(findItemList) && findItemList.size()>0) {
-                        updateRemoteOpcLogList.add(remoteOpcLog);
-                    } else {
-                        addOpcLogList.add(remoteOpcLog);
-                    }
-                }
-                if(!CollectionUtils.isEmpty(updateRemoteOpcLogList)){
-                    remoteOpcLogMapper.updateBatch(updateRemoteOpcLogList);
-                    log.info("更新数量:" + resultList.size());
-                }
-                if(!CollectionUtils.isEmpty(addOpcLogList)){
-                    remoteOpcLogMapper.insertListforComplex(addOpcLogList);
-                    log.info("写入数量:" + resultList.size());
-                }
-                log.info("写入/更新数据库数量:" + resultList.size());
-            }
-            log.info("结束保存点位");
-        }
-    }
-
-}

+ 39 - 3
platform-opc/src/main/java/com/platform/opc/util/OpcDAClient.java

@@ -29,6 +29,7 @@ import java.util.stream.Collectors;
 @Slf4j
 @Slf4j
 public class OpcDAClient {
 public class OpcDAClient {
 
 
+    public static String redis_ok = "redis_ok";
     public static String redis_opc_item_values = "redis_opc_item_values";
     public static String redis_opc_item_values = "redis_opc_item_values";
     public static String redis_opc_update_flag = "redis_opc_update_flag";
     public static String redis_opc_update_flag = "redis_opc_update_flag";
     // 待采集点位列表
     // 待采集点位列表
@@ -42,7 +43,7 @@ public class OpcDAClient {
     public static String progId = "Hollysys.HOLLiASiComm.1";
     public static String progId = "Hollysys.HOLLiASiComm.1";
     public static String tag_prefix = "Channel1.Device1.";
     public static String tag_prefix = "Channel1.Device1.";
     private static Server server;
     private static Server server;
-    private static List<Group> groupList = new ArrayList<>();
+    public static List<Group> groupList = new ArrayList<>();
     // private static Item[][] itemArrList = null;
     // private static Item[][] itemArrList = null;
     private static Map<String, List<Item>> itemArrList = new HashMap<>();
     private static Map<String, List<Item>> itemArrList = new HashMap<>();
 
 
@@ -184,7 +185,9 @@ public class OpcDAClient {
             // 将不存在的点位信息保存到数据库,
             // 将不存在的点位信息保存到数据库,
             Map<String, Integer> errorsItemMap = e.getErrors();
             Map<String, Integer> errorsItemMap = e.getErrors();
             // e.printStackTrace();
             // e.printStackTrace();
-            log.error("添加点位出错,有不存在的地位,等待下次启动添加", e);
+            log.error("添加点位出错,有不存在的点位,等待下次启动添加", e);
+            groupList = new ArrayList<>();
+            itemArrList = new HashMap<>();
             return e;
             return e;
         } catch (DuplicateGroupException e) {
         } catch (DuplicateGroupException e) {
             e.printStackTrace();
             e.printStackTrace();
@@ -235,7 +238,7 @@ public class OpcDAClient {
                         log.info("取消前数量:" + items.size());
                         log.info("取消前数量:" + items.size());
                         log.info("取消后数量:" + itemResults.size());
                         log.info("取消后数量:" + itemResults.size());
                         // 如果该分组下面没有点位了,则删除该分组
                         // 如果该分组下面没有点位了,则删除该分组
-                        if(CollectionUtils.isEmpty(itemResults) || itemResults.size()==0){
+                        if (CollectionUtils.isEmpty(itemResults) || itemResults.size() == 0) {
                             groupList.remove(group);
                             groupList.remove(group);
                             itemArrList.remove(entry.getKey() + "");
                             itemArrList.remove(entry.getKey() + "");
                         }
                         }
@@ -279,6 +282,39 @@ public class OpcDAClient {
                     }
                     }
                 }
                 }
             }
             }
+        } catch (Exception e) {
+            e.printStackTrace();
+            log.error("批量获取数据异常:", e);
+            return null;
+        }
+        return resultList;
+    }
+
+    /**
+     * 获取单个组数据
+     * 根据车间id来获取
+     *
+     * @param group:
+     * @return
+     */
+    public static List<OpcResult> getItemValuesList(Group group) {
+        List<OpcResult> resultList = new ArrayList<>();
+        try {
+            log.info("获取分组的数据: 组名:" + group.getName());
+            Item[] items = itemArrList.get(group.getName()).toArray(new Item[0]);
+            log.info("获取分组的数据数量:" + items.length);
+            Map<Item, ItemState> resultMap = group.read(true, items);
+            //log.info("数据获取完成。数量:", resultMap.size() + ", 组序号:" + i);
+            for (Item item : resultMap.keySet()) {
+                OpcResult result = new OpcResult();
+                ItemState itemMap = resultMap.get(item);
+                String value = getVal(itemMap.getValue());
+                result.setId(item.getId());
+                result.setValue(value);
+                result.setTime(DateUtils.dateToString(itemMap.getTimestamp().getTime()));
+                // log.info("id: " + item.getId() + ", value: " + value + ", timestamp: " + itemMap.getTimestamp());
+                resultList.add(result);
+            }
         } catch (Exception e) {
         } catch (Exception e) {
             e.printStackTrace();
             e.printStackTrace();
             log.error("批量获取数据异常:", e);
             log.error("批量获取数据异常:", e);

+ 1 - 1
platform-opc/src/main/resources/logback-spring.xml

@@ -91,7 +91,7 @@
     <!-- 开发环境 -->
     <!-- 开发环境 -->
     <springProfile name="dev">
     <springProfile name="dev">
         <logger name="com.platform" level="INFO"/>
         <logger name="com.platform" level="INFO"/>
-        <logger name="com.platform.dao.mapper.upms.SysLogMapper" level="ERROR"/>
+        <logger name="com.platform.dao.mapper.upms.SysLogMapper" level="INFO"/>
 
 
         <root level="INFO">
         <root level="INFO">
             <appender-ref ref="stdout"/>
             <appender-ref ref="stdout"/>

+ 1 - 1
platform-rest/src/main/java/com/platform/rest/controller/remote/RemoteOpcController.java

@@ -99,7 +99,7 @@ public class RemoteOpcController {
     @PreAuthorize("@pms.hasPermission('remote-opcs-edit')")
     @PreAuthorize("@pms.hasPermission('remote-opcs-edit')")
     public R addGroup(@PathVariable("positionNum") String positionNum) {
     public R addGroup(@PathVariable("positionNum") String positionNum) {
         RedisUtils.setListOne(redis_opc_wait_add_list, positionNum);
         RedisUtils.setListOne(redis_opc_wait_add_list, positionNum);
-        return new R<>("已加入待撤销列表,10秒后自动采集");
+        return new R<>("已加入待采集列表,10秒后自动采集");
     }
     }
 
 
     /**
     /**

+ 1 - 1
platform-service/src/main/java/com/platform/service/bean/ScheduleUtils.java

@@ -37,7 +37,7 @@ public class ScheduleUtils {
         try {
         try {
             return (CronTrigger) scheduler.getTrigger(getTriggerKey(jobId));
             return (CronTrigger) scheduler.getTrigger(getTriggerKey(jobId));
         } catch (SchedulerException e) {
         } catch (SchedulerException e) {
-            throw new BusinessException("获取定时任务CronTrigger出现异常");
+            throw new BusinessException("获取定时任务CronTrigger出现异常, jobId:" + jobId);
         }
         }
     }
     }
 
 

+ 5 - 5
platform-service/src/main/java/com/platform/service/repair/impl/RepairApplicationFormServiceImpl.java

@@ -1454,15 +1454,15 @@ public class RepairApplicationFormServiceImpl extends BaseServiceImpl<RepairAppl
             vo.setYear(map.get("searchStartTimeMonth").getYear());
             vo.setYear(map.get("searchStartTimeMonth").getYear());
             vo.setMonth(map.get("searchStartTimeMonth").getMonthValue());
             vo.setMonth(map.get("searchStartTimeMonth").getMonthValue());
             for (RepairApplicationFormVO repairApplicationForm : list) {
             for (RepairApplicationFormVO repairApplicationForm : list) {
-                if (repairApplicationForm.getDealMinutes() == null) {
-                    continue;
-                }
-                if (type == 1) {
+//                if (repairApplicationForm.getDealMinutes() == null) {
+//                    continue;
+//                }
+              /*  if (type == 1) {
                     double minutes = repairApplicationForm.getDealMinutes();
                     double minutes = repairApplicationForm.getDealMinutes();
                     if (minutes < 24.0) {
                     if (minutes < 24.0) {
                         continue;
                         continue;
                     }
                     }
-                }
+                }*/
                 if (repairApplicationForm.getApplyTime().isAfter(map.get("searchStartTimeMonth")) && repairApplicationForm.getApplyTime().isBefore(map.get("searchEndTimeMonth"))) {
                 if (repairApplicationForm.getApplyTime().isAfter(map.get("searchStartTimeMonth")) && repairApplicationForm.getApplyTime().isBefore(map.get("searchEndTimeMonth"))) {
                     i++;
                     i++;
                     detailList.add(repairApplicationForm);
                     detailList.add(repairApplicationForm);

+ 1 - 1
platform-service/src/main/java/com/platform/service/sb/impl/SbInfoServiceImpl.java

@@ -2694,7 +2694,7 @@ public class SbInfoServiceImpl extends BaseServiceImpl<SbInfoMapper, SbInfo, SbI
             vo.setTotalNum(Integer.valueOf(String.valueOf(map.get("num"))));
             vo.setTotalNum(Integer.valueOf(String.valueOf(map.get("num"))));
             totalNum = totalNum + vo.getTotalNum();
             totalNum = totalNum + vo.getTotalNum();
             for (SbType sbType : typeList) {
             for (SbType sbType : typeList) {
-                if (map.get("typeId").equals(sbType.getId())) {
+                if (map.get("typeId")!=null && map.get("typeId").equals(sbType.getId())) {
                     vo.setTypeName(sbType.getName());
                     vo.setTypeName(sbType.getName());
                     break;
                     break;
                 }
                 }

+ 14 - 2
platform-service/src/main/java/com/platform/service/sb/impl/SbPositionServiceImpl.java

@@ -81,6 +81,13 @@ public class SbPositionServiceImpl extends BaseServiceImpl<SbPositionMapper, SbP
             String code = IdGeneratorUtils.getStrNum("", ++i);
             String code = IdGeneratorUtils.getStrNum("", ++i);
             model.setCode(code);
             model.setCode(code);
         }*/
         }*/
+        Weekend<SbPosition> weekend = new Weekend<>(SbPosition.class);
+        WeekendCriteria<SbPosition, Object> weekendCriteria = weekend.weekendCriteria();
+        weekendCriteria.andEqualTo(SbPosition::getNo, model.getNo());
+        SbPosition example = mapper.selectOneByExample(weekend);
+        if(example !=null){
+            throw new BusinessException("位号重复,请重新设置位号");
+        }
         SbPosition sbPosition = super.saveModelByDTO(model);
         SbPosition sbPosition = super.saveModelByDTO(model);
         if (parent != null) {
         if (parent != null) {
             mapper.updateByPrimaryKeySelective(new SbPosition().setId(parentId).setUpdateTime(LocalDateTime.now()));
             mapper.updateByPrimaryKeySelective(new SbPosition().setId(parentId).setUpdateTime(LocalDateTime.now()));
@@ -90,9 +97,14 @@ public class SbPositionServiceImpl extends BaseServiceImpl<SbPositionMapper, SbP
 
 
     @Override
     @Override
     public void modModelByDTO(SbPositionDTO model) {
     public void modModelByDTO(SbPositionDTO model) {
-        SbPosition entity = mapper.selectByPrimaryKey(model.getId());
+        Weekend<SbPosition> weekend = new Weekend<>(SbPosition.class);
+        WeekendCriteria<SbPosition, Object> weekendCriteria = weekend.weekendCriteria();
+        weekendCriteria.andEqualTo(SbPosition::getNo, model.getNo());
+        SbPosition example = mapper.selectOneByExample(weekend);
+        if(example !=null && !example.getId().equals(model.getId())){
+            throw new BusinessException("位号重复,请重新设置位号");
+        }
         String parentId = model.getParentId();
         String parentId = model.getParentId();
-        SbPosition parent = mapper.selectByPrimaryKeyForUpdate(parentId);
 
 
         // 之前没有编码,则设置编码
         // 之前没有编码,则设置编码
        /* if(StringUtils.isBlank(model.getCode())){
        /* if(StringUtils.isBlank(model.getCode())){