hfxc226 2 tahun lalu
induk
melakukan
b118344c97

+ 2 - 0
platform-dao/src/main/java/com/platform/dao/mapper/remote/RemoteOpcLogMapper.java

@@ -3,6 +3,7 @@ package com.platform.dao.mapper.remote;
 import com.platform.dao.config.MyMapper;
 import com.platform.dao.dto.remote.RemoteOpcLogDTO;
 import com.platform.dao.entity.remote.RemoteOpcLog;
+import com.platform.dao.entity.store.OutStoreDetail;
 import com.platform.dao.vo.query.remote.RemoteOpcLogVO;
 import org.springframework.stereotype.Component;
 
@@ -24,4 +25,5 @@ public interface RemoteOpcLogMapper extends MyMapper<RemoteOpcLog> {
      */
     List<RemoteOpcLogVO> selectList(RemoteOpcLogDTO dto);
 
+    void updateBatch(List<RemoteOpcLog> list);
 }

+ 16 - 8
platform-dao/src/main/resources/mapper/remote/RemoteOpcLogMapper.xml

@@ -2,10 +2,7 @@
 <!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
 <mapper namespace="com.platform.dao.mapper.remote.RemoteOpcLogMapper">
     <sql id="Base_Column_List">
-        opc_log
-        .
-        id
-        ,
+        opc_log.id,
                                      opc_log.sb_id,
                                      opc_log.sb_name,
                                      opc_log.position_num,
@@ -27,10 +24,7 @@
                                      opc_log.update_time
     </sql>
     <sql id="Ref_Column_List">
-        opc_log
-        .
-        sb_id
-        ,
+        opc_log.sb_id,
                                      opc_log.sb_name,
                                      opc_log.position_num,
                                      opc_log.type,
@@ -138,4 +132,18 @@
             </if>
         </where>
     </select>
+
+    <update id="updateBatch" parameterType="java.util.List">
+        <foreach collection="list" item="item" index="index" open="" close="" separator=";">
+            update t_remote_opc_log
+            <set>
+                remark=concat(remark,#{item.remark})
+            </set>
+            where position_num = #{item.positionNum}
+            and year = #{item.year}
+            and month = #{item.month}
+            and day = #{item.day}
+        </foreach>
+    </update>
+
 </mapper>

+ 25 - 15
platform-opc/src/main/java/com/platform/opc/servie/OpcTaskService.java

@@ -1,8 +1,6 @@
 package com.platform.opc.servie;
 
 import com.alibaba.fastjson.JSON;
-import com.platform.common.constant.UpmsRedisKeyConstants;
-import com.platform.common.util.BeanUtils;
 import com.platform.common.util.DateUtils;
 import com.platform.common.util.RedisUtils;
 import com.platform.common.util.StringUtils;
@@ -10,27 +8,22 @@ import com.platform.dao.entity.remote.RemoteOpc;
 import com.platform.dao.entity.remote.RemoteOpcLog;
 import com.platform.dao.mapper.remote.RemoteOpcLogMapper;
 import com.platform.dao.mapper.remote.RemoteOpcMapper;
-import com.platform.dao.mapper.upms.SysDictMapper;
 import com.platform.opc.entity.OpcResult;
 import com.platform.opc.util.OpcDAClient;
 import lombok.AllArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.annotation.DependsOn;
 import org.springframework.scheduling.annotation.Async;
 import org.springframework.scheduling.annotation.EnableAsync;
 import org.springframework.scheduling.annotation.EnableScheduling;
 import org.springframework.scheduling.annotation.Scheduled;
 import org.springframework.stereotype.Service;
 import org.springframework.util.CollectionUtils;
+import tk.mybatis.mapper.weekend.Weekend;
+import tk.mybatis.mapper.weekend.WeekendCriteria;
 
-import javax.annotation.PostConstruct;
 import java.time.LocalDateTime;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
 
 @Service("opcTaskService")
 @AllArgsConstructor
@@ -71,13 +64,13 @@ public class OpcTaskService {
 
     /**
      * 1: 保存获取的数据
-     * b: 开启新线程,队列写入数据库,每分钟启动一次
+     * b: 开启新线程,队列写入数据库,每2分钟启动一次
      * c: 每个点位,每条数据保存一天的
      * 1)循环查询点位在当天是否存在记录,如果存在,则追加
-     * 2)批量写入数据库
+     * 2)批量写入数据库,每天的数据追加到一条里面
      */
     @Async
-    @Scheduled(fixedDelay = 60000)  //间隔60秒
+    @Scheduled(fixedDelay = 300000)  //间隔300秒,5分钟保存一次数据到数据库,确保每天不超过700万数据
     public void saveValue() {
         log.info("开始读取redis");
         String jsonStr = RedisUtils.getString("opcList");
@@ -87,6 +80,16 @@ public class OpcTaskService {
                 log.info("id: " + result.getId() + ", value: " + result.getValue() + ", time: " + result.getTime());
             }*/
             // 保存入库
+            // 查询当天是否已经存在了,存在则追加,否则则更新
+            Weekend<RemoteOpcLog> weekend = new Weekend<>(RemoteOpcLog.class);
+            WeekendCriteria<RemoteOpcLog, Object> weekendCriteria = weekend.weekendCriteria();
+            OpcResult log = resultList.get(0);
+            LocalDateTime time = DateUtils.strToLocalDateTime(log.getTime(), DateUtils.PATTERN_YMD_HMS);
+            weekendCriteria.andEqualTo(RemoteOpcLog::getPositionNum, log.getId());
+            weekendCriteria.andEqualTo(RemoteOpcLog::getYear, time.getYear());
+            weekendCriteria.andEqualTo(RemoteOpcLog::getMonth, time.getMonthValue());
+            weekendCriteria.andEqualTo(RemoteOpcLog::getDay, time.getDayOfMonth());
+            int count = remoteOpcLogMapper.selectCountByExample(weekend);
             List<RemoteOpcLog> remoteOpcLogList = new ArrayList<>();
             for (OpcResult result : resultList) {
                 RemoteOpcLog remoteOpcLog = new RemoteOpcLog();
@@ -97,12 +100,19 @@ public class OpcTaskService {
                 remoteOpcLog.setYear(localDateTime.getYear());
                 remoteOpcLog.setMonth(localDateTime.getMonthValue());
                 remoteOpcLog.setDay(localDateTime.getDayOfMonth());
-                remoteOpcLog.setHour(localDateTime.getHour());
-                remoteOpcLog.setMinute(localDateTime.getMinute());
+                // remoteOpcLog.setHour(localDateTime.getHour());
+                // remoteOpcLog.setMinute(localDateTime.getMinute());
+                remoteOpcLog.setRemark(result.getTime() + "," + result.getValue() + ";");
                 remoteOpcLogList.add(remoteOpcLog);
                 //log.info("id: " + result.getId() + ", value: " + result.getValue() + ", time: " + result.getTime());
             }
-            remoteOpcLogMapper.insertListforComplex(remoteOpcLogList);
+
+            // TODO:判断remoteOpcLogList是否大于5000,大于5000,就分多次存入
+            if (count > 0) {
+                remoteOpcLogMapper.updateBatch(remoteOpcLogList);
+            } else {
+                remoteOpcLogMapper.insertListforComplex(remoteOpcLogList);
+            }
         }
         log.info("结束读取redis");
     }

+ 2 - 2
platform-opc/src/main/java/com/platform/opc/util/OpcDAClient.java

@@ -143,9 +143,9 @@ public class OpcDAClient {
                     Group group = server.addGroup(entry.getKey()+"");
                     List<RemoteOpc> list = entry.getValue();
                     List<String> itemIdList = list.stream().distinct().map(RemoteOpc::getPositionNum).collect(Collectors.toList());
-                    log.info("itemIdList: " + itemIdList.size());
+                    //log.info("itemIdList: " + itemIdList.size());
                     String[] items = itemIdList.toArray(new String[]{});
-                    log.info("items: " + items.length);
+                    //log.info("items: " + items.length);
                     Map<String, Item> groupItems = group.addItems(items);
                     Set itemSet = new HashSet(groupItems.values());
                     Item[] itemArr = new Item[itemSet.size()];