This commit is contained in:
Timer
2026-02-08 22:57:30 +08:00
parent fcf08b8b69
commit e847ff6626
10 changed files with 101 additions and 80551 deletions

View File

@ -69,10 +69,6 @@ public class FacilitiesCardController {
* 获取左侧树形数据 设施、管道通用
* @param request
* @param model
* @param page
* @param rows
* @param sort
* @param order
* @return
*/
@RequestMapping("/getTreeJson.do")

View File

@ -4,6 +4,7 @@ import com.serotonin.modbus4j.sero.util.queue.ByteQueue;
import com.sipai.controller.work.ReadAWriteUtil;
import com.sipai.dao.repository.MPointRepo;
import com.sipai.dao.scada.MPointDao;
import com.sipai.entity.base.Result;
import com.sipai.entity.enums.EnableTypeEnum;
import com.sipai.entity.enums.SourceTypeEnum;
import com.sipai.entity.scada.*;
@ -79,6 +80,8 @@ public class MPointService {
@Autowired
private CompanyService companyService;
@Resource
private MPointExpandService mPointExpandService;
@Resource
private UnitService unitService;
public MPoint selectById(String bizId, String id) {
@ -165,7 +168,7 @@ public class MPointService {
return res;
}
@Transactional
// @Transactional
public int save(String bizId, MPoint entity) {
int res = 0;
try {
@ -1273,9 +1276,17 @@ public class MPointService {
if (mPoint != null) {
if (flag != null && flag.equals("insert")) {
insertNum += this.mPointDao.insert(mPoint);
try {
//更新es
MPointES mPointES = MPointES.format(mPoint);
mPointRepo.save(mPointES);
} catch (Exception e) {
System.out.println("mPoint_save------------------" + e.getMessage());
}
}
if (flag != null && flag.equals("update")) {
updateNum += this.mPointDao.updateByPrimaryKeySelective(mPoint);
// updateNum += this.mPointDao.updateByPrimaryKeySelective(mPoint);
int result = this.update(mPoint.getBizid(), mPoint);
}
}
}
@ -1643,6 +1654,7 @@ public class MPointService {
}
return result;
}
/**
* xls表格导入
*

View File

@ -22,13 +22,16 @@ public class DataSourceInterceptor {
Object argus = argusObjects[0];
if (argus != null) {
DataSourceHolder.setDataSources(DataSources.valueOf("SCADA_"+argus.toString()));
// DataSourceHolder.setDataSources(DataSources.valueOf("SCADA_HFCG"));
} else {
DataSourceHolder.setDataSources(DataSources.valueOf("SCADA_YL"));
// DataSourceHolder.setDataSources(DataSources.valueOf("SCADA_YL"));
DataSourceHolder.setDataSources(DataSources.valueOf("SCADA_0533JS"));
}
//适应所有生产库存集中为一个数据库
// DataSourceHolder.setDataSources(DataSources.SCADA_0756ZH);
}
//@After("dataSourceScadaPointcut()")
public void after(JoinPoint jp) {
DataSourceHolder.reset();

View File

@ -29,7 +29,8 @@ public enum DataSources {
//增城
// MASTER, SCADA_020ZCYH, SCADA_020ZCZX, SCADA_020ZC, SCADA_fe6fe310097f4679b46b7a173578dd11,SCADA_f31c04e44c9548bdaeca9f54c9e982e2
MASTER, SCADA_HFCG
// MASTER, SCADA_HFCG
MASTER, SCADA_0533JS
// MASTER, SCADA_FS_SK11_C
// MASTER, SCADA_021HQWS
// MASTER, SCADA_HFST

View File

@ -1,5 +1,9 @@
#es.nodes=127.0.0.1
es.nodes=122.51.194.184
es.host=9200
es.name=elastic
es.nodes=127.0.0.1
#es.nodes=122.51.194.184
es.host=9300
#es.name=elastic
es.name=my-application
#es.name=elasticsearch-sipaiis

View File

@ -1,13 +1,14 @@
#\u5B9A\u4E49LOG\u8F93\u51FA\u7EA7\u522B
log4j.rootLogger=INFO, Console ,File
#log4j.encoding=UTF-8
log4j.encoding=GBK
log4j.encoding=UTF-8
#log4j.encoding=GBK
#\u5B9A\u4E49\u65E5\u5FD7\u8F93\u51FA\u76EE\u7684\u5730\u4E3A\u63A7\u5236\u53F0
log4j.appender.Console=org.apache.log4j.ConsoleAppender
log4j.appender.Console.Target=System.out
#log4j.appender.Console.encoding=UTF-8
log4j.appender.Console.encoding=GBK
#log4j.appender.Console.encoding=GBK
log4j.appender.Console.encoding=UTF-8
#\u53EF\u4EE5\u7075\u6D3B\u5730\u6307\u5B9A\u65E5\u5FD7\u8F93\u51FA\u683C\u5F0F\uFF0C\u4E0B\u9762\u4E00\u884C\u662F\u6307\u5B9A\u5177\u4F53\u7684\u683C\u5F0F
log4j.appender.Console.layout = org.apache.log4j.PatternLayout
log4j.appender.Console.layout.ConversionPattern=[%c] - %m%n

View File

@ -1,19 +1,19 @@
#redis<69><73><EFBFBD><EFBFBD>
redis.host=122.51.194.184
# redis.host=127.0.0.1
#redis.host=122.51.194.184
redis.host=127.0.0.1
#<23><><EFBFBD><EFBFBD>single <20><>Ⱥcluster
redis.mode=single
redis.port=26739
# redis.port=6379
#redis.port=26739
redis.port=6379
redis.password=Aa112211
redis.maxIdle=100
redis.maxActive=300
redis.maxWait=1000
redis.testOnBorrow=true
redis.timeout=100000
# <20><><EFBFBD><EFBFBD>Ҫ<EFBFBD><D2AA><EFBFBD><EFBFBD><EBBBBA><EFBFBD><EFBFBD><EFBFBD>
# <20><><EFBFBD><EFBFBD>Ҫ<EFBFBD><D2AA><EFBFBD><EFBFBD><EBBBBA><EFBFBD><EFBFBD><EFBFBD><EFBFBD>?
targetNames=xxxRecordManager,xxxSetRecordManager,xxxStatisticsIdentificationManager
# <20><><EFBFBD><EFBFBD>Ҫ<EFBFBD><D2AA><EFBFBD><EFBFBD>ķ<EFBFBD><C4B7><EFBFBD>
# <20><><EFBFBD><EFBFBD>Ҫ<EFBFBD><D2AA><EFBFBD><EFBFBD>ķ<EFBFBD><C4B7><EFBFBD><EFBFBD>?
methodNames=
#<23><><EFBFBD>û<EFBFBD><C3BB><EFBFBD>ʧЧʱ<D0A7><CAB1>
com.service.impl.xxxRecordManager= 60

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@ -14,6 +14,7 @@
http://www.springframework.org/schema/mvc
http://www.springframework.org/schema/mvc/spring-mvc-4.1.xsd" default-lazy-init="true">
<import resource="activiti.cfg.xml"/>
<import resource="spring-mybatis-scada.xml"/>
<!-- 自动扫描 -->
<context:component-scan base-package="com.sipai"/>
<!-- 引入配置文件 -->
@ -135,45 +136,46 @@
<!-- 是否显示SQL语句 -->
<property name="statementExecutableSqlLogEnable" value="true"/>
</bean>
<bean id="SCADA_HFCG" class="com.alibaba.druid.pool.DruidDataSource" init-method="init"
destroy-method="close">
<property name="driverClassName" value="${driver}"/>
<property name="url" value="${scada-url}"/>
<property name="username" value="${scada-username}"/>
<property name="password" value="${scada-password}"/>
<!-- <bean id="SCADA_HFCG" class="com.alibaba.druid.pool.DruidDataSource" init-method="init"-->
<!-- destroy-method="close">-->
<!-- <property name="driverClassName" value="${driver}"/>-->
<!-- <property name="url" value="${scada-url}"/>-->
<!-- <property name="username" value="${scada-username}"/>-->
<!-- <property name="password" value="${scada-password}"/>-->
<!-- 配置初始化大小、最小、最大 -->
<property name="initialSize" value="8"/>
<property name="minIdle" value="5"/>
<property name="maxActive" value="19"/>
<!-- &lt;!&ndash; 配置初始化大小、最小、最大 &ndash;&gt;-->
<!-- <property name="initialSize" value="8"/>-->
<!-- <property name="minIdle" value="5"/>-->
<!-- <property name="maxActive" value="19"/>-->
<!-- 配置获取连接等待超时的时间 -->
<property name="maxWait" value="30000"/>
<!-- &lt;!&ndash; 配置获取连接等待超时的时间 &ndash;&gt;-->
<!-- <property name="maxWait" value="30000"/>-->
<!-- 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒 -->
<property name="timeBetweenEvictionRunsMillis" value="60000"/>
<!-- &lt;!&ndash; 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒 &ndash;&gt;-->
<!-- <property name="timeBetweenEvictionRunsMillis" value="60000"/>-->
<!-- 配置一个连接在池中最小生存的时间,单位是毫秒 -->
<property name="minEvictableIdleTimeMillis" value="300000"/>
<!-- &lt;!&ndash; 配置一个连接在池中最小生存的时间,单位是毫秒 &ndash;&gt;-->
<!-- <property name="minEvictableIdleTimeMillis" value="300000"/>-->
<property name="testWhileIdle" value="true"/>
<!-- <property name="testWhileIdle" value="true"/>-->
<!-- 这里建议配置为TRUE防止取到的连接不可用 -->
<property name="testOnBorrow" value="true"/>
<property name="testOnReturn" value="false"/>
<!-- &lt;!&ndash; 这里建议配置为TRUE防止取到的连接不可用 &ndash;&gt;-->
<!-- <property name="testOnBorrow" value="true"/>-->
<!-- <property name="testOnReturn" value="false"/>-->
<!-- 打开PSCache并且指定每个连接上PSCache的大小 -->
<property name="poolPreparedStatements" value="true"/>
<property name="maxPoolPreparedStatementPerConnectionSize"
value="20"/>
</bean>
<!-- &lt;!&ndash; 打开PSCache并且指定每个连接上PSCache的大小 &ndash;&gt;-->
<!-- <property name="poolPreparedStatements" value="true"/>-->
<!-- <property name="maxPoolPreparedStatementPerConnectionSize"-->
<!-- value="20"/>-->
<!-- </bean>-->
<bean id="dataSource" class="com.sipai.tools.DynamicDataSource">
<property name="defaultTargetDataSource" ref="dataSourceMaster"/>
<property name="targetDataSources">
<map key-type="com.sipai.tools.DataSources">
<entry key="MASTER" value-ref="dataSourceMaster"/>
<entry key="SCADA_HFCG" value-ref="SCADA_HFCG"/>
<!-- <entry key="SCADA_HFCG" value-ref="SCADA_HFCG"/>-->
<entry key="SCADA_0533JS" value-ref="SCADA_021HQWS"/>
<!-- 这里还可以加多个dataSource -->
</map>
</property>