# 开发环境配置 server: # 服务器的HTTP端口,默认为8080 port: 8092 servlet: # 应用的访问路径 context-path: /sipaiis_data tomcat: # tomcat的URI编码 uri-encoding: UTF-8 # DataSource Config spring: redis: mode: single #单服务模式 host: 127.0.0.1 port: 6379 database: 0 cluster: nodes: 172.16.242.44:6379,172.16.242.45:6379,172.16.242.46:6379,172.16.242.47:6379,172.16.242.48:6379,172.16.242.49:6379 max-redirects: 3 password: Aa112211 timeout: 100000 data: elasticsearch: # cluster-name: elasticsearch-sipaiis cluster-name: my-application cluster-nodes: 127.0.0.1:9300 datasource: master: username: sa password: P76XB3nm36aMkN6n jdbc-url: jdbc:sqlserver://122.51.194.184:1433;DatabaseName=SIPAIIS_WMS_HQAQ driver-class-name: com.microsoft.sqlserver.jdbc.SQLServerDriver type: com.alibaba.druid.pool.DruidDataSource #生产库 js1: username: sa password: P76XB3nm36aMkN6n jdbc-url: jdbc:sqlserver://122.51.194.184:1433;DatabaseName=EIP_PRD_HQWS driver-class-name: com.microsoft.sqlserver.jdbc.SQLServerDriver type: com.alibaba.druid.pool.DruidDataSource #生产库 js2: username: sa password: P76XB3nm36aMkN6n jdbc-url: jdbc:sqlserver://122.51.194.184:1433;DatabaseName=EIP_PRD_HQWS driver-class-name: com.microsoft.sqlserver.jdbc.SQLServerDriver type: com.alibaba.druid.pool.DruidDataSource #生产库 js3: username: sa password: P76XB3nm36aMkN6n jdbc-url: jdbc:sqlserver://122.51.194.184:1433;DatabaseName=EIP_PRD_HQWS driver-class-name: com.microsoft.sqlserver.jdbc.SQLServerDriver type: com.alibaba.druid.pool.DruidDataSource kafka: # Kafka 服务器地址 (KRaft模式) bootstrap-servers: 192.168.10.17:9092 # 生产者配置 producer: key-serializer: org.apache.kafka.common.serialization.StringSerializer value-serializer: org.apache.kafka.common.serialization.StringSerializer acks: all retries: 3 # 消费者配置 consumer: group-id: my-group-2.2.0 auto-offset-reset: earliest key-deserializer: org.apache.kafka.common.serialization.StringDeserializer value-deserializer: org.apache.kafka.common.serialization.StringDeserializer enable-auto-commit: false properties: missing.topics.fatal: false # 关键修正点 listener: ack-mode: manual # 设置为手动提交 # Admin客户端配置(用于topic自动创建) admin: fail-fast: true properties: auto.create.topics.enable: true # 适配Kafka 3.x的额外配置 properties: security.protocol: PLAINTEXT # interceptor.classes: org.apache.kafka.clients.interceptor.ProducerInterceptor # rabbitmq: # host: 10.18.68.10:90 # port: 5672 # username: admin # password: admin # #虚拟host 可以不设置,使用server默认host # virtual-host: / main: allow-bean-definition-overriding: true #当遇到同样名字的时候,是否允许覆盖注册 mybatis: mapper-locations: classpath:mybatis/mapper/**/*.xml type-aliases-package: com.sipai.entity #showSql logging: config: classpath:log4j2.xml # level: # com: # example: # mapper: debug thymeleaf: mode: HTML encoding: utf-8 cache: false enabled: false prefix: classpath:/templates/ # PageHelper配置(application.yml) pagehelper: helper-dialect: mysql reasonable: true support-methods-arguments: true params: count=countSql opcua: enabled: false # 是否启用OPC UA功能,默认为true server-url: "opc.tcp://132.120.136.19:49320" # OPC UA服务器地址和端口 # security-policy: Basic256Sha256 # security-mode: SignAndEncrypt security-policy: None # 安全策略设为None security-mode: None # 安全模式设为None username: "administrator" # 用户名(可选) password: "sipai@64368180" # 密码(可选) app: max-points: 200 #实时数据接口一次最多点位数 mqtt: # broker-address: "tcp://192.168.10.17:1883" # dashboard-01: "http://192.168.10.17:18083" # username: "admin" # password: "sipai@64368180" broker-address: "tcp://121.5.164.6:1883" dashboard-01: "http://121.5.164.6:18083" username: "dmbroker" password: "qwer1234" # udp服务 udp: server: enabled: false # 是否启用UDP服务,默认为true port: 5006 # UDP服务端口 scheduled: enabled: true # 设置为false即可临时关闭定时任务