集中更新一次

This commit is contained in:
markilue 2024-03-20 21:17:46 +08:00
parent 684ad15222
commit 4a1d6ad72b
218 changed files with 39435 additions and 0 deletions

View File

@ -0,0 +1,33 @@
HELP.md
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**/target/
!**/src/test/**/target/
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
!**/src/main/**/build/
!**/src/test/**/build/
### VS Code ###
.vscode/

View File

@ -0,0 +1,126 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>collecting</artifactId>
<groupId>com.cqu</groupId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>mqtt-collect-flume</artifactId>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
<dependencies>
<!-- flume依赖-->
<dependency>
<groupId>org.apache.flume</groupId>
<artifactId>flume-ng-core</artifactId>
<version>1.9.0</version>
<scope>provided</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.eclipse.paho/org.eclipse.paho.client.mqttv3 -->
<!-- mqtt依赖-->
<dependency>
<groupId>org.eclipse.paho</groupId>
<artifactId>org.eclipse.paho.client.mqttv3</artifactId>
<version>1.2.2</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.62</version>
</dependency>
<!--lombok用来简化实体类需要安装lombok插件-->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>provided</scope>
</dependency>
<!-- &lt;!&ndash; https://mvnrepository.com/artifact/junit/junit &ndash;&gt;-->
<!-- <dependency>-->
<!-- <groupId>org.junit</groupId>-->
<!-- <artifactId>Test</artifactId>-->
<!-- <version>4.13.1</version>-->
<!-- <scope>test</scope>-->
<!-- </dependency>-->
<!-- hadoop依赖-->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.1.3</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.12.0</version>
</dependency>
<!-- 统一flume和hadoop中guava版本-->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>27.0-jre</version>
</dependency>
<!--远程连接linux执行命令-->
<dependency>
<groupId>ch.ethz.ganymed</groupId>
<artifactId>ganymed-ssh2</artifactId>
<version>build210</version>
</dependency>
<dependency>
<groupId>org.apache.directory.studio</groupId>
<artifactId>org.apache.commons.lang</artifactId>
<version>2.6</version>
</dependency>
<dependency>
<groupId>com.jcraft</groupId>
<artifactId>jsch</artifactId>
<version>0.1.55</version>
<scope>compile</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>3.0.0</version>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,69 @@
#kafka-hdfs
#Name
a1.sources = r1
a1.channels = c1
a1.sinks = k1
#source
#本次使用kafka source
a1.sources.r1.type = org.apache.flume.source.kafka.KafkaSource
a1.sources.r1.kafka.bootstrap.servers = phm102:9092,phm103:9092,phm104:9092
a1.sources.r1.kafka.topics = phm_data
a1.sources.r1.kafka.consumer.group.id = phm
#将批次写入通道之前的最长时间
a1.sources.r1.batchDurationMillis = 2000
a1.sources.r1.interceptors = i1
a1.sources.r1.interceptors.i1.type = com.cqu.mqtt.interceptor.TimeStampInterceptor$MyBuilder
#file channel
a1.channels.c1.type = file
a1.channels.c1.dataDirs = /opt/module/flume-1.9.0/jobs/phm_data/filechannel
#写到file channel中时里面有已经去sink的和没去sink的,没去sink的会在内存中记录可以维护多少event的指针,以下配置就是配置这个容量
a1.channels.c1.capacity = 1000000
#事务容量,一定要大于kafka source 的batchsize
a1.channels.c1.transactionCapacity = 10000
#内存中的指针为了防止flume故障而丢失,会指定内存中的指针落盘会落到哪里
a1.channels.c1.checkpointDir = /opt/module/flume-1.9.0/jobs/phm_data/checkpoint
#落盘的备份
#a1.channels.c1.useDualCheckpoints = true
#a1.channels.c1.backupCheckpointDir = /opt/module/flume-1.9.0/jobs/checkpoint-bk
#最大的文件大小,当写在file中的文件达到一定程度,会换一个文件继续写,默认2146435071
a1.channels.c1.maxFileSize = 2146435071
#source 放进channel中时可能会放不进去,以下设置放不下去时等多久,超过之后仍然不等
a1.channels.c1.keep-alive = 5
a1.sinks.k1.type = hdfs
# TODO mqtt_service待改
a1.sinks.k1.hdfs.path = hdfs://phm102:8020/origin_data/phm/log/phm_data/%Y-%m-%d
a1.sinks.k1.hdfs.filePrefix = log-
#a1.sinks.k1.hdfs.fileType = CompressedStream
#a1.sinks.k1.hdfs.codeC = gzip
#是否按照时间滚动文件夹
a1.sinks.k1.hdfs.round = true
#多少时间单位创建一个新的文件夹
a1.sinks.k1.hdfs.roundValue = 1
#重新定义时间单位
a1.sinks.k1.hdfs.roundUnit = hour
#是否使用本地时间戳
a1.sinks.k1.hdfs.useLocalTimeStamp = false
#积攒多少个Event才flush到HDFS一次
a1.sinks.k1.hdfs.batchSize = 10
#设置文件类型,可支持压缩
a1.sinks.k1.hdfs.fileType = DataStream
#
a1.sinks.k1.hdfs.rollInterval = 0
#设置每个文件的滚动大小
a1.sinks.k1.hdfs.rollSize = 134217700
#文件的滚动与Event数量无关
a1.sinks.k1.hdfs.rollCount = 100
#最小冗余数
a1.sinks.k1.hdfs.minBlockReplicas = 1
# 延长失联断开时间 5min
a1.sinks.k1.hdfs.callTimeout = 300000
#bind
a1.sources.r1.channels = c1
a1.sinks.k1.channel = c1

View File

@ -0,0 +1,22 @@
a1.sources = r1
a1.channels = c1
# a1.sinks = k1
a1.sources.r1.type = com.cqu.mqtt.service.MQTTSource
a1.sources.r1.host = phm102
a1.sources.r1.port = 1883
a1.sources.r1.topic = phm_data
a1.sources.r1.keepAliveInterval = 10000
a1.sources.r1.username = mqtt
a1.sources.r1.password = mqtt
a1.sources.r1.interceptors = i1
a1.sources.r1.interceptors.i1.type = com.cqu.mqtt.interceptor.ETLNotJsonInterceptor$MyBuilder
a1.channels.c1.type = org.apache.flume.channel.kafka.KafkaChannel
a1.channels.c1.kafka.bootstrap.servers = phm102:9092,phm103:9092,phm104:9092
a1.channels.c1.kafka.topic = phm_data
a1.channels.c1.parseAsFlumeEvent = false
a1.sources.r1.channels = c1

View File

@ -0,0 +1,22 @@
a1.sources = r1
a1.channels = c1
# a1.sinks = k1
a1.sources.r1.type = com.cqu.mqtt.service.MQTTSource
a1.sources.r1.host = 43.138.191.64
a1.sources.r1.port = 1883
a1.sources.r1.topic = phm_data
a1.sources.r1.keepAliveInterval = 10000
a1.sources.r1.username = mqtt
a1.sources.r1.password = mqtt
a1.sources.r1.interceptors = i1
a1.sources.r1.interceptors.i1.type = com.cqu.mqtt.interceptor.ETLNotJsonInterceptor$MyBuilder
a1.channels.c1.type = org.apache.flume.channel.kafka.KafkaChannel
a1.channels.c1.kafka.bootstrap.servers = hadoop102:9092,hadoop103:9092,hadoop104:9092
a1.channels.c1.kafka.topic = phm_data
a1.channels.c1.parseAsFlumeEvent = false
a1.sources.r1.channels = c1

View File

@ -0,0 +1,24 @@
package com.cqu.mqtt.entity;
import lombok.Data;
import java.util.UUID;
@Data
public class MQTTClient {
//mqtt_host:需要指定有mqtt服务器的主机
private String host = "119.91.214.52";
//mqtt端口默认1883(mqtt默认端口)
private Integer port=1883;
//client的id,需要与发送的client_id不一致
private String client_id = UUID.randomUUID().toString();
//topic ->需要指定对应的topic,不需要主动创建
private String topic;
//mqtt是发布者还是订阅者
private String status ="subscriber";
}

View File

@ -0,0 +1,21 @@
package com.cqu.mqtt.entity;
import lombok.Data;
@Data
public class MQTTConnectOptions {
//是否清除session
private boolean cleanSession =true;
//存活时长
private Integer keepAliveInterval = 3000;
//mqtt的username
private String userName;
//mqtt的password
private char[] password;
}

View File

@ -0,0 +1,22 @@
package com.cqu.mqtt.entity;
import com.jcraft.jsch.Channel;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.Session;
import lombok.Data;
/**
* @author xiaolong
* @Description
* @create 2022-05-28-16:29
*/
@Data
public class SFTP {
private Session session;//会话
private Channel channel;//连接通道
private ChannelSftp sftp;// sftp操作类
}

View File

@ -0,0 +1,76 @@
package com.cqu.mqtt.interceptor;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONException;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.interceptor.Interceptor;
import java.nio.charset.StandardCharsets;
import java.util.Iterator;
import java.util.List;
/**
* 过滤掉不是json的event
*/
public class ETLNotJsonInterceptor implements Interceptor {
@Override
public void initialize() {
}
@Override
public Event intercept(Event event) {
//1.取出body
String body = new String(event.getBody(), StandardCharsets.UTF_8);
//2.通过阿里的fastJson判断数据是否完整
try{
//将数据进行解析如果解析没有问题,则传回,如果catch到异常了则不完整return null
JSON.parseObject(body);
}catch (JSONException e){
return null;
}
return event;
}
@Override
public List<Event> intercept(List<Event> events) {
//通过获取迭代器,判断回来的迭代器是否为null,如果是就移除
//这么写逻辑没有问题,但是不能通过集合的形式去移除一个
// for (Event event : events) {
// Event intercept = intercept(event);
// if(intercept==null){
// events.remove(event);
// }
// }
// return events;
//先获取迭代器对象用迭代器的方式去写,用迭代器的方式去移除一个
Iterator<Event> iterator = events.iterator();
while(iterator.hasNext()){
Event event = iterator.next();
Event result = intercept(event);
if(result==null){
iterator.remove();
}
}
return events;
}
@Override
public void close() {
}
public static class MyBuilder implements Builder{
@Override
public Interceptor build() {
return new ETLNotJsonInterceptor();
}
@Override
public void configure(Context context) {
}
}
}

View File

@ -0,0 +1,88 @@
package com.cqu.mqtt.interceptor;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.interceptor.Interceptor;
import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
/**
* 给每个event加上header
* 每个header存放改数据的在采集时的时间戳
* 这个事件中在采集时已有存放于json格式中
*/
public class TimeStampInterceptor implements Interceptor {
@Override
public void initialize() {
}
@Override
public Event intercept(Event event) {
//1.取出bodyjson格式的普通字符串
String body = new String(event.getBody(), StandardCharsets.UTF_8);
//2.将json字符串解析成对象
JSONObject jsonObject = JSON.parseObject(body);
//3.从对象中获取时间戳ts
System.out.println("timeStamp连接器收到数据:" + jsonObject);
System.out.println("获取到的对应的时间戳为:" + jsonObject.getString("timestamp"));
String ts = jsonObject.getString("timestamp");
// String formated_data = ts.substring(0, 4) + "-" + ts.substring(4, 6) + "-" + ts.substring(6, 8);
SimpleDateFormat simpleDateFormat1 = new SimpleDateFormat("yyyyMMddHHmmss");
try {
Date parse = simpleDateFormat1.parse(ts);
long time = parse.getTime();
String formated_data = String.valueOf(time);
// String formated_data = simpleDateFormat1.format(new Date(Long.parseLong(ts)));
System.out.println("修改日期后的时间戳为:" + formated_data);
//4.将ts的值设置到event的header中
event.getHeaders().put("timestamp", formated_data);
return event;
} catch (ParseException e) {
e.printStackTrace();
}
return event;
}
@Override
public List<Event> intercept(List<Event> events) {
//迭代调用event
for (Event event : events) {
intercept(event);
}
return events;
}
@Override
public void close() {
}
public static class MyBuilder implements Builder {
public TimeStampInterceptor build() {
return new TimeStampInterceptor();
}
@Override
public void configure(Context context) {
}
}
}

View File

@ -0,0 +1,247 @@
package com.cqu.mqtt.service;
import com.cqu.mqtt.entity.MQTTClient;
import com.cqu.mqtt.entity.MQTTConnectOptions;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDrivenSource;
import org.apache.flume.conf.Configurable;
import org.apache.flume.event.EventBuilder;
import org.apache.flume.source.AbstractSource;
import org.eclipse.paho.client.mqttv3.*;
import org.springframework.beans.BeanUtils;
import java.nio.charset.StandardCharsets;
import java.util.concurrent.TimeUnit;
/**
* 自定义MQTTSource将MQTT消息传入flume
*/
public class MQTTSource extends AbstractSource implements EventDrivenSource,Configurable {
private SimpleMqttClient client = null;
//存放用户输进来的MQTT相关信息
public MQTTClient MQTTObject=new MQTTClient();
public MQTTConnectOptions connectOptionsObject =new MQTTConnectOptions();
/**
* 源代码的初始化方法上下文(context)包含了所有的内容.
* Flume配置信息可用于检索任何配置.
* 可以获取flume中设置的配置信息
* @param context
*/
@Override
public void configure(Context context) {
MQTTObject.setHost(context.getString("host","43.138.191.64"));
MQTTObject.setPort(Integer.parseInt(context.getString("port","1883")));
MQTTObject.setTopic(context.getString("topic","default"));
connectOptionsObject.setCleanSession(Boolean.parseBoolean(context.getString("cleanSession","true")));
connectOptionsObject.setKeepAliveInterval(Integer.parseInt(context.getString("keepAliveInterval","3000")));
connectOptionsObject.setUserName(context.getString("username","mqtt"));
connectOptionsObject.setPassword(context.getString("password","mqtt").toCharArray());
}
/**
* AbstractSource中的方法,当flume启动的时候执行
*/
@Override
public void start() {
client = new SimpleMqttClient();
client.runClient();
}
/**
* AbstractSource中的方法,当flume关闭之后执行
* 用于关闭mqtt连接
*/
@Override
public void stop() {
if (client != null) {
System.out.println("生命周期结束,关闭mqtt");
client.closeConn();
}
}
public class SimpleMqttClient implements MqttCallback {
MqttClient myClient;
MqttConnectOptions connOpt;
/**
* 获取MQTT连接的相关配置
* @return
*/
public MqttTopic getConfiguration(){
connOpt=new MqttConnectOptions();
BeanUtils.copyProperties(connectOptionsObject,connOpt);
// Connect to Broker
try {
myClient = new MqttClient("tcp://"+MQTTObject.getHost()+":"+MQTTObject.getPort(), MQTTObject.getClient_id());
myClient.setCallback(this);
myClient.connect(connOpt);
System.out.println("MQTT连接成功");
} catch (MqttException e) {
e.printStackTrace();
System.exit(-1);
}
System.out.println("Connected to " + MQTTObject.getHost()+":"+MQTTObject.getPort());
MqttTopic topic =myClient.getTopic(MQTTObject.getTopic());
return topic;
}
/**
* MQTTClient初始化运行方法,连接broker
*/
public void runClient() {
MqttTopic topic = getConfiguration();
if ("subscriber".equals(MQTTObject.getStatus())) {
try {
System.out.println("开始订阅信息");
int subQoS = 2;
// myClient.subscribe(myTopic, subQoS);
myClient.subscribe(MQTTObject.getTopic(), subQoS);
} catch (Exception e) {
e.printStackTrace();
}
}else if("publisher".equals(MQTTObject.getStatus())){
//TODO 待完善发布者
System.out.println("开始发布消息");
for (int i = 1; i <= 10; i++) {
String pubMsg = "{\"pubmsg\":" + i + "}";
int pubQoS = 2;
MqttMessage message = new MqttMessage(pubMsg.getBytes());
message.setQos(pubQoS);
message.setRetained(false);
// Publish the message
System.out.println("Publishing to topic \"" + MQTTObject
+ "\" qos " + pubQoS);
MqttDeliveryToken token = null;
try {
// publish message to broker
token = topic.publish(message);
// Wait until the message has been delivered to the
// broker
token.waitForCompletion();
Thread.sleep(2000);
} catch (Exception e) {
e.printStackTrace();
}
}
}else {
System.out.println("wrong status!");
}
// TODO disconnect待完善
try {
// wait to ensure subscribed messages are delivered
if ("subscriber".equals(MQTTObject.getStatus())) {
while (true) {
Thread.sleep(5000);
}
}
// myClient.disconnect();
} catch (Exception e) {
e.printStackTrace();
} finally {
}
}
/**
* 在接收到消息时调用messageArrived这个回调.
* 可以订阅这个主题
* @param s
* @param mqttMessage
* @throws Exception
*/
@Override
public void messageArrived(String s, MqttMessage mqttMessage) throws Exception {
System.out.println("接收消息成功");
// Map<String,String> header=new HashMap<>();
//header中存放topic供后续kafka使用,这里为了他只能是json,为了后续可能还会接机器学习等功能,不能加header
// header.put("topic",MQTTObject.getTopic());
Event flumeEvent = EventBuilder.withBody(mqttMessage.getPayload());
System.out.println("接收到的数据为:"+new String(mqttMessage.getPayload(), StandardCharsets.UTF_8));
try {
getChannelProcessor().processEvent(flumeEvent);
System.out.println("消息到达到拦截器-------->");
} catch (Exception e) {
// TODO: handle exception
System.out.println("消息发送失败");
e.printStackTrace();
}
}
/**
* 分发结束后执行
* @param iMqttDeliveryToken
*/
@Override
public void deliveryComplete(IMqttDeliveryToken iMqttDeliveryToken) {
System.out.println("收到了一条数据");
try {
System.out.println(iMqttDeliveryToken.getMessage().getPayload());
} catch (MqttException e) {
e.printStackTrace();
}
}
/**
* 手动关闭mqtt连接
*/
public void closeConn() {
if (myClient != null) {
if (myClient.isConnected()) {
try {
System.out.println("关闭mqtt");
myClient.disconnect();
} catch (MqttException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
/**
* 当mqtt断开连接之后回调执行
* 如果发送和接受的client_id相同,会立马执行这个函数
* @param throwable
*/
@Override
public void connectionLost(Throwable throwable) {
System.out.println("Connection lost!,please try start the client again!");
System.out.println("五秒后尝试重启...");
try {
TimeUnit.SECONDS.sleep(5);
runClient();
} catch (InterruptedException e) {
System.out.println("重启client失败");
e.printStackTrace();
}
}
}
}

View File

@ -0,0 +1,79 @@
package com.cqu.mqtt.utils;
//import org.junit.Test;
import org.junit.Test;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Arrays;
/**
* @author xiaolong
* @Description
* @create 2022-05-27-12:25
*/
public class ConfUtils {
//指定路径下文件的复制方法
public static void copyFile(String srcPath,String destPath, String topic){
FileInputStream fis = null;
FileOutputStream fos = null;
try {
File srcFile = new File(srcPath);
File destFile = new File(destPath);
fis = new FileInputStream(srcFile);
fos = new FileOutputStream(destFile);
byte[] buffer = new byte[1024];
int len;
while((len = fis.read(buffer)) != -1){
System.out.println(buffer.getClass().getName());
String str = new String(buffer,0,len);
System.out.println(str);
String newStr = str.replace("phm_data", topic);
fos.write(newStr.getBytes(),0,newStr.getBytes().length);
}
System.out.println("复制成功");
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if(fos != null){
fos.close();
}
} catch (IOException e) {
e.printStackTrace();
}
try {
if(fis != null){
fis.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
public static void genConf(String topic){
// TODO 后续换成hdfs的路径
String srcPath = "src/main/java/com/cqu/mqtt/conf/mqtt-flume-kafka.conf";
String srcPath2 = "src/main/java/com/cqu/mqtt/conf/kafka-flume-hdfs.conf";
String destPath = "src/main/java/com/cqu/mqtt/conf/" + topic + "/"+"mqtt-flume-kafka-"+topic+".conf";
String destPath2 = "src/main/java/com/cqu/mqtt/conf/" + topic + "-kafka-flume-hdfs.conf";
copyFile(srcPath,destPath, topic);
copyFile(srcPath2,destPath2, topic);
}
}

View File

@ -0,0 +1,57 @@
package com.cqu.mqtt.utils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.net.URI;
/**
* @author xiaolong
* @Description
* @create 2022-05-28-11:16
*/
public class HDFSUtils {
private static FileSystem fs;
/**
* *@param uri HDFS的访问路径 hdfs://hadoop102:9820
* *@param conf 配置对象
* *@param user 操作的用户
*/
@Before
public void init() throws IOException, InterruptedException {
//1和HDFS建立连接
URI uri = URI.create("hdfs://phm102:8020");
Configuration conf = new Configuration();
conf.set("dfs.replication","3");
String user = "root";
fs = FileSystem.get(uri, conf, user);
}
@After
public void close() throws IOException {
//3关闭资源
fs.close();
}
public static void upload(String topic){
try {
fs.copyFromLocalFile(false,true,
new Path("src/main/java/com/cqu/mqtt/conf/" + topic + "-mqtt-flume-kafka.conf"),
new Path("/origin_data/phm/conf/" + topic + "-mqtt-flume-kafka.conf"));
fs.copyFromLocalFile(false,true,
new Path("src/main/java/com/cqu/mqtt/conf/" + topic + "-kafka-flume-hdfs.conf"),
new Path("/origin_data/phm/conf/" + topic + "-kafka-flume-hdfs.conf"));
} catch (IOException e) {
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,17 @@
package com.cqu.mqtt.utils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONException;
public class JSONUtils {
public static boolean isJSONValidate(String log){
try {
JSON.parse(log);
return true;
}catch (JSONException e){
return false;
}
}
}

View File

@ -0,0 +1,162 @@
package com.cqu.mqtt.utils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import org.apache.commons.lang.StringUtils;
import ch.ethz.ssh2.Connection;
import ch.ethz.ssh2.Session;
import ch.ethz.ssh2.StreamGobbler;
/**
* @author xiaolong
* @Description
* @create 2022-05-28-12:13
*/
public class LinuxUtils {
//字符编码默认是utf-8
private static String DEFAULTCHART="UTF-8";
private Connection conn;
private String ip;
private String userName;
private String userPwd;
public LinuxUtils(String ip, String userName, String userPwd) {
this.ip = ip;
this.userName = userName;
this.userPwd = userPwd;
}
public LinuxUtils() {
}
/**
* 远程登录linux的主机
* @author Ickes
* @since V0.1
* @return
* 登录成功返回true否则返回false
*/
public Boolean login(){
boolean flg=false;
try {
this.conn = new Connection(this.ip);
this.conn.connect();//连接
flg=this.conn.authenticateWithPassword(this.userName, this.userPwd);//认证
} catch (IOException e) {
e.printStackTrace();
}
return flg;
}
/**
* @author Ickes
* 远程执行shll脚本或者命令
* @param cmd
* 即将执行的命令
* @return
* 命令执行完后返回的结果值
* @since V0.1
*/
public String execute(String cmd){
String result="";
try {
if(login()){
Session session= conn.openSession();//打开一个会话
session.execCommand(cmd);//执行命令
result=processStdout(session.getStdout(),DEFAULTCHART);
//如果为得到标准输出为空说明脚本执行出错了
if(StringUtils.isBlank(result)){
result=processStdout(session.getStderr(),DEFAULTCHART);
}
conn.close();
session.close();
}
} catch (IOException e) {
e.printStackTrace();
}
return result;
}
/**
* 解析脚本执行返回的结果集
* @author Ickes
* @param in 输入流对象
* @param charset 编码
* @since V0.1
* @return
* 以纯文本的格式返回
*/
private String processStdout(InputStream in, String charset){
InputStream stdout = new StreamGobbler(in);
StringBuffer buffer = new StringBuffer();;
try {
BufferedReader br = new BufferedReader(new InputStreamReader(stdout,charset));
String line=null;
while((line=br.readLine()) != null){
buffer.append(line+"\n");
}
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return buffer.toString();
}
public static void setCharset(String charset) {
DEFAULTCHART = charset;
}
public Connection getConn() {
return conn;
}
public void setConn(Connection conn) {
this.conn = conn;
}
public String getIp() {
return ip;
}
public void setIp(String ip) {
this.ip = ip;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getUserPwd() {
return userPwd;
}
public void setUserPwd(String userPwd) {
this.userPwd = userPwd;
}
public static void main(String[] args) {
LinuxUtils hadoop102=new LinuxUtils("192.168.58.102", "atguigu","ysl18398857235");
LinuxUtils hadoop103=new LinuxUtils("192.168.58.103", "atguigu","ysl18398857235");
//执行命令
String result = hadoop102.execute("nohup flume-ng agent -c " +
"$FLUMW_HOME/conf -f $FLUME_HOME/jobs/mqttservice/mqtt-flume-kafka.conf -n a1 -Dflume.root.logger=INFO,console " +
"1>$FLUME_HOME/logs/flume.log 2>&1 &");
String result2 = hadoop103.execute("nohup flume-ng agent -c " +
"$FLUME_HOME/conf -f $FLUME_HOME/jobs/mqttservice/kafka-flume-hdfs.conf -n a1 -Dflume.root.logger=INFO,console" +
"1>$FLUME_HOME/logs/flume.log 2>&1 &");
String kill1 = hadoop102.execute("ps -ef | grep mqtt-flume-kafka.conf | grep -v grep | awk '{print $2}' | xargs -n1 kill -9");
String kill2 = hadoop103.execute("ps -ef | grep kafka-flume-hdfs.conf | grep -v grep | awk '{print $2}' | xargs -n1 kill -9");
}
}

View File

@ -0,0 +1,276 @@
package com.cqu.mqtt.utils;
import com.cqu.mqtt.entity.SFTP;
import com.jcraft.jsch.*;
import lombok.extern.slf4j.Slf4j;
import org.apache.log4j.LogManager;
import java.io.*;
import java.util.*;
/**
* @author xiaolong
* @Description
* @create 2022-05-30-12:18
*/
@Slf4j // 这个·注解后可以使用log.error()
public class SFTPUtils {
/**
* 连接ftp/sftp服务器
*/
public static void getConnect(SFTP s) throws Exception {
// 密钥的密码
// String privateKey ="key";
// /** 密钥文件路径 */
// String passphrase ="path";
// 主机
// String host ="hadoop102";
String host ="phm103";
// 端口
int port =22;
// 用户名
// String username ="atguigu";
String username ="root";
//密码
// String password ="ysl18398857235";
String password ="YSLysl325@@@";
Session session = null;
Channel channel = null;
ChannelSftp sftp = null;// sftp操作类
JSch jsch = new JSch();
//设置密钥和密码
//支持密钥的方式登陆只需在jsch.getSession之前设置一下密钥的相关信息就可以了
// if (privateKey != null && !"".equals(privateKey)) {
// if (passphrase != null && "".equals(passphrase)) {
// //设置带口令的密钥
// jsch.addIdentity(privateKey, passphrase);
// } else {
// //设置不带口令的密钥
// jsch.addIdentity(privateKey);
// }
// }
session = jsch.getSession(username, host, port);
session.setPassword(password);
Properties config = new Properties();
config.put("StrictHostKeyChecking", "no"); // 不验证 HostKey 严格主机密钥检查
session.setConfig(config);
try {
session.connect();
} catch (Exception e) {
if (session.isConnected())
session.disconnect();
log.error("连接服务器失败,请检查主机[" + host + "],端口[" + port
+ "],用户名[" + username + "],端口[" + port
+ "]是否正确,以上信息正确的情况下请检查网络连接是否正常或者请求被防火墙拒绝.");
}
channel = session.openChannel("sftp");
try {
// 获取sftp通道
channel.connect();
} catch (Exception e) {
if (channel.isConnected())
channel.disconnect();
log.error("连接服务器失败,请检查主机[" + host + "],端口[" + port
+ "],用户名[" + username + "],密码是否正确,以上信息正确的情况下请检查网络连接是否正常或者请求被防火墙拒绝.");
}
sftp = (ChannelSftp) channel;
s.setChannel(channel);
s.setSession(session);
s.setSftp(sftp);
}
/**
* 断开连接
*
*/
public static void disConn(Session session,Channel channel,ChannelSftp sftp)throws Exception{
if(null != sftp){
sftp.disconnect();
sftp.exit();
sftp = null;
}
if(null != channel){
channel.disconnect();
channel = null;
}
if(null != session){
session.disconnect();
session = null;
}
}
/**
* 上传文件
*
* @param directory 上传的目录-相对于SFPT设置的用户访问目录
* 为空则在SFTP设置的根目录进行创建文件除设置了服务器全磁盘访问
* @param uploadFile 要上传的文件全路径
*/
public static void upload(String directory, String uploadFile) throws Exception {
SFTP s = new SFTP();
getConnect(s);//建立连接
Session session = s.getSession();
Channel channel = s.getChannel();
ChannelSftp sftp = s.getSftp();// sftp操作类
try {
try {
sftp.cd(directory); //进入目录
} catch (SftpException sException) {
if (sftp.SSH_FX_NO_SUCH_FILE == sException.id) { //指定上传路径不存在
sftp.mkdir(directory);//创建目录
sftp.cd(directory); //进入目录
}
}
File file = new File(uploadFile);
InputStream in = new FileInputStream(file);
sftp.put(in, file.getName());
in.close();
} catch(Exception e){
throw new Exception(e.getMessage(), e);
} finally{
disConn(session, channel, sftp);
}
}
/**
* 下载文件
*
* @param directory 下载目录 根据SFTP设置的根目录来进行传入
* @param downloadFile 下载的文件
* @param saveFile 存在本地的路径
*/
public static void download(String directory, String downloadFile, String saveFile) throws Exception {
SFTP s = new SFTP();
getConnect(s);//建立连接
Session session = s.getSession();
Channel channel = s.getChannel();
ChannelSftp sftp = s.getSftp();// sftp操作类
try {
sftp.cd(directory); //进入目录
File file = new File(saveFile);
boolean bFile;
bFile = false;
bFile = file.exists();
if (!bFile) {
bFile = file.mkdirs();//创建目录
}
OutputStream out = new FileOutputStream(new File(saveFile, downloadFile));
sftp.get(downloadFile, out);
out.flush();
out.close();
} catch (Exception e) {
throw new Exception(e.getMessage(), e);
} finally {
disConn(session, channel, sftp);
}
}
/**
* 删除文件
* @param directory 要删除文件所在目录
* @param deleteFile 要删除的文件
*/
public static void delete(String directory, String deleteFile) throws Exception {
SFTP s=new SFTP();
getConnect(s);//建立连接
Session session = s.getSession();
Channel channel = s.getChannel();
ChannelSftp sftp = s.getSftp();// sftp操作类
try {
sftp.cd(directory); //进入的目录应该是要删除的目录的上一级
sftp.rm(deleteFile);//删除目录
} catch (Exception e) {
throw new Exception(e.getMessage(),e);
} finally {
disConn(session,channel,sftp);
}
}
/**
* 列出目录下的文件
* @param directory 要列出的目录
* @return list 文件名列表
* @throws Exception
*/
public static List<String> listFiles(String directory) throws Exception {
SFTP s=new SFTP();
getConnect(s);//建立连接
Session session = s.getSession();
Channel channel = s.getChannel();
ChannelSftp sftp = s.getSftp();// sftp操作类
Vector fileList=null;
List<String> fileNameList = new ArrayList<String>();
fileList = sftp.ls(directory); //返回目录下所有文件名称
disConn(session,channel,sftp);
for (Object o : fileList) {
String fileName = ((ChannelSftp.LsEntry) o).getFilename();
if (".".equals(fileName) || "..".equals(fileName)) {
continue;
}
fileNameList.add(fileName);
}
return fileNameList;
}
/**
* 创建目录
* @param directory 要创建的目录 位置
* @param dir 要创建的目录
*/
public static void creatDir(String directory,String dir) throws Exception {
SFTP s=new SFTP();
getConnect(s);//建立连接
Session session = s.getSession();
Channel channel = s.getChannel();
ChannelSftp sftp = s.getSftp();// sftp操作类
try {
sftp.cd(directory);
sftp.mkdir(dir);
} catch (Exception e) {
throw new Exception(e.getMessage(),e);
} finally {
disConn(session,channel,sftp);
}
}
/**
* 进入目录
* @param directory
* @throws Exception
*/
public static void cd(String directory)throws Exception {
SFTP s=new SFTP();
getConnect(s);//建立连接
Session session = s.getSession();
Channel channel = s.getChannel();
ChannelSftp sftp = s.getSftp();// sftp操作类
try {
sftp.cd(directory); //目录要一级一级进
} catch (Exception e) {
throw new Exception(e.getMessage(),e);
} finally {
disConn(session,channel,sftp);
}
}
}

View File

@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
<property>
<name>dfs.client.use.datanode.hostname</name>
<value>true</value>
<description>only cofig in clients</description>
</property>
</configuration>

View File

@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8"?>
<Configuration>
<Appenders>
<Console name="STDOUT" target="SYSTEM_OUT">
<PatternLayout pattern="%d %-5p [%t] %C{2} (%F:%L) - %m%n"/>
</Console>
<RollingFile name="RollingFile" fileName="logs/strutslog1.log"
filePattern="logs/$${date:yyyy-MM}/app-%d{MM-dd-yyyy}-%i.log.gz">
<PatternLayout>
<Pattern>%d{MM-dd-yyyy} %p %c{1.} [%t] -%M-%L- %m%n</Pattern>
</PatternLayout>
<Policies>
<TimeBasedTriggeringPolicy />
<SizeBasedTriggeringPolicy size="1 KB"/>
</Policies>
<DefaultRolloverStrategy fileIndex="max" max="2"/>
</RollingFile>
</Appenders>
<Loggers>
<Logger name="com.opensymphony.xwork2" level="WAN"/>
<Logger name="org.apache.struts2" level="WAN"/>
<Root level="warn">
<AppenderRef ref="STDOUT"/>
</Root>
</Loggers>
</Configuration>

View File

@ -0,0 +1,115 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>common</artifactId>
<groupId>com.cqu</groupId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>common-linux</artifactId>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.1.3</version>
</dependency>
<!-- 这里不注释会和springboot里面的jar冲突-->
<!-- <dependency>-->
<!-- <groupId>org.apache.logging.log4j</groupId>-->
<!-- <artifactId>log4j-slf4j-impl</artifactId>-->
<!-- <version>2.12.0</version>-->
<!-- </dependency>-->
<!-- 统一flume和hadoop中guava版本-->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>27.0-jre</version>
</dependency>
<!--远程连接linux执行命令-->
<dependency>
<groupId>ch.ethz.ganymed</groupId>
<artifactId>ganymed-ssh2</artifactId>
<version>build210</version>
</dependency>
<dependency>
<groupId>org.apache.directory.studio</groupId>
<artifactId>org.apache.commons.lang</artifactId>
<version>2.6</version>
</dependency>
<!--http3远程工具-->
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
<version>3.3.0</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<!--连接SFTP服务器工具-->
<dependency>
<groupId>commons-net</groupId>
<artifactId>commons-net</artifactId>
<version>3.8.0</version>
</dependency>
<dependency>
<groupId>com.jcraft</groupId>
<artifactId>jsch</artifactId>
<version>0.1.55</version>
</dependency>
<dependency>
<groupId>com.cqu</groupId>
<artifactId>common-utils</artifactId>
<version>0.0.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
<!-- <scope>compile</scope>-->
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>3.0.0</version>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,16 @@
package com.cqu.linux.Exception;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class LoginErrorException extends Exception {
private Integer code; //状态码
private String message; //异常信息
}

View File

@ -0,0 +1,32 @@
package com.cqu.linux.entity;
import ch.ethz.ssh2.Connection;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
//@NoArgsConstructor
//@AllArgsConstructor
public class LinuxConnection {
//字符编码默认是utf-8
public static final String DEFAULTCHART = "UTF-8";
public static final String GBKCHART = "GBK";
private Connection conn;
private String ip = "hdfs://phm102:8020";
private String userName = "root";
private String password = "dingjiawen.123";
public LinuxConnection() {
}
public LinuxConnection(String ip, String userName, String password) {
this.ip = ip;
this.userName = userName;
this.password = password;
}
}

View File

@ -0,0 +1,22 @@
package com.cqu.linux.entity;
import com.jcraft.jsch.Channel;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.Session;
import lombok.Data;
/**
* @author xiaolong
* @Description
* @create 2022-05-28-16:29
*/
@Data
public class SFTP {
private Session session;//会话
private Channel channel;//连接通道
private ChannelSftp sftp;// sftp操作类
}

View File

@ -0,0 +1,159 @@
package com.cqu.linux.utils;
import com.cqu.utils.utils.ReadResourcesFileUtil;
import org.apache.commons.io.FileUtils;
import java.io.*;
public class CopyFileUtils {
//指定路径下文件的复制方法
public static void copyFile(String srcPath, String destPath, String topic) {
FileInputStream fis = null;
FileOutputStream fos = null;
try {
File srcFile = new File(srcPath);
File destFile = new File(destPath);
String destAbsolutePath = destFile.getAbsolutePath().substring(0, destFile.getAbsolutePath().lastIndexOf("\\"));
System.out.println(destAbsolutePath);
File file = new File(destAbsolutePath);
if (!file.exists()) {
file.mkdirs();
}
// destFile.deleteOnExit();
// destFile.mkdir()
fis = new FileInputStream(srcFile);
fos = new FileOutputStream(destFile);
// new InputStream()
byte[] buffer = new byte[1024];
int len;
while ((len = fis.read(buffer)) != -1) {
// System.out.println(buffer.getClass().getName());
String str = new String(buffer, 0, len);
System.out.println(str);
String newStr = str.replace("phm_data", topic);
fos.write(newStr.getBytes(), 0, newStr.getBytes().length);
}
System.out.println("复制成功");
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (fos != null) {
fos.close();
}
} catch (IOException e) {
e.printStackTrace();
}
try {
if (fis != null) {
fis.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* 指定resource路径下文件的复制方法
* @param srcPath
* @param destPath
* @param topic
*/
public static void copyFileByResource(String srcPath, String destPath, String topic) {
try(FileOutputStream fos =new FileOutputStream(destPath);
InputStream fis = ReadResourcesFileUtil.readBatchBackToInputSteam(srcPath)) {
File destFile = new File(destPath);
String destAbsolutePath = destFile.getAbsolutePath().substring(0, destFile.getAbsolutePath().lastIndexOf("\\"));
System.out.println(destAbsolutePath);
File file = new File(destAbsolutePath);
if (!file.exists()) {
file.mkdirs();
}
byte[] buffer = new byte[1024];
int len;
while ((len = fis.read(buffer)) != -1) {
String str = new String(buffer, 0, len);
System.out.println(str);
String newStr = str.replace("phm_data", topic);
fos.write(newStr.getBytes(), 0, newStr.getBytes().length);
}
System.out.println("复制成功");
} catch (IOException e) {
e.printStackTrace();
}
}
//指定路径下文件的复制方法
public static void copyDFSFile(InputStream fis, OutputStream fos, String topic) {
try {
byte[] buffer = new byte[1024];
int len;
while ((len = fis.read(buffer)) != -1) {
System.out.println(buffer.getClass().getName());
String str = new String(buffer, 0, len);
System.out.println(str);
String newStr = str.replace("phm_data", topic);
fos.write(newStr.getBytes(), 0, newStr.getBytes().length);
}
System.out.println("复制成功");
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (fos != null) {
fos.close();
}
} catch (IOException e) {
e.printStackTrace();
}
try {
if (fis != null) {
fis.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
public static void deleteFile(String srcFileName) {
File file = new File(srcFileName);
boolean flag = false;
if (file.exists()) {
try {
FileUtils.deleteDirectory(file);
System.out.println("删除文件:"+srcFileName);
} catch (IOException e) {
System.out.println("删除文件失败,尝试重新删除");
e.printStackTrace();
}
}
}
}

View File

@ -0,0 +1,134 @@
package com.cqu.linux.utils;
import com.cqu.utils.utils.MyPropertiesUtil;
import com.cqu.utils.utils.ReadResourcesFileUtil;
import lombok.extern.slf4j.Slf4j;
import org.junit.Test;
import java.io.InputStream;
import java.util.Properties;
@Slf4j
public class FlumeUtils {
private static Properties properties;
private static String host1;
private static String username1;
private static String password1;
private static String host2;
private static String username2;
private static String password2;
private static String localPath;
/**
* 将模板代码复制一份
*
* @param topic
*/
private static void copy(String topic) {
// TODO 后续换成hdfs的路径
String srcPath = "mqtt-flume-kafka.conf";
String srcPath2 = "kafka-flume-hdfs.conf";
String destPath = localPath + topic + "/mqtt-flume-kafka-" + topic + ".conf";
String destPath2 = localPath + topic + "/kafka-flume-hdfs-" + topic + ".conf";
// System.out.println(srcPath);
// System.out.println(srcPath2);
CopyFileUtils.copyFileByResource(srcPath, destPath, topic);
CopyFileUtils.copyFileByResource(srcPath2, destPath2, topic);
}
/**
* 由于上传文件使用的put里面传的参数是InputStream,暂时无法将替换后的数据放入一个InputStream里面
* TODO 暂时的解决办法是:上传之后删除本地文件
*
* @param topic
*/
private static void delete(String topic) {
String destPath = localPath + topic;
String destPath2 = localPath + topic;
CopyFileUtils.deleteFile(destPath);
CopyFileUtils.deleteFile(destPath2);
}
/**
* 将替换后的代码复制到Linux
*
* @param topic
*/
private static void uploadToLinux(String topic) {
localPath = properties.getProperty("template.base.localPath");
String linuxPath = properties.getProperty("template.base.linuxPath");
copy(topic);
boolean b = new LinuxUtils().InitLinuxConnection(host1, username1, password1).uploadFileBySFTP(localPath + topic, linuxPath, "mqtt-flume-kafka-" + topic + ".conf");
boolean c = new LinuxUtils().InitLinuxConnection(host2, username2, password2).uploadFileBySFTP(localPath + topic, linuxPath, "kafka-flume-hdfs-" + topic + ".conf");
if (b && c) {
System.out.println("上传成功");
delete(topic);
} else {
System.out.println("上传失败,请尝试重新上传");
}
}
public static void execute(String topic) {
//导入配置文件
properties = MyPropertiesUtil.load("flumeConfig.properties");
host1 = properties.getProperty("phm102.host");
username1 = properties.getProperty("phm102.username");
password1 = properties.getProperty("phm102.password");
host2 = properties.getProperty("phm103.host");
username2 = properties.getProperty("phm103.username");
password2 = properties.getProperty("phm103.password");
uploadToLinux(topic);
System.out.println("开启flume第一阶段");
String execute1 = new LinuxUtils().InitLinuxConnection(host1, username1, password1).execute("nohup flume-ng agent -c $FLUME_HOME/conf -f /opt/module/flume-1.9.0/jobs/phm/mqtt-flume-kafka-" + topic + ".conf -n a1 -Dflume.root.logger=INFO,console 1>$FLUME_HOME/logs/flume.log 2>&1 &");
System.out.println(execute1);
// log.debug(execute1);
System.out.println("开启flume第二阶段");
String execute2 = new LinuxUtils().InitLinuxConnection(host2, username2, password2).execute("nohup flume-ng agent -c $FLUME_HOME/conf -f /opt/module/flume-1.9.0/jobs/phm/kafka-flume-hdfs-" + topic + ".conf -n a1 -Dflume.root.logger=INFO,console 1>$FLUME_HOME/logs/flume.log 2>&1 &");
System.out.println(execute2);
// log.debug(execute2);
}
public static void main(String[] args) {
execute("ysl");
}
@Test
public void test(){
//导入配置文件
properties = MyPropertiesUtil.load("flumeConfig.properties");
host1 = properties.getProperty("phm102.host");
username1 = properties.getProperty("phm102.username");
System.out.println(username1);
}
@Test
public void test1(){
//导入配置文件
// File read = ReadResourcesFileUtil.read("flumeConfig.properties");
// InputStream read = ReadResourcesFileUtil.readBackToInputSteam("mqtt-flume-kafka.conf");
InputStream read = ReadResourcesFileUtil.readBatchBackToInputSteam("kafka-flume-hdfs.conf");
// InputStream read = ReadResourcesFileUtil.readBackToInputSteam("flumeConfig.properties");
}
}

View File

@ -0,0 +1,134 @@
package com.cqu.linux.utils;
import com.cqu.linux.entity.LinuxConnection;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.*;
import java.net.URI;
public class HDFSUtils {
private static FileSystem fs;
private LinuxConnection linuxConnection = new LinuxConnection();
/**
* *@param uri HDFS的访问路径 hdfs://hadoop102:9820
* *@param conf 配置对象
* *@param user 操作的用户
*/
public static void init() throws IOException, InterruptedException {
//1和HDFS建立连接
URI uri = URI.create("hdfs://Ding202:8020");
Configuration conf = new Configuration();
conf.set("dfs.replication", "3");
String user = "dingjiawen";
fs = FileSystem.get(uri, conf, user);
}
public static void init(String ip, String username) throws IOException, InterruptedException {
//1和HDFS建立连接
URI uri = URI.create(ip);
Configuration conf = new Configuration();
conf.set("dfs.replication", "3");
String user = username;
fs = FileSystem.get(uri, conf, user);
}
public static void close() throws IOException {
//3关闭资源
fs.close();
}
public static void uploadFromLocal(String topic) {
try {
fs.copyFromLocalFile(false, true,
new Path("src/main/java/com/cqu/mqtt/conf/" + topic + "-mqtt-flume-kafka.conf"),
new Path("/origin_data/phm/conf/" + topic + "-mqtt-flume-kafka.conf"));
fs.copyFromLocalFile(false, true,
new Path("src/main/java/com/cqu/mqtt/conf/" + topic + "-kafka-flume-hdfs.conf"),
new Path("/origin_data/phm/conf/" + topic + "-kafka-flume-hdfs.conf"));
} catch (IOException e) {
e.printStackTrace();
}
}
public static void uploadFromHDFS(String sourcePath, String targetPath) {
// DFSInputStream inputStream=new DFSInputStream();
//如果路径存在就删除,如果不存在就上传
Path dfsPath = new Path(targetPath);
Path findPath = new Path(sourcePath);
try {
if (fs.exists(dfsPath)) {
System.out.println("删除文件:" + fs.delete(dfsPath, true));
}
RemoteIterator<LocatedFileStatus> sourceFiles = fs.listFiles(findPath, true);
while (sourceFiles.hasNext()) {
Configuration conf = new Configuration();
conf.set("dfs.replication", "3");
FileUtil.copy(fs, sourceFiles.next().getPath(), fs, dfsPath, true, conf);
}
} catch (IOException e) {
e.printStackTrace();
}
}
public static void delete(String path) {
// DFSInputStream inputStream=new DFSInputStream();
//如果路径存在就删除,如果不存在就上传
Path dfsPath = new Path(path);
try {
if (fs.exists(dfsPath)) {
System.out.println("删除文件:" + fs.delete(dfsPath, true));
}
} catch (IOException e) {
e.printStackTrace();
}
}
public static void copyFlumeFile(String sourcePath, String targetPath, String fileName, String topic) {
String exName= fileName.substring(fileName.lastIndexOf("."));
String targetName = fileName.substring(0,fileName.lastIndexOf("."));
Path realTargetPath = new Path(targetPath+"/"+targetName+"-"+topic+exName);
Path realSourcePath = new Path(sourcePath +"/"+fileName);
try {
if (!fs.exists(realSourcePath)) {
System.out.println("需要拷贝的文件不存在");
return;
}
if (fs.exists(realTargetPath)) {
System.out.println("删除文件:" + fs.delete(realTargetPath, true));
System.out.println("开始重新生成");
}
FSDataInputStream inputStream = fs.open(realSourcePath);
FSDataOutputStream outputStream = fs.create(realTargetPath);
CopyFileUtils.copyDFSFile(inputStream,outputStream,topic);
} catch (IOException e) {
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,77 @@
package com.cqu.linux.utils;
import okhttp3.*;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
public class HTTPUtils {
private static OkHttpClient client;
private HTTPUtils() {
}
public static OkHttpClient getInstance() {
if (client == null) {
synchronized (HTTPUtils.class) {
if (client == null) {
client = new OkHttpClient();
}
}
}
return client;
}
public static void get(String url, String json) {
String encodeJson = "";
try {
encodeJson = URLEncoder.encode(json, "utf-8");
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
url = url + "?param=" + encodeJson;
Request request = new Request.Builder()
.url(url).get().build();
Call call = HTTPUtils.getInstance().newCall(request);
Response response = null;
long start = System.currentTimeMillis();
try {
response = call.execute();
long end = System.currentTimeMillis();
System.out.println(response.body().string() + " used:" + (end - start) + " ms");
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException("发送失败...检查网络地址...");
}
}
public static void post(String url, String json) {
System.out.println(json);
RequestBody requestBody = RequestBody.create(MediaType.parse("application/json; charset=utf-8"), json);
Request request = new Request.Builder()
.url(url)
.post(requestBody) //post请求
.build();
Call call = HTTPUtils.getInstance().newCall(request);
Response response = null;
long start = System.currentTimeMillis();
try {
response = call.execute();
long end = System.currentTimeMillis();
System.out.println(response.body().string() + " used:" + (end - start) + " ms");
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException("发送失败...检查网络地址...");
}
}
}

View File

@ -0,0 +1,487 @@
package com.cqu.linux.utils;
import ch.ethz.ssh2.Connection;
import ch.ethz.ssh2.Session;
import ch.ethz.ssh2.StreamGobbler;
import com.cqu.linux.Exception.LoginErrorException;
import com.cqu.linux.entity.LinuxConnection;
import com.cqu.linux.entity.SFTP;
import com.jcraft.jsch.Channel;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.SftpException;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.net.ftp.FTPClient;
import org.apache.commons.net.ftp.FTPClientConfig;
import org.apache.commons.net.ftp.FTPReply;
import org.springframework.data.convert.Jsr310Converters;
import java.io.*;
import java.net.SocketException;
import java.nio.file.Path;
import java.util.Properties;
@Slf4j
public class LinuxUtils {
static LinuxConnection linuxConnection = new LinuxConnection();
// public LinuxUtils() {
// }
public LinuxUtils InitLinuxConnection(String ip, String username, String password) {
linuxConnection.setIp(ip);
linuxConnection.setUserName(username);
linuxConnection.setPassword(password);
return this;
}
/**
* 连接ftp/sftp服务器
*/
private void getConnect(SFTP s) throws Exception {
// 密钥的密码
// String privateKey ="key";
// /** 密钥文件路径 */
// String passphrase ="path";
// 主机
String host =linuxConnection.getIp();
// 端口
int port =22;
// 用户名
String username =linuxConnection.getUserName();
//密码
String password =linuxConnection.getPassword();
com.jcraft.jsch.Session session = null;
Channel channel = null;
ChannelSftp sftp = null;// sftp操作类
JSch jsch = new JSch();
//设置密钥和密码
//支持密钥的方式登陆只需在jsch.getSession之前设置一下密钥的相关信息就可以了
// if (privateKey != null && !"".equals(privateKey)) {
// if (passphrase != null && "".equals(passphrase)) {
// //设置带口令的密钥
// jsch.addIdentity(privateKey, passphrase);
// } else {
// //设置不带口令的密钥
// jsch.addIdentity(privateKey);
// }
// }
session = jsch.getSession(username, host, port);
session.setPassword(password);
Properties config = new Properties();
config.put("StrictHostKeyChecking", "no"); // 不验证 HostKey
session.setConfig(config);
// 设置为被动模式 ->已弃用,设置以后要么连接不上,要么无法发送文件
//ftp.enterLocalPassiveMode();
// 设置上传文件的类型为二进制类型 ->已弃用,设置以后要么连接不上,要么无法发送文件
//ftp.setFileType(FTPClient.BINARY_FILE_TYPE);
try {
session.connect();
} catch (Exception e) {
if (session.isConnected())
session.disconnect();
log.error("连接服务器失败,请检查主机[" + host + "],端口[" + port
+ "],用户名[" + username + "],端口[" + port
+ "]是否正确,以上信息正确的情况下请检查网络连接是否正常或者请求被防火墙拒绝.");
}
channel = session.openChannel("sftp");
try {
channel.connect();
} catch (Exception e) {
if (channel.isConnected())
channel.disconnect();
log.error("连接服务器失败,请检查主机[" + host + "],端口[" + port
+ "],用户名[" + username + "],密码是否正确,以上信息正确的情况下请检查网络连接是否正常或者请求被防火墙拒绝.");
}
sftp = (ChannelSftp) channel;
s.setChannel(channel);
s.setSession(session);
s.setSftp(sftp);
}
/**
* 断开连接
*
*/
public static void disConn(com.jcraft.jsch.Session session, Channel channel, ChannelSftp sftp)throws Exception{
if(null != sftp){
sftp.disconnect();
sftp.exit();
sftp = null;
}
if(null != channel){
channel.disconnect();
channel = null;
}
if(null != session){
session.disconnect();
session = null;
}
}
/**
* 远程登录linux的主机
*
* @return 登录成功返回true否则返回false
* @author Ickes
* @since V0.1
*/
private LinuxUtils login() {
String ip = linuxConnection.getIp();
Connection conn = linuxConnection.getConn();
String userName = linuxConnection.getUserName();
String password = linuxConnection.getPassword();
boolean flg = false;
try {
conn = new Connection(ip);
conn.connect();//连接
flg = conn.authenticateWithPassword(userName, password);//认证
if (!flg) {
System.err.println("认证失败,用户名或密码错误");
throw new LoginErrorException(404, "连接失败,用户名或密码错误");
}
linuxConnection.setConn(conn);
} catch (IOException | LoginErrorException e) {
e.printStackTrace();
}
return this;
}
/**
* @param cmd 即将执行的命令
* @return 命令执行完后返回的结果值
* @author Ickes
* 远程执行shll脚本或者命令
* @since V0.1
*/
public String execute(String cmd) {
login();
String result = "";
try {
Connection conn = linuxConnection.getConn();
System.out.println("连接成功");
Session session = conn.openSession();//打开一个会话
System.out.println("开始执行指定命令");
session.execCommand(cmd);//执行命令
System.out.println("返回命令结果");
result = processStdout(session.getStdout(), linuxConnection.DEFAULTCHART);
//如果为得到标准输出为空说明脚本执行出错了
if (StringUtils.isBlank(result)) {
result = processStdout(session.getStderr(), linuxConnection.DEFAULTCHART);
}
conn.close();
session.close();
} catch (IOException e) {
e.printStackTrace();
}
return result;
}
/**
* 解析脚本执行返回的结果集
*
* @param in 输入流对象
* @param charset 编码
* @return 以纯文本的格式返回
* @author Ickes
* @since V0.1
*/
private String processStdout(InputStream in, String charset) {
InputStream stdout = new StreamGobbler(in);
StringBuffer buffer = new StringBuffer();
;
try {
BufferedReader br = new BufferedReader(new InputStreamReader(stdout, charset));
String line = null;
while ((line = br.readLine()) != null) {
buffer.append(line + "\n");
}
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return buffer.toString();
}
/**
* 向FTP服务器上传文件
*
* @param sourcePath 本地文件的地址
* @param targetPath FTP服务器文件存放路径
* @param filename 上传到FTP服务器上的文件名
* @return 成功返回true否则返回false
*/
public boolean uploadFileBySFTP(String sourcePath,String targetPath, String filename) {
boolean result;
//FTPClient ftp = new FTPClient();
SFTP s = new SFTP();
try {
getConnect(s);//建立连接
} catch (Exception e) {
e.printStackTrace();
}
com.jcraft.jsch.Session session = s.getSession();
Channel channel = s.getChannel();
ChannelSftp sftp = s.getSftp();// sftp操作类
try {
// 进入到要上传的目录 然后上传文件
try {
sftp.cd(targetPath); //进入目录
} catch (SftpException sException) {
if (sftp.SSH_FX_NO_SUCH_FILE == sException.id) { //指定上传路径不存在
sftp.mkdir(targetPath);//创建目录
sftp.cd(targetPath); //进入目录
}
}
File file = new File(sourcePath+"/"+filename);
FileInputStream fileInputStream = new FileInputStream(file);
// sftp.cd(basePath + filePath);
// sftp put为上传get为下载
sftp.put(fileInputStream, filename);
fileInputStream.close();
result = true;
} catch (Exception e) {
// log.error("FTP服务器 文件上传失败 失败原因:{}", e.getMessage(), e);
System.err.println("FTP服务器 文件上传失败 失败原因:"+e.getMessage());
e.printStackTrace();
result = false;
} finally {
try {
disConn(session,channel,sftp);
} catch (Exception e) {
System.err.println("FTP服务器 关闭失败 失败原因:"+e.getMessage());
//// log.error("FTP服务器 关闭失败 失败原因:{}", e.getMessage(), e);
}
// if (ftp.isConnected()) {
// try {
// ftp.disconnect();
// } catch (IOException e) {
// System.err.println("FTP服务器 关闭失败 失败原因:"+e.getMessage());
//// log.error("FTP服务器 关闭失败 失败原因:{}", e.getMessage(), e);
// }
// }
}
return result;
}
/**
* TODO 暂时不能使用
* @param url FTP服务器hostname
* @param port FTP服务器端口
* @param username FTP登录账号
* @param password FTP登录密码
* @param path FTP服务器保存目录
* @param filename 文件名 6403_APP_YYYYMMDD_渠道标志_批次号.txt
* @param inputStream 输入流
* @return
*/
private boolean uploadFileFTP(String url, int port, String username, String password,
String path, String filename, InputStream inputStream) {
boolean success;
FTPClient ftp = new FTPClient();
try {
int reply;
// 链接 FTP 服务器
// 如果采用默认端口可以使用ftp.connect(url)的方式直接连接FTP服务器
ftp.connect(url, port);
ftp.setControlEncoding("UTF-8");
// 设置文件格式为二进制类型
ftp.setFileType(FTPClient.BINARY_FILE_TYPE);
new FTPClientConfig(FTPClientConfig.SYST_NT);
// 登录
ftp.login(username, password);
// 返回链接码
reply = ftp.getReplyCode();
if (!FTPReply.isPositiveCompletion(reply)) {
ftp.disconnect();
System.err.println("FTP服务器 拒绝连接");
// log.info("FTP服务器 拒绝连接");
return false;
}
// 跳转到指定的目录路径
ftp.changeWorkingDirectory(path);
// 存储文件
ftp.storeFile(filename, inputStream);
inputStream.close();
ftp.logout();
success = true;
} catch (IOException e) {
e.printStackTrace();
System.out.println("FTP服务器 文件上传失败 失败原因"+e.getMessage());
// log.error("FTP服务器 文件上传失败 失败原因:{}", e.getMessage(), e);
return false;
} finally {
// if (ftp.isConnected()) {
// try {
// ftp.disconnect();
// } catch (IOException e) {
// e.printStackTrace();
// System.out.println("FTP服务器 关闭失败 失败原因"+e.getMessage());
//// log.error("FTP服务器 关闭失败 失败原因:{}", e.getMessage(), e);
// }
// }
}
return success;
}
/**
* 另一种上传方式,需要服务器上安装vsftpd服务器,并启用
* @param hostname
* @param port
* @param username
* @param password
* @param sourcePath
* @param linuxPath
* @param fileName
* @return
*/
public Integer uploadFileToLinux(String hostname,int port,String username,String password,String sourcePath,String linuxPath,String fileName){
System.out.println(sourcePath+"\\"+fileName);
File imagefile = new File(sourcePath+"/"+fileName);
// String imagefileFileName = "gear_box_ysl.conf";
//创建ftp客户端
FTPClient ftpClient = new FTPClient();
ftpClient.setControlEncoding("UTF-8");
//String hostname = "192.168.118.202";
//int port = 21;
//String username = "root";
//String password = "dingjiawen.123";
try {
//链接ftp服务器
ftpClient.connect(hostname, port);
//登录ftp
ftpClient.login(username, password);
int reply = ftpClient.getReplyCode();
System.out.println(reply);
//如果reply返回230就算成功了如果返回530密码用户名错误或当前用户无权限下面有详细的解释
if (!FTPReply.isPositiveCompletion(reply)) {
ftpClient.disconnect();
throw new LoginErrorException(reply,"连接失败,检查用户名,密码,端口,服务器是否开启");
}
ftpClient.setFileType(FTPClient.BINARY_FILE_TYPE);
//ftpClient.makeDirectory(linuxPath);//在root目录下创建文件夹
String remoteFileName = linuxPath+"/"+fileName;
InputStream input = new FileInputStream(imagefile);
ftpClient.storeFile(remoteFileName, input);//文件你若是不指定就会上传到root目录下
input.close();
ftpClient.logout();
return reply;
} catch (SocketException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (LoginErrorException e) {
e.printStackTrace();
} finally {
if (ftpClient.isConnected()) {
try {
ftpClient.disconnect();
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
}
return 530;
}
/**
* 下载文件
*
* @param directory 下载目录 根据SFTP设置的根目录来进行传入
* @param downloadFile 下载的文件
* @param saveFile 存在本地的路径
*/
public Boolean download(String directory, String downloadFile, String saveFile) throws Exception {
boolean result = false;
SFTP s = new SFTP();
getConnect(s);//建立连接
com.jcraft.jsch.Session session = s.getSession();
Channel channel = s.getChannel();
ChannelSftp sftp = s.getSftp();// sftp操作类
try {
sftp.cd(directory); //进入目录
File file = new File(saveFile);
boolean bFile;
bFile = false;
bFile = file.exists();
if (!bFile) {
bFile = file.mkdirs();//创建目录
}
OutputStream out = new FileOutputStream(new File(saveFile, downloadFile));
sftp.get(downloadFile, out);
result=true;
out.flush();
out.close();
} catch (Exception e) {
throw new Exception(e.getMessage(), e);
} finally {
disConn(session, channel, sftp);
}
return result;
}
/**
* 删除文件
* @param directory 要删除文件所在目录
* @param deleteFile 要删除的文件
*/
public Boolean delete(String directory, String deleteFile) throws Exception {
boolean result = false;
SFTP s=new SFTP();
getConnect(s);//建立连接
com.jcraft.jsch.Session session = s.getSession();
Channel channel = s.getChannel();
ChannelSftp sftp = s.getSftp();// sftp操作类
try {
sftp.cd(directory); //进入的目录应该是要删除的目录的上一级
sftp.rm(deleteFile);//删除目录
result = true;
} catch (Exception e) {
throw new Exception(e.getMessage(),e);
} finally {
disConn(session,channel,sftp);
}
return result;
}
}

View File

@ -0,0 +1,69 @@
#kafka-hdfs
#Name
a1.sources = r1
a1.channels = c1
a1.sinks = k1
#source
#本次使用kafka source
a1.sources.r1.type = org.apache.flume.source.kafka.KafkaSource
a1.sources.r1.kafka.bootstrap.servers = phm102:9092,phm103:9092,phm104:9092
a1.sources.r1.kafka.topics = phm_data
a1.sources.r1.kafka.consumer.group.id = phm
#将批次写入通道之前的最长时间
a1.sources.r1.batchDurationMillis = 2000
a1.sources.r1.interceptors = i1
a1.sources.r1.interceptors.i1.type = com.cqu.mqtt.interceptor.TimeStampInterceptor$MyBuilder
#file channel
a1.channels.c1.type = file
a1.channels.c1.dataDirs = /opt/module/flume-1.9.0/jobs/phm_data/filechannel
#写到file channel中时里面有已经去sink的和没去sink的,没去sink的会在内存中记录可以维护多少event的指针,以下配置就是配置这个容量
a1.channels.c1.capacity = 1000000
#事务容量,一定要大于kafka source 的batchsize
a1.channels.c1.transactionCapacity = 10000
#内存中的指针为了防止flume故障而丢失,会指定内存中的指针落盘会落到哪里
a1.channels.c1.checkpointDir = /opt/module/flume-1.9.0/jobs/phm_data/checkpoint
#落盘的备份
#a1.channels.c1.useDualCheckpoints = true
#a1.channels.c1.backupCheckpointDir = /opt/module/flume-1.9.0/jobs/checkpoint-bk
#最大的文件大小,当写在file中的文件达到一定程度,会换一个文件继续写,默认2146435071
a1.channels.c1.maxFileSize = 2146435071
#source 放进channel中时可能会放不进去,以下设置放不下去时等多久,超过之后仍然不等
a1.channels.c1.keep-alive = 5
a1.sinks.k1.type = hdfs
# TODO mqtt_service待改
a1.sinks.k1.hdfs.path = hdfs://phm102:8020/origin_data/phm/log/phm_data/%Y-%m-%d
a1.sinks.k1.hdfs.filePrefix = log-
#a1.sinks.k1.hdfs.fileType = CompressedStream
#a1.sinks.k1.hdfs.codeC = gzip
#是否按照时间滚动文件夹
a1.sinks.k1.hdfs.round = true
#多少时间单位创建一个新的文件夹
a1.sinks.k1.hdfs.roundValue = 1
#重新定义时间单位
a1.sinks.k1.hdfs.roundUnit = hour
#是否使用本地时间戳
a1.sinks.k1.hdfs.useLocalTimeStamp = false
#积攒多少个Event才flush到HDFS一次
a1.sinks.k1.hdfs.batchSize = 10
#设置文件类型,可支持压缩
a1.sinks.k1.hdfs.fileType = DataStream
#
a1.sinks.k1.hdfs.rollInterval = 0
#设置每个文件的滚动大小
a1.sinks.k1.hdfs.rollSize = 134217700
#文件的滚动与Event数量无关
a1.sinks.k1.hdfs.rollCount = 100
#最小冗余数
a1.sinks.k1.hdfs.minBlockReplicas = 1
# 延长失联断开时间 5min
a1.sinks.k1.hdfs.callTimeout = 300000
#bind
a1.sources.r1.channels = c1
a1.sinks.k1.channel = c1

View File

@ -0,0 +1,22 @@
a1.sources = r1
a1.channels = c1
# a1.sinks = k1
a1.sources.r1.type = com.cqu.mqtt.service.MQTTSource
a1.sources.r1.host = phm102
a1.sources.r1.port = 1883
a1.sources.r1.topic = phm_data
a1.sources.r1.keepAliveInterval = 10000
a1.sources.r1.username = mqtt
a1.sources.r1.password = mqtt
a1.sources.r1.interceptors = i1
a1.sources.r1.interceptors.i1.type = com.cqu.mqtt.interceptor.ETLNotJsonInterceptor$MyBuilder
a1.channels.c1.type = org.apache.flume.channel.kafka.KafkaChannel
a1.channels.c1.kafka.bootstrap.servers = phm102:9092,phm103:9092,phm104:9092
a1.channels.c1.kafka.topic = phm_data
a1.channels.c1.parseAsFlumeEvent = false
a1.sources.r1.channels = c1

View File

@ -0,0 +1,28 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>common</artifactId>
<groupId>com.cqu</groupId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>common-utils</artifactId>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
<scope>compile</scope>
</dependency>
</dependencies>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
</project>

View File

@ -0,0 +1,70 @@
package com.cqu.utils;
import lombok.Data;
import java.util.HashMap;
import java.util.Map;
/**
* @author ranmaoqi
* @time 2022-04-09 16:22
*/
@Data
public class Result {
private Boolean success;
private Integer code;
private String message;
private Map<String, Object> data = new HashMap<String, Object>();
//将构造器私有化别人就不能new
private Result() {}
//要用这个类只能用下面的静态方法
//成功
public static Result ok() {
Result result = new Result();
result.setSuccess(true);
result.setCode(ResultCode.SUCCESS);
result.setMessage("成功");
return result;
}
//失败
public static Result error() {
Result result = new Result();
result.setSuccess(false);
result.setCode(ResultCode.ERROR);
result.setMessage("失败");
return result;
}
public Result success(Boolean success){
this.setSuccess(success);
return this;
}
public Result message(String message){
this.setMessage(message);
return this;
}
public Result code(Integer code){
this.setCode(code);
return this;
}
public Result data(String key, Object value){
this.data.put(key, value);
return this;
}
public Result data(Map<String, Object> map){
this.setData(map);
return this;
}
}

View File

@ -0,0 +1,12 @@
package com.cqu.utils;
/**
* @author ranmaoqi
* @time 2022-04-09 16:19
*/
public class ResultCode {
public static final Integer SUCCESS = 20000;
public static final Integer ERROR = 20001;
}

View File

@ -0,0 +1,109 @@
package com.cqu.utils.utils;
import org.junit.Test;
import java.io.*;
import java.net.URL;
import java.net.URLConnection;
import java.util.Enumeration;
/**
*@BelongsProject: phm_backend
*@BelongsPackage: com.cqu.utils.utils
*@Author: dingjiawen
*@CreateTime: 2023-01-08 19:02
*@Description: TODO 读取当前项目下resource下的文件
*@Version: 1.0
*/
public class ReadResourcesFileUtil {
@Test
public void test(){
}
/**
* 读取本地resource目录下的文件暂时直接返回一个inputStream
* TODO 不建议使用暂时能读取properties,text等文件如果是conf文件会返回空指针
* @param fileName 想读取的resource下的文件名
* @return
*/
public static InputStream readBackToInputSteam(String fileName){
InputStream inputStream=null;
try {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
URL urls = classLoader.getResource(fileName);
URLConnection urlConnection = urls.openConnection();
urlConnection.setUseCaches(false);
inputStream = urlConnection.getInputStream();
// 从文件输入流获取缓存 该方法的后续用法
// 1.一行一行读
// BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
// 2.按buffer大小读
// byte[] buffer = new byte[1024];
// int len;
// while ((len = inputStream.read(buffer)) != -1){
// String str = new String(buffer, 0, len);
// System.out.println(str);
// }
} catch (IOException e) {
e.printStackTrace();
}
return inputStream;
}
/**
* 读取本地resource目录下的批量同名文件文件暂时直接返回一个inputStream
* TODO 推荐使用,不会报错并且可能读取conf文件
* @param fileName 想读取的resource下的文件名
* @return
*/
public static InputStream readBatchBackToInputSteam(String fileName){
InputStream inputStream=null;
try {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
Enumeration<URL> urls = classLoader.getResources(fileName);
System.out.println(urls.hasMoreElements());
while (urls.hasMoreElements()){
URL url = urls.nextElement();
URLConnection urlConnection = url.openConnection();
urlConnection.setUseCaches(false);
inputStream = urlConnection.getInputStream();
// byte[] buffer = new byte[1024];
// int len;
// while ((len = inputStream.read(buffer)) != -1){
// String str = new String(buffer, 0, len);
// System.out.println(str);
// }
}
} catch (IOException e) {
e.printStackTrace();
}
return inputStream;
}
/**
* 读取resource下的文件返回File类型暂时无法使用
* @param fileName
* @return
*/
private static File read(String fileName){
InputStream inputStream=null;
try {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
URL urls = classLoader.getResource(fileName);
URLConnection urlConnection = urls.openConnection();
urlConnection.setUseCaches(false);
inputStream = urlConnection.getInputStream();
// 从文件输入流获取缓存
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
// 从文件内容里面得到实现类的全类名
String className = bufferedReader.readLine();
System.out.println(className);
}catch (Exception e){
e.printStackTrace();
}
return new File(fileName);
}
}

View File

@ -0,0 +1,75 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>phm_parent</artifactId>
<groupId>com.cqu</groupId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>common</artifactId>
<packaging>pom</packaging>
<modules>
<module>common-utils</module>
<module>service_base</module>
<module>common-linux</module>
</modules>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<scope>provided </scope>
</dependency>
<!--mybatis-plus-->
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<scope>provided </scope>
</dependency>
<!--lombok用来简化实体类需要安装lombok插件-->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided </scope>
</dependency>
<!--swagger-->
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<scope>provided </scope>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
<scope>provided </scope>
</dependency>
<!-- redis -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<!--spring2.X集成redis所需common-pool2-->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-pool2</artifactId>
<version>2.6.0</version>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,28 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>common</artifactId>
<groupId>com.cqu</groupId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>service_base</artifactId>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
<dependencies>
<!--引入common-utils-->
<dependency>
<groupId>com.cqu</groupId>
<artifactId>common-utils</artifactId>
<version>0.0.1-SNAPSHOT</version>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,40 @@
package com.cqu.base.config;
import com.google.common.base.Predicates;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import springfox.documentation.builders.ApiInfoBuilder;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.service.Contact;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
/**
* @author ranmaoqi
* @time 2022-04-09 17:32
*/
@Configuration
@EnableSwagger2
public class SwaggerConfig {
@Bean
public Docket webApiConfig(){
return new Docket(DocumentationType.SWAGGER_2)
.groupName("webApi")
.apiInfo(webApiInfo())
.select()
.paths(Predicates.not(PathSelectors.regex("/admin/.*")))
.paths(Predicates.not(PathSelectors.regex("/error.*")))
.build();
}
private ApiInfo webApiInfo(){
return new ApiInfoBuilder()
.title("大型旋转机组健康管理软件API文档")
.version("1.0")
.build();
}
}

View File

@ -0,0 +1,35 @@
package com.cqu.base.exception;
import com.cqu.utils.Result;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseBody;
/**
* 统一异常处理类
* @author ranmaoqi
* @time 2022-04-09 16:49
*/
@ControllerAdvice
@Slf4j
public class GlobalExceptionHandler {
//处理全部异常
@ExceptionHandler(Exception.class) //指定出现什么异常时执行
@ResponseBody //返回json格式数据
public Result error(Exception e) {
e.printStackTrace();
return Result.error().message("发生异常");
}
//处理自定义异常
@ExceptionHandler(PhmException.class)
@ResponseBody
public Result error(PhmException e) {
e.printStackTrace();
return Result.error().code(e.getCode()).message(e.getMsg());
}
}

View File

@ -0,0 +1,21 @@
package com.cqu.base.exception;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* 自定义异常处理类PhmException
* @author ranmaoqi
* @time 2022-04-09 16:54
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class PhmException extends RuntimeException {
private Integer code; //状态码
private String msg; //错误信息
}

View File

@ -0,0 +1,28 @@
package com.cqu.base.handler;
import com.baomidou.mybatisplus.core.handlers.MetaObjectHandler;
import org.apache.ibatis.reflection.MetaObject;
import org.springframework.stereotype.Component;
import java.util.Date;
/**
* 用来处理自动填充gmtCreategmtModified
* @author ranmaoqi
* @time 2022-04-11 11:33
*/
@Component
public class MyMetaObjectHandler implements MetaObjectHandler {
@Override
public void insertFill(MetaObject metaObject) {
this.setFieldValByName("gmtCreate", new Date(), metaObject);
this.setFieldValByName("gmtModified", new Date(), metaObject);
}
@Override
public void updateFill(MetaObject metaObject) {
this.setFieldValByName("gmtModified", new Date(), metaObject);
}
}

View File

@ -0,0 +1,32 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>phm_parent</artifactId>
<groupId>com.cqu</groupId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>gateway</artifactId>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
<dependencies>
<!--网关-->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-gateway</artifactId>
</dependency>
<!--服务注册-->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-alibaba-nacos-discovery</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,23 @@
package com.cqu.gateway;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.ComponentScan;
/**
* @Author: ranmaoqi
* @Date: 2023/01/10 19:05
*/
@SpringBootApplication
/*
如果不加这个注解就扫描不到common包里的东西
因为默认只会扫描启动类所在的包及其子包
*/
@ComponentScan(basePackages = {"com.cqu"})
public class GatewayApplication {
public static void main(String[] args) {
SpringApplication.run(GatewayApplication.class, args);
}
}

View File

@ -0,0 +1,31 @@
package com.cqu.gateway.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.cors.CorsConfiguration;
import org.springframework.web.cors.reactive.CorsWebFilter;
import org.springframework.web.cors.reactive.UrlBasedCorsConfigurationSource;
import org.springframework.web.util.pattern.PathPatternParser;
/**
* 网关配置解决跨域问题
* @Author: ranmaoqi
* @Date: 2023/3/22 14:42
*/
@Configuration
public class CorsConfig {
@Bean
public CorsWebFilter corsFilter() {
CorsConfiguration config = new CorsConfiguration();
config.addAllowedMethod("*");
config.addAllowedOrigin("*");
config.addAllowedHeader("*");
UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource(new PathPatternParser());
source.registerCorsConfiguration("/**", config);
return new CorsWebFilter(source);
}
}

View File

@ -0,0 +1,239 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<modules>
<module>service</module>
<module>common</module>
<module>collecting</module>
<module>gateway</module>
<module>warehouse</module>
</modules>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.2.1.RELEASE</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.cqu</groupId>
<artifactId>phm_parent</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>phm_parent</name>
<description>phm_parent</description>
<!--一般父工程打包方式都是pom,因为父工程里面一般没有Java代码主要用于传递依赖-->
<packaging>pom</packaging>
<!--父工程不引入依赖,只作依赖的版本管理-->
<!--<dependencies>-->
<!-- <dependency>-->
<!-- <groupId>org.springframework.boot</groupId>-->
<!-- <artifactId>spring-boot-starter</artifactId>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>org.springframework.boot</groupId>-->
<!-- <artifactId>spring-boot-starter-test</artifactId>-->
<!-- <scope>test</scope>-->
<!-- </dependency>-->
<!--</dependencies>-->
<!--依赖版本-->
<properties>
<java.version>1.8</java.version>
<phm.version>0.0.1-SNAPSHOT</phm.version>
<mybatis-plus.version>3.0.5</mybatis-plus.version>
<velocity.version>2.0</velocity.version>
<swagger.version>2.7.0</swagger.version>
<aliyun.oss.version>2.8.3</aliyun.oss.version>
<jodatime.version>2.10.1</jodatime.version>
<poi.version>3.17</poi.version>
<commons-fileupload.version>1.3.1</commons-fileupload.version>
<commons-io.version>2.6</commons-io.version>
<httpclient.version>4.5.1</httpclient.version>
<jwt.version>0.7.0</jwt.version>
<aliyun-java-sdk-core.version>4.3.3</aliyun-java-sdk-core.version>
<aliyun-sdk-oss.version>3.1.0</aliyun-sdk-oss.version>
<aliyun-java-sdk-vod.version>2.15.2</aliyun-java-sdk-vod.version>
<aliyun-java-vod-upload.version>1.4.11</aliyun-java-vod-upload.version>
<aliyun-sdk-vod-upload.version>1.4.11</aliyun-sdk-vod-upload.version>
<fastjson.version>1.2.28</fastjson.version>
<gson.version>2.8.2</gson.version>
<json.version>20170516</json.version>
<commons-dbutils.version>1.7</commons-dbutils.version>
<canal.client.version>1.1.0</canal.client.version>
<docker.image.prefix>zx</docker.image.prefix>
<cloud-alibaba.version>0.2.2.RELEASE</cloud-alibaba.version>
</properties>
<!--依赖管理-->
<dependencyManagement>
<dependencies>
<!--Spring Cloud-->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-dependencies</artifactId>
<version>Hoxton.RELEASE</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-alibaba-dependencies</artifactId>
<version>${cloud-alibaba.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<!--mybatis-plus 持久层-->
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<!-- velocity 模板引擎, Mybatis Plus 代码生成器需要 -->
<dependency>
<groupId>org.apache.velocity</groupId>
<artifactId>velocity-engine-core</artifactId>
<version>${velocity.version}</version>
</dependency>
<!--swagger-->
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<version>${swagger.version}</version>
</dependency>
<!--swagger ui-->
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
<version>${swagger.version}</version>
</dependency>
<!--aliyunOSS-->
<dependency>
<groupId>com.aliyun.oss</groupId>
<artifactId>aliyun-sdk-oss</artifactId>
<version>${aliyun.oss.version}</version>
</dependency>
<!--日期时间工具-->
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<version>${jodatime.version}</version>
</dependency>
<!--xls-->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${poi.version}</version>
</dependency>
<!--xlsx-->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>${poi.version}</version>
</dependency>
<!--文件上传-->
<dependency>
<groupId>commons-fileupload</groupId>
<artifactId>commons-fileupload</artifactId>
<version>${commons-fileupload.version}</version>
</dependency>
<!--commons-io-->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons-io.version}</version>
</dependency>
<!--httpclient-->
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>${httpclient.version}</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>${gson.version}</version>
</dependency>
<!-- JWT -->
<dependency>
<groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt</artifactId>
<version>${jwt.version}</version>
</dependency>
<!--aliyun-->
<dependency>
<groupId>com.aliyun</groupId>
<artifactId>aliyun-java-sdk-core</artifactId>
<version>${aliyun-java-sdk-core.version}</version>
</dependency>
<dependency>
<groupId>com.aliyun.oss</groupId>
<artifactId>aliyun-sdk-oss</artifactId>
<version>${aliyun-sdk-oss.version}</version>
</dependency>
<dependency>
<groupId>com.aliyun</groupId>
<artifactId>aliyun-java-sdk-vod</artifactId>
<version>${aliyun-java-sdk-vod.version}</version>
</dependency>
<dependency>
<groupId>com.aliyun</groupId>
<artifactId>aliyun-java-vod-upload</artifactId>
<version>${aliyun-java-vod-upload.version}</version>
</dependency>
<dependency>
<groupId>com.aliyun</groupId>
<artifactId>aliyun-sdk-vod-upload</artifactId>
<version>${aliyun-sdk-vod-upload.version}</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>${fastjson.version}</version>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>${json.version}</version>
</dependency>
<dependency>
<groupId>commons-dbutils</groupId>
<artifactId>commons-dbutils</artifactId>
<version>${commons-dbutils.version}</version>
</dependency>
<dependency>
<groupId>com.alibaba.otter</groupId>
<artifactId>canal.client</artifactId>
<version>${canal.client.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<!-- <build>-->
<!-- <plugins>-->
<!-- <plugin>-->
<!-- <groupId>org.springframework.boot</groupId>-->
<!-- <artifactId>spring-boot-maven-plugin</artifactId>-->
<!-- <version>2.2.1.RELEASE</version>-->
<!-- </plugin>-->
<!-- </plugins>-->
<!-- </build>-->
</project>

View File

@ -0,0 +1,150 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>phm_parent</artifactId>
<groupId>com.cqu</groupId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>service</artifactId>
<!--此模块是子模块用于引入依赖,它下面还有子子模块
此模块里没有java代码所以打包方式是pom-->
<packaging>pom</packaging>
<modules>
<module>service_device</module>
<module>service_data</module>
<module>service_data_interface</module>
</modules>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
<dependencies>
<!--引入service-base、common-utils-->
<dependency>
<groupId>com.cqu</groupId>
<artifactId>service_base</artifactId>
<version>0.0.1-SNAPSHOT</version>
</dependency>
<!--暂时用不到,先注释掉,否则会报错-->
<!--<dependency>-->
<!-- <groupId>org.springframework.cloud</groupId>-->
<!-- <artifactId>spring-cloud-starter-netflix-ribbon</artifactId>-->
<!--</dependency>-->
<!--hystrix依赖主要是用 @HystrixCommand -->
<!--<dependency>-->
<!-- <groupId>org.springframework.cloud</groupId>-->
<!-- <artifactId>spring-cloud-starter-netflix-hystrix</artifactId>-->
<!--</dependency>-->
<!--服务注册-->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-alibaba-nacos-discovery</artifactId>
</dependency>
<!--服务调用-->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-openfeign</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<!--mybatis-plus-->
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
</dependency>
<!--mysql-->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<!-- <version>5.1.47</version>-->
</dependency>
<!-- velocity 模板引擎, Mybatis Plus 代码生成器需要 -->
<dependency>
<groupId>org.apache.velocity</groupId>
<artifactId>velocity-engine-core</artifactId>
</dependency>
<!--swagger(接口测试)-->
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
</dependency>
<!--lombok用来简化实体类需要安装lombok插件-->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<!--xls-->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
</dependency>
<dependency>
<groupId>commons-fileupload</groupId>
<artifactId>commons-fileupload</artifactId>
</dependency>
<!--httpclient-->
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</dependency>
<!--commons-io-->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
<!--gson-->
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,53 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>service</artifactId>
<groupId>com.cqu</groupId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>service_data</artifactId>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
</dependency>
</dependencies>
<!-- 项目打包时会将java目录中的*.xml文件也进行打包 -->
<build>
<resources>
<resource>
<directory>src/main/java</directory>
<includes>
<include>**/*.xml</include>
</includes>
<filtering>false</filtering>
</resource>
<resource>
<directory>src/main/resources</directory>
<includes>
<include>**/*.yml</include>
<include>**/*.properties</include>
</includes>
<filtering>false</filtering>
</resource>
</resources>
</build>
</project>

View File

@ -0,0 +1,19 @@
package com.cqu.data;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.context.annotation.ComponentScan;
@SpringBootApplication
@ComponentScan(basePackages = {"com.cqu"})
@EnableFeignClients
// 不加好像扫描不到mapper
//@MapperScan({"com.cqu"})
public class DataApplication {
public static void main(String[] args) {
SpringApplication.run(DataApplication.class,args);
}
}

View File

@ -0,0 +1,33 @@
package com.cqu.data.clients;
import com.cqu.data.entity.vo.Device;
import com.cqu.data.entity.vo.DeviceTypeDetail;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
/**
* @Author: ranmaoqi
* @Date: 2023/01/12 11:31
*/
@FeignClient("service-device")
public interface DeviceClient {
/**
* 获得设备型号实体
* @param id
* @return
*/
@GetMapping("/device/device-type-detail/getDetailById/{id}")
DeviceTypeDetail getDetailById(@PathVariable String id);
/**
* 获得设备设备对象实体
* @param deviceId
* @return
*/
@GetMapping("/device/device/getDeviceByDeviceId/{deviceId}")
Device getDeviceByDeviceId(@PathVariable String deviceId);
}

View File

@ -0,0 +1,53 @@
package com.cqu.data.config;
import com.baomidou.mybatisplus.core.injector.ISqlInjector;
import com.baomidou.mybatisplus.extension.injector.LogicSqlInjector;
import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringBootConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.CorsRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
@Configuration
@MapperScan("com.cqu.data.mapper")
public class DataConfig {
/**
* 逻辑删除插件
* @return
*/
@Bean
public ISqlInjector sqlInjector() {
return new LogicSqlInjector();
}
/**
* 分页插件
* @return
*/
@Bean
public PaginationInterceptor paginationInterceptor() {
return new PaginationInterceptor();
}
//
// /**
// * 配置全局跨域
// * 所有请求都允许跨域
// * @param registry
// */
// @Override
// public void addCorsMappings(CorsRegistry registry) {
//
// registry.addMapping("/**")
// .allowCredentials(true)
// .allowedOrigins("http://localhost:9528") //换成前端中的地址
// .allowedMethods("POST", "GET", "PUT", "OPTIONS", "DELETE")
// .allowedHeaders("*")
// .maxAge(3600);
//
// }
}

View File

@ -0,0 +1,54 @@
package com.cqu.data.controller;
import com.cqu.data.entity.AccessService;
import com.cqu.data.entity.vo.ServiceFormVo;
import com.cqu.data.service.AccessServiceService;
import com.cqu.utils.Result;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
/**
* <p>
* 接入服务表 接入服务表 前端控制器
* </p>
*
* @author ding
* @since 2022-06-27
*/
@RestController
@RequestMapping("/data/access-service")
//@CrossOrigin
public class AccessServiceController {
@Autowired
private AccessServiceService accessServiceService;
//根据设备组id查询服务
@GetMapping("/listServiceByGroupId/{groupId}/{current}/{size}")
public Result listServiceByGroupId(@PathVariable String groupId,@PathVariable Integer current,@PathVariable Integer size){
return accessServiceService.listServiceByGroupId(groupId,current,size);
}
//保存接入服务
@PostMapping("/addService/{deviceGroupId}")
public Result addService(@PathVariable String deviceGroupId, @RequestBody ServiceFormVo serviceForm){
System.err.println("serviceForm:"+serviceForm);
boolean flag = accessServiceService.saveService(deviceGroupId,serviceForm);
return flag ? Result.ok():Result.error();
}
//根据服务id查询服务信息
@GetMapping("/getServiceById/{serviceId}")
public Result getServiceById(@PathVariable String serviceId){
AccessService service = accessServiceService.getById(serviceId);
return Result.ok().data("service",service);
}
}

View File

@ -0,0 +1,23 @@
package com.cqu.data.controller;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* <p>
* 接入任务表 接入任务表 前端控制器
* </p>
*
* @author ding
* @since 2022-06-27
*/
@RestController
@RequestMapping("/data/access-task")
//@CrossOrigin
public class AccessTaskController {
}

View File

@ -0,0 +1,117 @@
package com.cqu.data.controller;
import com.cqu.data.entity.Dataset;
import com.cqu.data.service.DatasetService;
import com.cqu.utils.Result;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* <p>
* 数据集表 前端控制器
* </p>
*
* @author ding
* @since 2022-06-27
*/
@RestController
@RequestMapping("/data/dataset")
//@CrossOrigin
public class DatasetController {
@Autowired
private DatasetService datasetService;
/**
* 根据设备型号id查询设备型号下的数据集
* @param detailId 设备型号id
* @param current 当前页
* @param size 每页大小
* @return
*/
@GetMapping("/listDatasetByDetailId/{detailId}/{current}/{size}")
public Result listDatasetByDetailId(@PathVariable String detailId, @PathVariable Integer current, @PathVariable Integer size) {
return datasetService.listDatasetByDetailId(detailId, current, size);
}
/**
* 在设备型号下新建数据集
* @param detailId 设备型号id
* @param dataset 数据集实体
* @return
*/
@PostMapping("/addDatasetByDetailId/{detailId}")
public Result addDatasetByDetailId(@PathVariable String detailId, @RequestBody Dataset dataset) {
return datasetService.addDatasetByDetailId(detailId, dataset);
// boolean flag = datasetService.save(dataset.setDetailId(detailId));
// return flag ? Result.ok() : Result.error();
}
/**
* 根据数据集id查询数据集用于回显
* @param id 数据集id
* @return
*/
@GetMapping("/getDatasetById/{id}")
public Result getDatasetById(@PathVariable String id) {
Dataset dataset = datasetService.getById(id);
return Result.ok().data("dataset", dataset);
}
/**
* 编辑数据集信息
* @param dataset 数据集实体
* @return
*/
@PutMapping("/updateDataset")
public Result updateDataset(@RequestBody Dataset dataset) {
return datasetService.updateDataset(dataset);
// boolean flag = datasetService.updateById(dataset);
// return flag ? Result.ok() : Result.error();
}
/**
* 根据数据集id删除数据集
* @param id 数据集id
* @return
*/
@DeleteMapping("/deleteDatasetById/{id}")
public Result deleteDatasetById(@PathVariable String id) {
boolean flag = datasetService.removeById(id);
return flag ? Result.ok() : Result.error();
}
/**
* 多选批量删除数据集
* @param datasetList 数据集实体列表
* @return
*/
@DeleteMapping("/deleteDatasetBatch")
public Result deleteDatasetBatch(@RequestBody List<Dataset> datasetList) {
return datasetService.deleteDatasetBatch(datasetList);
}
/**
* 根据detailId查询数据图谱
* @param detailId
* @return
*/
@GetMapping("/getDataGraphByDetailId/{detailId}")
public Result getDataGraphByDetailId(@PathVariable String detailId) {
return datasetService.getDataGraphByDetailId(detailId);
}
/**
* 根据deviceId查询数据图谱
* @param deviceId
* @return
*/
@GetMapping("/getDataGraphByDeviceId/{deviceId}")
public Result getDataGraphByDeviceId(@PathVariable String deviceId) {
return datasetService.getDataGraphByDeviceId(deviceId);
}
}

View File

@ -0,0 +1,95 @@
package com.cqu.data.controller;
import com.cqu.data.entity.Dataset;
import com.cqu.data.entity.DatasetItem;
import com.cqu.data.service.DatasetItemService;
import com.cqu.utils.Result;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* <p>
* 数据集详情表 前端控制器
* </p>
*
* @author ding
* @since 2022-06-27
*/
@RestController
@RequestMapping("/data/dataset-item")
//@CrossOrigin
public class DatasetItemController {
@Autowired
private DatasetItemService datasetItemService;
/**
* 根据数据集id分页查询数据集项列表
* @param datasetId 数据集id
* @param current 当前页
* @param size 每页显示条数
* @return
*/
@GetMapping("/listDatasetItemByDatasetId/{datasetId}/{current}/{size}")
public Result listDatasetItemByDatasetId(@PathVariable String datasetId, @PathVariable Integer current, @PathVariable Integer size) {
return datasetItemService.listDatasetItemByDatasetId(datasetId, current, size);
}
/**
* 在id为datasetId的数据集下新建数据项
* @param datasetId 数据集Id
* @param datasetItem 数据项实体
* @return
*/
@PostMapping("/addDatasetItemByDatasetId/{datasetId}")
public Result addDatasetItemByDatasetId(@PathVariable String datasetId, @RequestBody DatasetItem datasetItem) {
System.err.println(":"+datasetItem);
return datasetItemService.addDatasetItemByDatasetId(datasetId, datasetItem);
}
/**
* 根据数据项Id查询数据项用于回显
* @param id 数据项id
* @return
*/
@GetMapping("/getDatasetItemById/{id}")
public Result getDatasetById(@PathVariable String id) {
DatasetItem datasetItem = datasetItemService.getById(id);
return Result.ok().data("datasetItem", datasetItem);
}
/**
* 编辑数据项
* @param datasetItem 数据项实体
* @return
*/
@PutMapping("/updateDatasetItem")
public Result updateDatasetItem(@RequestBody DatasetItem datasetItem) {
return datasetItemService.updateDatasetItem(datasetItem);
}
/**
* 根据数据项id删除数据项
* @param id 数据项id
* @return
*/
@DeleteMapping("/deleteDatasetItemById/{id}")
public Result deleteDatasetItemById(@PathVariable String id) {
boolean flag = datasetItemService.removeById(id);
return flag ? Result.ok() : Result.error();
}
/**
* 多选批量删除数据项
* @param datasetItemList 数据项实体列表
* @return
*/
@DeleteMapping("/deleteDatasetItemBatch")
public Result deleteDatasetItemBatch(@RequestBody List<DatasetItem> datasetItemList) {
return datasetItemService.deleteDatasetItemBatch(datasetItemList);
}
}

View File

@ -0,0 +1,23 @@
package com.cqu.data.controller;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* <p>
* 数据源kafka表 数据源kafka表 前端控制器
* </p>
*
* @author ding
* @since 2022-06-27
*/
@RestController
@RequestMapping("/data/source-type-kafka")
//@CrossOrigin
public class SourceTypeKafkaController {
}

View File

@ -0,0 +1,23 @@
package com.cqu.data.controller;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* <p>
* 数据源liga表 数据源liga表 前端控制器
* </p>
*
* @author ding
* @since 2022-06-27
*/
@RestController
@RequestMapping("/data/source-type-liga")
//@CrossOrigin
public class SourceTypeLigaController {
}

View File

@ -0,0 +1,23 @@
package com.cqu.data.controller;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* <p>
* 数据源MQTT表 数据源MQTT表 前端控制器
* </p>
*
* @author ding
* @since 2022-06-27
*/
@RestController
@RequestMapping("/data/source-type-mqtt")
//@CrossOrigin
public class SourceTypeMqttController {
}

View File

@ -0,0 +1,23 @@
package com.cqu.data.controller;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* <p>
* 数据源OPC_UA表 数据源OPC_UA表 前端控制器
* </p>
*
* @author ding
* @since 2022-06-27
*/
@RestController
@RequestMapping("/data/source-type-opc-ua")
//@CrossOrigin
public class SourceTypeOpcUaController {
}

View File

@ -0,0 +1,127 @@
package com.cqu.data.entity;
import com.baomidou.mybatisplus.annotation.*;
import java.util.Date;
import java.io.Serializable;
import com.cqu.base.exception.PhmException;
import com.cqu.data.entity.enumClass.SourceType;
import com.cqu.data.entity.enumClass.Status;
import com.cqu.utils.ResultCode;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
/**
* <p>
* 接入服务表 接入服务表
* </p>
*
* @author ding
* @since 2022-06-27
*/
@Data
@EqualsAndHashCode(callSuper = false)
@Accessors(chain = true)
@ApiModel(value="AccessService对象", description="接入服务表 接入服务表")
public class AccessService implements Serializable {
private static final long serialVersionUID = 1L;
@ApiModelProperty(value = "编号 主键编号")
@TableId(value = "id", type = IdType.ID_WORKER_STR)
private String id;
@ApiModelProperty(value = "设备组id 设备组id")
private String groupId;
@ApiModelProperty(value = "接入服务名称 接入服务名称")
private String name;
@ApiModelProperty(value = "数据源类型 数据源类型")
private Integer sourceType;
@ApiModelProperty(value = "数据源类型id 关联数据源类型表的id")
private String sourceTypeId;
@ApiModelProperty(value = "接入服务描述 接入服务描述")
private String description;
@ApiModelProperty(value = "数据包格式 数据包格式,0标准,1轻量默认0")
private Integer packetFormat;
@ApiModelProperty(value = "时间戳编码 时间戳编码默认为time")
private String timestampCode;
@ApiModelProperty(value = "时间戳精度 时间戳精度1秒,0毫秒默认1")
private Integer timestampAccurate;
@ApiModelProperty(value = "时间戳格式 时间戳格式枚举类0yyyyMMddHHmmss,默认为0")
private Integer timestampFormat;
@ApiModelProperty(value = "接入任务数量 接入任务数量")
private Integer taskNum;
@ApiModelProperty(value = "状态 状态枚举类0运行中等等")
@TableField("STATUS")
private Integer status;
@ApiModelProperty(value = "是否已被删除 是否已被删除")
@TableLogic
private Integer isDeleted;
@ApiModelProperty(value = "创建日期 创建时间")
@TableField(fill = FieldFill.INSERT)
private Date gmtCreate;
@ApiModelProperty(value = "修改时间 修改时间")
@TableField(fill = FieldFill.INSERT_UPDATE)
private Date gmtModified;
@TableField(exist = false)
private String sourceTypeName;
@TableField(exist = false)
private String statusName;
public void setSourceTypeName(){
if(this.getSourceType()==SourceType.内置MQTT){
this.sourceTypeName = "内置MQTT";
}else if(this.getSourceType()==SourceType.第三方MQTT){
this.sourceTypeName = "第三方MQTT";
}else if(this.getSourceType()==SourceType.Kafka){
this.sourceTypeName = "Kafka";
}else if(this.getSourceType()==SourceType.OPC_UA){
this.sourceTypeName = "OPC_UA";
}else if(this.getSourceType()==SourceType.Liga实时数据){
this.sourceTypeName = "Liga实时数据";
}else if(this.getSourceType()==SourceType.Liga历史数据){
this.sourceTypeName = "Liga历史数据";
}else if(this.getSourceType()==SourceType.Liga振动数据){
this.sourceTypeName = "Liga振动数据";
}else {
throw new PhmException(ResultCode.ERROR,"数据源未定义");
}
}
public void setStatusName(){
if(this.getStatus()==Status.运行中){
this.statusName = "运行中";
}else if(this.getSourceType()==Status.已停止){
this.statusName = "已停止";
}else if(this.getSourceType()==Status.等待中){
this.statusName = "等待中";
}else {
throw new PhmException(ResultCode.ERROR,"服务运行状态状态异常");
}
}
}

View File

@ -0,0 +1,74 @@
package com.cqu.data.entity;
import com.baomidou.mybatisplus.annotation.*;
import java.util.Date;
import java.io.Serializable;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
/**
* <p>
* 接入任务表 接入任务表
* </p>
*
* @author ding
* @since 2022-06-27
*/
@Data
@EqualsAndHashCode(callSuper = false)
@Accessors(chain = true)
@ApiModel(value="AccessTask对象", description="接入任务表 接入任务表")
public class AccessTask implements Serializable {
private static final long serialVersionUID = 1L;
@ApiModelProperty(value = "编号 主键编号")
@TableId(value = "id", type = IdType.ID_WORKER_STR)
private String id;
@ApiModelProperty(value = "接入服务id 设备组id")
private String serviceId;
@ApiModelProperty(value = "接入任务名称 接入任务名称")
private String name;
@ApiModelProperty(value = "设备对象id 设备对象id")
private String deivceObjectId;
@ApiModelProperty(value = "部件id 部件id")
private String partId;
@ApiModelProperty(value = "数据集id 数据集id")
private Integer datasetId;
@ApiModelProperty(value = "描述 描述")
private String description;
@ApiModelProperty(value = "状态 状态,0运行1停止等")
@TableField("STATUS")
private Integer status;
@ApiModelProperty(value = "接受数据条数 接受数据条数")
private Integer dataNum;
@ApiModelProperty(value = "是否已被删除 是否已被删除")
@TableLogic
private Integer isDeleted;
@ApiModelProperty(value = "创建人 创建人")
private String createPerson;
@ApiModelProperty(value = "创建日期 创建时间")
@TableField(fill = FieldFill.INSERT)
private Date gmtCreate;
@ApiModelProperty(value = "修改时间 修改时间")
@TableField(fill = FieldFill.INSERT_UPDATE)
private Date gmtModified;
}

View File

@ -0,0 +1,86 @@
package com.cqu.data.entity;
import com.baomidou.mybatisplus.annotation.*;
import java.util.Date;
import java.io.Serializable;
import com.cqu.data.entity.enumClass.DataType;
import com.cqu.data.entity.enumClass.DatasetType;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
/**
* <p>
* 数据集表
* </p>
*
* @author ding
* @since 2022-06-27
*/
@Data
@EqualsAndHashCode(callSuper = false)
@Accessors(chain = true)
@ApiModel(value="DataSets对象", description="数据集表")
public class Dataset implements Serializable {
private static final long serialVersionUID = 1L;
@ApiModelProperty(value = "编号")
@TableId(value = "id", type = IdType.ID_WORKER_STR)
private String id;
@ApiModelProperty(value = "设备型号id 设备型号id")
private String detailId;
@ApiModelProperty(value = "名称 名称")
private String name;
@ApiModelProperty(value = "描述 描述")
private String description;
@ApiModelProperty(value = "数据集类型")
private Integer datasetType;
@ApiModelProperty(value = "数据类型")
private Integer dataType;
@ApiModelProperty(value = "是否已被删除 是否已被删除")
@TableLogic
private Integer isDeleted;
@ApiModelProperty(value = "创建人 创建人")
private String createPerson;
@ApiModelProperty(value = "创建日期 创建时间")
@TableField(fill = FieldFill.INSERT)
private Date gmtCreate;
@ApiModelProperty(value = "修改时间 修改时间")
@TableField(fill = FieldFill.INSERT_UPDATE)
private Date gmtModified;
@TableField(exist = false)
private String datasetTypeName; //数据集类型的名字
@TableField(exist = false)
private String dataTypeName; //数据类型的名字
public void setDatasetTypeName() {
if (this.datasetType == DatasetType.iot数据) {
this.datasetTypeName = "iot数据";
}
}
public void setDataTypeName() {
if (this.dataType == DataType.指标数据) {
this.dataTypeName = "指标数据";
} else if (this.dataType == DataType.振动数据) {
this.dataTypeName = "振动数据";
}
}
}

View File

@ -0,0 +1,118 @@
package com.cqu.data.entity;
import com.baomidou.mybatisplus.annotation.*;
import java.util.Date;
import java.io.Serializable;
import com.cqu.data.entity.enumClass.DataType;
import com.cqu.data.entity.enumClass.DatasetItemType;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
/**
* <p>
* 数据集详情表
* </p>
*
* @author ding
* @since 2022-06-27
*/
@Data
@EqualsAndHashCode(callSuper = false)
@Accessors(chain = true)
@ApiModel(value="DataSetDetail对象", description="数据集详情表")
public class DatasetItem implements Serializable {
private static final long serialVersionUID = 1L;
@ApiModelProperty(value = "编号")
@TableId(value = "id", type = IdType.ID_WORKER_STR)
private String id;
@ApiModelProperty(value = "数据集id")
private String datasetId;
@ApiModelProperty(value = "名称 名称")
private String name;
@ApiModelProperty(value = "显示名称")
private String showName;
@ApiModelProperty(value = "编码")
private String code;
@ApiModelProperty(value = "数据类型, 枚举类")
private Integer type;
@ApiModelProperty(value = "是否一级预警")
private Boolean isFirstWarning;
@ApiModelProperty(value = "一级预警值上限")
private Double firstCeiling;
@ApiModelProperty(value = "一级预警值下限")
private Double firstFloor;
@ApiModelProperty(value = "是否二级预警")
private Boolean isSecondWarning;
@ApiModelProperty(value = "二级预警值上限")
private Double secondCeiling;
@ApiModelProperty(value = "二级预警值下限")
private Double secondFloor;
@ApiModelProperty(value = "数据单位")
private String unit;
@ApiModelProperty(value = "描述 描述")
private String description;
@ApiModelProperty(value = "是否已被删除 是否已被删除")
@TableLogic
private Integer isDeleted;
@ApiModelProperty(value = "创建人 创建人")
private String createPerson;
@ApiModelProperty(value = "创建日期 创建时间")
@TableField(fill = FieldFill.INSERT)
private Date gmtCreate;
@ApiModelProperty(value = "修改时间 修改时间")
@TableField(fill=FieldFill.INSERT_UPDATE)
private Date gmtModified;
@TableField(exist = false)
private String datasetItemTypeName; //数据项的参数类型
public void setDatasetItemTypeName() {
if (this.type == DatasetItemType.实数) {
this.datasetItemTypeName = "实数(double)";
} else if (this.type == DatasetItemType.整数) {
this.datasetItemTypeName = "整数(int)";
} else if (this.type == DatasetItemType.字符串) {
this.datasetItemTypeName = "字符串(string)";
} else if (this.type == DatasetItemType.布尔) {
this.datasetItemTypeName = "布尔(bool)";
} else if (this.type == DatasetItemType.实数数组) {
this.datasetItemTypeName = "实数数组(double_array)";
} else if (this.type == DatasetItemType.整数数组) {
this.datasetItemTypeName = "整数数组(int_array)";
} else if (this.type == DatasetItemType.字符串数组) {
this.datasetItemTypeName = "字符串数组(string_array)";
} else if (this.type == DatasetItemType.布尔数组) {
this.datasetItemTypeName = "布尔数组(bool_array)";
} else if (this.type == DatasetItemType.振动) {
this.datasetItemTypeName = "振动(vibration)";
} else if (this.type == DatasetItemType.时间) {
this.datasetItemTypeName = "时间(time)";
}
}
}

View File

@ -0,0 +1,62 @@
package com.cqu.data.entity;
import com.baomidou.mybatisplus.annotation.*;
import java.util.Date;
import java.io.Serializable;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
/**
* <p>
* 数据源kafka表 数据源kafka表
* </p>
*
* @author ding
* @since 2022-06-27
*/
@Data
@EqualsAndHashCode(callSuper = false)
@Accessors(chain = true)
@ApiModel(value="SourceTypeKafka对象", description="数据源kafka表 数据源kafka表")
public class SourceTypeKafka implements Serializable {
private static final long serialVersionUID = 1L;
@ApiModelProperty(value = "编号 编号")
@TableId(value = "id", type = IdType.ID_WORKER_STR)
private String id;
@ApiModelProperty(value = "数据源类型名称 数据源类型名称")
@TableField(exist = false)
private String typeName = "Kafka";
@ApiModelProperty(value = "服务器地址 服务器地址host,如192.168.118.1")
@TableField("HOST")
private String host;
@ApiModelProperty(value = "服务器端口 服务器端口port,1-65535")
@TableField("PORT")
private String port;
@ApiModelProperty(value = "是否已被删除 是否已被删除")
@TableLogic
private Integer isDeleted;
@ApiModelProperty(value = "创建日期 创建时间")
@TableField(fill = FieldFill.INSERT)
private Date gmtCreate;
@ApiModelProperty(value = "修改时间 修改时间")
@TableField(fill = FieldFill.INSERT_UPDATE)
private Date gmtModified;
}

View File

@ -0,0 +1,54 @@
package com.cqu.data.entity;
import com.baomidou.mybatisplus.annotation.*;
import java.util.Date;
import java.io.Serializable;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
/**
* <p>
* 数据源liga表 数据源liga表
* </p>
*
* @author ding
* @since 2022-06-27
*/
@Data
@EqualsAndHashCode(callSuper = false)
@Accessors(chain = true)
@ApiModel(value="SourceTypeLiga对象", description="数据源liga表 数据源liga表")
public class SourceTypeLiga implements Serializable {
private static final long serialVersionUID = 1L;
@ApiModelProperty(value = "编号 编号")
@TableId(value = "id", type = IdType.ID_WORKER_STR)
private String id;
@ApiModelProperty(value = "数据源类型名称 数据源类型名称")
@TableField(exist = false)
private String typeName = "Liga";
@ApiModelProperty(value = "是否已被删除 是否已被删除")
@TableLogic
private Integer isDeleted;
@ApiModelProperty(value = "创建日期 创建时间")
@TableField(fill = FieldFill.INSERT)
private Date gmtCreate;
@ApiModelProperty(value = "修改时间 修改时间")
@TableField(fill = FieldFill.INSERT_UPDATE)
private Date gmtModified;
}

View File

@ -0,0 +1,102 @@
package com.cqu.data.entity;
import com.baomidou.mybatisplus.annotation.*;
import java.util.Date;
import java.io.Serializable;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
/**
* <p>
* 数据源MQTT表 数据源MQTT表
* </p>
*
* @author ding
* @since 2022-06-27
*/
@Data
@EqualsAndHashCode(callSuper = false)
@Accessors(chain = true)
@ApiModel(value="SourceTypeMqtt对象", description="数据源MQTT表 数据源MQTT表")
public class SourceTypeMqtt implements Serializable {
private static final long serialVersionUID = 1L;
@ApiModelProperty(value = "编号 编号")
@TableId(value = "id", type = IdType.ID_WORKER_STR)
private String id;
@ApiModelProperty(value = "数据源类型名称 数据源类型名称,什么表就是什么名称")
@TableField(exist = false)
private String typeName = "Mqtt";
@ApiModelProperty(value = "是否是内置MQTT 是否是内置数据源0内置1第三方默认0")
private Integer isInner;
@ApiModelProperty(value = "是否加密 是否加密0不加密1加密默认为0")
private Boolean isEncryption;
@ApiModelProperty(value = "是否TSL通信 是否开启TSL通信0不开启1开启默认为0")
@TableField("is_TSL_connect")
private Boolean isTslConnect;
@ApiModelProperty(value = "加密类型 加密类型DES,AES默认DES")
private String encryptionType;
@ApiModelProperty(value = "加密秘钥 加密秘钥")
private String encryptionPassword;
@ApiModelProperty(value = "服务器地址 服务器地址host,如192.168.118.1")
@TableField("HOST")
private String host;
@ApiModelProperty(value = "服务器端口 服务器端口port,1-65535")
@TableField("PORT")
private String port;
@ApiModelProperty(value = "开启单一连接 是否开启单一连接0不开启1开启默认为0")
private Boolean isSingleConnect;
@ApiModelProperty(value = "mqtt的client_id mqtt的client_id")
private String clientId;
@ApiModelProperty(value = "是否匿名连接 是否开启匿名连接0不开启1开启默认为1")
private Boolean isAnonymousConnect;
@ApiModelProperty(value = "mqtt的用户名 mqtt的用户名")
private String username;
@ApiModelProperty(value = "mqtt的密码 mqtt的密码")
@TableField("PASSWORD")
private String password;
@ApiModelProperty(value = "连接串 连接串")
@TableField("CONNECT")
private String connect;
@ApiModelProperty(value = "证书 证书")
@TableField("certificate")
private String certificate;
@ApiModelProperty(value = "是否已被删除 是否已被删除")
@TableLogic
private Integer isDeleted;
@ApiModelProperty(value = "创建日期 创建时间")
@TableField(fill = FieldFill.INSERT)
private Date gmtCreate;
@ApiModelProperty(value = "修改时间 修改时间")
@TableField(fill = FieldFill.INSERT_UPDATE)
private Date gmtModified;
}

View File

@ -0,0 +1,71 @@
package com.cqu.data.entity;
import com.baomidou.mybatisplus.annotation.*;
import java.util.Date;
import java.io.Serializable;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
/**
* <p>
* 数据源OPC_UA表 数据源OPC_UA表
* </p>
*
* @author ding
* @since 2022-06-27
*/
@Data
@EqualsAndHashCode(callSuper = false)
@Accessors(chain = true)
@ApiModel(value="SourceTypeOpcUa对象", description="数据源OPC_UA表 数据源OPC_UA表")
public class SourceTypeOpcUa implements Serializable {
private static final long serialVersionUID = 1L;
@ApiModelProperty(value = "编号 编号")
@TableId(value = "id", type = IdType.ID_WORKER_STR)
private String id;
// @ApiModelProperty(value = "接入服务id 接入服务id")
// private String serviceId;
@ApiModelProperty(value = "数据源类型名称 数据源类型名称")
@TableField(exist = false)
private String typeName = "OPC-UA";
@ApiModelProperty(value = "服务器地址 服务器地址host,如192.168.118.1")
@TableField("HOST")
private String host;
@ApiModelProperty(value = "服务器端口 服务器端口port,1-65535")
@TableField("PORT")
private String port;
@ApiModelProperty(value = "是否匿名连接 是否开启匿名连接0不开启1开启默认为1")
private Boolean isAnonymousConnect;
@ApiModelProperty(value = "用户名")
private String username;
@ApiModelProperty(value = "密码 密码")
@TableField("PASSWORD")
private String password;
@ApiModelProperty(value = "是否已被删除 是否已被删除")
@TableLogic
private Integer isDeleted;
@ApiModelProperty(value = "创建日期 创建时间")
@TableField(fill = FieldFill.INSERT)
private Date gmtCreate;
@ApiModelProperty(value = "修改时间 修改时间")
@TableField(fill = FieldFill.INSERT_UPDATE)
private Date gmtModified;
}

View File

@ -0,0 +1,14 @@
package com.cqu.data.entity.enumClass;
/**
* 新建数据集时选择的数据类型如振动数据指标数据
* @Author: ranmaoqi
* @Date: 2022/11/8 15:24
*/
public class DataType {
public static final Integer 指标数据 = 0; //指标数据
public static final Integer 振动数据 = 1; //振动数据
}

View File

@ -0,0 +1,21 @@
package com.cqu.data.entity.enumClass;
/**
* 数据集中新建数据项参数的数据类型
* @Author: ranmaoqi
* @Date: 2022/11/8 16:46
*/
public class DatasetItemType {
public static final Integer 实数 = 0;
public static final Integer 整数 = 1;
public static final Integer 字符串 = 2;
public static final Integer 布尔 = 3;
public static final Integer 实数数组 = 4;
public static final Integer 整数数组 = 5;
public static final Integer 字符串数组 = 6;
public static final Integer 布尔数组 = 7;
public static final Integer 振动 = 8;
public static final Integer 时间 = 9;
}

View File

@ -0,0 +1,12 @@
package com.cqu.data.entity.enumClass;
/**
* 新建数据集时选择数据集类型
* @Author: ranmaoqi
* @Date: 2022/11/8 15:21
*/
public class DatasetType {
public static final Integer iot数据 = 0;
}

View File

@ -0,0 +1,18 @@
package com.cqu.data.entity.enumClass;
/**
* 后续可能扩展新的数据源类型
*/
public class SourceType {
public static final Integer 内置MQTT= 0;
public static final Integer 第三方MQTT = 1;
public static final Integer Kafka = 2;
public static final Integer OPC_UA = 3;
public static final Integer Liga实时数据 = 4;
public static final Integer Liga历史数据 = 5;
public static final Integer Liga振动数据 = 6;
}

View File

@ -0,0 +1,20 @@
package com.cqu.data.entity.enumClass;
/**
* 后续可能扩展新的状态
*/
public class Status {
public static final Integer 运行中=0;
public static final Integer 已停止=1;
public static final Integer 等待中 =2;
}

View File

@ -0,0 +1,84 @@
package com.cqu.data.entity.vo;
import com.baomidou.mybatisplus.annotation.*;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
import java.io.Serializable;
import java.util.Date;
/**
* <p>
* 设备 设备
* </p>
*
* @author ranmaoqi
* @since 2022-05-18
*/
@Data
@EqualsAndHashCode(callSuper = false)
@Accessors(chain = true)
@ApiModel(value="Device对象", description="设备 设备")
public class Device implements Serializable {
private static final long serialVersionUID = 1L;
@ApiModelProperty(value = "编号")
@TableId(value = "id", type = IdType.ID_WORKER_STR)
private String id;
@ApiModelProperty(value = "设备组id")
private String groupId;
@ApiModelProperty(value = "设备标识")
private String name;
@ApiModelProperty(value = "设备类型")
private String typeId;
@ApiModelProperty(value = "设备型号")
private String detailId;
@ApiModelProperty(value = "设备编码")
private String deviceCode;
@ApiModelProperty(value = "出厂日期")
@JsonFormat(timezone = "GMT+8", pattern = "yyyy-MM-dd")
private Date produceTime;
@ApiModelProperty(value = "东经")
private String eastLongitude;
@ApiModelProperty(value = "北纬")
private String northLatitude;
@ApiModelProperty(value = "描述")
private String description;
@ApiModelProperty(value = "是否已被删除")
@TableLogic
private Integer isDeleted;
@ApiModelProperty(value = "创建人")
private String createPerson;
@ApiModelProperty(value = "创建日期")
@TableField(fill = FieldFill.INSERT)
private Date gmtCreate;
@ApiModelProperty(value = "修改时间")
@TableField(fill = FieldFill.INSERT_UPDATE)
private Date gmtModified;
@TableField(exist = false)
private String typeName;
@TableField(exist = false)
private String detailName;
}

View File

@ -0,0 +1,70 @@
package com.cqu.data.entity.vo;
import com.baomidou.mybatisplus.annotation.*;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
import java.io.Serializable;
import java.util.Date;
/**
* <p>
* 设备型号 设备型号表
* </p>
*
* @author testjava
* @since 2022-04-11
*/
@Data
@EqualsAndHashCode(callSuper = false)
@Accessors(chain = true)
@ApiModel(value="DeviceTypeDetail对象", description="设备型号 设备型号表")
public class DeviceTypeDetail implements Serializable {
private static final long serialVersionUID = 1L;
@ApiModelProperty(value = "编号")
@TableId(value = "id", type = IdType.ID_WORKER_STR)
private String id;
@ApiModelProperty(value = "设备类型id")
private String typeId;
@ApiModelProperty(value = "设备分组id")
private String typeGroupId;
@ApiModelProperty(value = "设备标识")
private String name;
@ApiModelProperty(value = "型号名称")
private String showName;
@ApiModelProperty(value = "规格类型")
private String specificationType;
@ApiModelProperty(value = "生产厂商")
private String manufacturer;
@ApiModelProperty(value = "描述")
private String description;
@ApiModelProperty(value = "是否已被删除")
@TableLogic
private Integer isDeleted;
@ApiModelProperty(value = "创建人")
private String createPerson;
@ApiModelProperty(value = "创建日期")
@TableField(fill = FieldFill.INSERT)
private Date gmtCreate;
@ApiModelProperty(value = "修改时间")
@TableField(fill = FieldFill.INSERT_UPDATE)
private Date gmtModified;
}

View File

@ -0,0 +1,18 @@
package com.cqu.data.entity.vo;
import lombok.Data;
import java.util.List;
/**
* 结构图谱数据图谱的graph
* @Author: ranmaoqi
* @Date: 2023/01/11 16:51
*/
@Data
public class GraphVo {
List<NodesVo> nodes;
List<LinksVo> links;
}

View File

@ -0,0 +1,15 @@
package com.cqu.data.entity.vo;
import lombok.Data;
/**
* @Author: ranmaoqi
* @Date: 2023/01/11 16:52
*/
@Data
public class LinksVo {
String source;
String target;
}

View File

@ -0,0 +1,20 @@
package com.cqu.data.entity.vo;
import lombok.Data;
/**
* 结构图普中的node
* @Author: ranmaoqi
* @Date: 2023/01/11 16:52
*/
@Data
public class NodesVo {
String id;
String name;
int symbolSize;
double x;
double y;
int category;
}

View File

@ -0,0 +1,28 @@
package com.cqu.data.entity.vo;
import io.swagger.models.auth.In;
import lombok.Data;
@Data
public class ServiceFormVo {
private String name;
private Integer sourceType;
private Boolean isEncryption;
private String encryptionType;
private String encryptionPassword;
private String host;
private String port;
private Boolean isSingleConnect;
private String clientId;
private Boolean isAnonymousConnect;
private String username;
private String password;
private Boolean isTslConnect;
private String description;
private Integer packetFormat;
private String timestampCode;
private Integer timestampAccurate;
private Integer timestampFormat;
}

View File

@ -0,0 +1,16 @@
package com.cqu.data.mapper;
import com.cqu.data.entity.AccessService;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* <p>
* 接入服务表 接入服务表 Mapper 接口
* </p>
*
* @author ding
* @since 2022-06-27
*/
public interface AccessServiceMapper extends BaseMapper<AccessService> {
}

View File

@ -0,0 +1,16 @@
package com.cqu.data.mapper;
import com.cqu.data.entity.AccessTask;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* <p>
* 接入任务表 接入任务表 Mapper 接口
* </p>
*
* @author ding
* @since 2022-06-27
*/
public interface AccessTaskMapper extends BaseMapper<AccessTask> {
}

View File

@ -0,0 +1,22 @@
package com.cqu.data.mapper;
import com.cqu.data.entity.DatasetItem;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
import java.util.List;
/**
* <p>
* 数据集详情表 Mapper 接口
* </p>
*
* @author ding
* @since 2022-06-27
*/
@Mapper
public interface DatasetItemMapper extends BaseMapper<DatasetItem> {
//通过数据集Id得到每一个数据项的名字
List<String> getItemNameByDatasetId(String datasetId);
}

View File

@ -0,0 +1,26 @@
package com.cqu.data.mapper;
import com.cqu.data.entity.Dataset;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
import java.util.List;
/**
* <p>
* 数据集表 Mapper 接口
* </p>
*
* @author ding
* @since 2022-06-27
*/
@Mapper
public interface DatasetMapper extends BaseMapper<Dataset> {
/**
* 根据detailId查询数据集IDs
* @param detailId
* @return
*/
List<String> getDatasetIdsByDetailId(String detailId);
}

View File

@ -0,0 +1,16 @@
package com.cqu.data.mapper;
import com.cqu.data.entity.SourceTypeKafka;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* <p>
* 数据源kafka表 数据源kafka表 Mapper 接口
* </p>
*
* @author ding
* @since 2022-06-27
*/
public interface SourceTypeKafkaMapper extends BaseMapper<SourceTypeKafka> {
}

View File

@ -0,0 +1,16 @@
package com.cqu.data.mapper;
import com.cqu.data.entity.SourceTypeLiga;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* <p>
* 数据源liga表 数据源liga表 Mapper 接口
* </p>
*
* @author ding
* @since 2022-06-27
*/
public interface SourceTypeLigaMapper extends BaseMapper<SourceTypeLiga> {
}

View File

@ -0,0 +1,16 @@
package com.cqu.data.mapper;
import com.cqu.data.entity.SourceTypeMqtt;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* <p>
* 数据源MQTT表 数据源MQTT表 Mapper 接口
* </p>
*
* @author ding
* @since 2022-06-27
*/
public interface SourceTypeMqttMapper extends BaseMapper<SourceTypeMqtt> {
}

View File

@ -0,0 +1,16 @@
package com.cqu.data.mapper;
import com.cqu.data.entity.SourceTypeOpcUa;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* <p>
* 数据源OPC_UA表 数据源OPC_UA表 Mapper 接口
* </p>
*
* @author ding
* @since 2022-06-27
*/
public interface SourceTypeOpcUaMapper extends BaseMapper<SourceTypeOpcUa> {
}

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.cqu.data.mapper.AccessServiceMapper">
</mapper>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.cqu.data.mapper.AccessTaskMapper">
</mapper>

View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.cqu.data.mapper.DatasetItemMapper">
<select id="getItemNameByDatasetId" resultType="java.lang.String">
SELECT show_name FROM `dataset_item`
where dataset_id = #{datasetId} and is_deleted = 0
</select>
</mapper>

View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.cqu.data.mapper.DatasetMapper">
<select id="getDatasetIdsByDetailId" resultType="java.lang.String">
SELECT id FROM `dataset`
where detail_id = #{detailId} and is_deleted = 0
</select>
</mapper>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.cqu.data.mapper.SourceTypeKafkaMapper">
</mapper>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.cqu.data.mapper.SourceTypeLigaMapper">
</mapper>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.cqu.data.mapper.SourceTypeMqttMapper">
</mapper>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.cqu.data.mapper.SourceTypeOpcUaMapper">
</mapper>

View File

@ -0,0 +1,22 @@
package com.cqu.data.service;
import com.cqu.data.entity.AccessService;
import com.baomidou.mybatisplus.extension.service.IService;
import com.cqu.data.entity.vo.ServiceFormVo;
import com.cqu.utils.Result;
/**
* <p>
* 接入服务表 接入服务表 服务类
* </p>
*
* @author ding
* @since 2022-06-27
*/
public interface AccessServiceService extends IService<AccessService> {
Result listServiceByGroupId(String groupId, Integer current, Integer size);
boolean saveService(String deviceGroupId, ServiceFormVo serviceForm);
}

View File

@ -0,0 +1,16 @@
package com.cqu.data.service;
import com.cqu.data.entity.AccessTask;
import com.baomidou.mybatisplus.extension.service.IService;
/**
* <p>
* 接入任务表 接入任务表 服务类
* </p>
*
* @author ding
* @since 2022-06-27
*/
public interface AccessTaskService extends IService<AccessTask> {
}

View File

@ -0,0 +1,49 @@
package com.cqu.data.service;
import com.cqu.data.entity.DatasetItem;
import com.baomidou.mybatisplus.extension.service.IService;
import com.cqu.utils.Result;
import java.util.List;
/**
* <p>
* 数据集详情表 服务类
* </p>
*
* @author ding
* @since 2022-06-27
*/
public interface DatasetItemService extends IService<DatasetItem> {
/**
* 根据数据集id分页查询数据集项列表
* @param datasetId 数据集id
* @param current 当前页
* @param size 每页显示条数
* @return
*/
Result listDatasetItemByDatasetId(String datasetId, Integer current, Integer size);
/**
* 在id为datasetId的数据集下新建数据项
* @param datasetId 数据集Id
* @param datasetItem 数据项实体
* @return
*/
Result addDatasetItemByDatasetId(String datasetId, DatasetItem datasetItem);
/**
* 编辑数据项
* @param datasetItem 数据项实体
* @return
*/
Result updateDatasetItem(DatasetItem datasetItem);
/**
* 多选批量删除数据项
* @param datasetItemList 数据项实体列表
* @return
*/
Result deleteDatasetItemBatch(List<DatasetItem> datasetItemList);
}

View File

@ -0,0 +1,63 @@
package com.cqu.data.service;
import com.cqu.data.entity.Dataset;
import com.baomidou.mybatisplus.extension.service.IService;
import com.cqu.utils.Result;
import java.util.List;
/**
* <p>
* 数据集表 服务类
* </p>
*
* @author ding
* @since 2022-06-27
*/
public interface DatasetService extends IService<Dataset> {
/**
* 根据设备型号id查询设备型号下的数据集
* @param detailId 设备型号id
* @param current 当前页
* @param size 每页大小
* @return
*/
Result listDatasetByDetailId(String detailId, Integer current, Integer size);
/**
* 在设备型号下新建数据集
* @param detailId 设备型号id
* @param dataset 数据集实体
* @return
*/
Result addDatasetByDetailId(String detailId, Dataset dataset);
/**
* 编辑数据集信息
* @param dataset 数据集实体
* @return
*/
Result updateDataset(Dataset dataset);
/**
* 多选批量删除数据集
* @param datasetList 数据集实体列表
* @return
*/
Result deleteDatasetBatch(List<Dataset> datasetList);
/**
* 根据detailId查询数据图谱
* @param detailId
* @return
*/
Result getDataGraphByDetailId(String detailId);
/**
* 根据deviceId查询数据图谱
* @param deviceId
* @return
*/
Result getDataGraphByDeviceId(String deviceId);
}

View File

@ -0,0 +1,16 @@
package com.cqu.data.service;
import com.cqu.data.entity.SourceTypeKafka;
import com.baomidou.mybatisplus.extension.service.IService;
/**
* <p>
* 数据源kafka表 数据源kafka表 服务类
* </p>
*
* @author ding
* @since 2022-06-27
*/
public interface SourceTypeKafkaService extends IService<SourceTypeKafka> {
}

View File

@ -0,0 +1,16 @@
package com.cqu.data.service;
import com.cqu.data.entity.SourceTypeLiga;
import com.baomidou.mybatisplus.extension.service.IService;
/**
* <p>
* 数据源liga表 数据源liga表 服务类
* </p>
*
* @author ding
* @since 2022-06-27
*/
public interface SourceTypeLigaService extends IService<SourceTypeLiga> {
}

View File

@ -0,0 +1,16 @@
package com.cqu.data.service;
import com.cqu.data.entity.SourceTypeMqtt;
import com.baomidou.mybatisplus.extension.service.IService;
/**
* <p>
* 数据源MQTT表 数据源MQTT表 服务类
* </p>
*
* @author ding
* @since 2022-06-27
*/
public interface SourceTypeMqttService extends IService<SourceTypeMqtt> {
}

View File

@ -0,0 +1,16 @@
package com.cqu.data.service;
import com.cqu.data.entity.SourceTypeOpcUa;
import com.baomidou.mybatisplus.extension.service.IService;
/**
* <p>
* 数据源OPC_UA表 数据源OPC_UA表 服务类
* </p>
*
* @author ding
* @since 2022-06-27
*/
public interface SourceTypeOpcUaService extends IService<SourceTypeOpcUa> {
}

Some files were not shown because too many files have changed in this diff Show More