Conflicts:
	GE_Migrating_data/pom.xml
	GE_Migrating_data/src/main/java/com/cqu/ge/ArchiveAPITest.java
	GE_Migrating_data/src/main/java/com/cqu/ge/Collector.java
	GE_Migrating_data/src/main/java/com/cqu/ge/CollectorServiceImplTest.java
	GE_Migrating_data/src/main/java/com/cqu/ge/DataServiceImplTest.java
	GE_Migrating_data/src/main/java/com/cqu/ge/TestHelper.java
This commit is contained in:
markilue 2023-06-04 12:34:52 +08:00
commit 68e48d7ddb
410 changed files with 4419632 additions and 467 deletions

View File

@ -44,6 +44,11 @@
<artifactId>flume-kafka-source</artifactId>
<version>1.9.0</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.68</version>
</dependency>
</dependencies>

View File

@ -0,0 +1,70 @@
package com.atguigu.kafka.consumer;
import com.alibaba.fastjson.JSON;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
/**
* kafka消费者自动提交offset
*
*
*/
public class KafkaConsumerDemoTest {
public static void main(String[] args) {
//0.创建配置对象
Properties props =new Properties();
//kafka集群的位置
props.put("bootstrap.servers", "Ding202:9092");
//消费者组id
props.put("group.id", "suibian");
//自动提交offset
props.put("enable.auto.commit", "true");
//offset提交的间隔
props.put("auto.commit.interval.ms", "1000");
//key和value的反序列化器
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
//1.创建消费者对象
KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<String, String>(props);
//2.订阅主题topic
//设置一个对象去封装topic,默认是用的collection但是小的list现在就足够了
List<String> topic = new ArrayList<String>();
topic.add("pykafka_demo");
// topic.add("hello");
//如果没有这个主题他会自动帮你创建一个但是这个主题默认的是一个分区一个副本
//topic.add("second");
kafkaConsumer.subscribe(topic);
//3.持续消费数据
while (true){
//和kafka的consumer一样会去主动拉去数据所以需要设置超时拉取时间
//返回一个集合
ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofSeconds(2));
for (ConsumerRecord<String, String> record : records) {
//拿到每一个record将他打印出来
System.out.println("消费到:"+record.topic()+
":"+record.partition()+
":"+record.offset()+
":"+record.key()+
":"+record.value());
System.out.println(JSON.toJSONString(JSON.parseObject(record.value())));
}
}
//关闭消费者对象
//kafkaConsumer.close();
}
}

View File

@ -0,0 +1,97 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>rt-gmall-parent</artifactId>
<groupId>com.atguigu.rtgmall</groupId>
<version>1.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>gmall-cdc</artifactId>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>1.12.0</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.12</artifactId>
<version>1.12.0</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.12</artifactId>
<version>1.12.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.1.3</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.48</version>
</dependency>
<dependency>
<groupId>com.alibaba.ververica</groupId>
<artifactId>flink-connector-mysql-cdc</artifactId>
<version>1.2.0</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.75</version>
</dependency>
<!-- flink sql-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_2.12</artifactId>
<version>1.12.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>3.0.0</version>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,66 @@
package com.atguigu.gmall.cdc;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import com.alibaba.ververica.cdc.debezium.StringDebeziumDeserializationSchema;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.cdc
*@Author: markilue
*@CreateTime: 2023-05-06 14:26
*@Description: TODO 通过FlinkCDC动态读取Mysql表中的数据 -- DataStreamAPI
*@Version: 1.0
*/
public class FlinkCDC01_DS {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// enable checkpoint
// env.enableCheckpointing(3000);
env.setParallelism(1);
//TODO 2.开启检查点 Flink-CDC将读取binlog的位置信息以状态的方式保存在CK,如果想要做到断点续传,
// 需要从Checkpoint或者Savepoint启动程序
//2.1 开启Checkpoint,每隔5秒钟做一次CK ,并指定CK的一致性语义
env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//2.2 设置超时时间为1分钟
env.getCheckpointConfig().setCheckpointTimeout(60000);
//2.3 指定从CK自动重启策略
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(2,2000L));
//2.4 设置任务关闭的时候保留最后一次CK数据
env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//2.5 设置状态后端
env.setStateBackend(new FsStateBackend("hdfs://Ding202:8020/flinkCDC"));
//2.6 设置访问HDFS的用户名
System.setProperty("HADOOP_USER_NAME", "dingjiawen");
SourceFunction<String> mySqlSource = MySQLSource.<String>builder()
.hostname("Ding202")
.port(3306)
.databaseList("rt_gmall_realtime") // set captured database
.tableList("rt_gmall_realtime.t_user") // set captured table
.username("root")
.password("123456")
.startupOptions(StartupOptions.initial())
.deserializer(new StringDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
.build();
env.addSource(mySqlSource)
.print();
env.execute("Print MySQL Snapshot + Binlog");
}
}

View File

@ -0,0 +1,55 @@
package com.atguigu.gmall.cdc;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.StringDebeziumDeserializationSchema;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.cdc
*@Author: markilue
*@CreateTime: 2023-05-06 14:26
*@Description: TODO 通过FlinkCDC动态读取Mysql表中的数据
*@Version: 1.0
*/
public class FlinkCDC02_SQL {
public static void main(String[] args) throws Exception {
//TODO 1.基本环境准备
//1.1 流处理环境
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//1.2 表处理环境
StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);
//1.3 设置并行度
env.setParallelism(1);
//TODO 2.转换动态表
tableEnvironment.executeSql("CREATE TABLE user_info (" +
" id INT NOT NULL," +
" name STRING" +
") WITH (" +
" 'connector' = 'mysql-cdc'," +
" 'hostname' = 'Ding202'," +
" 'port' = '3306'," +
" 'username' = 'root'," +
" 'password' = '123456'," +
" 'database-name' = 'rt_gmall_realtime'," +
" 'table-name' = 't_user'" +
")");
tableEnvironment.executeSql("select * from user_info").print();
env.execute("Print MySQL Snapshot + Binlog");
}
}

View File

@ -0,0 +1,131 @@
package com.atguigu.gmall.cdc;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.DebeziumDeserializationSchema;
import com.alibaba.ververica.cdc.debezium.StringDebeziumDeserializationSchema;
import io.debezium.data.Envelope;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.util.Collector;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.cdc
*@Author: markilue
*@CreateTime: 2023-05-06 14:26
*@Description:
* TODO 通过FlinkCDC动态读取Mysql表中的数据 -- DataStreamAPI
* 自定义序列化器
*@Version: 1.0
*/
public class FlinkCDC03_CustomSchema {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
//TODO 2.开启检查点 Flink-CDC将读取binlog的位置信息以状态的方式保存在CK,如果想要做到断点续传,
// 需要从Checkpoint或者Savepoint启动程序
// //2.1 开启Checkpoint,每隔5秒钟做一次CK ,并指定CK的一致性语义
// env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
// //2.2 设置超时时间为1分钟
// env.getCheckpointConfig().setCheckpointTimeout(60000);
// //2.3 指定从CK自动重启策略
// env.setRestartStrategy(RestartStrategies.fixedDelayRestart(2,2000L));
// //2.4 设置任务关闭的时候保留最后一次CK数据
// env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
// //2.5 设置状态后端
// env.setStateBackend(new FsStateBackend("hdfs://Ding202:8020/flinkCDC"));
// //2.6 设置访问HDFS的用户名
// System.setProperty("HADOOP_USER_NAME", "dingjiawen");
SourceFunction<String> mySqlSource = MySQLSource.<String>builder()
.hostname("Ding202")
.port(3306)
.databaseList("rt_gmall_realtime") // set captured database
.tableList("rt_gmall_realtime.t_user") // set captured table
.username("root")
.password("123456")
.startupOptions(StartupOptions.initial())
.deserializer(new MySchema()) // converts SourceRecord to JSON String
.build();
env.addSource(mySqlSource)
.print();
env.execute("Print MySQL Snapshot + Binlog");
}
}
/**
* 自定义反序列化方式:希望把它转换为一个json字符串信息简洁一点
*/
class MySchema implements DebeziumDeserializationSchema<String> {
/*
//flink cdc对象
value=Struct{
after=Struct{
id=1,
name=ssss
},
source=Struct{
db=rt_gmall_realtime,
table=t_user
}
op=c
}
*/
@Override
public void deserialize(SourceRecord sourceRecord, Collector collector) throws Exception {
Struct valueStruct = (Struct) sourceRecord.value();//要用kafka的Struct因为本质上使用的是DebeziumDebezium使用了kafka的struct
Struct source = valueStruct.getStruct("source");
String database = source.getString("db");
String table = source.getString("table");
//类型
String type = Envelope.operationFor(sourceRecord).toString().toLowerCase();//内部通过枚举类将op转为对应的string
if (type.equals("create")) {
type = "insert";
}
//获取影响的数据data
JSONObject jsonObject = new JSONObject();
jsonObject.put("database", database);
jsonObject.put("table", table);
jsonObject.put("type", type);
JSONObject dataObject = new JSONObject();
Struct after = valueStruct.getStruct("after");
if(after!=null){
for (Field field : after.schema().fields()) {
String fieldName = field.name();
Object fieldValue = after.get(field);
dataObject.put(fieldName, fieldValue);
}
}
jsonObject.put("data",dataObject);
collector.collect(jsonObject.toJSONString());
}
@Override
public TypeInformation<String> getProducedType() {
return TypeInformation.of(String.class);
}
}

View File

@ -0,0 +1,33 @@
HELP.md
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**/target/
!**/src/test/**/target/
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
!**/src/main/**/build/
!**/src/test/**/build/
### VS Code ###
.vscode/

View File

@ -0,0 +1,2 @@
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.5/apache-maven-3.8.5-bin.zip
wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar

View File

@ -0,0 +1,316 @@
#!/bin/sh
# ----------------------------------------------------------------------------
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
# Maven Start Up Batch script
#
# Required ENV vars:
# ------------------
# JAVA_HOME - location of a JDK home dir
#
# Optional ENV vars
# -----------------
# M2_HOME - location of maven2's installed home dir
# MAVEN_OPTS - parameters passed to the Java VM when running Maven
# e.g. to debug Maven itself, use
# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
# MAVEN_SKIP_RC - flag to disable loading of mavenrc files
# ----------------------------------------------------------------------------
if [ -z "$MAVEN_SKIP_RC" ] ; then
if [ -f /usr/local/etc/mavenrc ] ; then
. /usr/local/etc/mavenrc
fi
if [ -f /etc/mavenrc ] ; then
. /etc/mavenrc
fi
if [ -f "$HOME/.mavenrc" ] ; then
. "$HOME/.mavenrc"
fi
fi
# OS specific support. $var _must_ be set to either true or false.
cygwin=false;
darwin=false;
mingw=false
case "`uname`" in
CYGWIN*) cygwin=true ;;
MINGW*) mingw=true;;
Darwin*) darwin=true
# Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
# See https://developer.apple.com/library/mac/qa/qa1170/_index.html
if [ -z "$JAVA_HOME" ]; then
if [ -x "/usr/libexec/java_home" ]; then
export JAVA_HOME="`/usr/libexec/java_home`"
else
export JAVA_HOME="/Library/Java/Home"
fi
fi
;;
esac
if [ -z "$JAVA_HOME" ] ; then
if [ -r /etc/gentoo-release ] ; then
JAVA_HOME=`java-config --jre-home`
fi
fi
if [ -z "$M2_HOME" ] ; then
## resolve links - $0 may be a link to maven's home
PRG="$0"
# need this for relative symlinks
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG="`dirname "$PRG"`/$link"
fi
done
saveddir=`pwd`
M2_HOME=`dirname "$PRG"`/..
# make it fully qualified
M2_HOME=`cd "$M2_HOME" && pwd`
cd "$saveddir"
# echo Using m2 at $M2_HOME
fi
# For Cygwin, ensure paths are in UNIX format before anything is touched
if $cygwin ; then
[ -n "$M2_HOME" ] &&
M2_HOME=`cygpath --unix "$M2_HOME"`
[ -n "$JAVA_HOME" ] &&
JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
[ -n "$CLASSPATH" ] &&
CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
fi
# For Mingw, ensure paths are in UNIX format before anything is touched
if $mingw ; then
[ -n "$M2_HOME" ] &&
M2_HOME="`(cd "$M2_HOME"; pwd)`"
[ -n "$JAVA_HOME" ] &&
JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
fi
if [ -z "$JAVA_HOME" ]; then
javaExecutable="`which javac`"
if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
# readlink(1) is not available as standard on Solaris 10.
readLink=`which readlink`
if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
if $darwin ; then
javaHome="`dirname \"$javaExecutable\"`"
javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
else
javaExecutable="`readlink -f \"$javaExecutable\"`"
fi
javaHome="`dirname \"$javaExecutable\"`"
javaHome=`expr "$javaHome" : '\(.*\)/bin'`
JAVA_HOME="$javaHome"
export JAVA_HOME
fi
fi
fi
if [ -z "$JAVACMD" ] ; then
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
else
JAVACMD="`\\unset -f command; \\command -v java`"
fi
fi
if [ ! -x "$JAVACMD" ] ; then
echo "Error: JAVA_HOME is not defined correctly." >&2
echo " We cannot execute $JAVACMD" >&2
exit 1
fi
if [ -z "$JAVA_HOME" ] ; then
echo "Warning: JAVA_HOME environment variable is not set."
fi
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
# traverses directory structure from process work directory to filesystem root
# first directory with .mvn subdirectory is considered project base directory
find_maven_basedir() {
if [ -z "$1" ]
then
echo "Path not specified to find_maven_basedir"
return 1
fi
basedir="$1"
wdir="$1"
while [ "$wdir" != '/' ] ; do
if [ -d "$wdir"/.mvn ] ; then
basedir=$wdir
break
fi
# workaround for JBEAP-8937 (on Solaris 10/Sparc)
if [ -d "${wdir}" ]; then
wdir=`cd "$wdir/.."; pwd`
fi
# end of workaround
done
echo "${basedir}"
}
# concatenates all lines of a file
concat_lines() {
if [ -f "$1" ]; then
echo "$(tr -s '\n' ' ' < "$1")"
fi
}
BASE_DIR=`find_maven_basedir "$(pwd)"`
if [ -z "$BASE_DIR" ]; then
exit 1;
fi
##########################################################################################
# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
# This allows using the maven wrapper in projects that prohibit checking in binary data.
##########################################################################################
if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
if [ "$MVNW_VERBOSE" = true ]; then
echo "Found .mvn/wrapper/maven-wrapper.jar"
fi
else
if [ "$MVNW_VERBOSE" = true ]; then
echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
fi
if [ -n "$MVNW_REPOURL" ]; then
jarUrl="$MVNW_REPOURL/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar"
else
jarUrl="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar"
fi
while IFS="=" read key value; do
case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
esac
done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
if [ "$MVNW_VERBOSE" = true ]; then
echo "Downloading from: $jarUrl"
fi
wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
if $cygwin; then
wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"`
fi
if command -v wget > /dev/null; then
if [ "$MVNW_VERBOSE" = true ]; then
echo "Found wget ... using wget"
fi
if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
wget "$jarUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath"
else
wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath"
fi
elif command -v curl > /dev/null; then
if [ "$MVNW_VERBOSE" = true ]; then
echo "Found curl ... using curl"
fi
if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
curl -o "$wrapperJarPath" "$jarUrl" -f
else
curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f
fi
else
if [ "$MVNW_VERBOSE" = true ]; then
echo "Falling back to using Java to download"
fi
javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
# For Cygwin, switch paths to Windows format before running javac
if $cygwin; then
javaClass=`cygpath --path --windows "$javaClass"`
fi
if [ -e "$javaClass" ]; then
if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
if [ "$MVNW_VERBOSE" = true ]; then
echo " - Compiling MavenWrapperDownloader.java ..."
fi
# Compiling the Java class
("$JAVA_HOME/bin/javac" "$javaClass")
fi
if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
# Running the downloader
if [ "$MVNW_VERBOSE" = true ]; then
echo " - Running MavenWrapperDownloader.java ..."
fi
("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
fi
fi
fi
fi
##########################################################################################
# End of extension
##########################################################################################
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
if [ "$MVNW_VERBOSE" = true ]; then
echo $MAVEN_PROJECTBASEDIR
fi
MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
# For Cygwin, switch paths to Windows format before running java
if $cygwin; then
[ -n "$M2_HOME" ] &&
M2_HOME=`cygpath --path --windows "$M2_HOME"`
[ -n "$JAVA_HOME" ] &&
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
[ -n "$CLASSPATH" ] &&
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
[ -n "$MAVEN_PROJECTBASEDIR" ] &&
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
fi
# Provide a "standardized" way to retrieve the CLI args that will
# work with both Windows and non-Windows executions.
MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@"
export MAVEN_CMD_LINE_ARGS
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
exec "$JAVACMD" \
$MAVEN_OPTS \
$MAVEN_DEBUG_OPTS \
-classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
"-Dmaven.home=${M2_HOME}" \
"-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"

View File

@ -0,0 +1,188 @@
@REM ----------------------------------------------------------------------------
@REM Licensed to the Apache Software Foundation (ASF) under one
@REM or more contributor license agreements. See the NOTICE file
@REM distributed with this work for additional information
@REM regarding copyright ownership. The ASF licenses this file
@REM to you under the Apache License, Version 2.0 (the
@REM "License"); you may not use this file except in compliance
@REM with the License. You may obtain a copy of the License at
@REM
@REM https://www.apache.org/licenses/LICENSE-2.0
@REM
@REM Unless required by applicable law or agreed to in writing,
@REM software distributed under the License is distributed on an
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@REM KIND, either express or implied. See the License for the
@REM specific language governing permissions and limitations
@REM under the License.
@REM ----------------------------------------------------------------------------
@REM ----------------------------------------------------------------------------
@REM Maven Start Up Batch script
@REM
@REM Required ENV vars:
@REM JAVA_HOME - location of a JDK home dir
@REM
@REM Optional ENV vars
@REM M2_HOME - location of maven2's installed home dir
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
@REM e.g. to debug Maven itself, use
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
@REM ----------------------------------------------------------------------------
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
@echo off
@REM set title of command window
title %0
@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on'
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
@REM set %HOME% to equivalent of $HOME
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
@REM Execute a user defined script before this one
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
if exist "%USERPROFILE%\mavenrc_pre.bat" call "%USERPROFILE%\mavenrc_pre.bat" %*
if exist "%USERPROFILE%\mavenrc_pre.cmd" call "%USERPROFILE%\mavenrc_pre.cmd" %*
:skipRcPre
@setlocal
set ERROR_CODE=0
@REM To isolate internal variables from possible post scripts, we use another setlocal
@setlocal
@REM ==== START VALIDATION ====
if not "%JAVA_HOME%" == "" goto OkJHome
echo.
echo Error: JAVA_HOME not found in your environment. >&2
echo Please set the JAVA_HOME variable in your environment to match the >&2
echo location of your Java installation. >&2
echo.
goto error
:OkJHome
if exist "%JAVA_HOME%\bin\java.exe" goto init
echo.
echo Error: JAVA_HOME is set to an invalid directory. >&2
echo JAVA_HOME = "%JAVA_HOME%" >&2
echo Please set the JAVA_HOME variable in your environment to match the >&2
echo location of your Java installation. >&2
echo.
goto error
@REM ==== END VALIDATION ====
:init
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
@REM Fallback to current working directory if not found.
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
set EXEC_DIR=%CD%
set WDIR=%EXEC_DIR%
:findBaseDir
IF EXIST "%WDIR%"\.mvn goto baseDirFound
cd ..
IF "%WDIR%"=="%CD%" goto baseDirNotFound
set WDIR=%CD%
goto findBaseDir
:baseDirFound
set MAVEN_PROJECTBASEDIR=%WDIR%
cd "%EXEC_DIR%"
goto endDetectBaseDir
:baseDirNotFound
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
cd "%EXEC_DIR%"
:endDetectBaseDir
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
@setlocal EnableExtensions EnableDelayedExpansion
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
:endReadAdditionalConfig
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar"
FOR /F "usebackq tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO (
IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
)
@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
if exist %WRAPPER_JAR% (
if "%MVNW_VERBOSE%" == "true" (
echo Found %WRAPPER_JAR%
)
) else (
if not "%MVNW_REPOURL%" == "" (
SET DOWNLOAD_URL="%MVNW_REPOURL%/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar"
)
if "%MVNW_VERBOSE%" == "true" (
echo Couldn't find %WRAPPER_JAR%, downloading it ...
echo Downloading from: %DOWNLOAD_URL%
)
powershell -Command "&{"^
"$webclient = new-object System.Net.WebClient;"^
"if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^
"$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^
"}"^
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^
"}"
if "%MVNW_VERBOSE%" == "true" (
echo Finished downloading %WRAPPER_JAR%
)
)
@REM End of extension
@REM Provide a "standardized" way to retrieve the CLI args that will
@REM work with both Windows and non-Windows executions.
set MAVEN_CMD_LINE_ARGS=%*
%MAVEN_JAVA_EXE% ^
%JVM_CONFIG_MAVEN_PROPS% ^
%MAVEN_OPTS% ^
%MAVEN_DEBUG_OPTS% ^
-classpath %WRAPPER_JAR% ^
"-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" ^
%WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
if ERRORLEVEL 1 goto error
goto end
:error
set ERROR_CODE=1
:end
@endlocal & set ERROR_CODE=%ERROR_CODE%
if not "%MAVEN_SKIP_RC%"=="" goto skipRcPost
@REM check for post script, once with legacy .bat ending and once with .cmd ending
if exist "%USERPROFILE%\mavenrc_post.bat" call "%USERPROFILE%\mavenrc_post.bat"
if exist "%USERPROFILE%\mavenrc_post.cmd" call "%USERPROFILE%\mavenrc_post.cmd"
:skipRcPost
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
if "%MAVEN_BATCH_PAUSE%"=="on" pause
if "%MAVEN_TERMINATE_CMD%"=="on" exit %ERROR_CODE%
cmd /C exit /B %ERROR_CODE%

View File

@ -0,0 +1,69 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.6.8</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.atguigu.rtgmall</groupId>
<artifactId>gmall-logger</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>gmall-logger</name>
<description>Demo project for Spring Boot</description>
<properties>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<!--加上这个之后才可以打包-->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>3.1.0</version>
</plugin>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<excludes>
<exclude>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,13 @@
package com.atguigu.rtgmall;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class GmallLoggerApplication {
public static void main(String[] args) {
SpringApplication.run(GmallLoggerApplication.class, args);
}
}

View File

@ -0,0 +1,125 @@
{"common":{"ar":"420000","ba":"Huawei","ch":"huawei","is_new":"1","md":"Huawei P30","mid":"mid_6","os":"Android 11.0","uid":"21","vc":"v2.1.134"},"start":{"entry":"icon","loading_time":2116,"open_ad_id":4,"open_ad_ms":3162,"open_ad_skip_ms":0},"ts":1655279132000}
{"common":{"ar":"420000","ba":"Huawei","ch":"huawei","is_new":"1","md":"Huawei P30","mid":"mid_6","os":"Android 11.0","uid":"21","vc":"v2.1.134"},"displays":[{"display_type":"activity","item":"1","item_type":"activity_id","order":1,"pos_id":4},{"display_type":"query","item":"2","item_type":"sku_id","order":2,"pos_id":4},{"display_type":"query","item":"4","item_type":"sku_id","order":3,"pos_id":1},{"display_type":"query","item":"6","item_type":"sku_id","order":4,"pos_id":5},{"display_type":"query","item":"8","item_type":"sku_id","order":5,"pos_id":3},{"display_type":"promotion","item":"2","item_type":"sku_id","order":6,"pos_id":5},{"display_type":"query","item":"5","item_type":"sku_id","order":7,"pos_id":3},{"display_type":"query","item":"7","item_type":"sku_id","order":8,"pos_id":3},{"display_type":"query","item":"9","item_type":"sku_id","order":9,"pos_id":5},{"display_type":"query","item":"5","item_type":"sku_id","order":10,"pos_id":4}],"page":{"during_time":9542,"page_id":"home"},"ts":1655279132000}
{"actions":[{"action_id":"get_coupon","item":"1","item_type":"coupon_id","ts":1655279136319}],"common":{"ar":"420000","ba":"Huawei","ch":"huawei","is_new":"1","md":"Huawei P30","mid":"mid_6","os":"Android 11.0","uid":"21","vc":"v2.1.134"},"displays":[{"display_type":"query","item":"5","item_type":"sku_id","order":1,"pos_id":4},{"display_type":"promotion","item":"2","item_type":"sku_id","order":2,"pos_id":4},{"display_type":"promotion","item":"9","item_type":"sku_id","order":3,"pos_id":1},{"display_type":"promotion","item":"9","item_type":"sku_id","order":4,"pos_id":2},{"display_type":"promotion","item":"2","item_type":"sku_id","order":5,"pos_id":3},{"display_type":"query","item":"4","item_type":"sku_id","order":6,"pos_id":2},{"display_type":"query","item":"9","item_type":"sku_id","order":7,"pos_id":5},{"display_type":"query","item":"3","item_type":"sku_id","order":8,"pos_id":2},{"display_type":"query","item":"1","item_type":"sku_id","order":9,"pos_id":1}],"page":{"during_time":8638,"item":"3","item_type":"sku_id","last_page_id":"home","page_id":"good_detail","source_type":"query"},"ts":1655279132000}
{"common":{"ar":"310000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone 8","mid":"mid_12","os":"iOS 13.3.1","uid":"35","vc":"v2.1.132"},"start":{"entry":"icon","loading_time":18029,"open_ad_id":20,"open_ad_ms":8125,"open_ad_skip_ms":0},"ts":1655279134000}
{"common":{"ar":"310000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone 8","mid":"mid_12","os":"iOS 13.3.1","uid":"35","vc":"v2.1.132"},"displays":[{"display_type":"activity","item":"2","item_type":"activity_id","order":1,"pos_id":4},{"display_type":"promotion","item":"9","item_type":"sku_id","order":2,"pos_id":4},{"display_type":"query","item":"8","item_type":"sku_id","order":3,"pos_id":5},{"display_type":"query","item":"5","item_type":"sku_id","order":4,"pos_id":4},{"display_type":"query","item":"3","item_type":"sku_id","order":5,"pos_id":2},{"display_type":"promotion","item":"2","item_type":"sku_id","order":6,"pos_id":3},{"display_type":"query","item":"2","item_type":"sku_id","order":7,"pos_id":5},{"display_type":"query","item":"10","item_type":"sku_id","order":8,"pos_id":1},{"display_type":"query","item":"7","item_type":"sku_id","order":9,"pos_id":3}],"page":{"during_time":11196,"page_id":"home"},"ts":1655279134000}
{"actions":[{"action_id":"cart_add","item":"7","item_type":"sku_id","ts":1655279135904}],"common":{"ar":"310000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone 8","mid":"mid_12","os":"iOS 13.3.1","uid":"35","vc":"v2.1.132"},"displays":[{"display_type":"query","item":"1","item_type":"sku_id","order":1,"pos_id":2},{"display_type":"promotion","item":"8","item_type":"sku_id","order":2,"pos_id":3},{"display_type":"query","item":"2","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"query","item":"6","item_type":"sku_id","order":4,"pos_id":2},{"display_type":"query","item":"7","item_type":"sku_id","order":5,"pos_id":2},{"display_type":"query","item":"2","item_type":"sku_id","order":6,"pos_id":1},{"display_type":"query","item":"6","item_type":"sku_id","order":7,"pos_id":2},{"display_type":"query","item":"8","item_type":"sku_id","order":8,"pos_id":1},{"display_type":"promotion","item":"10","item_type":"sku_id","order":9,"pos_id":4}],"page":{"during_time":3809,"item":"7","item_type":"sku_id","last_page_id":"home","page_id":"good_detail","source_type":"promotion"},"ts":1655279134000}
{"common":{"ar":"440000","ba":"Xiaomi","ch":"web","is_new":"1","md":"Xiaomi Mix2 ","mid":"mid_10","os":"Android 11.0","uid":"18","vc":"v2.1.132"},"start":{"entry":"notice","loading_time":9350,"open_ad_id":13,"open_ad_ms":5748,"open_ad_skip_ms":4498},"ts":1655279134000}
{"common":{"ar":"440000","ba":"Xiaomi","ch":"web","is_new":"1","md":"Xiaomi Mix2 ","mid":"mid_10","os":"Android 11.0","uid":"18","vc":"v2.1.132"},"displays":[{"display_type":"activity","item":"1","item_type":"activity_id","order":1,"pos_id":5},{"display_type":"recommend","item":"4","item_type":"sku_id","order":2,"pos_id":4},{"display_type":"query","item":"7","item_type":"sku_id","order":3,"pos_id":5},{"display_type":"query","item":"9","item_type":"sku_id","order":4,"pos_id":3},{"display_type":"query","item":"6","item_type":"sku_id","order":5,"pos_id":2},{"display_type":"query","item":"1","item_type":"sku_id","order":6,"pos_id":5},{"display_type":"query","item":"5","item_type":"sku_id","order":7,"pos_id":2},{"display_type":"query","item":"10","item_type":"sku_id","order":8,"pos_id":3},{"display_type":"promotion","item":"9","item_type":"sku_id","order":9,"pos_id":1},{"display_type":"query","item":"7","item_type":"sku_id","order":10,"pos_id":5},{"display_type":"query","item":"9","item_type":"sku_id","order":11,"pos_id":3}],"page":{"during_time":17565,"page_id":"home"},"ts":1655279134000}
{"common":{"ar":"310000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"42","vc":"v2.1.134"},"start":{"entry":"icon","loading_time":14187,"open_ad_id":14,"open_ad_ms":3161,"open_ad_skip_ms":0},"ts":1655279135000}
{"common":{"ar":"310000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"42","vc":"v2.1.134"},"displays":[{"display_type":"activity","item":"2","item_type":"activity_id","order":1,"pos_id":2},{"display_type":"activity","item":"2","item_type":"activity_id","order":2,"pos_id":2},{"display_type":"query","item":"9","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"promotion","item":"9","item_type":"sku_id","order":4,"pos_id":2},{"display_type":"query","item":"8","item_type":"sku_id","order":5,"pos_id":1},{"display_type":"query","item":"10","item_type":"sku_id","order":6,"pos_id":4},{"display_type":"promotion","item":"3","item_type":"sku_id","order":7,"pos_id":1},{"display_type":"query","item":"3","item_type":"sku_id","order":8,"pos_id":1},{"display_type":"query","item":"7","item_type":"sku_id","order":9,"pos_id":4},{"display_type":"query","item":"2","item_type":"sku_id","order":10,"pos_id":3}],"page":{"during_time":8255,"page_id":"home"},"ts":1655279135000}
{"common":{"ar":"310000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"42","vc":"v2.1.134"},"page":{"during_time":4119,"last_page_id":"home","page_id":"search"},"ts":1655279135000}
{"common":{"ar":"310000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"42","vc":"v2.1.134"},"displays":[{"display_type":"promotion","item":"3","item_type":"sku_id","order":1,"pos_id":5},{"display_type":"query","item":"5","item_type":"sku_id","order":2,"pos_id":3},{"display_type":"promotion","item":"1","item_type":"sku_id","order":3,"pos_id":4},{"display_type":"query","item":"9","item_type":"sku_id","order":4,"pos_id":2},{"display_type":"query","item":"9","item_type":"sku_id","order":5,"pos_id":2},{"display_type":"query","item":"10","item_type":"sku_id","order":6,"pos_id":1},{"display_type":"recommend","item":"5","item_type":"sku_id","order":7,"pos_id":4},{"display_type":"query","item":"1","item_type":"sku_id","order":8,"pos_id":3},{"display_type":"query","item":"5","item_type":"sku_id","order":9,"pos_id":4}],"page":{"during_time":10562,"item":"图书","item_type":"keyword","last_page_id":"search","page_id":"good_list"},"ts":1655279135000}
{"actions":[{"action_id":"get_coupon","item":"2","item_type":"coupon_id","ts":1655279143500}],"common":{"ar":"310000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"42","vc":"v2.1.134"},"displays":[{"display_type":"query","item":"10","item_type":"sku_id","order":1,"pos_id":1},{"display_type":"promotion","item":"7","item_type":"sku_id","order":2,"pos_id":2},{"display_type":"query","item":"5","item_type":"sku_id","order":3,"pos_id":1},{"display_type":"query","item":"9","item_type":"sku_id","order":4,"pos_id":3},{"display_type":"recommend","item":"8","item_type":"sku_id","order":5,"pos_id":2},{"display_type":"promotion","item":"2","item_type":"sku_id","order":6,"pos_id":4}],"page":{"during_time":17001,"item":"8","item_type":"sku_id","last_page_id":"good_list","page_id":"good_detail","source_type":"query"},"ts":1655279135000}
{"common":{"ar":"310000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"42","vc":"v2.1.134"},"page":{"during_time":5954,"last_page_id":"good_detail","page_id":"login"},"ts":1655279135000}
{"actions":[{"action_id":"get_coupon","item":"1","item_type":"coupon_id","ts":1655279135616}],"common":{"ar":"310000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"42","vc":"v2.1.134"},"displays":[{"display_type":"query","item":"6","item_type":"sku_id","order":1,"pos_id":5},{"display_type":"promotion","item":"2","item_type":"sku_id","order":2,"pos_id":5},{"display_type":"recommend","item":"4","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"recommend","item":"7","item_type":"sku_id","order":4,"pos_id":4},{"display_type":"query","item":"10","item_type":"sku_id","order":5,"pos_id":4},{"display_type":"recommend","item":"6","item_type":"sku_id","order":6,"pos_id":5}],"page":{"during_time":1232,"item":"8","item_type":"sku_id","last_page_id":"login","page_id":"good_detail","source_type":"query"},"ts":1655279135000}
{"actions":[{"action_id":"cart_minus_num","item":"6","item_type":"sku_id","ts":1655279144814}],"common":{"ar":"310000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"42","vc":"v2.1.134"},"page":{"during_time":19629,"last_page_id":"good_detail","page_id":"cart"},"ts":1655279135000}
{"common":{"ar":"310000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"42","vc":"v2.1.134"},"page":{"during_time":7666,"item":"2,6","item_type":"sku_ids","last_page_id":"cart","page_id":"trade"},"ts":1655279135000}
{"common":{"ar":"310000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"42","vc":"v2.1.134"},"page":{"during_time":3828,"item":"3,6,8","item_type":"sku_ids","last_page_id":"trade","page_id":"payment"},"ts":1655279135000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_20","os":"iOS 13.3.1","uid":"38","vc":"v2.1.134"},"start":{"entry":"icon","loading_time":3586,"open_ad_id":1,"open_ad_ms":3511,"open_ad_skip_ms":0},"ts":1655279136000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_20","os":"iOS 13.3.1","uid":"38","vc":"v2.1.134"},"displays":[{"display_type":"activity","item":"2","item_type":"activity_id","order":1,"pos_id":4},{"display_type":"query","item":"7","item_type":"sku_id","order":2,"pos_id":2},{"display_type":"query","item":"1","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"recommend","item":"10","item_type":"sku_id","order":4,"pos_id":3},{"display_type":"query","item":"7","item_type":"sku_id","order":5,"pos_id":1},{"display_type":"query","item":"1","item_type":"sku_id","order":6,"pos_id":1},{"display_type":"query","item":"2","item_type":"sku_id","order":7,"pos_id":2},{"display_type":"query","item":"5","item_type":"sku_id","order":8,"pos_id":1},{"display_type":"query","item":"2","item_type":"sku_id","order":9,"pos_id":3}],"page":{"during_time":12076,"page_id":"home"},"ts":1655279136000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_20","os":"iOS 13.3.1","uid":"38","vc":"v2.1.134"},"page":{"during_time":7600,"last_page_id":"home","page_id":"search"},"ts":1655279136000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_20","os":"iOS 13.3.1","uid":"38","vc":"v2.1.134"},"displays":[{"display_type":"query","item":"1","item_type":"sku_id","order":1,"pos_id":3},{"display_type":"promotion","item":"2","item_type":"sku_id","order":2,"pos_id":4},{"display_type":"promotion","item":"10","item_type":"sku_id","order":3,"pos_id":1},{"display_type":"promotion","item":"4","item_type":"sku_id","order":4,"pos_id":5}],"page":{"during_time":19058,"item":"小米盒子","item_type":"keyword","last_page_id":"search","page_id":"good_list"},"ts":1655279136000}
{"actions":[{"action_id":"favor_add","item":"5","item_type":"sku_id","ts":1655279139470},{"action_id":"get_coupon","item":"1","item_type":"coupon_id","ts":1655279142940}],"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_20","os":"iOS 13.3.1","uid":"38","vc":"v2.1.134"},"displays":[{"display_type":"promotion","item":"7","item_type":"sku_id","order":1,"pos_id":5},{"display_type":"query","item":"10","item_type":"sku_id","order":2,"pos_id":2},{"display_type":"recommend","item":"7","item_type":"sku_id","order":3,"pos_id":2},{"display_type":"query","item":"5","item_type":"sku_id","order":4,"pos_id":1}],"page":{"during_time":10411,"item":"5","item_type":"sku_id","last_page_id":"good_list","page_id":"good_detail","source_type":"promotion"},"ts":1655279136000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_20","os":"iOS 13.3.1","uid":"38","vc":"v2.1.134"},"page":{"during_time":2665,"last_page_id":"good_detail","page_id":"login"},"ts":1655279136000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_20","os":"iOS 13.3.1","uid":"38","vc":"v2.1.134"},"displays":[{"display_type":"promotion","item":"10","item_type":"sku_id","order":1,"pos_id":5},{"display_type":"query","item":"6","item_type":"sku_id","order":2,"pos_id":4},{"display_type":"promotion","item":"10","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"query","item":"1","item_type":"sku_id","order":4,"pos_id":2},{"display_type":"query","item":"7","item_type":"sku_id","order":5,"pos_id":1},{"display_type":"query","item":"7","item_type":"sku_id","order":6,"pos_id":5},{"display_type":"query","item":"8","item_type":"sku_id","order":7,"pos_id":5}],"page":{"during_time":14956,"item":"5","item_type":"sku_id","last_page_id":"login","page_id":"good_detail","source_type":"recommend"},"ts":1655279136000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_20","os":"iOS 13.3.1","uid":"38","vc":"v2.1.134"},"page":{"during_time":13085,"last_page_id":"good_detail","page_id":"cart"},"ts":1655279136000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_20","os":"iOS 13.3.1","uid":"38","vc":"v2.1.134"},"page":{"during_time":2924,"item":"10","item_type":"sku_ids","last_page_id":"cart","page_id":"trade"},"ts":1655279136000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_20","os":"iOS 13.3.1","uid":"38","vc":"v2.1.134"},"page":{"during_time":5939,"item":"7","item_type":"sku_ids","last_page_id":"trade","page_id":"payment"},"ts":1655279136000}
{"common":{"ar":"310000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone X","mid":"mid_11","os":"iOS 13.3.1","uid":"40","vc":"v2.1.134"},"start":{"entry":"icon","loading_time":19763,"open_ad_id":9,"open_ad_ms":8038,"open_ad_skip_ms":5987},"ts":1655279137000}
{"common":{"ar":"310000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone X","mid":"mid_11","os":"iOS 13.3.1","uid":"40","vc":"v2.1.134"},"displays":[{"display_type":"activity","item":"1","item_type":"activity_id","order":1,"pos_id":4},{"display_type":"activity","item":"2","item_type":"activity_id","order":2,"pos_id":4},{"display_type":"query","item":"1","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"recommend","item":"2","item_type":"sku_id","order":4,"pos_id":2},{"display_type":"query","item":"10","item_type":"sku_id","order":5,"pos_id":1},{"display_type":"query","item":"4","item_type":"sku_id","order":6,"pos_id":2}],"page":{"during_time":3624,"page_id":"home"},"ts":1655279137000}
{"actions":[{"action_id":"get_coupon","item":"1","item_type":"coupon_id","ts":1655279145089}],"common":{"ar":"310000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone X","mid":"mid_11","os":"iOS 13.3.1","uid":"40","vc":"v2.1.134"},"displays":[{"display_type":"query","item":"7","item_type":"sku_id","order":1,"pos_id":5},{"display_type":"promotion","item":"9","item_type":"sku_id","order":2,"pos_id":5},{"display_type":"query","item":"2","item_type":"sku_id","order":3,"pos_id":2},{"display_type":"query","item":"8","item_type":"sku_id","order":4,"pos_id":1},{"display_type":"query","item":"6","item_type":"sku_id","order":5,"pos_id":4}],"page":{"during_time":16179,"item":"7","item_type":"sku_id","last_page_id":"home","page_id":"good_detail","source_type":"recommend"},"ts":1655279137000}
{"common":{"ar":"230000","ba":"Huawei","ch":"360","is_new":"0","md":"Huawei P30","mid":"mid_16","os":"Android 11.0","uid":"45","vc":"v2.1.134"},"start":{"entry":"icon","loading_time":9088,"open_ad_id":18,"open_ad_ms":9437,"open_ad_skip_ms":0},"ts":1655279137000}
{"common":{"ar":"230000","ba":"Huawei","ch":"360","is_new":"0","md":"Huawei P30","mid":"mid_16","os":"Android 11.0","uid":"45","vc":"v2.1.134"},"displays":[{"display_type":"activity","item":"1","item_type":"activity_id","order":1,"pos_id":3},{"display_type":"promotion","item":"9","item_type":"sku_id","order":2,"pos_id":4},{"display_type":"promotion","item":"7","item_type":"sku_id","order":3,"pos_id":4},{"display_type":"promotion","item":"5","item_type":"sku_id","order":4,"pos_id":2},{"display_type":"query","item":"10","item_type":"sku_id","order":5,"pos_id":4}],"page":{"during_time":15488,"page_id":"home"},"ts":1655279137000}
{"common":{"ar":"230000","ba":"Huawei","ch":"360","is_new":"0","md":"Huawei P30","mid":"mid_16","os":"Android 11.0","uid":"45","vc":"v2.1.134"},"page":{"during_time":7551,"last_page_id":"home","page_id":"search"},"ts":1655279137000}
{"common":{"ar":"230000","ba":"Huawei","ch":"360","is_new":"0","md":"Huawei P30","mid":"mid_16","os":"Android 11.0","uid":"45","vc":"v2.1.134"},"displays":[{"display_type":"query","item":"3","item_type":"sku_id","order":1,"pos_id":4},{"display_type":"query","item":"5","item_type":"sku_id","order":2,"pos_id":5},{"display_type":"query","item":"1","item_type":"sku_id","order":3,"pos_id":5},{"display_type":"promotion","item":"7","item_type":"sku_id","order":4,"pos_id":5},{"display_type":"query","item":"6","item_type":"sku_id","order":5,"pos_id":1},{"display_type":"query","item":"1","item_type":"sku_id","order":6,"pos_id":5},{"display_type":"promotion","item":"3","item_type":"sku_id","order":7,"pos_id":4},{"display_type":"query","item":"10","item_type":"sku_id","order":8,"pos_id":2},{"display_type":"recommend","item":"2","item_type":"sku_id","order":9,"pos_id":5},{"display_type":"promotion","item":"7","item_type":"sku_id","order":10,"pos_id":5}],"page":{"during_time":18119,"item":"小米盒子","item_type":"keyword","last_page_id":"search","page_id":"good_list"},"ts":1655279137000}
{"actions":[{"action_id":"favor_add","item":"10","item_type":"sku_id","ts":1655279142854},{"action_id":"get_coupon","item":"2","item_type":"coupon_id","ts":1655279148708}],"common":{"ar":"230000","ba":"Huawei","ch":"360","is_new":"0","md":"Huawei P30","mid":"mid_16","os":"Android 11.0","uid":"45","vc":"v2.1.134"},"displays":[{"display_type":"query","item":"9","item_type":"sku_id","order":1,"pos_id":2},{"display_type":"query","item":"8","item_type":"sku_id","order":2,"pos_id":3},{"display_type":"query","item":"10","item_type":"sku_id","order":3,"pos_id":1},{"display_type":"query","item":"6","item_type":"sku_id","order":4,"pos_id":2}],"page":{"during_time":17562,"item":"10","item_type":"sku_id","last_page_id":"good_list","page_id":"good_detail","source_type":"recommend"},"ts":1655279137000}
{"common":{"ar":"230000","ba":"Huawei","ch":"360","is_new":"0","md":"Huawei P30","mid":"mid_16","os":"Android 11.0","uid":"45","vc":"v2.1.134"},"page":{"during_time":15834,"last_page_id":"good_detail","page_id":"login"},"ts":1655279137000}
{"actions":[{"action_id":"favor_add","item":"9","item_type":"sku_id","ts":1655279143423}],"common":{"ar":"230000","ba":"Huawei","ch":"360","is_new":"0","md":"Huawei P30","mid":"mid_16","os":"Android 11.0","uid":"45","vc":"v2.1.134"},"displays":[{"display_type":"query","item":"6","item_type":"sku_id","order":1,"pos_id":5},{"display_type":"query","item":"5","item_type":"sku_id","order":2,"pos_id":2},{"display_type":"query","item":"6","item_type":"sku_id","order":3,"pos_id":1},{"display_type":"query","item":"1","item_type":"sku_id","order":4,"pos_id":4},{"display_type":"promotion","item":"2","item_type":"sku_id","order":5,"pos_id":3},{"display_type":"query","item":"2","item_type":"sku_id","order":6,"pos_id":4},{"display_type":"promotion","item":"4","item_type":"sku_id","order":7,"pos_id":5},{"display_type":"promotion","item":"6","item_type":"sku_id","order":8,"pos_id":5},{"display_type":"promotion","item":"7","item_type":"sku_id","order":9,"pos_id":5},{"display_type":"query","item":"10","item_type":"sku_id","order":10,"pos_id":3}],"page":{"during_time":12847,"item":"9","item_type":"sku_id","last_page_id":"login","page_id":"good_detail","source_type":"promotion"},"ts":1655279137000}
{"actions":[{"action_id":"cart_remove","item":"9","item_type":"sku_id","ts":1655279144267}],"common":{"ar":"230000","ba":"Huawei","ch":"360","is_new":"0","md":"Huawei P30","mid":"mid_16","os":"Android 11.0","uid":"45","vc":"v2.1.134"},"page":{"during_time":14534,"last_page_id":"good_detail","page_id":"cart"},"ts":1655279137000}
{"common":{"ar":"230000","ba":"Huawei","ch":"360","is_new":"0","md":"Huawei P30","mid":"mid_16","os":"Android 11.0","uid":"45","vc":"v2.1.134"},"page":{"during_time":19066,"item":"7,10","item_type":"sku_ids","last_page_id":"cart","page_id":"trade"},"ts":1655279137000}
{"common":{"ar":"230000","ba":"Huawei","ch":"360","is_new":"0","md":"Huawei P30","mid":"mid_16","os":"Android 11.0","uid":"45","vc":"v2.1.134"},"page":{"during_time":17424,"item":"3,7","item_type":"sku_ids","last_page_id":"trade","page_id":"payment"},"ts":1655279137000}
{"common":{"ar":"440000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"49","vc":"v2.1.132"},"start":{"entry":"icon","loading_time":15593,"open_ad_id":13,"open_ad_ms":5913,"open_ad_skip_ms":0},"ts":1655279138000}
{"common":{"ar":"440000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"49","vc":"v2.1.132"},"displays":[{"display_type":"activity","item":"2","item_type":"activity_id","order":1,"pos_id":2},{"display_type":"recommend","item":"6","item_type":"sku_id","order":2,"pos_id":5},{"display_type":"query","item":"1","item_type":"sku_id","order":3,"pos_id":1},{"display_type":"recommend","item":"10","item_type":"sku_id","order":4,"pos_id":3},{"display_type":"promotion","item":"7","item_type":"sku_id","order":5,"pos_id":4},{"display_type":"promotion","item":"10","item_type":"sku_id","order":6,"pos_id":1},{"display_type":"query","item":"6","item_type":"sku_id","order":7,"pos_id":2},{"display_type":"promotion","item":"7","item_type":"sku_id","order":8,"pos_id":3},{"display_type":"recommend","item":"7","item_type":"sku_id","order":9,"pos_id":2},{"display_type":"query","item":"4","item_type":"sku_id","order":10,"pos_id":5}],"page":{"during_time":2996,"page_id":"home"},"ts":1655279138000}
{"common":{"ar":"440000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"49","vc":"v2.1.132"},"err":{"error_code":1473,"msg":" Exception in thread \\ java.net.SocketTimeoutException\\n \\tat com.atgugu.gmall2020.mock.log.bean.AppError.main(AppError.java:xxxxxx)"},"page":{"during_time":5097,"last_page_id":"home","page_id":"mine"},"ts":1655279138000}
{"common":{"ar":"440000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"49","vc":"v2.1.132"},"page":{"during_time":8882,"last_page_id":"mine","page_id":"orders_unpaid"},"ts":1655279138000}
{"actions":[{"action_id":"get_coupon","item":"3","item_type":"coupon_id","ts":1655279146452}],"common":{"ar":"440000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"49","vc":"v2.1.132"},"displays":[{"display_type":"recommend","item":"3","item_type":"sku_id","order":1,"pos_id":3},{"display_type":"query","item":"2","item_type":"sku_id","order":2,"pos_id":1},{"display_type":"query","item":"9","item_type":"sku_id","order":3,"pos_id":1},{"display_type":"recommend","item":"8","item_type":"sku_id","order":4,"pos_id":2},{"display_type":"query","item":"10","item_type":"sku_id","order":5,"pos_id":5},{"display_type":"query","item":"2","item_type":"sku_id","order":6,"pos_id":5},{"display_type":"query","item":"7","item_type":"sku_id","order":7,"pos_id":3},{"display_type":"promotion","item":"1","item_type":"sku_id","order":8,"pos_id":1},{"display_type":"query","item":"2","item_type":"sku_id","order":9,"pos_id":2},{"display_type":"query","item":"6","item_type":"sku_id","order":10,"pos_id":1}],"page":{"during_time":16905,"item":"3","item_type":"sku_id","last_page_id":"orders_unpaid","page_id":"good_detail","source_type":"promotion"},"ts":1655279138000}
{"common":{"ar":"440000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"49","vc":"v2.1.132"},"displays":[{"display_type":"query","item":"3","item_type":"sku_id","order":1,"pos_id":2},{"display_type":"query","item":"4","item_type":"sku_id","order":2,"pos_id":2},{"display_type":"query","item":"7","item_type":"sku_id","order":3,"pos_id":2},{"display_type":"query","item":"6","item_type":"sku_id","order":4,"pos_id":5},{"display_type":"promotion","item":"5","item_type":"sku_id","order":5,"pos_id":2},{"display_type":"query","item":"5","item_type":"sku_id","order":6,"pos_id":4},{"display_type":"promotion","item":"8","item_type":"sku_id","order":7,"pos_id":1},{"display_type":"query","item":"1","item_type":"sku_id","order":8,"pos_id":2},{"display_type":"promotion","item":"8","item_type":"sku_id","order":9,"pos_id":4},{"display_type":"recommend","item":"10","item_type":"sku_id","order":10,"pos_id":1}],"page":{"during_time":9651,"item":"4","item_type":"sku_id","last_page_id":"good_detail","page_id":"good_spec","source_type":"activity"},"ts":1655279138000}
{"common":{"ar":"440000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"49","vc":"v2.1.132"},"page":{"during_time":8606,"item":"3","item_type":"sku_id","last_page_id":"good_spec","page_id":"comment","source_type":"activity"},"ts":1655279138000}
{"common":{"ar":"440000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"49","vc":"v2.1.132"},"page":{"during_time":10147,"item":"1,9","item_type":"sku_ids","last_page_id":"comment","page_id":"trade"},"ts":1655279138000}
{"common":{"ar":"440000","ba":"Xiaomi","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_19","os":"Android 10.0","uid":"49","vc":"v2.1.132"},"page":{"during_time":3322,"item":"1","item_type":"sku_ids","last_page_id":"trade","page_id":"payment"},"ts":1655279138000}
{"common":{"ar":"370000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs Max","mid":"mid_2","os":"iOS 13.2.3","uid":"24","vc":"v2.1.134"},"start":{"entry":"notice","loading_time":5388,"open_ad_id":15,"open_ad_ms":9018,"open_ad_skip_ms":8891},"ts":1655279139000}
{"common":{"ar":"370000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs Max","mid":"mid_2","os":"iOS 13.2.3","uid":"24","vc":"v2.1.134"},"displays":[{"display_type":"activity","item":"1","item_type":"activity_id","order":1,"pos_id":1},{"display_type":"activity","item":"1","item_type":"activity_id","order":2,"pos_id":1},{"display_type":"query","item":"2","item_type":"sku_id","order":3,"pos_id":2},{"display_type":"query","item":"7","item_type":"sku_id","order":4,"pos_id":4},{"display_type":"query","item":"1","item_type":"sku_id","order":5,"pos_id":3},{"display_type":"promotion","item":"6","item_type":"sku_id","order":6,"pos_id":5},{"display_type":"promotion","item":"10","item_type":"sku_id","order":7,"pos_id":4},{"display_type":"recommend","item":"6","item_type":"sku_id","order":8,"pos_id":1},{"display_type":"query","item":"3","item_type":"sku_id","order":9,"pos_id":5},{"display_type":"promotion","item":"4","item_type":"sku_id","order":10,"pos_id":2},{"display_type":"query","item":"8","item_type":"sku_id","order":11,"pos_id":2}],"page":{"during_time":12294,"page_id":"home"},"ts":1655279139000}
{"common":{"ar":"370000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs Max","mid":"mid_2","os":"iOS 13.2.3","uid":"24","vc":"v2.1.134"},"page":{"during_time":1484,"last_page_id":"home","page_id":"mine"},"ts":1655279139000}
{"common":{"ar":"370000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs Max","mid":"mid_2","os":"iOS 13.2.3","uid":"24","vc":"v2.1.134"},"page":{"during_time":2271,"last_page_id":"mine","page_id":"orders_unpaid"},"ts":1655279139000}
{"actions":[{"action_id":"get_coupon","item":"1","item_type":"coupon_id","ts":1655279146468}],"common":{"ar":"370000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs Max","mid":"mid_2","os":"iOS 13.2.3","uid":"24","vc":"v2.1.134"},"displays":[{"display_type":"query","item":"9","item_type":"sku_id","order":1,"pos_id":2},{"display_type":"query","item":"1","item_type":"sku_id","order":2,"pos_id":5},{"display_type":"query","item":"6","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"query","item":"3","item_type":"sku_id","order":4,"pos_id":4},{"display_type":"recommend","item":"10","item_type":"sku_id","order":5,"pos_id":5}],"page":{"during_time":14936,"item":"1","item_type":"sku_id","last_page_id":"orders_unpaid","page_id":"good_detail","source_type":"query"},"ts":1655279139000}
{"common":{"ar":"370000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs Max","mid":"mid_2","os":"iOS 13.2.3","uid":"24","vc":"v2.1.134"},"displays":[{"display_type":"promotion","item":"2","item_type":"sku_id","order":1,"pos_id":5},{"display_type":"promotion","item":"5","item_type":"sku_id","order":2,"pos_id":5},{"display_type":"recommend","item":"9","item_type":"sku_id","order":3,"pos_id":1},{"display_type":"promotion","item":"6","item_type":"sku_id","order":4,"pos_id":3}],"page":{"during_time":16768,"item":"1","item_type":"sku_id","last_page_id":"good_detail","page_id":"good_spec","source_type":"query"},"ts":1655279139000}
{"common":{"ar":"370000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs Max","mid":"mid_2","os":"iOS 13.2.3","uid":"24","vc":"v2.1.134"},"page":{"during_time":14951,"item":"7","item_type":"sku_id","last_page_id":"good_spec","page_id":"comment","source_type":"query"},"ts":1655279139000}
{"common":{"ar":"370000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs Max","mid":"mid_2","os":"iOS 13.2.3","uid":"24","vc":"v2.1.134"},"page":{"during_time":6208,"item":"2,4,7","item_type":"sku_ids","last_page_id":"comment","page_id":"trade"},"ts":1655279139000}
{"common":{"ar":"370000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs Max","mid":"mid_2","os":"iOS 13.2.3","uid":"24","vc":"v2.1.134"},"page":{"during_time":7648,"item":"4","item_type":"sku_ids","last_page_id":"trade","page_id":"payment"},"ts":1655279139000}
{"common":{"ar":"370000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_10","os":"iOS 13.3.1","uid":"27","vc":"v2.1.134"},"start":{"entry":"icon","loading_time":11717,"open_ad_id":5,"open_ad_ms":2065,"open_ad_skip_ms":1121},"ts":1655279140000}
{"common":{"ar":"370000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_10","os":"iOS 13.3.1","uid":"27","vc":"v2.1.134"},"displays":[{"display_type":"activity","item":"1","item_type":"activity_id","order":1,"pos_id":5},{"display_type":"activity","item":"1","item_type":"activity_id","order":2,"pos_id":5},{"display_type":"query","item":"5","item_type":"sku_id","order":3,"pos_id":1},{"display_type":"query","item":"3","item_type":"sku_id","order":4,"pos_id":1},{"display_type":"query","item":"3","item_type":"sku_id","order":5,"pos_id":4},{"display_type":"query","item":"1","item_type":"sku_id","order":6,"pos_id":2},{"display_type":"query","item":"3","item_type":"sku_id","order":7,"pos_id":2},{"display_type":"query","item":"8","item_type":"sku_id","order":8,"pos_id":1},{"display_type":"query","item":"7","item_type":"sku_id","order":9,"pos_id":3},{"display_type":"query","item":"6","item_type":"sku_id","order":10,"pos_id":1}],"page":{"during_time":5945,"page_id":"home"},"ts":1655279140000}
{"actions":[{"action_id":"favor_add","item":"3","item_type":"sku_id","ts":1655279144864},{"action_id":"get_coupon","item":"2","item_type":"coupon_id","ts":1655279149728}],"common":{"ar":"370000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_10","os":"iOS 13.3.1","uid":"27","vc":"v2.1.134"},"displays":[{"display_type":"query","item":"10","item_type":"sku_id","order":1,"pos_id":4},{"display_type":"query","item":"8","item_type":"sku_id","order":2,"pos_id":3},{"display_type":"query","item":"7","item_type":"sku_id","order":3,"pos_id":1},{"display_type":"promotion","item":"2","item_type":"sku_id","order":4,"pos_id":5},{"display_type":"query","item":"8","item_type":"sku_id","order":5,"pos_id":5},{"display_type":"query","item":"9","item_type":"sku_id","order":6,"pos_id":4},{"display_type":"query","item":"5","item_type":"sku_id","order":7,"pos_id":1}],"page":{"during_time":14594,"item":"3","item_type":"sku_id","last_page_id":"home","page_id":"good_detail","source_type":"query"},"ts":1655279140000}
{"common":{"ar":"110000","ba":"Xiaomi","ch":"xiaomi","is_new":"1","md":"Xiaomi 9","mid":"mid_7","os":"Android 11.0","uid":"10","vc":"v2.1.134"},"start":{"entry":"icon","loading_time":13131,"open_ad_id":8,"open_ad_ms":5355,"open_ad_skip_ms":1309},"ts":1655279141000}
{"common":{"ar":"110000","ba":"Xiaomi","ch":"xiaomi","is_new":"1","md":"Xiaomi 9","mid":"mid_7","os":"Android 11.0","uid":"10","vc":"v2.1.134"},"displays":[{"display_type":"activity","item":"1","item_type":"activity_id","order":1,"pos_id":1},{"display_type":"activity","item":"2","item_type":"activity_id","order":2,"pos_id":1},{"display_type":"query","item":"8","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"promotion","item":"5","item_type":"sku_id","order":4,"pos_id":5},{"display_type":"query","item":"6","item_type":"sku_id","order":5,"pos_id":1},{"display_type":"recommend","item":"3","item_type":"sku_id","order":6,"pos_id":3},{"display_type":"promotion","item":"1","item_type":"sku_id","order":7,"pos_id":1}],"page":{"during_time":17125,"page_id":"home"},"ts":1655279141000}
{"common":{"ar":"110000","ba":"Xiaomi","ch":"xiaomi","is_new":"1","md":"Xiaomi 9","mid":"mid_7","os":"Android 11.0","uid":"10","vc":"v2.1.134"},"displays":[{"display_type":"recommend","item":"7","item_type":"sku_id","order":1,"pos_id":1},{"display_type":"promotion","item":"5","item_type":"sku_id","order":2,"pos_id":4},{"display_type":"promotion","item":"6","item_type":"sku_id","order":3,"pos_id":1},{"display_type":"query","item":"9","item_type":"sku_id","order":4,"pos_id":3},{"display_type":"query","item":"5","item_type":"sku_id","order":5,"pos_id":1},{"display_type":"query","item":"3","item_type":"sku_id","order":6,"pos_id":2}],"page":{"during_time":18147,"item":"口红","item_type":"keyword","last_page_id":"home","page_id":"good_list"},"ts":1655279141000}
{"actions":[{"action_id":"get_coupon","item":"1","item_type":"coupon_id","ts":1655279142242}],"common":{"ar":"110000","ba":"Xiaomi","ch":"xiaomi","is_new":"1","md":"Xiaomi 9","mid":"mid_7","os":"Android 11.0","uid":"10","vc":"v2.1.134"},"displays":[{"display_type":"query","item":"1","item_type":"sku_id","order":1,"pos_id":3},{"display_type":"promotion","item":"4","item_type":"sku_id","order":2,"pos_id":1},{"display_type":"recommend","item":"9","item_type":"sku_id","order":3,"pos_id":4},{"display_type":"promotion","item":"3","item_type":"sku_id","order":4,"pos_id":4},{"display_type":"recommend","item":"7","item_type":"sku_id","order":5,"pos_id":4},{"display_type":"query","item":"1","item_type":"sku_id","order":6,"pos_id":5},{"display_type":"promotion","item":"10","item_type":"sku_id","order":7,"pos_id":4},{"display_type":"query","item":"3","item_type":"sku_id","order":8,"pos_id":3},{"display_type":"query","item":"7","item_type":"sku_id","order":9,"pos_id":2},{"display_type":"promotion","item":"3","item_type":"sku_id","order":10,"pos_id":4}],"page":{"during_time":2484,"item":"8","item_type":"sku_id","last_page_id":"good_list","page_id":"good_detail","source_type":"activity"},"ts":1655279141000}
{"common":{"ar":"110000","ba":"Xiaomi","ch":"xiaomi","is_new":"1","md":"Xiaomi 9","mid":"mid_7","os":"Android 11.0","uid":"10","vc":"v2.1.134"},"page":{"during_time":19404,"last_page_id":"good_detail","page_id":"cart"},"ts":1655279141000}
{"common":{"ar":"110000","ba":"Xiaomi","ch":"xiaomi","is_new":"1","md":"Xiaomi 9","mid":"mid_7","os":"Android 11.0","uid":"10","vc":"v2.1.134"},"page":{"during_time":13879,"item":"2,3","item_type":"sku_ids","last_page_id":"cart","page_id":"trade"},"ts":1655279141000}
{"common":{"ar":"110000","ba":"Xiaomi","ch":"xiaomi","is_new":"1","md":"Xiaomi 9","mid":"mid_7","os":"Android 11.0","uid":"10","vc":"v2.1.134"},"page":{"during_time":10617,"item":"1,4,5","item_type":"sku_ids","last_page_id":"trade","page_id":"payment"},"ts":1655279141000}
{"common":{"ar":"370000","ba":"Xiaomi","ch":"xiaomi","is_new":"1","md":"Xiaomi 10 Pro ","mid":"mid_14","os":"Android 8.1","uid":"32","vc":"v2.1.134"},"start":{"entry":"icon","loading_time":9468,"open_ad_id":9,"open_ad_ms":1444,"open_ad_skip_ms":1313},"ts":1655279141000}
{"common":{"ar":"370000","ba":"Xiaomi","ch":"xiaomi","is_new":"1","md":"Xiaomi 10 Pro ","mid":"mid_14","os":"Android 8.1","uid":"32","vc":"v2.1.134"},"displays":[{"display_type":"activity","item":"2","item_type":"activity_id","order":1,"pos_id":3},{"display_type":"promotion","item":"4","item_type":"sku_id","order":2,"pos_id":4},{"display_type":"promotion","item":"4","item_type":"sku_id","order":3,"pos_id":5},{"display_type":"query","item":"2","item_type":"sku_id","order":4,"pos_id":1},{"display_type":"promotion","item":"7","item_type":"sku_id","order":5,"pos_id":5},{"display_type":"query","item":"10","item_type":"sku_id","order":6,"pos_id":1}],"page":{"during_time":18898,"page_id":"home"},"ts":1655279141000}
{"common":{"ar":"230000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_7","os":"iOS 13.2.9","uid":"34","vc":"v2.1.134"},"start":{"entry":"icon","loading_time":6046,"open_ad_id":4,"open_ad_ms":4840,"open_ad_skip_ms":2385},"ts":1655281316000}
{"common":{"ar":"230000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_7","os":"iOS 13.2.9","uid":"34","vc":"v2.1.134"},"displays":[{"display_type":"activity","item":"2","item_type":"activity_id","order":1,"pos_id":4},{"display_type":"query","item":"2","item_type":"sku_id","order":2,"pos_id":1},{"display_type":"promotion","item":"5","item_type":"sku_id","order":3,"pos_id":1},{"display_type":"query","item":"2","item_type":"sku_id","order":4,"pos_id":1},{"display_type":"promotion","item":"3","item_type":"sku_id","order":5,"pos_id":2},{"display_type":"query","item":"7","item_type":"sku_id","order":6,"pos_id":3},{"display_type":"promotion","item":"9","item_type":"sku_id","order":7,"pos_id":5},{"display_type":"query","item":"1","item_type":"sku_id","order":8,"pos_id":2},{"display_type":"promotion","item":"4","item_type":"sku_id","order":9,"pos_id":4},{"display_type":"recommend","item":"7","item_type":"sku_id","order":10,"pos_id":5},{"display_type":"promotion","item":"9","item_type":"sku_id","order":11,"pos_id":4}],"page":{"during_time":6963,"page_id":"home"},"ts":1655281316000}
{"common":{"ar":"230000","ba":"iPhone","ch":"Appstore","is_new":"1","md":"iPhone Xs","mid":"mid_7","os":"iOS 13.2.9","uid":"34","vc":"v2.1.134"},"displays":[{"display_type":"recommend","item":"4","item_type":"sku_id","order":1,"pos_id":4},{"display_type":"query","item":"2","item_type":"sku_id","order":2,"pos_id":5},{"display_type":"recommend","item":"2","item_type":"sku_id","order":3,"pos_id":1},{"display_type":"query","item":"6","item_type":"sku_id","order":4,"pos_id":4},{"display_type":"promotion","item":"2","item_type":"sku_id","order":5,"pos_id":4}],"page":{"during_time":8478,"item":"8","item_type":"sku_id","last_page_id":"home","page_id":"good_detail","source_type":"query"},"ts":1655281316000}
{"common":{"ar":"230000","ba":"Xiaomi","ch":"huawei","is_new":"0","md":"Xiaomi 9","mid":"mid_12","os":"Android 11.0","uid":"39","vc":"v2.1.134"},"start":{"entry":"icon","loading_time":11117,"open_ad_id":8,"open_ad_ms":5592,"open_ad_skip_ms":4065},"ts":1655281318000}
{"common":{"ar":"230000","ba":"Xiaomi","ch":"huawei","is_new":"0","md":"Xiaomi 9","mid":"mid_12","os":"Android 11.0","uid":"39","vc":"v2.1.134"},"displays":[{"display_type":"activity","item":"2","item_type":"activity_id","order":1,"pos_id":1},{"display_type":"activity","item":"1","item_type":"activity_id","order":2,"pos_id":1},{"display_type":"query","item":"6","item_type":"sku_id","order":3,"pos_id":1},{"display_type":"query","item":"7","item_type":"sku_id","order":4,"pos_id":5},{"display_type":"query","item":"1","item_type":"sku_id","order":5,"pos_id":3},{"display_type":"query","item":"3","item_type":"sku_id","order":6,"pos_id":4}],"page":{"during_time":13780,"page_id":"home"},"ts":1655281318000}
{"common":{"ar":"230000","ba":"Xiaomi","ch":"huawei","is_new":"0","md":"Xiaomi 9","mid":"mid_12","os":"Android 11.0","uid":"39","vc":"v2.1.134"},"page":{"during_time":16748,"last_page_id":"home","page_id":"search"},"ts":1655281318000}
{"common":{"ar":"230000","ba":"Xiaomi","ch":"huawei","is_new":"0","md":"Xiaomi 9","mid":"mid_12","os":"Android 11.0","uid":"39","vc":"v2.1.134"},"displays":[{"display_type":"query","item":"2","item_type":"sku_id","order":1,"pos_id":3},{"display_type":"query","item":"7","item_type":"sku_id","order":2,"pos_id":5},{"display_type":"query","item":"9","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"query","item":"10","item_type":"sku_id","order":4,"pos_id":2},{"display_type":"query","item":"2","item_type":"sku_id","order":5,"pos_id":3},{"display_type":"promotion","item":"7","item_type":"sku_id","order":6,"pos_id":3},{"display_type":"query","item":"2","item_type":"sku_id","order":7,"pos_id":4}],"page":{"during_time":7077,"item":"ps5","item_type":"keyword","last_page_id":"search","page_id":"good_list"},"ts":1655281318000}
{"actions":[{"action_id":"get_coupon","item":"1","item_type":"coupon_id","ts":1655281325191}],"common":{"ar":"230000","ba":"Xiaomi","ch":"huawei","is_new":"0","md":"Xiaomi 9","mid":"mid_12","os":"Android 11.0","uid":"39","vc":"v2.1.134"},"displays":[{"display_type":"query","item":"2","item_type":"sku_id","order":1,"pos_id":2},{"display_type":"query","item":"6","item_type":"sku_id","order":2,"pos_id":5},{"display_type":"promotion","item":"2","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"query","item":"3","item_type":"sku_id","order":4,"pos_id":1},{"display_type":"query","item":"4","item_type":"sku_id","order":5,"pos_id":4}],"page":{"during_time":14382,"item":"1","item_type":"sku_id","last_page_id":"good_list","page_id":"good_detail","source_type":"recommend"},"ts":1655281318000}
{"common":{"ar":"230000","ba":"Xiaomi","ch":"huawei","is_new":"0","md":"Xiaomi 9","mid":"mid_12","os":"Android 11.0","uid":"39","vc":"v2.1.134"},"page":{"during_time":6152,"last_page_id":"good_detail","page_id":"login"},"ts":1655281318000}
{"actions":[{"action_id":"favor_add","item":"1","item_type":"sku_id","ts":1655281324407},{"action_id":"get_coupon","item":"2","item_type":"coupon_id","ts":1655281330814}],"common":{"ar":"230000","ba":"Xiaomi","ch":"huawei","is_new":"0","md":"Xiaomi 9","mid":"mid_12","os":"Android 11.0","uid":"39","vc":"v2.1.134"},"displays":[{"display_type":"query","item":"5","item_type":"sku_id","order":1,"pos_id":3},{"display_type":"promotion","item":"5","item_type":"sku_id","order":2,"pos_id":4},{"display_type":"query","item":"9","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"query","item":"6","item_type":"sku_id","order":4,"pos_id":2},{"display_type":"query","item":"3","item_type":"sku_id","order":5,"pos_id":3},{"display_type":"query","item":"2","item_type":"sku_id","order":6,"pos_id":3},{"display_type":"promotion","item":"4","item_type":"sku_id","order":7,"pos_id":4},{"display_type":"promotion","item":"6","item_type":"sku_id","order":8,"pos_id":2},{"display_type":"promotion","item":"4","item_type":"sku_id","order":9,"pos_id":4},{"display_type":"query","item":"3","item_type":"sku_id","order":10,"pos_id":4}],"page":{"during_time":19222,"item":"1","item_type":"sku_id","last_page_id":"login","page_id":"good_detail","source_type":"activity"},"ts":1655281318000}
{"common":{"ar":"230000","ba":"Xiaomi","ch":"huawei","is_new":"0","md":"Xiaomi 9","mid":"mid_12","os":"Android 11.0","uid":"39","vc":"v2.1.134"},"page":{"during_time":16715,"last_page_id":"good_detail","page_id":"cart"},"ts":1655281318000}
{"common":{"ar":"230000","ba":"Xiaomi","ch":"huawei","is_new":"0","md":"Xiaomi 9","mid":"mid_12","os":"Android 11.0","uid":"39","vc":"v2.1.134"},"page":{"during_time":11667,"item":"5","item_type":"sku_ids","last_page_id":"cart","page_id":"trade"},"ts":1655281318000}
{"common":{"ar":"230000","ba":"Xiaomi","ch":"huawei","is_new":"0","md":"Xiaomi 9","mid":"mid_12","os":"Android 11.0","uid":"39","vc":"v2.1.134"},"page":{"during_time":10886,"item":"4","item_type":"sku_ids","last_page_id":"trade","page_id":"payment"},"ts":1655281318000}
{"common":{"ar":"310000","ba":"Xiaomi","ch":"oppo","is_new":"0","md":"Xiaomi Mix2 ","mid":"mid_17","os":"Android 9.0","uid":"39","vc":"v2.1.134"},"start":{"entry":"notice","loading_time":5266,"open_ad_id":20,"open_ad_ms":7358,"open_ad_skip_ms":4762},"ts":1655281319000}
{"common":{"ar":"310000","ba":"Xiaomi","ch":"oppo","is_new":"0","md":"Xiaomi Mix2 ","mid":"mid_17","os":"Android 9.0","uid":"39","vc":"v2.1.134"},"displays":[{"display_type":"activity","item":"1","item_type":"activity_id","order":1,"pos_id":2},{"display_type":"activity","item":"2","item_type":"activity_id","order":2,"pos_id":2},{"display_type":"promotion","item":"4","item_type":"sku_id","order":3,"pos_id":2},{"display_type":"promotion","item":"10","item_type":"sku_id","order":4,"pos_id":1},{"display_type":"recommend","item":"6","item_type":"sku_id","order":5,"pos_id":3},{"display_type":"recommend","item":"10","item_type":"sku_id","order":6,"pos_id":1},{"display_type":"query","item":"6","item_type":"sku_id","order":7,"pos_id":2},{"display_type":"promotion","item":"3","item_type":"sku_id","order":8,"pos_id":5}],"page":{"during_time":17236,"page_id":"home"},"ts":1655281319000}
{"actions":[{"action_id":"favor_add","item":"4","item_type":"sku_id","ts":1655281321935},{"action_id":"get_coupon","item":"2","item_type":"coupon_id","ts":1655281324870}],"common":{"ar":"310000","ba":"Xiaomi","ch":"oppo","is_new":"0","md":"Xiaomi Mix2 ","mid":"mid_17","os":"Android 9.0","uid":"39","vc":"v2.1.134"},"displays":[{"display_type":"promotion","item":"3","item_type":"sku_id","order":1,"pos_id":1},{"display_type":"promotion","item":"2","item_type":"sku_id","order":2,"pos_id":1},{"display_type":"promotion","item":"5","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"query","item":"8","item_type":"sku_id","order":4,"pos_id":3}],"page":{"during_time":8805,"item":"4","item_type":"sku_id","last_page_id":"home","page_id":"good_detail","source_type":"activity"},"ts":1655281319000}
{"common":{"ar":"230000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone X","mid":"mid_14","os":"iOS 13.2.3","uid":"29","vc":"v2.1.134"},"start":{"entry":"icon","loading_time":4249,"open_ad_id":8,"open_ad_ms":9162,"open_ad_skip_ms":3451},"ts":1655281320000}
{"common":{"ar":"230000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone X","mid":"mid_14","os":"iOS 13.2.3","uid":"29","vc":"v2.1.134"},"displays":[{"display_type":"activity","item":"2","item_type":"activity_id","order":1,"pos_id":2},{"display_type":"activity","item":"2","item_type":"activity_id","order":2,"pos_id":2},{"display_type":"recommend","item":"3","item_type":"sku_id","order":3,"pos_id":5},{"display_type":"query","item":"7","item_type":"sku_id","order":4,"pos_id":1},{"display_type":"query","item":"6","item_type":"sku_id","order":5,"pos_id":1},{"display_type":"query","item":"1","item_type":"sku_id","order":6,"pos_id":4},{"display_type":"query","item":"10","item_type":"sku_id","order":7,"pos_id":1},{"display_type":"promotion","item":"4","item_type":"sku_id","order":8,"pos_id":3},{"display_type":"query","item":"6","item_type":"sku_id","order":9,"pos_id":5}],"page":{"during_time":13921,"page_id":"home"},"ts":1655281320000}
{"actions":[{"action_id":"get_coupon","item":"1","item_type":"coupon_id","ts":1655281326699}],"common":{"ar":"230000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone X","mid":"mid_14","os":"iOS 13.2.3","uid":"29","vc":"v2.1.134"},"displays":[{"display_type":"promotion","item":"5","item_type":"sku_id","order":1,"pos_id":4},{"display_type":"promotion","item":"4","item_type":"sku_id","order":2,"pos_id":1},{"display_type":"promotion","item":"10","item_type":"sku_id","order":3,"pos_id":5},{"display_type":"query","item":"8","item_type":"sku_id","order":4,"pos_id":1},{"display_type":"query","item":"4","item_type":"sku_id","order":5,"pos_id":1},{"display_type":"recommend","item":"4","item_type":"sku_id","order":6,"pos_id":1},{"display_type":"query","item":"9","item_type":"sku_id","order":7,"pos_id":3},{"display_type":"query","item":"10","item_type":"sku_id","order":8,"pos_id":2}],"page":{"during_time":13399,"item":"3","item_type":"sku_id","last_page_id":"home","page_id":"good_detail","source_type":"recommend"},"ts":1655281320000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone X","mid":"mid_12","os":"iOS 13.2.9","uid":"4","vc":"v2.0.1"},"start":{"entry":"icon","loading_time":15894,"open_ad_id":18,"open_ad_ms":9284,"open_ad_skip_ms":0},"ts":1655281320000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone X","mid":"mid_12","os":"iOS 13.2.9","uid":"4","vc":"v2.0.1"},"displays":[{"display_type":"activity","item":"2","item_type":"activity_id","order":1,"pos_id":1},{"display_type":"promotion","item":"2","item_type":"sku_id","order":2,"pos_id":5},{"display_type":"query","item":"6","item_type":"sku_id","order":3,"pos_id":5},{"display_type":"query","item":"5","item_type":"sku_id","order":4,"pos_id":4},{"display_type":"promotion","item":"9","item_type":"sku_id","order":5,"pos_id":1},{"display_type":"query","item":"6","item_type":"sku_id","order":6,"pos_id":3},{"display_type":"promotion","item":"10","item_type":"sku_id","order":7,"pos_id":2}],"page":{"during_time":8547,"page_id":"home"},"ts":1655281320000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone X","mid":"mid_12","os":"iOS 13.2.9","uid":"4","vc":"v2.0.1"},"page":{"during_time":19952,"last_page_id":"home","page_id":"search"},"ts":1655281320000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone X","mid":"mid_12","os":"iOS 13.2.9","uid":"4","vc":"v2.0.1"},"displays":[{"display_type":"query","item":"6","item_type":"sku_id","order":1,"pos_id":1},{"display_type":"query","item":"1","item_type":"sku_id","order":2,"pos_id":5},{"display_type":"query","item":"4","item_type":"sku_id","order":3,"pos_id":2},{"display_type":"query","item":"9","item_type":"sku_id","order":4,"pos_id":3},{"display_type":"promotion","item":"7","item_type":"sku_id","order":5,"pos_id":4},{"display_type":"query","item":"1","item_type":"sku_id","order":6,"pos_id":2},{"display_type":"query","item":"8","item_type":"sku_id","order":7,"pos_id":3},{"display_type":"recommend","item":"5","item_type":"sku_id","order":8,"pos_id":4}],"page":{"during_time":16423,"item":"ps5","item_type":"keyword","last_page_id":"search","page_id":"good_list"},"ts":1655281320000}
{"actions":[{"action_id":"get_coupon","item":"1","item_type":"coupon_id","ts":1655281323559}],"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone X","mid":"mid_12","os":"iOS 13.2.9","uid":"4","vc":"v2.0.1"},"displays":[{"display_type":"query","item":"5","item_type":"sku_id","order":1,"pos_id":4},{"display_type":"query","item":"10","item_type":"sku_id","order":2,"pos_id":4},{"display_type":"promotion","item":"7","item_type":"sku_id","order":3,"pos_id":2},{"display_type":"query","item":"1","item_type":"sku_id","order":4,"pos_id":5},{"display_type":"query","item":"2","item_type":"sku_id","order":5,"pos_id":2}],"page":{"during_time":7118,"item":"5","item_type":"sku_id","last_page_id":"good_list","page_id":"good_detail","source_type":"promotion"},"ts":1655281320000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone X","mid":"mid_12","os":"iOS 13.2.9","uid":"4","vc":"v2.0.1"},"page":{"during_time":3560,"last_page_id":"good_detail","page_id":"login"},"ts":1655281320000}
{"actions":[{"action_id":"get_coupon","item":"2","item_type":"coupon_id","ts":1655281322290}],"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone X","mid":"mid_12","os":"iOS 13.2.9","uid":"4","vc":"v2.0.1"},"displays":[{"display_type":"query","item":"6","item_type":"sku_id","order":1,"pos_id":5},{"display_type":"query","item":"5","item_type":"sku_id","order":2,"pos_id":5},{"display_type":"query","item":"10","item_type":"sku_id","order":3,"pos_id":5},{"display_type":"promotion","item":"9","item_type":"sku_id","order":4,"pos_id":3},{"display_type":"recommend","item":"9","item_type":"sku_id","order":5,"pos_id":5},{"display_type":"query","item":"3","item_type":"sku_id","order":6,"pos_id":2},{"display_type":"recommend","item":"7","item_type":"sku_id","order":7,"pos_id":1},{"display_type":"query","item":"3","item_type":"sku_id","order":8,"pos_id":1},{"display_type":"promotion","item":"10","item_type":"sku_id","order":9,"pos_id":3}],"page":{"during_time":4580,"item":"2","item_type":"sku_id","last_page_id":"login","page_id":"good_detail","source_type":"promotion"},"ts":1655281320000}
{"actions":[{"action_id":"cart_add_num","item":"10","item_type":"sku_id","ts":1655281320967}],"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone X","mid":"mid_12","os":"iOS 13.2.9","uid":"4","vc":"v2.0.1"},"err":{"error_code":3074,"msg":" Exception in thread \\ java.net.SocketTimeoutException\\n \\tat com.atgugu.gmall2020.mock.log.bean.AppError.main(AppError.java:xxxxxx)"},"page":{"during_time":1935,"last_page_id":"good_detail","page_id":"cart"},"ts":1655281320000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone X","mid":"mid_12","os":"iOS 13.2.9","uid":"4","vc":"v2.0.1"},"page":{"during_time":14693,"item":"4","item_type":"sku_ids","last_page_id":"cart","page_id":"trade"},"ts":1655281320000}
{"common":{"ar":"110000","ba":"iPhone","ch":"Appstore","is_new":"0","md":"iPhone X","mid":"mid_12","os":"iOS 13.2.9","uid":"4","vc":"v2.0.1"},"page":{"during_time":4302,"item":"5","item_type":"sku_ids","last_page_id":"trade","page_id":"payment"},"ts":1655281320000}
{"common":{"ar":"530000","ba":"Xiaomi","ch":"vivo","is_new":"0","md":"Xiaomi 10 Pro ","mid":"mid_4","os":"Android 11.0","uid":"5","vc":"v2.1.132"},"start":{"entry":"icon","loading_time":8448,"open_ad_id":10,"open_ad_ms":4130,"open_ad_skip_ms":1305},"ts":1655281321000}
{"common":{"ar":"530000","ba":"Xiaomi","ch":"vivo","is_new":"0","md":"Xiaomi 10 Pro ","mid":"mid_4","os":"Android 11.0","uid":"5","vc":"v2.1.132"},"displays":[{"display_type":"activity","item":"2","item_type":"activity_id","order":1,"pos_id":1},{"display_type":"query","item":"6","item_type":"sku_id","order":2,"pos_id":3},{"display_type":"recommend","item":"4","item_type":"sku_id","order":3,"pos_id":4},{"display_type":"query","item":"2","item_type":"sku_id","order":4,"pos_id":3},{"display_type":"promotion","item":"8","item_type":"sku_id","order":5,"pos_id":4},{"display_type":"query","item":"5","item_type":"sku_id","order":6,"pos_id":1},{"display_type":"query","item":"4","item_type":"sku_id","order":7,"pos_id":5},{"display_type":"promotion","item":"5","item_type":"sku_id","order":8,"pos_id":2},{"display_type":"query","item":"5","item_type":"sku_id","order":9,"pos_id":1},{"display_type":"promotion","item":"8","item_type":"sku_id","order":10,"pos_id":1},{"display_type":"promotion","item":"9","item_type":"sku_id","order":11,"pos_id":4}],"page":{"during_time":17188,"page_id":"home"},"ts":1655281321000}
{"common":{"ar":"530000","ba":"Xiaomi","ch":"vivo","is_new":"0","md":"Xiaomi 10 Pro ","mid":"mid_4","os":"Android 11.0","uid":"5","vc":"v2.1.132"},"page":{"during_time":19812,"last_page_id":"home","page_id":"search"},"ts":1655281321000}
{"common":{"ar":"530000","ba":"Xiaomi","ch":"vivo","is_new":"0","md":"Xiaomi 10 Pro ","mid":"mid_4","os":"Android 11.0","uid":"5","vc":"v2.1.132"},"displays":[{"display_type":"query","item":"2","item_type":"sku_id","order":1,"pos_id":2},{"display_type":"query","item":"9","item_type":"sku_id","order":2,"pos_id":4},{"display_type":"promotion","item":"8","item_type":"sku_id","order":3,"pos_id":4},{"display_type":"query","item":"8","item_type":"sku_id","order":4,"pos_id":1}],"page":{"during_time":13735,"item":"图书","item_type":"keyword","last_page_id":"search","page_id":"good_list"},"ts":1655281321000}
{"actions":[{"action_id":"get_coupon","item":"3","item_type":"coupon_id","ts":1655281326117}],"common":{"ar":"530000","ba":"Xiaomi","ch":"vivo","is_new":"0","md":"Xiaomi 10 Pro ","mid":"mid_4","os":"Android 11.0","uid":"5","vc":"v2.1.132"},"displays":[{"display_type":"promotion","item":"4","item_type":"sku_id","order":1,"pos_id":3},{"display_type":"query","item":"2","item_type":"sku_id","order":2,"pos_id":4},{"display_type":"promotion","item":"3","item_type":"sku_id","order":3,"pos_id":2},{"display_type":"query","item":"6","item_type":"sku_id","order":4,"pos_id":1},{"display_type":"recommend","item":"8","item_type":"sku_id","order":5,"pos_id":5},{"display_type":"query","item":"8","item_type":"sku_id","order":6,"pos_id":3},{"display_type":"query","item":"9","item_type":"sku_id","order":7,"pos_id":1}],"page":{"during_time":10235,"item":"10","item_type":"sku_id","last_page_id":"good_list","page_id":"good_detail","source_type":"query"},"ts":1655281321000}
{"common":{"ar":"530000","ba":"Xiaomi","ch":"vivo","is_new":"0","md":"Xiaomi 10 Pro ","mid":"mid_4","os":"Android 11.0","uid":"5","vc":"v2.1.132"},"page":{"during_time":17524,"last_page_id":"good_detail","page_id":"login"},"ts":1655281321000}
{"actions":[{"action_id":"favor_add","item":"10","item_type":"sku_id","ts":1655281324637},{"action_id":"get_coupon","item":"2","item_type":"coupon_id","ts":1655281328274}],"common":{"ar":"530000","ba":"Xiaomi","ch":"vivo","is_new":"0","md":"Xiaomi 10 Pro ","mid":"mid_4","os":"Android 11.0","uid":"5","vc":"v2.1.132"},"displays":[{"display_type":"query","item":"8","item_type":"sku_id","order":1,"pos_id":5},{"display_type":"promotion","item":"3","item_type":"sku_id","order":2,"pos_id":2},{"display_type":"query","item":"8","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"query","item":"6","item_type":"sku_id","order":4,"pos_id":3},{"display_type":"query","item":"1","item_type":"sku_id","order":5,"pos_id":1},{"display_type":"query","item":"10","item_type":"sku_id","order":6,"pos_id":1},{"display_type":"query","item":"3","item_type":"sku_id","order":7,"pos_id":5}],"page":{"during_time":10913,"item":"10","item_type":"sku_id","last_page_id":"login","page_id":"good_detail","source_type":"activity"},"ts":1655281321000}
{"common":{"ar":"530000","ba":"Xiaomi","ch":"vivo","is_new":"0","md":"Xiaomi 10 Pro ","mid":"mid_4","os":"Android 11.0","uid":"5","vc":"v2.1.132"},"page":{"during_time":6681,"last_page_id":"good_detail","page_id":"cart"},"ts":1655281321000}
{"common":{"ar":"530000","ba":"Xiaomi","ch":"vivo","is_new":"0","md":"Xiaomi 10 Pro ","mid":"mid_4","os":"Android 11.0","uid":"5","vc":"v2.1.132"},"page":{"during_time":8132,"item":"2,3,5","item_type":"sku_ids","last_page_id":"cart","page_id":"trade"},"ts":1655281321000}
{"common":{"ar":"530000","ba":"Xiaomi","ch":"vivo","is_new":"0","md":"Xiaomi 10 Pro ","mid":"mid_4","os":"Android 11.0","uid":"5","vc":"v2.1.132"},"page":{"during_time":6369,"item":"2,8,10","item_type":"sku_ids","last_page_id":"trade","page_id":"payment"},"ts":1655281321000}
{"common":{"ar":"440000","ba":"Honor","ch":"xiaomi","is_new":"1","md":"Honor 20s","mid":"mid_14","os":"Android 11.0","uid":"9","vc":"v2.1.134"},"start":{"entry":"icon","loading_time":13898,"open_ad_id":16,"open_ad_ms":3327,"open_ad_skip_ms":0},"ts":1655281322000}
{"common":{"ar":"440000","ba":"Honor","ch":"xiaomi","is_new":"1","md":"Honor 20s","mid":"mid_14","os":"Android 11.0","uid":"9","vc":"v2.1.134"},"displays":[{"display_type":"activity","item":"2","item_type":"activity_id","order":1,"pos_id":3},{"display_type":"query","item":"3","item_type":"sku_id","order":2,"pos_id":4},{"display_type":"query","item":"5","item_type":"sku_id","order":3,"pos_id":4},{"display_type":"query","item":"6","item_type":"sku_id","order":4,"pos_id":1},{"display_type":"promotion","item":"4","item_type":"sku_id","order":5,"pos_id":2},{"display_type":"query","item":"7","item_type":"sku_id","order":6,"pos_id":2},{"display_type":"query","item":"3","item_type":"sku_id","order":7,"pos_id":1},{"display_type":"recommend","item":"3","item_type":"sku_id","order":8,"pos_id":2}],"page":{"during_time":5066,"page_id":"home"},"ts":1655281322000}
{"common":{"ar":"420000","ba":"Xiaomi","ch":"vivo","is_new":"1","md":"Xiaomi 10 Pro ","mid":"mid_13","os":"Android 10.0","uid":"31","vc":"v2.1.134"},"err":{"error_code":3443,"msg":" Exception in thread \\ java.net.SocketTimeoutException\\n \\tat com.atgugu.gmall2020.mock.log.bean.AppError.main(AppError.java:xxxxxx)"},"start":{"entry":"icon","loading_time":19980,"open_ad_id":15,"open_ad_ms":2899,"open_ad_skip_ms":1368},"ts":1655281323000}
{"common":{"ar":"420000","ba":"Xiaomi","ch":"vivo","is_new":"1","md":"Xiaomi 10 Pro ","mid":"mid_13","os":"Android 10.0","uid":"31","vc":"v2.1.134"},"displays":[{"display_type":"activity","item":"1","item_type":"activity_id","order":1,"pos_id":5},{"display_type":"query","item":"1","item_type":"sku_id","order":2,"pos_id":5},{"display_type":"query","item":"5","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"query","item":"2","item_type":"sku_id","order":4,"pos_id":1},{"display_type":"query","item":"9","item_type":"sku_id","order":5,"pos_id":5},{"display_type":"promotion","item":"5","item_type":"sku_id","order":6,"pos_id":5},{"display_type":"query","item":"5","item_type":"sku_id","order":7,"pos_id":1},{"display_type":"query","item":"3","item_type":"sku_id","order":8,"pos_id":5},{"display_type":"recommend","item":"2","item_type":"sku_id","order":9,"pos_id":3}],"page":{"during_time":13666,"page_id":"home"},"ts":1655281323000}
{"actions":[{"action_id":"favor_add","item":"8","item_type":"sku_id","ts":1655281329042},{"action_id":"get_coupon","item":"2","item_type":"coupon_id","ts":1655281335084}],"common":{"ar":"420000","ba":"Xiaomi","ch":"vivo","is_new":"1","md":"Xiaomi 10 Pro ","mid":"mid_13","os":"Android 10.0","uid":"31","vc":"v2.1.134"},"displays":[{"display_type":"query","item":"1","item_type":"sku_id","order":1,"pos_id":1},{"display_type":"query","item":"1","item_type":"sku_id","order":2,"pos_id":2},{"display_type":"query","item":"5","item_type":"sku_id","order":3,"pos_id":2},{"display_type":"promotion","item":"2","item_type":"sku_id","order":4,"pos_id":1},{"display_type":"recommend","item":"3","item_type":"sku_id","order":5,"pos_id":1},{"display_type":"query","item":"8","item_type":"sku_id","order":6,"pos_id":3},{"display_type":"query","item":"7","item_type":"sku_id","order":7,"pos_id":5}],"page":{"during_time":18126,"item":"8","item_type":"sku_id","last_page_id":"home","page_id":"good_detail","source_type":"promotion"},"ts":1655281323000}
{"common":{"ar":"370000","ba":"Huawei","ch":"oppo","is_new":"1","md":"Huawei Mate 30","mid":"mid_20","os":"Android 11.0","uid":"47","vc":"v2.1.132"},"start":{"entry":"icon","loading_time":2289,"open_ad_id":19,"open_ad_ms":2126,"open_ad_skip_ms":0},"ts":1655281323000}
{"common":{"ar":"370000","ba":"Huawei","ch":"oppo","is_new":"1","md":"Huawei Mate 30","mid":"mid_20","os":"Android 11.0","uid":"47","vc":"v2.1.132"},"displays":[{"display_type":"activity","item":"2","item_type":"activity_id","order":1,"pos_id":2},{"display_type":"query","item":"8","item_type":"sku_id","order":2,"pos_id":3},{"display_type":"query","item":"4","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"query","item":"8","item_type":"sku_id","order":4,"pos_id":4},{"display_type":"query","item":"2","item_type":"sku_id","order":5,"pos_id":1}],"page":{"during_time":17345,"page_id":"home"},"ts":1655281323000}
{"common":{"ar":"370000","ba":"Huawei","ch":"oppo","is_new":"1","md":"Huawei Mate 30","mid":"mid_20","os":"Android 11.0","uid":"47","vc":"v2.1.132"},"displays":[{"display_type":"query","item":"5","item_type":"sku_id","order":1,"pos_id":5},{"display_type":"query","item":"7","item_type":"sku_id","order":2,"pos_id":1},{"display_type":"query","item":"10","item_type":"sku_id","order":3,"pos_id":5},{"display_type":"query","item":"3","item_type":"sku_id","order":4,"pos_id":1},{"display_type":"query","item":"5","item_type":"sku_id","order":5,"pos_id":3}],"err":{"error_code":1382,"msg":" Exception in thread \\ java.net.SocketTimeoutException\\n \\tat com.atgugu.gmall2020.mock.log.bean.AppError.main(AppError.java:xxxxxx)"},"page":{"during_time":10983,"item":"电视","item_type":"keyword","last_page_id":"home","page_id":"good_list"},"ts":1655281323000}
{"actions":[{"action_id":"favor_add","item":"8","item_type":"sku_id","ts":1655281328853},{"action_id":"get_coupon","item":"3","item_type":"coupon_id","ts":1655281334706}],"common":{"ar":"370000","ba":"Huawei","ch":"oppo","is_new":"1","md":"Huawei Mate 30","mid":"mid_20","os":"Android 11.0","uid":"47","vc":"v2.1.132"},"displays":[{"display_type":"query","item":"8","item_type":"sku_id","order":1,"pos_id":2},{"display_type":"query","item":"1","item_type":"sku_id","order":2,"pos_id":3},{"display_type":"promotion","item":"6","item_type":"sku_id","order":3,"pos_id":5},{"display_type":"query","item":"1","item_type":"sku_id","order":4,"pos_id":2}],"page":{"during_time":17560,"item":"8","item_type":"sku_id","last_page_id":"good_list","page_id":"good_detail","source_type":"recommend"},"ts":1655281323000}
{"common":{"ar":"370000","ba":"Huawei","ch":"oppo","is_new":"1","md":"Huawei Mate 30","mid":"mid_20","os":"Android 11.0","uid":"47","vc":"v2.1.132"},"page":{"during_time":4680,"last_page_id":"good_detail","page_id":"cart"},"ts":1655281323000}
{"common":{"ar":"370000","ba":"Huawei","ch":"oppo","is_new":"1","md":"Huawei Mate 30","mid":"mid_20","os":"Android 11.0","uid":"47","vc":"v2.1.132"},"page":{"during_time":12706,"item":"1,4,10","item_type":"sku_ids","last_page_id":"cart","page_id":"trade"},"ts":1655281323000}
{"common":{"ar":"370000","ba":"Huawei","ch":"oppo","is_new":"1","md":"Huawei Mate 30","mid":"mid_20","os":"Android 11.0","uid":"47","vc":"v2.1.132"},"page":{"during_time":8631,"item":"1,5","item_type":"sku_ids","last_page_id":"trade","page_id":"payment"},"ts":1655281323000}
{"common":{"ar":"440000","ba":"Xiaomi","ch":"web","is_new":"1","md":"Xiaomi Mix2 ","mid":"mid_6","os":"Android 11.0","uid":"43","vc":"v2.0.1"},"start":{"entry":"icon","loading_time":5597,"open_ad_id":16,"open_ad_ms":6587,"open_ad_skip_ms":5736},"ts":1655281324000}
{"common":{"ar":"440000","ba":"Xiaomi","ch":"web","is_new":"1","md":"Xiaomi Mix2 ","mid":"mid_6","os":"Android 11.0","uid":"43","vc":"v2.0.1"},"displays":[{"display_type":"activity","item":"1","item_type":"activity_id","order":1,"pos_id":2},{"display_type":"activity","item":"2","item_type":"activity_id","order":2,"pos_id":2},{"display_type":"promotion","item":"9","item_type":"sku_id","order":3,"pos_id":3},{"display_type":"promotion","item":"5","item_type":"sku_id","order":4,"pos_id":2},{"display_type":"recommend","item":"1","item_type":"sku_id","order":5,"pos_id":4},{"display_type":"query","item":"9","item_type":"sku_id","order":6,"pos_id":1}],"page":{"during_time":5689,"page_id":"home"},"ts":1655281324000}
{"common":{"ar":"440000","ba":"Xiaomi","ch":"web","is_new":"1","md":"Xiaomi Mix2 ","mid":"mid_6","os":"Android 11.0","uid":"43","vc":"v2.0.1"},"page":{"during_time":1730,"last_page_id":"home","page_id":"search"},"ts":1655281324000}

View File

@ -0,0 +1,5 @@
server.port=8082
# kafka????
spring.kafka.bootstrap-servers = Ding202:9092,Ding203:9092,Ding204:9092
spring.kafka.producer.key-serializer= org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer

View File

@ -26,7 +26,7 @@
<appender-ref ref="console" />
</logger>
<root level="error" additivity="false">
<root level="info" additivity="false">
<appender-ref ref="console" />
</root>
</configuration>

View File

@ -0,0 +1,13 @@
package com.atguigu.rtgmall;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class GmallLoggerApplicationTests {
@Test
void contextLoads() {
}
}

View File

@ -0,0 +1,33 @@
HELP.md
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**/target/
!**/src/test/**/target/
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
!**/src/main/**/build/
!**/src/test/**/build/
### VS Code ###
.vscode/

View File

@ -0,0 +1,18 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.7/apache-maven-3.8.7-bin.zip
wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.1/maven-wrapper-3.1.1.jar

View File

@ -0,0 +1,316 @@
#!/bin/sh
# ----------------------------------------------------------------------------
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
# Maven Start Up Batch script
#
# Required ENV vars:
# ------------------
# JAVA_HOME - location of a JDK home dir
#
# Optional ENV vars
# -----------------
# M2_HOME - location of maven2's installed home dir
# MAVEN_OPTS - parameters passed to the Java VM when running Maven
# e.g. to debug Maven itself, use
# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
# MAVEN_SKIP_RC - flag to disable loading of mavenrc files
# ----------------------------------------------------------------------------
if [ -z "$MAVEN_SKIP_RC" ] ; then
if [ -f /usr/local/etc/mavenrc ] ; then
. /usr/local/etc/mavenrc
fi
if [ -f /etc/mavenrc ] ; then
. /etc/mavenrc
fi
if [ -f "$HOME/.mavenrc" ] ; then
. "$HOME/.mavenrc"
fi
fi
# OS specific support. $var _must_ be set to either true or false.
cygwin=false;
darwin=false;
mingw=false
case "`uname`" in
CYGWIN*) cygwin=true ;;
MINGW*) mingw=true;;
Darwin*) darwin=true
# Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
# See https://developer.apple.com/library/mac/qa/qa1170/_index.html
if [ -z "$JAVA_HOME" ]; then
if [ -x "/usr/libexec/java_home" ]; then
export JAVA_HOME="`/usr/libexec/java_home`"
else
export JAVA_HOME="/Library/Java/Home"
fi
fi
;;
esac
if [ -z "$JAVA_HOME" ] ; then
if [ -r /etc/gentoo-release ] ; then
JAVA_HOME=`java-config --jre-home`
fi
fi
if [ -z "$M2_HOME" ] ; then
## resolve links - $0 may be a link to maven's home
PRG="$0"
# need this for relative symlinks
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG="`dirname "$PRG"`/$link"
fi
done
saveddir=`pwd`
M2_HOME=`dirname "$PRG"`/..
# make it fully qualified
M2_HOME=`cd "$M2_HOME" && pwd`
cd "$saveddir"
# echo Using m2 at $M2_HOME
fi
# For Cygwin, ensure paths are in UNIX format before anything is touched
if $cygwin ; then
[ -n "$M2_HOME" ] &&
M2_HOME=`cygpath --unix "$M2_HOME"`
[ -n "$JAVA_HOME" ] &&
JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
[ -n "$CLASSPATH" ] &&
CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
fi
# For Mingw, ensure paths are in UNIX format before anything is touched
if $mingw ; then
[ -n "$M2_HOME" ] &&
M2_HOME="`(cd "$M2_HOME"; pwd)`"
[ -n "$JAVA_HOME" ] &&
JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
fi
if [ -z "$JAVA_HOME" ]; then
javaExecutable="`which javac`"
if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
# readlink(1) is not available as standard on Solaris 10.
readLink=`which readlink`
if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
if $darwin ; then
javaHome="`dirname \"$javaExecutable\"`"
javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
else
javaExecutable="`readlink -f \"$javaExecutable\"`"
fi
javaHome="`dirname \"$javaExecutable\"`"
javaHome=`expr "$javaHome" : '\(.*\)/bin'`
JAVA_HOME="$javaHome"
export JAVA_HOME
fi
fi
fi
if [ -z "$JAVACMD" ] ; then
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
else
JAVACMD="`\\unset -f command; \\command -v java`"
fi
fi
if [ ! -x "$JAVACMD" ] ; then
echo "Error: JAVA_HOME is not defined correctly." >&2
echo " We cannot execute $JAVACMD" >&2
exit 1
fi
if [ -z "$JAVA_HOME" ] ; then
echo "Warning: JAVA_HOME environment variable is not set."
fi
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
# traverses directory structure from process work directory to filesystem root
# first directory with .mvn subdirectory is considered project base directory
find_maven_basedir() {
if [ -z "$1" ]
then
echo "Path not specified to find_maven_basedir"
return 1
fi
basedir="$1"
wdir="$1"
while [ "$wdir" != '/' ] ; do
if [ -d "$wdir"/.mvn ] ; then
basedir=$wdir
break
fi
# workaround for JBEAP-8937 (on Solaris 10/Sparc)
if [ -d "${wdir}" ]; then
wdir=`cd "$wdir/.."; pwd`
fi
# end of workaround
done
echo "${basedir}"
}
# concatenates all lines of a file
concat_lines() {
if [ -f "$1" ]; then
echo "$(tr -s '\n' ' ' < "$1")"
fi
}
BASE_DIR=`find_maven_basedir "$(pwd)"`
if [ -z "$BASE_DIR" ]; then
exit 1;
fi
##########################################################################################
# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
# This allows using the maven wrapper in projects that prohibit checking in binary data.
##########################################################################################
if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
if [ "$MVNW_VERBOSE" = true ]; then
echo "Found .mvn/wrapper/maven-wrapper.jar"
fi
else
if [ "$MVNW_VERBOSE" = true ]; then
echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
fi
if [ -n "$MVNW_REPOURL" ]; then
jarUrl="$MVNW_REPOURL/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar"
else
jarUrl="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar"
fi
while IFS="=" read key value; do
case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
esac
done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
if [ "$MVNW_VERBOSE" = true ]; then
echo "Downloading from: $jarUrl"
fi
wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
if $cygwin; then
wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"`
fi
if command -v wget > /dev/null; then
if [ "$MVNW_VERBOSE" = true ]; then
echo "Found wget ... using wget"
fi
if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
wget "$jarUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath"
else
wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath"
fi
elif command -v curl > /dev/null; then
if [ "$MVNW_VERBOSE" = true ]; then
echo "Found curl ... using curl"
fi
if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
curl -o "$wrapperJarPath" "$jarUrl" -f
else
curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f
fi
else
if [ "$MVNW_VERBOSE" = true ]; then
echo "Falling back to using Java to download"
fi
javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
# For Cygwin, switch paths to Windows format before running javac
if $cygwin; then
javaClass=`cygpath --path --windows "$javaClass"`
fi
if [ -e "$javaClass" ]; then
if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
if [ "$MVNW_VERBOSE" = true ]; then
echo " - Compiling MavenWrapperDownloader.java ..."
fi
# Compiling the Java class
("$JAVA_HOME/bin/javac" "$javaClass")
fi
if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
# Running the downloader
if [ "$MVNW_VERBOSE" = true ]; then
echo " - Running MavenWrapperDownloader.java ..."
fi
("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
fi
fi
fi
fi
##########################################################################################
# End of extension
##########################################################################################
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
if [ "$MVNW_VERBOSE" = true ]; then
echo $MAVEN_PROJECTBASEDIR
fi
MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
# For Cygwin, switch paths to Windows format before running java
if $cygwin; then
[ -n "$M2_HOME" ] &&
M2_HOME=`cygpath --path --windows "$M2_HOME"`
[ -n "$JAVA_HOME" ] &&
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
[ -n "$CLASSPATH" ] &&
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
[ -n "$MAVEN_PROJECTBASEDIR" ] &&
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
fi
# Provide a "standardized" way to retrieve the CLI args that will
# work with both Windows and non-Windows executions.
MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@"
export MAVEN_CMD_LINE_ARGS
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
exec "$JAVACMD" \
$MAVEN_OPTS \
$MAVEN_DEBUG_OPTS \
-classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
"-Dmaven.home=${M2_HOME}" \
"-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"

View File

@ -0,0 +1,188 @@
@REM ----------------------------------------------------------------------------
@REM Licensed to the Apache Software Foundation (ASF) under one
@REM or more contributor license agreements. See the NOTICE file
@REM distributed with this work for additional information
@REM regarding copyright ownership. The ASF licenses this file
@REM to you under the Apache License, Version 2.0 (the
@REM "License"); you may not use this file except in compliance
@REM with the License. You may obtain a copy of the License at
@REM
@REM https://www.apache.org/licenses/LICENSE-2.0
@REM
@REM Unless required by applicable law or agreed to in writing,
@REM software distributed under the License is distributed on an
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@REM KIND, either express or implied. See the License for the
@REM specific language governing permissions and limitations
@REM under the License.
@REM ----------------------------------------------------------------------------
@REM ----------------------------------------------------------------------------
@REM Maven Start Up Batch script
@REM
@REM Required ENV vars:
@REM JAVA_HOME - location of a JDK home dir
@REM
@REM Optional ENV vars
@REM M2_HOME - location of maven2's installed home dir
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
@REM e.g. to debug Maven itself, use
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
@REM ----------------------------------------------------------------------------
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
@echo off
@REM set title of command window
title %0
@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on'
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
@REM set %HOME% to equivalent of $HOME
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
@REM Execute a user defined script before this one
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
if exist "%USERPROFILE%\mavenrc_pre.bat" call "%USERPROFILE%\mavenrc_pre.bat" %*
if exist "%USERPROFILE%\mavenrc_pre.cmd" call "%USERPROFILE%\mavenrc_pre.cmd" %*
:skipRcPre
@setlocal
set ERROR_CODE=0
@REM To isolate internal variables from possible post scripts, we use another setlocal
@setlocal
@REM ==== START VALIDATION ====
if not "%JAVA_HOME%" == "" goto OkJHome
echo.
echo Error: JAVA_HOME not found in your environment. >&2
echo Please set the JAVA_HOME variable in your environment to match the >&2
echo location of your Java installation. >&2
echo.
goto error
:OkJHome
if exist "%JAVA_HOME%\bin\java.exe" goto init
echo.
echo Error: JAVA_HOME is set to an invalid directory. >&2
echo JAVA_HOME = "%JAVA_HOME%" >&2
echo Please set the JAVA_HOME variable in your environment to match the >&2
echo location of your Java installation. >&2
echo.
goto error
@REM ==== END VALIDATION ====
:init
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
@REM Fallback to current working directory if not found.
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
set EXEC_DIR=%CD%
set WDIR=%EXEC_DIR%
:findBaseDir
IF EXIST "%WDIR%"\.mvn goto baseDirFound
cd ..
IF "%WDIR%"=="%CD%" goto baseDirNotFound
set WDIR=%CD%
goto findBaseDir
:baseDirFound
set MAVEN_PROJECTBASEDIR=%WDIR%
cd "%EXEC_DIR%"
goto endDetectBaseDir
:baseDirNotFound
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
cd "%EXEC_DIR%"
:endDetectBaseDir
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
@setlocal EnableExtensions EnableDelayedExpansion
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
:endReadAdditionalConfig
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar"
FOR /F "usebackq tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO (
IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
)
@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
if exist %WRAPPER_JAR% (
if "%MVNW_VERBOSE%" == "true" (
echo Found %WRAPPER_JAR%
)
) else (
if not "%MVNW_REPOURL%" == "" (
SET DOWNLOAD_URL="%MVNW_REPOURL%/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar"
)
if "%MVNW_VERBOSE%" == "true" (
echo Couldn't find %WRAPPER_JAR%, downloading it ...
echo Downloading from: %DOWNLOAD_URL%
)
powershell -Command "&{"^
"$webclient = new-object System.Net.WebClient;"^
"if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^
"$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^
"}"^
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^
"}"
if "%MVNW_VERBOSE%" == "true" (
echo Finished downloading %WRAPPER_JAR%
)
)
@REM End of extension
@REM Provide a "standardized" way to retrieve the CLI args that will
@REM work with both Windows and non-Windows executions.
set MAVEN_CMD_LINE_ARGS=%*
%MAVEN_JAVA_EXE% ^
%JVM_CONFIG_MAVEN_PROPS% ^
%MAVEN_OPTS% ^
%MAVEN_DEBUG_OPTS% ^
-classpath %WRAPPER_JAR% ^
"-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" ^
%WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
if ERRORLEVEL 1 goto error
goto end
:error
set ERROR_CODE=1
:end
@endlocal & set ERROR_CODE=%ERROR_CODE%
if not "%MAVEN_SKIP_RC%"=="" goto skipRcPost
@REM check for post script, once with legacy .bat ending and once with .cmd ending
if exist "%USERPROFILE%\mavenrc_post.bat" call "%USERPROFILE%\mavenrc_post.bat"
if exist "%USERPROFILE%\mavenrc_post.cmd" call "%USERPROFILE%\mavenrc_post.cmd"
:skipRcPost
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
if "%MAVEN_BATCH_PAUSE%"=="on" pause
if "%MAVEN_TERMINATE_CMD%"=="on" exit %ERROR_CODE%
cmd /C exit /B %ERROR_CODE%

View File

@ -0,0 +1,74 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.7.11</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.atguigu.rtgmall</groupId>
<artifactId>gmall-publisher</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>gmall-publisher</name>
<description>gmall-publisher</description>
<properties>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.mybatis.spring.boot</groupId>
<artifactId>mybatis-spring-boot-starter</artifactId>
<version>2.1.3</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.11</version>
</dependency>
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<version>0.1.55</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,15 @@
package com.atguigu.rtgmall;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
@MapperScan(basePackages = "com/atguigu/rtgmall/mapper")
public class GmallPublisherApplication {
public static void main(String[] args) {
SpringApplication.run(GmallPublisherApplication.class, args);
}
}

View File

@ -0,0 +1,27 @@
package com.atguigu.rtgmall.beans;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.rtgmall.beans
*@Author: markilue
*@CreateTime: 2023-05-15 15:00
*@Description: TODO 关键词统计实体类
*@Version: 1.0
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class KeywordStats {
private String stt;
private String edt;
private String keyword;
private Long ct;
private String ts;
}

View File

@ -0,0 +1,54 @@
package com.atguigu.rtgmall.beans;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.math.BigDecimal;
/**
* Author: Felix
* Date: 2021/8/16
* Desc: 商品交易额统计实体类
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class ProductStats {
String stt;
String edt;
Long sku_id;
String sku_name;
BigDecimal sku_price;
Long spu_id;
String spu_name;
Long tm_id ;
String tm_name;
Long category3_id ;
String category3_name ;
@Builder.Default
Long display_ct=0L;
@Builder.Default
Long click_ct=0L;
@Builder.Default
Long cart_ct=0L;
@Builder.Default
Long order_sku_num=0L;
@Builder.Default
BigDecimal order_amount=BigDecimal.ZERO;
@Builder.Default
Long order_ct=0L;
@Builder.Default
BigDecimal payment_amount=BigDecimal.ZERO;
@Builder.Default
Long refund_ct=0L;
@Builder.Default
BigDecimal refund_amount=BigDecimal.ZERO;
@Builder.Default
Long comment_ct=0L;
@Builder.Default
Long good_comment_ct=0L ;
Long ts;
}

View File

@ -0,0 +1,29 @@
package com.atguigu.rtgmall.beans;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.math.BigDecimal;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.rtgmall.beans
*@Author: markilue
*@CreateTime: 2023-05-12 21:29
*@Description: TODO 地区交易额统计实体类
*@Version: 1.0
*/
@AllArgsConstructor
@Data
@NoArgsConstructor
public class ProvinceStats {
private String stt;
private String edt;
private String province_id;
private String province_name;
private BigDecimal order_amount;
private String ts;
}

View File

@ -0,0 +1,72 @@
package com.atguigu.rtgmall.beans;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.math.BigDecimal;
import java.math.RoundingMode;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.rtgmall.beans
*@Author: markilue
*@CreateTime: 2023-05-15 13:41
*@Description: TODO 访客统计实体类
*
*@Version: 1.0
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class VisitorStats {
private String stt;
private String edt;
private String vc;
private String ch;
private String ar;
private String is_new;
private Long uv_ct = 0L;
private Long pv_ct = 0L;
private Long sv_ct = 0L;
private Long uj_ct = 0L;
private Long dur_sum = 0L;
private Long new_uv = 0L;
private Long ts;
private int hr;
//计算跳出率 = 跳出次数*100/访问次数
public BigDecimal getUjRate() {
if (sv_ct != 0L) {
return BigDecimal.valueOf(uj_ct)
.multiply(BigDecimal.valueOf(100))
.divide(BigDecimal.valueOf(sv_ct), 2, RoundingMode.HALF_UP);
} else {
return BigDecimal.ZERO;
}
}
//计算每次访问停留时间() = 当日总停留时间毫秒)/当日访问次数/1000
public BigDecimal getDurPerSv() {
if (sv_ct != 0L) {
return BigDecimal.valueOf(dur_sum)
.divide(BigDecimal.valueOf(sv_ct), 0, RoundingMode.HALF_UP)
.divide(BigDecimal.valueOf(1000), 1, RoundingMode.HALF_UP);
} else {
return BigDecimal.ZERO;
}
}
//计算每次访问停留页面数 = 当日总访问页面数/当日访问次数
public BigDecimal getPvPerSv() {
if (sv_ct != 0L) {
return BigDecimal.valueOf(pv_ct)
.divide(BigDecimal.valueOf(sv_ct), 2, RoundingMode.HALF_UP);
} else {
return BigDecimal.ZERO;
}
}
}

View File

@ -0,0 +1,378 @@
package com.atguigu.rtgmall.controller;
import com.atguigu.rtgmall.beans.KeywordStats;
import com.atguigu.rtgmall.beans.ProductStats;
import com.atguigu.rtgmall.beans.ProvinceStats;
import com.atguigu.rtgmall.beans.VisitorStats;
import com.atguigu.rtgmall.service.KeywordStatsService;
import com.atguigu.rtgmall.service.ProductStatsService;
import com.atguigu.rtgmall.service.ProvinceStatsService;
import com.atguigu.rtgmall.service.VisitorStatsService;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.math.BigDecimal;
import java.util.*;
/**
* Author: Felix
* Date: 2021/8/16
* Desc: 大屏展示的控制层
*/
@RestController
@RequestMapping("/api/sugar")
public class SugarController {
@Autowired
private ProductStatsService productStatsService;
@Autowired
private ProvinceStatsService provinceStatsService;
@Autowired
private VisitorStatsService visitorStatsService;
@Autowired
private KeywordStatsService keywordStatsService;
@RequestMapping("/gmv")
public String getGMV(@RequestParam(value = "date", defaultValue = "0") Integer date) {
if (date == 0) {
date = now();
}
//调用service获取总交易额
BigDecimal gmv = productStatsService.getGMV(date);
String json = "{\"status\": 0,\"data\": " + gmv + "}";
return json;
}
@RequestMapping("/tm")
public String getProductStatsByTm(@RequestParam(value = "date", defaultValue = "0") Integer date, @RequestParam(value = "limit", defaultValue = "20") Integer limit) {
if (date == 0) {
date = now();
}
/*
{
"status": 0,
"data": {
"categories": ["苹果","三星", "华为","oppo","vivo","小米98"],
"series": [{
"name": "商品品牌",
"data": [5101,6100,6371,5682,5430,9533]
}]
}
}
*/
//调用service获取品牌交易额
List<ProductStats> productStatsList = productStatsService.getProductStatsByTm(date, limit);
//对查询结果进行遍历,获取品牌以及品牌交易额,将品牌以及品牌交易额分别封装到独立的list集合中
List<String> tmList = new ArrayList<>();
List<BigDecimal> amountList = new ArrayList<>();
for (ProductStats productStats : productStatsList) {
tmList.add(productStats.getTm_name());
amountList.add(productStats.getOrder_amount());
}
String json = "{" +
" \"status\": 0," +
" \"data\": {" +
" \"categories\": [\"" + StringUtils.join(tmList, "\",\"") + "\"]," +
" \"series\": [{" +
" \"name\": \"商品品牌\"," +
" \"data\": [" + StringUtils.join(amountList, ",") + "]" +
" }]" +
" }" +
" }";
return json;
}
// @RequestMapping("/category3")
// public String getProductStatsByCategory3(@RequestParam(value = "date", defaultValue = "0") Integer date, @RequestParam(value = "limit", defaultValue = "20") Integer limit) {
// if (date == 0) {
// date = now();
// }
// /*
// {"status": 0,"data": [{"name": "PC","value": 97},{"name": "iOS","value": 50,}]}
// */
// //调用service获取品牌交易额
// List<ProductStats> productStatsList = productStatsService.getProductStatsByCategory3(date, limit);
//
// //对查询结果进行遍历,获取品牌以及品牌交易额,将品牌以及品牌交易额分别封装到独立的list集合中
// StringBuilder json = new StringBuilder("{\"status\": 0,\"data\": [");
//
// for (int i = 0; i < productStatsList.size(); i++) {
// ProductStats productStats = productStatsList.get(i);
// json.append("{\"name\": \"" + productStats.getCategory3_name() + "\",\"value\": " + productStats.getOrder_amount() + "}");
// if (i < productStatsList.size() - 1) {
// json.append(",");
// }
// }
//
// json.append("]}");
//
// return json.toString();
// }
@RequestMapping("/category3")
public Object getProductStatsByCategory3(@RequestParam(value = "date", defaultValue = "0") Integer date, @RequestParam(value = "limit", defaultValue = "20") Integer limit) {
if (date == 0) {
date = now();
}
/*
{"status": 0,"data": [{"name": "PC","value": 97},{"name": "iOS","value": 50,}]}
*/
//调用service获取品牌交易额
List<ProductStats> productStatsList = productStatsService.getProductStatsByCategory3(date, limit);
HashMap resMap = new HashMap();
List dateList = new ArrayList();
for (ProductStats productStats : productStatsList) {
HashMap dateMap = new HashMap();
dateMap.put("name", productStats.getCategory3_name());
dateMap.put("value", productStats.getOrder_amount());
dateList.add(dateMap);
}
resMap.put("status", 0);
resMap.put("data", dateList);
return resMap;
}
@RequestMapping("/spu")
public String getProductStatsBySPU(@RequestParam(value = "date", defaultValue = "0") Integer date, @RequestParam(value = "limit", defaultValue = "20") Integer limit) {
if (date == 0) {
date = now();
}
//调用service获取品牌交易额
List<ProductStats> productStatsList = productStatsService.getProductStatsBySPU(date, limit);
//对查询结果进行遍历,获取品牌以及品牌交易额,将品牌以及品牌交易额分别封装到独立的list集合中
StringBuilder json = new StringBuilder("{ " +
" \"status\": 0, " +
" \"data\": { " +
" \"columns\": [ " +
" { " +
" \"name\": \"商品名称\", " +
" \"id\": \"name\" " +
" }, " +
" { " +
" \"name\": \"交易额\", " +
" \"id\": \"amount\" " +
" }, " +
" { " +
" \"name\": \"订单数\", " +
" \"id\": \"ct\" " +
" } " +
" ], " +
" \"rows\": [");
for (int i = 0; i < productStatsList.size(); i++) {
ProductStats productStats = productStatsList.get(i);
// System.out.println(productStats.getSpu_name()+":"+productStats.getOrder_amount()+":"+productStats.getOrder_ct());
json.append("{\"name\": \"" + productStats.getSpu_name() + "\",\"amount\": " + productStats.getOrder_amount() + ",\"ct\": " + productStats.getOrder_ct() + "}");
if (i < productStatsList.size() - 1) {
json.append(",");
}
}
json.append("]}}");
return json.toString();
}
@RequestMapping("province")
public String getProvinceStats(@RequestParam(value = "date", defaultValue = "0") Integer date) {
if (date == 0) {
date = now();
}
List<ProvinceStats> provinceStatsList = provinceStatsService.getProvinceStats(date);
StringBuilder json = new StringBuilder("{\"status\": 0,\"data\": {\"mapData\": [");
for (int i = 0; i < provinceStatsList.size(); i++) {
ProvinceStats provinceStats = provinceStatsList.get(i);
json.append("{\"name\": \"" + provinceStats.getProvince_name() + "\",\"value\": " + provinceStats.getOrder_amount() + "}");
if (i < provinceStatsList.size() - 1) {
json.append(",");
}
}
json.append("],\"valueName\": \"省份交易额\"}}");
return json.toString();
}
@RequestMapping("visitor")
public String getVisitorStats(@RequestParam(value = "date", defaultValue = "0") Integer date) {
if (date == 0) {
date = now();
}
List<VisitorStats> visitorStatsList = visitorStatsService.getVisitorStats(date);
//构建新老用户
VisitorStats newVisitor = new VisitorStats();
VisitorStats oldVisitor = new VisitorStats();
for (VisitorStats visitorStats : visitorStatsList) {
if ("1".equals(visitorStats.getIs_new())) {
//新访客
newVisitor = visitorStats;
} else {
//老访客
oldVisitor = visitorStats;
}
}
String json = "{ " +
" \"status\": 0, " +
" \"data\": { " +
" \"columns\": [" +
" { " +
" \"name\": \"类别\", " +
" \"id\": \"type\" " +
" }, " +
" { " +
" \"name\": \"新用户\", " +
" \"id\": \"new\" " +
" }, " +
" { " +
" \"name\": \"老用户\", " +
" \"id\": \"old\" " +
" }" +
" ], " +
" \"rows\": [ " +
" { " +
" \"type\": \"用户数(人)\", " +
" \"new\": " + newVisitor.getUv_ct() + ", " +
" \"old\": " + oldVisitor.getUv_ct() +
" }, " +
" { " +
" \"type\": \"总访问页面(次)\", " +
" \"new\": " + newVisitor.getPv_ct() + ", " +
" \"old\": " + oldVisitor.getPv_ct() +
" }, " +
" { " +
" \"type\": \"跳出率(%)\", " +
" \"new\": " + newVisitor.getUjRate() + ", " +
" \"old\": " + oldVisitor.getUjRate() +
" }, " +
" { " +
" \"type\": \"平均在线时长(秒)\", " +
" \"new\": " + newVisitor.getDurPerSv() + ", " +
" \"old\": " + oldVisitor.getDurPerSv() +
" }, " +
" { " +
" \"type\": \"平均访问页面数(次)\", " +
" \"new\": " + newVisitor.getPvPerSv() + ", " +
" \"old\": " + oldVisitor.getPvPerSv() +
" }" +
" ]}}";
return json;
}
@RequestMapping("hr")
public String getVisitorStatsByHr(@RequestParam(value = "date", defaultValue = "0") Integer date) {
if (date == 0) {
date = now();
}
//从Service中获取分时统计的访问情况
List<VisitorStats> visitorStatsList = visitorStatsService.getVisitorStatsByHr(date);
//定义一个数组,用于存放24小时分时统计情况
Long[] PVArr = new Long[24];
Long[] UVArr = new Long[24];
Long[] newUVArr = new Long[24];
Arrays.fill(PVArr, 0L);
Arrays.fill(UVArr, 0L);
Arrays.fill(newUVArr, 0L);
String[] category = {"00", "01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23"};
//从集合中获取结果赋值给数组中对应的小时
for (VisitorStats visitorStats : visitorStatsList) {
//从数组中获取一天24的数据
PVArr[visitorStats.getHr()] = visitorStats.getPv_ct();
UVArr[visitorStats.getHr()] = visitorStats.getUv_ct();
newUVArr[visitorStats.getHr()] = visitorStats.getNew_uv();
}
String pvString = StringUtils.join(PVArr, ",");
String uvString = StringUtils.join(UVArr, ",");
String newUvString = StringUtils.join(newUVArr, ",");
String categoryString = "\"" + StringUtils.join(category, "\",\"") + "\"";
String json = "{ " +
" \"status\": 0, " +
" \"data\": { " +
" \"categories\": [" + categoryString + "], " +
" \"series\": [ " +
" { " +
" \"name\": \"UV\", " +
" \"data\": [" + uvString + "] " +
" }, " +
" { " +
" \"name\": \"PV\", " +
" \"data\": [" + pvString + "] " +
" }, " +
" { " +
" \"name\": \"newUV\", " +
" \"data\": [" + newUvString + "] " +
" } " +
" ] " +
" } " +
"}";
return json;
}
@RequestMapping("keyword")
public String getKeywordStats(@RequestParam(value = "date", defaultValue = "0") Integer date, @RequestParam(value = "limit", defaultValue = "20") Integer limit) {
if (date == 0) {
date = now();
}
List<KeywordStats> keywordStatsList = keywordStatsService.getKeywordStats(date, limit);
StringBuilder json = new StringBuilder("{ " +
" \"status\": 0, " +
" \"data\": [");
for (int i = 0; i < keywordStatsList.size(); i++) {
KeywordStats keywordStats = keywordStatsList.get(i);
json.append("{ " +
" \"name\": \"" + keywordStats.getKeyword() + "\", " +
" \"value\": " + keywordStats.getCt() + " " +
" }");
if (i < keywordStatsList.size() - 1) {
json.append(",");
}
}
json.append("]}");
return json.toString();
}
//获取当前日期
private Integer now() {
String yyyyMMdd = DateFormatUtils.format(new Date(), "yyyyMMdd");
return Integer.valueOf(yyyyMMdd);
}
}

View File

@ -0,0 +1,234 @@
//
{
"status": 0,
"data": {
"categories": [
"苹果",
"小米",
"华为",
"Redmi",
"索芙特"
],
"series": [
{
"name": "商品品牌",
"data": [
258910.00,
153974.00,
127240.00,
24681.00,
3439.00
]
}
]
}
}
//
{
"status": 0,
"msg": "",
"data": [
{
"name": "PC",
"value": 97
},
{
"name": "iOS",
"value": 50
}
]
}
//
{
"status": 0,
"data": {
"columns": [
{
"name": "商品名称",
"id": "name"
},
{
"name": "交易额",
"id": "amount"
},
{
"name": "订单数",
"id": "ct"
}
],
"rows": [
{
"name": "北京总部",
"amount": 1,
"cr": 1
},
{
"name": "北京总部",
"amount": 1,
"cr": 1
}
]
}
}
//
{
"status": 0,
"data": {
"mapData": [
{
"name": "北京",
"value": 8985
},
{
"name": "天津",
"value": 8616
}
],
"valueName": "省份交易额"
}
}
//
{
"status": 0,
"data": {
"total": 13,
"columns": [
{
"name": "类别",
"id": "type"
},
{
"name": "新用户",
"id": "new"
},
{
"name": "老用户",
"id": "old"
}
],
"rows": [
{
"type": "用户数(人)",
"new": 200,
"old": 500
},
{
"type": "总访问页面(次)",
"new": 200,
"old": 500
},
{
"type": "跳出率(%)",
"new": 200,
"old": 500
},
{
"type": "平均在线时长(秒)",
"new": 200,
"old": 500
},
{
"type": "平均访问页面数(次)",
"new": 200,
"old": 500
}
]
}
}
{
"status": 0,
"data": {
"columns": [
{
"name": "类别",
"id": "type"
},
{
"name": "新用户",
"id": "new"
},
{
"name": "老用户",
"id": "old"
}
],
"rows": [
{
"type": "用户数(人)",
"new": 8,
"old": 6
},
{
"type": "总访问页面(次)",
"new": 80,
"old": 69
},
{
"type": "跳出率(%)",
"new": 0.00,
"old": 0.00
},
{
"type": "平均在线时长(秒)",
"new": 52.2,
"old": 55.2
},
{
"type": "平均访问页面数(次)",
"new": 5.33,
"old": 4.93
}
]
}
}
//线
{
"status": 0,
"data": {
"categories": [
"00",
"01",
"02",
...,
"23"
],
"series": [
{
"name": "UV",
"data": [
2891,
4166,
...,
2625
]
},
{
"name": "PV",
"data": [
6289,
8964,
...,
5030
]
},
{
"name": "newUV",
"data": [
6289,
8964,
...,
5030
]
}
]
}
}
//3d
{"status": 0,"msg": "", "data": [{"name": "海门", "value": 1}, {"name": "鄂尔多斯", "value": 1}]}

View File

@ -0,0 +1,33 @@
package com.atguigu.rtgmall.mapper;
import com.atguigu.rtgmall.beans.KeywordStats;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Select;
import java.util.List;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.rtgmall.mapper
*@Author: markilue
*@CreateTime: 2023-05-15 15:01
*@Description: TODO 关键词统计Mapper接口
*@Version: 1.0
*/
public interface KeywordStatsMapper {
@Select("select keyword, " +
" sum(keyword_stats.ct * " +
" multiIf( " +
" source = 'SEARCH', 10, " +
" source = 'ORDER', 5, " +
" source = 'CART', 2, " +
" source = 'CLICK', 1, 0 " +
" )) ct " +
"from keyword_stats " +
"where toYYYYMMDD(stt) = #{date} " +
"group by keyword " +
"order by sum (keyword_stats.ct) desc " +
"limit #{limit}")
List<KeywordStats> selectKeywordStats(@Param("date") Integer date,@Param("limit") Integer limit);
}

View File

@ -0,0 +1,43 @@
package com.atguigu.rtgmall.mapper;
import com.atguigu.rtgmall.beans.ProductStats;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.annotations.Select;
import java.math.BigDecimal;
import java.util.List;
/**
* Author: Felix
* Date: 2021/8/16
* Desc: 商品统计Mapper接口
*/
public interface ProductStatsMapper {
//获取某天商品的总交易额
@Select("select sum(order_amount) order_amount from product_stats where toYYYYMMDD(stt)=#{date}")
BigDecimal selectGMV(Integer date);
@Select("select tm_id,tm_name,sum(order_amount) order_amount from product_stats " +
"where toYYYYMMDD(stt) = #{date} " +
"group by tm_id,tm_name " +
"having order_amount > 0 " +
"order by order_amount desc limit #{limit}")
List<ProductStats> selectProductStatsByTm(@Param("date") Integer date, @Param("limit") Integer limit);
@Select("select category3_id,category3_name,sum(order_amount) order_amount from product_stats " +
"where toYYYYMMDD(stt)=#{date} " +
"group by category3_id,category3_name " +
"having order_amount>0 " +
"order by order_amount desc limit #{limit} ")
List<ProductStats> selectProductStatsByCategory3(@Param("date") Integer date, @Param("limit") Integer limit);
@Select("select spu_id, spu_name, sum(order_amount) order_amount, sum(order_ct) order_ct " +
"from product_stats " +
"where toYYYYMMDD(stt) = #{date} " +
"group by spu_id, spu_name " +
"having order_amount > 0 " +
"order by order_amount desc, order_ct desc limit #{limit} ")
List<ProductStats> selectProductStatsBySPU(@Param("date") Integer date, @Param("limit") Integer limit);
}

View File

@ -0,0 +1,26 @@
package com.atguigu.rtgmall.mapper;
import com.atguigu.rtgmall.beans.ProvinceStats;
import org.apache.ibatis.annotations.Select;
import java.util.List;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.rtgmall.mapper
*@Author: markilue
*@CreateTime: 2023-05-12 21:29
*@Description: TODO 地区统计Mapper
*@Version: 1.0
*/
public interface ProvinceStatsMapper {
@Select("select province_id,province_name,sum(order_amount) order_amount " +
"from province_stats " +
"where toYYYYMMDD(stt) = #{date} " +
"group by province_id, province_name")
List<ProvinceStats> selectProvinceStats(Integer date);
}

View File

@ -0,0 +1,33 @@
package com.atguigu.rtgmall.mapper;
import com.atguigu.rtgmall.beans.VisitorStats;
import org.apache.ibatis.annotations.Select;
import java.util.List;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.rtgmall.mapper
*@Author: markilue
*@CreateTime: 2023-05-15 13:40
*@Description: TODO 访客统计mapper
*@Version: 1.0
*/
public interface VisitorStatsMapper {
@Select("select " +
" is_new,sum(uv_ct) uv_ct,sum(pv_ct) pv_ct,sum(sv_ct) sv_ct,sum(uj_ct) uj_ct,sum(dur_sum) dur_sum " +
"from " +
" visitor_stats " +
"where toYYYYMMDD(stt)=#{date} " +
"group by " +
" is_new")
List<VisitorStats> selectVisitorStats(Integer date);
@Select("select toHour(stt) hr,sum(uv_ct) uv_ct,sum(pv_ct) pv_ct,sum(if(is_new='1',visitor_stats.uv_ct,0)) new_uv\n" +
"from visitor_stats " +
"group by hr")
List<VisitorStats> selectVisitorStatsByHr(Integer date);
}

View File

@ -0,0 +1,72 @@
-- 品牌销售类
select tm_id, tm_name, sum(order_amount) order_amount
from product_stats
where toYYYYMMDD(stt) = 20230511
group by tm_id, tm_name
having order_amount > 0
order by order_amount desc limit 5;
-- 品类销售额占比
select category3_id, category3_name, sum(order_amount) order_amount
from product_stats
where toYYYYMMDD(stt) = 20230511
group by category3_id, category3_name
having order_amount > 0
order by order_amount desc limit 5;
-- spu销售额
select spu_id, spu_name, sum(order_amount) order_amount, sum(order_ct) order_ct
from product_stats
where toYYYYMMDD(stt) = 20230511
group by spu_id, spu_name
having order_amount > 0
order by order_amount desc, order_ct desc limit 5;
select spu_id, spu_name, sum(order_amount) order_amount, sum(order_ct) order_ct
from product_stats
where toYYYYMMDD(stt) = 20230511
group by spu_id, spu_name
having order_amount > 0
order by order_amount desc, order_ct desc limit 5
-- 省份交易额
select province_id, province_name, sum(order_amount) order_amount
from province_stats
where toYYYYMMDD(stt) = 20230511
group by province_id, province_name
--
select is_new,
sum(uv_ct) uv_ct,
sum(pv_ct) pv_ct,
sum(sv_ct) sv_ct,
sum(uj_ct) uj_ct,
sum(dur_sum) dur_sum
from visitor_stats
where toYYYYMMDD(stt) = 20230511
group by is_new;
-- 用户分时统计
-- 注意这里要使用visitor_stats.uv_ct不然不知道要去哪里拿
select toHour(stt) hr,
sum(uv_ct) uv_ct,
sum(pv_ct) pv_ct,
sum(if(is_new = '1', visitor_stats.uv_ct, 0)) new_uv
from visitor_stats
group by hr;
-- --(--)
select keyword,
sum(keyword_stats.ct *
multiIf(
source = 'SEARCH', 10,
source = 'ORDER', 5,
source = 'CART', 2,
source = 'CLICK', 1, 0
)) ct
from keyword_stats
where toYYYYMMDD(stt) = 20230512
group by keyword
order by sum (keyword_stats.ct) desc
limit 5;

View File

@ -0,0 +1,18 @@
package com.atguigu.rtgmall.service;
import com.atguigu.rtgmall.beans.KeywordStats;
import java.util.List;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.rtgmall.service
*@Author: markilue
*@CreateTime: 2023-05-15 15:09
*@Description: TODO 关键词统计service
*@Version: 1.0
*/
public interface KeywordStatsService {
List<KeywordStats> getKeywordStats(Integer date,Integer limit);
}

View File

@ -0,0 +1,30 @@
package com.atguigu.rtgmall.service;
import com.atguigu.rtgmall.beans.ProductStats;
import java.math.BigDecimal;
import java.util.List;
/**
* Author: Felix
* Date: 2021/8/16
* Desc: 商品统计Service接口
*/
public interface ProductStatsService {
//获取某天的总交易额
BigDecimal getGMV(Integer date);
//获取品牌成交额
List<ProductStats> getProductStatsByTm(Integer date, Integer limit);
List<ProductStats> getProductStatsByCategory3(Integer date, Integer limit);
//获取spu成交额
List<ProductStats> getProductStatsBySPU(Integer date, Integer limit);
}

View File

@ -0,0 +1,19 @@
package com.atguigu.rtgmall.service;
import com.atguigu.rtgmall.beans.ProvinceStats;
import java.util.List;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.rtgmall.service
*@Author: markilue
*@CreateTime: 2023-05-12 21:32
*@Description: TODO 地区统计接口
*@Version: 1.0
*/
public interface ProvinceStatsService {
//地区交易额
List<ProvinceStats> getProvinceStats(Integer date);
}

View File

@ -0,0 +1,19 @@
package com.atguigu.rtgmall.service;
import com.atguigu.rtgmall.beans.VisitorStats;
import java.util.List;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.rtgmall.service
*@Author: markilue
*@CreateTime: 2023-05-15 13:47
*@Description: TODO 访客统计service
*@Version: 1.0
*/
public interface VisitorStatsService {
List<VisitorStats> getVisitorStats(Integer date);
List<VisitorStats> getVisitorStatsByHr(Integer date);
}

View File

@ -0,0 +1,28 @@
package com.atguigu.rtgmall.service.impl;
import com.atguigu.rtgmall.beans.KeywordStats;
import com.atguigu.rtgmall.mapper.KeywordStatsMapper;
import com.atguigu.rtgmall.service.KeywordStatsService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.rtgmall.service.impl
*@Author: markilue
*@CreateTime: 2023-05-15 15:10
*@Description: TODO 关键词统计服务实现类
*@Version: 1.0
*/
@Service
public class KeywordStatsServiceImpl implements KeywordStatsService {
@Autowired
private KeywordStatsMapper keywordStatsMapper;
@Override
public List<KeywordStats> getKeywordStats(Integer date, Integer limit) {
return keywordStatsMapper.selectKeywordStats(date, limit);
}
}

View File

@ -0,0 +1,44 @@
package com.atguigu.rtgmall.service.impl;
import com.atguigu.rtgmall.beans.ProductStats;
import com.atguigu.rtgmall.mapper.ProductStatsMapper;
import com.atguigu.rtgmall.service.ProductStatsService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.math.BigDecimal;
import java.util.List;
/**
* Author: Felix
* Date: 2021/8/16
* Desc: 商品统计Service接口实现类
*/
@Service
public class ProductStatsServiceImpl implements ProductStatsService {
@Autowired
private ProductStatsMapper productStatsMapper;
@Override
public BigDecimal getGMV(Integer date) {
return productStatsMapper.selectGMV(date);
}
@Override
public List<ProductStats> getProductStatsByTm(Integer date, Integer limit) {
return productStatsMapper.selectProductStatsByTm(date, limit);
}
@Override
public List<ProductStats> getProductStatsByCategory3(Integer date, Integer limit) {
return productStatsMapper.selectProductStatsByCategory3(date,limit);
}
@Override
public List<ProductStats> getProductStatsBySPU(Integer date, Integer limit) {
return productStatsMapper.selectProductStatsBySPU(date, limit);
}
}

View File

@ -0,0 +1,31 @@
package com.atguigu.rtgmall.service.impl;
import com.atguigu.rtgmall.beans.ProvinceStats;
import com.atguigu.rtgmall.mapper.ProvinceStatsMapper;
import com.atguigu.rtgmall.service.ProvinceStatsService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.rtgmall.service.impl
*@Author: markilue
*@CreateTime: 2023-05-12 21:32
*@Description: TODO 地区统计接口实现类
*@Version: 1.0
*/
@Service
public class ProvinceStatsServiceImpl implements ProvinceStatsService {
@Autowired
private ProvinceStatsMapper provinceStatsMapper;
@Override
public List<ProvinceStats> getProvinceStats(Integer date) {
return provinceStatsMapper.selectProvinceStats(date);
}
}

View File

@ -0,0 +1,34 @@
package com.atguigu.rtgmall.service.impl;
import com.atguigu.rtgmall.beans.VisitorStats;
import com.atguigu.rtgmall.mapper.VisitorStatsMapper;
import com.atguigu.rtgmall.service.VisitorStatsService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.rtgmall.service.impl
*@Author: markilue
*@CreateTime: 2023-05-15 13:48
*@Description: TODO 访客主题服务实现类
*@Version: 1.0
*/
@Service
public class VisitorStatsServiceImpl implements VisitorStatsService {
@Autowired
private VisitorStatsMapper visitorStatsMapper;
@Override
public List<VisitorStats> getVisitorStats(Integer date) {
return visitorStatsMapper.selectVisitorStats(date);
}
@Override
public List<VisitorStats> getVisitorStatsByHr(Integer date) {
return visitorStatsMapper.selectVisitorStatsByHr(date);
}
}

View File

@ -0,0 +1,4 @@
server.port=8069
spring.datasource.driver-class-name=ru.yandex.clickhouse.ClickHouseDriver
spring.datasource.url=jdbc:clickhouse://Ding202:8123/rt_gmall

View File

@ -0,0 +1,13 @@
package com.atguigu.rtgmall;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class GmallPublisherApplicationTests {
@Test
void contextLoads() {
}
}

View File

@ -0,0 +1,24 @@
package com.atguigu.rtgmall;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.rtgmall
*@Author: markilue
*@CreateTime: 2023-05-12 21:07
*@Description: TODO
*@Version: 1.0
*/
public class test1 {
public static void main(String[] args) {
String date ="20230511";
String limit ="5";
String sql = "select spu_id,spu_name,sum(order_amount) order_amount,sum(order_ct) order_ct from product_stats " +
"where toYYYYMMDD(stt)="+date+" " +
"group by spu_id,spu_name " +
"having order_amount > 0 " +
"order by order_amount desc,order_ct desc limit "+limit+" ";
System.out.println(sql);
}
}

View File

@ -25,12 +25,14 @@
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.version}</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
@ -43,18 +45,21 @@
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_${scala.version}</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-cep_${scala.version}</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-json</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
@ -67,26 +72,130 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
</dependency>
<!--Flink默认使用的是slf4j记录日志相当于一个日志的接口,我们这里使用log4j作为具体的日志实现-->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.25</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-to-slf4j</artifactId>
<version>2.14.0</version>
<scope>provided</scope>
</dependency>
<!--lomback插件依赖-->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.14</version>
<scope>provided</scope>
</dependency>
<!--添加flink-mysql的cdc依赖-->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.47</version>
</dependency>
<dependency>
<groupId>com.alibaba.ververica</groupId>
<artifactId>flink-connector-mysql-cdc</artifactId>
<version>1.2.0</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
<scope>compile</scope>
</dependency>
<!--commons-beanutils是Apache开源组织提供的用于操作JAVA BEAN的工具包。
使用commons-beanutils我们可以很方便的对bean对象的属性进行操作-->
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>1.9.3</version>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-spark</artifactId>
<version>5.0.0-HBase-2.0</version>
<exclusions>
<exclusion>
<groupId>org.glassfish</groupId>
<artifactId>javax.el</artifactId>
</exclusion>
</exclusions>
</dependency>
<!--redis客户端-->
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<version>3.3.0</version>
</dependency>
<!--clickhouse驱动-->
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<version>0.3.0</version>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc_${scala.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<!--flink sql-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java-bridge_${scala.version}</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_${scala.version}</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<!--IK分词器-->
<dependency>
<groupId>com.janeluo</groupId>
<artifactId>ikanalyzer</artifactId>
<version>2012_u6</version>
</dependency>
</dependencies>
<build>
@ -114,5 +223,4 @@
</build>
</project>

View File

@ -0,0 +1,39 @@
package com.atguigu.gmall.realtime;
import org.junit.Test;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime
*@Author: markilue
*@CreateTime: 2023-05-06 21:14
*@Description: TODO
*@Version: 1.0
*/
public class Test1 {
@Test
public void test() {
String sinTable = "base_trademark";
String sinkPK = "id";
String sinkColumn = "id,tm_name";
String sinExtend = "";
checkTable(sinTable,sinkPK,sinkColumn,sinExtend);
}
private void checkTable(String sinkTable, String sinkPk, String sinkColumns, String sinkExtend) {
StringBuilder sql = new StringBuilder();
sql.append("create table if not exist " + sinkTable + "(");
String[] columns = sinkColumns.split(",");
for (String column : columns) {
sql.append(column + " String,");
}
sql.append("PRIMARY KEY (" + sinkPk + "))");
System.out.println(sql.toString());
//create table if not exist base_trademark(id String,tm_name String,PRIMARY KEY (id))
}
}

View File

@ -0,0 +1,46 @@
package com.atguigu.gmall.realtime.app.dwd;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.dwd
*@Author: markilue
*@CreateTime: 2023-05-10 20:06
*@Description: TODO 抽取基类:模板方法设计模式
*@Version: 1.0
*/
public abstract class Base {
public void entry() throws Exception {
//TODO 1.基本环境准确
//1.1 流处理环境
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//1.2 设置并行度
env.setParallelism(4);
//TODO 2.检查点设置
//2.1 开启检查点
env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//2.2 设置检查点超时时间
env.getCheckpointConfig().setCheckpointTimeout(60000L);
//2.3 设置重启策略
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 3000L));
//2.4 设置job取消后,检查点是否保存
env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//2.5 设置状态后端 内存/文件系统/RocksDB
env.setStateBackend(new FsStateBackend("hdfs://Ding202:8020/rt_gmall/gmall"));
//2.6 指定操作HDFS用户
System.setProperty("HADOOP_USER_NAME", "dingjiawen");
execute(env);
env.execute();
}
public abstract void execute(StreamExecutionEnvironment environment);
}

View File

@ -0,0 +1,146 @@
package com.atguigu.gmall.realtime.app.dwd;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.StringDebeziumDeserializationSchema;
import com.atguigu.gmall.realtime.app.func.DimSink;
import com.atguigu.gmall.realtime.app.func.MyDeserializationSchemaFunction;
import com.atguigu.gmall.realtime.app.func.TableProcessFunction;
import com.atguigu.gmall.realtime.beans.TableProcess;
import com.atguigu.gmall.realtime.utils.MyKafkaUtils;
import com.mysql.cj.xdevapi.Table;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.kafka.clients.producer.ProducerRecord;
import javax.annotation.Nullable;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.dwd
*@Author: markilue
*@CreateTime: 2023-05-05 21:13
*@Description: TODO 业务数据动态分流
*@Version: 1.0
*/
public class BaseDBApp {
public static void main(String[] args) throws Exception {
//TODO 1.基本环境准确
//1.1 流处理环境
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//1.2 设置并行度
env.setParallelism(4);
//TODO 2.检查点设置
//2.1 开启检查点
env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//2.2 设置检查点超时时间
env.getCheckpointConfig().setCheckpointTimeout(60000L);
//2.3 设置重启策略
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 3000L));
//2.4 设置job取消后,检查点是否保存
env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//2.5 设置状态后端 内存/文件系统/RocksDB
env.setStateBackend(new FsStateBackend("hdfs://Ding202:8020/rt_gmall/gmall"));
//2.6 指定操作HDFS用户
System.setProperty("HADOOP_USER_NAME", "dingjiawen");
//TODO 3.从kafka中读取数据
//3.1 声明消费主题以及消费者组
String topic = "ods_base_db_m";
String groupId = "base_db_app_group";
//3.2 获取消费者对象
FlinkKafkaConsumer<String> kafkaSource = MyKafkaUtils.getKafkaSource(topic, groupId);
//3.3 读取数据 封装流
DataStreamSource<String> kafkaDs = env.addSource(kafkaSource);
//TODO 4.对数据类型进行转换 String ->JSONObject
SingleOutputStreamOperator<JSONObject> jsonObjDs = kafkaDs.map(JSON::parseObject);
//TODO 5.简单ETL
SingleOutputStreamOperator<JSONObject> filterDs = jsonObjDs.filter(
jsonObject -> {
boolean flag = jsonObject.getString("table") != null
&& jsonObject.getString("table").length() > 0
&& jsonObject.getJSONObject("data") != null
&& jsonObject.getString("data").length() > 3;
return flag;
}
);
// filterDs.print(">>>>");
//TODO 6.使用FlinkCDC读取配置表数据
//6.1 获取datasource
SourceFunction<String> mySqlSource = MySQLSource.<String>builder()
.hostname("Ding202")
.port(3306)
.databaseList("rt_gmall_realtime") // set captured database
.tableList("rt_gmall_realtime.table_process") // set captured table
.username("root")
.password("123456")
.startupOptions(StartupOptions.initial())
.deserializer(new MyDeserializationSchemaFunction()) // converts SourceRecord to JSON String
.build();
//6.2 读取数据封装流
DataStreamSource<String> mysqlDS = env.addSource(mySqlSource);
//6.3 为了让主流中的每一个并行度流上处理业务时都能够使用配置流mysqlDS的数据,所以需要将配置流进行广播(广播只能是map类型<k,v>)
MapStateDescriptor<String, TableProcess> mapStateDescriptor = new MapStateDescriptor<>("table_process", String.class, TableProcess.class);//广播的时候是一个什么类型
BroadcastStream<String> broadcastDS = mysqlDS.broadcast(mapStateDescriptor);//转化为了广播流
//6.4 调用非广播流的connect方法 将业务流与配置流进行连接
BroadcastConnectedStream<JSONObject, String> connectDS = filterDs.connect(broadcastDS);
//TODO 7. 动态分流 将维度数据放入维度侧输出流 事实数据放入主流
//声明维度侧输出流的标记
OutputTag<JSONObject> dimTag = new OutputTag<JSONObject>("dimTag") {
};
SingleOutputStreamOperator<JSONObject> realDS = connectDS.process(
new TableProcessFunction(dimTag, mapStateDescriptor)
);
//获取维度侧输出流
DataStream<JSONObject> dimDS = realDS.getSideOutput(dimTag);
dimDS.print(">>>>");
realDS.print("####");
//TODO 8.将维度侧输出流的数据写到Hbase(phoenix)
dimDS.addSink(new DimSink());
//TODO 9.将主流数据写回到Kafka的dwd层中
//因为不同的表需要发送到不同的主题当中去,所以就不用采用传统的方式
//但是为了保证精确一致性,尽可能的使用Flink自己提供的方式,因此采用了以下的方式
realDS.addSink(MyKafkaUtils.getKafkaSinkBySchema(new KafkaSerializationSchema<JSONObject>() {
@Override
public ProducerRecord<byte[], byte[]> serialize(JSONObject jsonObject, @Nullable Long aLong) {
String topic = jsonObject.getString("sink_table");
return new ProducerRecord<byte[], byte[]>(topic, jsonObject.getJSONObject("data").toJSONString().getBytes());
}
}));
env.execute();
}
}

View File

@ -1,20 +1,33 @@
package com.atguigu.gmall.realtime.app.dwd;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.utils.MyKafkaUtils;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import java.text.SimpleDateFormat;
import java.util.Properties;
/**
@ -24,6 +37,19 @@ import java.util.Properties;
* 曝光日志放到曝光侧输出流中
* 页面日志放到页面侧输出流中
* 将不同流的数据协会kafka的dwd主题中
*
* 日志数据分流执行流程:
* -需要启动的进程:
* zk,kafka,[hdfs],logger,BaseLogApp
* -运行模拟生成日志jar包
* -将生成的日志发送给Nginx
* -Nginx接受到数据之后进行请求转发将请求发送给Ding202,203,204上的日志采集服务
* -日志采集服务对数据进行输出落盘以及发送到kafka的ods_base_log
* -BaseLogApp从ods_base_log读取数据
* >结构转换 String ->JSONObject
* >状态修复 分组,修复
* >分流
* >将分流后的数据写到kafka的dwd层不同的主题中
*/
public class BaseLogApp {
@ -34,23 +60,22 @@ public class BaseLogApp {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//1.2 设置并行度
env.setParallelism(4); //这里需要与kafka对应的分区相对应
env.setParallelism(2); //这里需要与kafka对应的分区相对应
//TODO 2.检查点相关设置
//2.1 开启检查点
env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//2.1 设置检查点超时时间
//2.2 设置检查点超时时间
env.getCheckpointConfig().setCheckpointTimeout(60000);
//2.1 设置重启策略 这里是固定次数的重启 ->3秒钟重启一次一共最多重启3次
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,3000L));
//2.1 设置job取消后检查点是否保留 这里设置保留检查点->
//2.3 设置重启策略 这里是固定次数的重启 ->3秒钟重启一次一共最多重启3次
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 3000L));
//2.4 设置job取消后检查点是否保留 这里设置保留检查点->
env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//2.1 设置状态后端 即设置检查点的存储位置 内存|文件系统|RocksDB RocksDB类似于Redis会在内存中存一份也会进行落盘
//2.5 设置状态后端 即设置检查点的存储位置 内存|文件系统|RocksDB RocksDB类似于Redis会在内存中存一份也会进行落盘
env.setStateBackend(new FsStateBackend("hdfs://Ding202:8020/rt_gmall/gmall"));
//2 .6 指定操作HDFS的用户
System.setProperty("HADOOP_USER_NAME","dingjiawen");
//2.6 指定操作HDFS的用户
System.setProperty("HADOOP_USER_NAME", "dingjiawen");
//TODO 3.从kafka中读取数据
//3.1 声明消费的主题
@ -79,32 +104,140 @@ public class BaseLogApp {
//方法的默认实现
// kafkaDS.map(JSON::parseObject);
jsonObjectDs.print(">>>");
// jsonObjectDs.print(">>>");
//TODO 5.新老访客状态进行修复 --flink状态编程:键控状态
//5.1 按照用户id进行分组 --jsonObject的格式具体参照log.json
KeyedStream<JSONObject, String> keyedDS = jsonObjectDs.keyBy(
jsonObject -> jsonObject.getJSONObject("common").getString("mid")
);
//5.2 修复 --仍然是之前的记录只是改变他的is_new属性因此用map
SingleOutputStreamOperator<JSONObject> jsonWithNewDs = keyedDS.map(
new RichMapFunction<JSONObject, JSONObject>() {
//注意:不能再生命的时候对状态进行初始化,因为这个时候还不能获取到getRuntimeContext
private ValueState<String> lastVisitDateState;
private SimpleDateFormat sdf;//如果在这里初始化那么每调用一次map方法就会创建一个对象
@Override
public void open(Configuration parameters) throws Exception {
lastVisitDateState = getRuntimeContext().getState(new ValueStateDescriptor<String>("lastVisitDatState", String.class));
sdf = new SimpleDateFormat("yyyyMMdd");
}
@Override
public JSONObject map(JSONObject jsonObject) throws Exception {
//获取新老访客状态
String isNew = jsonObject.getJSONObject("common").getString("is_new");
if ("1".equals(isNew)) {
//如果是新访客状态才有可能需要进行修复;老访客不需要修复
String lastVisitDate = lastVisitDateState.value();
String curVisitDate = sdf.format(jsonObject.getLong("ts"));
//判断上次访问是否为空
if (lastVisitDate != null && !"".equals(lastVisitDate)) {
//访问过,修复
//判断是否在同一天访问
if (!lastVisitDate.equals(curVisitDate)) {
isNew = "0";
jsonObject.getJSONObject("common").put("is_new", isNew);
}
} else {
//当前是第一次访问
lastVisitDateState.update(curVisitDate);
}
}
return jsonObject;
}
}
);
//TODO 6.按照日志类型对日志进行分流 --定义侧输出流标记输入到不同的侧输出流
//6.1 声明侧输出流Tag
//启动日志--启动侧输出流
OutputTag<String> startTag = new OutputTag<String>("start") {
};//需要加{}因为不加是调用他的父类那么就获取不到泛型可以从父类的构造方法中看出
//曝光日志--曝光侧输出流
OutputTag<String> displayTag = new OutputTag<String>("display") {
};//需要加{}因为不加是调用他的父类那么就获取不到泛型可以从父类的构造方法中看出
//页面日志--主流
//TODO 5.新老访客状态进行修复
//5.2 使用侧输出流完成分流 process算子因为process算子可以拿到上下文,从而输出到侧输出流
SingleOutputStreamOperator<String> pageDs = jsonWithNewDs.process(
new ProcessFunction<JSONObject, String>() {
@Override
public void processElement(JSONObject jsonObject, ProcessFunction<JSONObject, String>.Context context, Collector<String> collector) throws Exception {
//获取启动jsonObject
JSONObject startObject = jsonObject.getJSONObject("start");
String jsonString = jsonObject.toJSONString();
//判断是否为启动日志
if (startObject != null && startObject.size() > 0) {
//是启动日志
context.output(startTag, jsonString);
} else {
//如果不是启动日志则都是页面日志放入主流
collector.collect(jsonString);
//判断是否是曝光日志
JSONArray displays = jsonObject.getJSONArray("displays");
if (displays != null && displays.size() > 0) {
//获取时间戳和页面id
String ts = jsonObject.getString("ts");
String pageId = jsonObject.getJSONObject("page").getString("page_id");
//有曝光信息遍历数组获取每一条曝光数据
for (int i = 0; i < displays.size(); i++) {
JSONObject displayJSONObject = displays.getJSONObject(i);
//TODO 6.安装日志类型对日志进行分流
//放入曝光侧输出流中
//放入时间戳和页面id
displayJSONObject.put("ts", ts);
displayJSONObject.put("page_id", pageId);
context.output(displayTag, displayJSONObject.toJSONString());
}
}
}
}
}
);
//6.3 获取不同流数据 输出测试
DataStream<String> startDs = pageDs.getSideOutput(startTag);
DataStream<String> displayDS = pageDs.getSideOutput(displayTag);
pageDs.print(">>>>");
startDs.print("####");
displayDS.print("&&&&");
//TODO 7.将不同流的数据写到kafka的dwd不同的主题中
// 为什么不在判断之后直接输入到kafka而是放入流中?
// 因为这样就不能保证精确一致性了需要自己实现对应的二阶段提交
pageDs.addSink(
MyKafkaUtils.getKafkaSink("dwd_page_log")
);
startDs.addSink(
MyKafkaUtils.getKafkaSink("dwd_start_log")
);
displayDS.addSink(
MyKafkaUtils.getKafkaSink("dwd_display_log")
);
env.execute();
}
}

View File

@ -0,0 +1,294 @@
//
{
"common": {
"ar": "370000",
"uid": "31",
"os": "Android 11.0",
"ch": "xiaomi",
"is_new": "0",
"md": "Xiaomi 9",
"mid": "mid_12",
"vc": "v2.1.111",
"ba": "Xiaomi"
},
"start": {
"entry": "icon",
"open_ad_skip_ms": 0,
"open_ad_ms": 1662,
"loading_time": 2531,
"open_ad_id": 4
},
"ts": 1683276802000
}
// --
{
"common": {
"ar": "370000",
"uid": "31",
"os": "Android 11.0",
"ch": "xiaomi",
"is_new": "0",
"md": "Xiaomi 9",
"mid": "mid_12",
"vc": "v2.1.111",
"ba": "Xiaomi"
},
"page": {
"page_id": "home",
"during_time": 15127
},
"displays": [
{
"display_type": "activity",
"item": "1",
"item_type": "activity_id",
"pos_id": 5,
"order": 1
},
{
"display_type": "activity",
"item": "2",
"item_type": "activity_id",
"pos_id": 5,
"order": 2
},
{
"display_type": "query",
"item": "8",
"item_type": "sku_id",
"pos_id": 4,
"order": 3
},
{
"display_type": "query",
"item": "5",
"item_type": "sku_id",
"pos_id": 3,
"order": 4
},
{
"display_type": "promotion",
"item": "8",
"item_type": "sku_id",
"pos_id": 4,
"order": 5
},
{
"display_type": "query",
"item": "5",
"item_type": "sku_id",
"pos_id": 4,
"order": 6
},
{
"display_type": "promotion",
"item": "1",
"item_type": "sku_id",
"pos_id": 1,
"order": 7
},
{
"display_type": "query",
"item": "4",
"item_type": "sku_id",
"pos_id": 2,
"order": 8
}
],
"ts": 1683276802000
}
{
"common": {
"ar": "370000",
"uid": "31",
"os": "Android 11.0",
"ch": "xiaomi",
"is_new": "0",
"md": "Xiaomi 9",
"mid": "mid_12",
"vc": "v2.1.111",
"ba": "Xiaomi"
},
"page": {
"page_id": "good_detail",
"item": "8",
"during_time": 15379,
"item_type": "sku_id",
"last_page_id": "home",
"source_type": "recommend"
},
"displays": [
{
"display_type": "query",
"item": "7",
"item_type": "sku_id",
"pos_id": 2,
"order": 1
},
{
"display_type": "query",
"item": "10",
"item_type": "sku_id",
"pos_id": 3,
"order": 2
},
{
"display_type": "promotion",
"item": "3",
"item_type": "sku_id",
"pos_id": 2,
"order": 3
},
{
"display_type": "query",
"item": "8",
"item_type": "sku_id",
"pos_id": 3,
"order": 4
},
{
"display_type": "recommend",
"item": "3",
"item_type": "sku_id",
"pos_id": 4,
"order": 5
},
{
"display_type": "query",
"item": "4",
"item_type": "sku_id",
"pos_id": 4,
"order": 6
},
{
"display_type": "query",
"item": "7",
"item_type": "sku_id",
"pos_id": 4,
"order": 7
},
{
"display_type": "promotion",
"item": "10",
"item_type": "sku_id",
"pos_id": 2,
"order": 8
},
{
"display_type": "promotion",
"item": "10",
"item_type": "sku_id",
"pos_id": 5,
"order": 9
}
],
"actions": [
{
"item": "8",
"action_id": "favor_add",
"item_type": "sku_id",
"ts": 1683276809689
}
],
"ts": 1683276802000
}
//json
{
"database": "rt_gmall",
"xid": 26161,
"data": {
"sku_num": "1",
"create_time": "2023-05-05 21:31:47",
"sku_id": 6,
"order_price": 1299.00,
"source_type": "2404",
"img_url": "http://47.93.148.192:8080/group1/M00/00/01/rBHu8l-rgJqAHPnoAAF9hoDNfsc505.jpg",
"sku_name": "Redmi 10X 4G Helio G85游戏芯 4800万超清四摄 5020mAh大电量 小孔全面屏 128GB大存储 8GB+128GB 冰雾白 游戏智能手机 小米 红米",
"id": 86497,
"source_id": 2,
"order_id": 28748,
"split_total_amount": 1299.00
},
"xoffset": 523,
"type": "insert",
"table": "order_detail",
"ts": 1683293507
}
{
"database": "rt_gmall",
"xid": 26161,
"data": {
"order_status": "1001",
"id": 74863,
"order_id": 28744,
"operate_time": "2023-05-05 21:31:47"
},
"xoffset": 529,
"type": "insert",
"table": "order_status_log",
"ts": 1683293507
}
{
"page": {
"page_id": "good_detail",
"item": "7",
"during_time": 14474,
"item_type": "sku_id",
"last_page_id": "good_list",
"source_type": "promotion"
},
"displays": [
{
"display_type": "query",
"item": "8",
"item_type": "sku_id",
"pos_id": 2,
"order": 1
},
{
"display_type": "promotion",
"item": "2",
"item_type": "sku_id",
"pos_id": 1,
"order": 2
},
{
"display_type": "query",
"item": "2",
"item_type": "sku_id",
"pos_id": 3,
"order": 3
},
{
"display_type": "promotion",
"item": "2",
"item_type": "sku_id",
"pos_id": 3,
"order": 4
},
{
"display_type": "recommend",
"item": "7",
"item_type": "sku_id",
"pos_id": 4,
"order": 5
},
{
"display_type": "promotion",
"item": "3",
"item_type": "sku_id",
"pos_id": 5,
"order": 6
}
],
"ts": 1683788701000
}

View File

@ -0,0 +1,310 @@
package com.atguigu.gmall.realtime.app.dwm;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.func.DimAsyncFunction;
import com.atguigu.gmall.realtime.beans.OrderDetail;
import com.atguigu.gmall.realtime.beans.OrderInfo;
import com.atguigu.gmall.realtime.beans.OrderWide;
import com.atguigu.gmall.realtime.utils.MyKafkaUtils;
import org.apache.avro.Schema;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.async.AsyncFunction;
import org.apache.flink.streaming.api.functions.async.ResultFuture;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.Date;
import java.util.concurrent.TimeUnit;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.dwm
*@Author: markilue
*@CreateTime: 2023-05-09 14:33
*@Description: TODO 订单宽表准备
* -- 需要启动的进程:
* zk,hdfs,hbase,kafka,maxwell,,redis
* BaseDBApp,OrderWideApp
* --执行流程:
* >运行模拟生成业务数据的jar
* >会想业务数据库Mysql中插入生成业务数据
* >mysql会将变化的数据放到binlog中
* >maxwell从binlog中获取数据,将位数封装为json字符串发送到kafka的ods主题中 ods_base_db_m
* >BaseLogApp从ods_base_db_m主题中读取数据,进行分流
* &事实数据----写回到kafka的dwd主题
* &业务数据----保存到phoenix的维度表中
* >OrderWideApp从dwd主题中获取订单和订单明细数据
* >使用intervalJoin对订单和订单明细进行双流join
* >将用户维度关联到订单宽表上
* *基本的维度关联
* *优化1旁路缓存
* *优化2异步IO
*
*@Version: 1.0
*/
public class OrderWideApp {
public static void main(String[] args) throws Exception {
//TODO 1.流环境建立
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(4);
//TODO 2.检查点设置
env.enableCheckpointing(5000L);
env.getCheckpointConfig().setCheckpointTimeout(3000L);
env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5000L));
env.setStateBackend(new FsStateBackend("hdfs://Ding202:8020/rt_gmall/gmall"));
System.setProperty("HADOOP_USER_NAME", "dingjiawen");
//TODO 3.从kafka中读取数据
String orderInfoTopic = "dwd_order_info";
String groupId = "order_wide_app_group";
DataStreamSource<String> orderInfoStrDS = env.addSource(
MyKafkaUtils.getKafkaSource(orderInfoTopic, groupId)
);
String orderDetailTopic = "dwd_order_detail";
DataStreamSource<String> orderDetailStrDS = env.addSource(
MyKafkaUtils.getKafkaSource(orderDetailTopic, groupId)
);
//TODO 4.对流中数据类型进行转换 string ->实体对象
SingleOutputStreamOperator<OrderInfo> orderInfoDS = orderInfoStrDS.map(
new RichMapFunction<String, OrderInfo>() {
private SimpleDateFormat sdf;
@Override
public void open(Configuration parameters) throws Exception {
sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
}
@Override
public OrderInfo map(String s) throws Exception {
OrderInfo orderInfo = JSON.parseObject(s, OrderInfo.class);
long create_ts = sdf.parse(orderInfo.getCreate_time()).getTime();
orderInfo.setCreate_ts(create_ts);
return orderInfo;
}
}
);
SingleOutputStreamOperator<OrderDetail> orderDetailDS = orderDetailStrDS.map(
new RichMapFunction<String, OrderDetail>() {
private SimpleDateFormat sdf;
@Override
public void open(Configuration parameters) throws Exception {
sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
}
@Override
public OrderDetail map(String s) throws Exception {
OrderDetail orderDetail = JSON.parseObject(s, OrderDetail.class);
long create_ts = sdf.parse(orderDetail.getCreate_time()).getTime();
orderDetail.setCreate_ts(create_ts);
return orderDetail;
}
}
);
// orderInfoDS.print(">>>>");
// orderDetailDS.print("####");
//TODO 5.指定watermark并提取事件时间字段
//订单
SingleOutputStreamOperator<OrderInfo> orderInfoWithWatermarkDS = orderInfoDS.assignTimestampsAndWatermarks(
WatermarkStrategy.<OrderInfo>forBoundedOutOfOrderness(Duration.ofSeconds(3))
.withTimestampAssigner(
new SerializableTimestampAssigner<OrderInfo>() {
@Override
public long extractTimestamp(OrderInfo orderInfo, long l) {
return orderInfo.getCreate_ts();
}
}
)
);
//订单明细
SingleOutputStreamOperator<OrderDetail> orderDetailWithWatermarkDS = orderDetailDS.assignTimestampsAndWatermarks(
WatermarkStrategy.<OrderDetail>forBoundedOutOfOrderness(Duration.ofSeconds(3))
.withTimestampAssigner(
new SerializableTimestampAssigner<OrderDetail>() {
@Override
public long extractTimestamp(OrderDetail orderDetail, long l) {
return orderDetail.getCreate_ts();
}
}
)
);
//TODO 6.指定两个流的关联字段 ------- order_id
KeyedStream<OrderInfo, Long> orderInfoKeyedDS = orderInfoWithWatermarkDS.keyBy(OrderInfo::getId);
KeyedStream<OrderDetail, Long> orderDetailKeyedDS = orderDetailWithWatermarkDS.keyBy(OrderDetail::getOrder_id);
//TODO 7.双流join 使用intervalJoin
SingleOutputStreamOperator<OrderWide> orderWideDS = orderInfoKeyedDS
.intervalJoin(orderDetailKeyedDS)
.between(Time.seconds(-5), Time.seconds(5))
.process(
new ProcessJoinFunction<OrderInfo, OrderDetail, OrderWide>() {
@Override
public void processElement(OrderInfo orderInfo, OrderDetail orderDetail, ProcessJoinFunction<OrderInfo, OrderDetail, OrderWide>.Context context, Collector<OrderWide> collector) throws Exception {
collector.collect(new OrderWide(orderInfo, orderDetail));
}
}
);
// orderWideDS.print(">>>");
//TODO 8.和用户维度进行关联 ->异步操作进行关联
SingleOutputStreamOperator<OrderWide> orderWideWithUserDS = AsyncDataStream.unorderedWait(
orderWideDS,
new DimAsyncFunction<OrderWide>("DIM_USER_INFO") {
@Override
public void join(OrderWide orderWide, JSONObject dimJsonObj) throws ParseException {
String gender = dimJsonObj.getString("GENDER");
//计算年龄
String birthday = dimJsonObj.getString("BIRTHDAY");//1972-06-15
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Date birthdayDate = sdf.parse(birthday);
long betweenMs = System.currentTimeMillis() - birthdayDate.getTime();
long ageLong = betweenMs / 1000L / 60L / 60L / 24L / 365L;
int age = (int) ageLong;
//维度关联
orderWide.setUser_gender(gender);
orderWide.setUser_age(age);
}
@Override
public String getKey(OrderWide orderWide) {
return orderWide.getUser_id().toString();
}
},
60,
TimeUnit.SECONDS
);
//TODO 9.和地区纬度进行关联
SingleOutputStreamOperator<OrderWide> orderWideWithProvinceDS = AsyncDataStream.unorderedWait(
orderWideWithUserDS,
new DimAsyncFunction<OrderWide>("DIM_BASE_PROVINCE") {
@Override
public void join(OrderWide orderWide, JSONObject dimJsonObj) throws Exception {
orderWide.setProvince_3166_2_code(dimJsonObj.getString("ISO_3166_2"));
orderWide.setProvince_area_code(dimJsonObj.getString("AREA_CODE"));
orderWide.setProvince_name(dimJsonObj.getString("NAME"));
orderWide.setProvince_iso_code(dimJsonObj.getString("ISO_CODE"));
}
@Override
public String getKey(OrderWide input) {
return input.getProvince_id().toString();
}
},
60, TimeUnit.SECONDS
);
//TODO 10.和SKU维度进行关联
SingleOutputStreamOperator<OrderWide> orderWideWithSkuDS = AsyncDataStream.unorderedWait(
orderWideWithProvinceDS,
new DimAsyncFunction<OrderWide>("DIM_SKU_INFO") {
@Override
public void join(OrderWide orderWide, JSONObject jsonObject) throws Exception {
orderWide.setSku_name(jsonObject.getString("SKU_NAME"));
orderWide.setCategory3_id(jsonObject.getLong("CATEGORY3_ID"));
orderWide.setSpu_id(jsonObject.getLong("SPU_ID"));
orderWide.setTm_id(jsonObject.getLong("TM_ID"));
}
@Override
public String getKey(OrderWide orderWide) {
return String.valueOf(orderWide.getSku_id());
}
}, 60, TimeUnit.SECONDS);
//TODO 11.和SPU维度进行关联
SingleOutputStreamOperator<OrderWide> orderWideWithSpuDS = AsyncDataStream.unorderedWait(
orderWideWithSkuDS,
new DimAsyncFunction<OrderWide>("DIM_SPU_INFO") {
@Override
public void join(OrderWide orderWide, JSONObject jsonObject) throws Exception {
orderWide.setSpu_name(jsonObject.getString("SPU_NAME"));
}
@Override
public String getKey(OrderWide orderWide) {
return String.valueOf(orderWide.getSpu_id());
}
}, 60, TimeUnit.SECONDS);
//TODO 12.和类别度进行关联
SingleOutputStreamOperator<OrderWide> orderWideWithCategory3DS = AsyncDataStream.unorderedWait(
orderWideWithSpuDS,
new DimAsyncFunction<OrderWide>("DIM_BASE_CATEGORY3") {
@Override
public void join(OrderWide orderWide, JSONObject jsonObject) throws Exception {
orderWide.setCategory3_name(jsonObject.getString("NAME"));
}
@Override
public String getKey(OrderWide orderWide) {
return String.valueOf(orderWide.getCategory3_id());
}
}, 60, TimeUnit.SECONDS);
//TODO 13.和品牌维度进行关联
SingleOutputStreamOperator<OrderWide> orderWideWithTmDS = AsyncDataStream.unorderedWait(
orderWideWithCategory3DS,
new DimAsyncFunction<OrderWide>("DIM_BASE_TRADEMARK") {
@Override
public void join(OrderWide orderWide, JSONObject jsonObject) throws Exception {
orderWide.setTm_name(jsonObject.getString("TM_NAME"));
}
@Override
public String getKey(OrderWide orderWide) {
return String.valueOf(orderWide.getTm_id());
}
}, 60, TimeUnit.SECONDS);
orderWideWithTmDS.print(">>>>>");
// orderWideWithTmDS.print(">>>>>");
//TODO 14.将订单宽表数据写回kafka的dwm_order_wide
//JSON.parseObject(jsonStr): 将字符串转换为json对象
//JSON.parseObject(jsonStr,类型): 将字符串转换为对应类型的对象
//JSON.toJSONString(对象): 将对象转换为json格式字符串
orderWideWithTmDS.map(JSON::toJSONString).addSink(
MyKafkaUtils.getKafkaSink("dwm_order_wide")
);
env.execute();
}
}

View File

@ -0,0 +1,130 @@
package com.atguigu.gmall.realtime.app.dwm;
import com.alibaba.fastjson.JSON;
import com.atguigu.gmall.realtime.beans.OrderWide;
import com.atguigu.gmall.realtime.beans.PaymentInfo;
import com.atguigu.gmall.realtime.beans.PaymentWide;
import com.atguigu.gmall.realtime.utils.DateTimeUtils;
import com.atguigu.gmall.realtime.utils.MyKafkaUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;
import java.time.Duration;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.dwm
*@Author: markilue
*@CreateTime: 2023-05-10 19:03
*@Description: TODO 支付宽表准备
*@Version: 1.0
*/
public class PaymentWideApp {
public static void main(String[] args) throws Exception {
//TODO 1.创建流环境
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(4);
//TODO 2.检查点配置
env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
env.getCheckpointConfig().setCheckpointTimeout(60000L);
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 3000L));
env.setStateBackend(new FsStateBackend("hdfs://Ding202/rt_gmall/gmall"));
System.setProperty("HADOOP_USER_NAME", "dingjiawen");
//TODO 3.从kafka中读取数据(订单宽表和dwd_payment_info)
String paymentTopic = "dwd_payment_info";
String orderWideTopic = "dwm_order_wide";
String groupId = "payment_wide_app_group";
DataStreamSource<String> paymentStrDS = env.addSource(
MyKafkaUtils.getKafkaSource(paymentTopic, groupId)
);
DataStreamSource<String> orderWideStrDS = env.addSource(
MyKafkaUtils.getKafkaSource(orderWideTopic, groupId)
);
//TODO 4.数据格式进行转换转为paymentInfo,orderWide对象
SingleOutputStreamOperator<PaymentInfo> paymentDS = paymentStrDS.map(
new MapFunction<String, PaymentInfo>() {
@Override
public PaymentInfo map(String s) throws Exception {
return JSON.parseObject(s, PaymentInfo.class);
}
}
);
SingleOutputStreamOperator<OrderWide> orderWideDS = orderWideStrDS.map(
orderWideStr -> JSON.parseObject(orderWideStr, OrderWide.class)
);
// paymentDS.print(">>>>");
// orderWideDS.print("$$$$");
//TODO 5.注册水位线
SingleOutputStreamOperator<PaymentInfo> paymentInfoWithWatermarkDS = paymentDS.assignTimestampsAndWatermarks(
WatermarkStrategy.<PaymentInfo>forBoundedOutOfOrderness(Duration.ofSeconds(3))
.withTimestampAssigner(
new SerializableTimestampAssigner<PaymentInfo>() {
@Override
public long extractTimestamp(PaymentInfo paymentInfo, long l) {
return DateTimeUtils.toTs(paymentInfo.getCallback_time());
}
}
)
);
SingleOutputStreamOperator<OrderWide> orderWideWithWatermarkDS = orderWideDS.assignTimestampsAndWatermarks(
WatermarkStrategy.<OrderWide>forBoundedOutOfOrderness(Duration.ofSeconds(3))
.withTimestampAssigner(
new SerializableTimestampAssigner<OrderWide>() {
@Override
public long extractTimestamp(OrderWide orderWide, long l) {
return DateTimeUtils.toTs(orderWide.getCreate_time());
}
}
)
);
//TODO 6.分组创建连接字段
KeyedStream<PaymentInfo, Long> paymentInfoKeyedDS = paymentInfoWithWatermarkDS.keyBy(PaymentInfo::getOrder_id);
KeyedStream<OrderWide, Long> orderWideKeyedDS = orderWideWithWatermarkDS.keyBy(OrderWide::getOrder_id);
//TODO 7.双流join
SingleOutputStreamOperator<PaymentWide> paymentWideDS = paymentInfoKeyedDS
.intervalJoin(orderWideKeyedDS)
.between(Time.seconds(-1800), Time.seconds(0))//往前半小时
.process(
new ProcessJoinFunction<PaymentInfo, OrderWide, PaymentWide>() {
@Override
public void processElement(PaymentInfo paymentInfo, OrderWide orderWide, ProcessJoinFunction<PaymentInfo, OrderWide, PaymentWide>.Context context, Collector<PaymentWide> collector) throws Exception {
collector.collect(new PaymentWide(paymentInfo, orderWide));
}
}
);
paymentWideDS.print(">>>>");
//TODO 8.将支付宽表数据写到kafka的dwm_payment_wide
paymentWideDS
.map(paymentWide -> JSON.toJSONString(paymentWide))
.addSink(
MyKafkaUtils.getKafkaSink("dwm_payment_wide")
);
env.execute();
}
}

View File

@ -0,0 +1,185 @@
package com.atguigu.gmall.realtime.app.dwm;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.utils.MyKafkaUtils;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import java.text.SimpleDateFormat;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.dwm
*@Author: markilue
*@CreateTime: 2023-05-08 16:56
*@Description:
* TODO 独立访客计算
* 需要启动的进程:
* zk,kafka,logger,hadoop,模拟日志jar,UniqueVisitorApp,BaseLogApp
* 执行流程:
* 1.运行模拟生成日志的jar
* 2.将人们ONI生成日志数据发送给nginx进行负载均衡
* 3.nginx将请求转发给三台日志采集服务
* 4.三台日志采集服务器接收到日志数据 将日志数据发送到kafka的ods_base_log主题中
* 5.BaseLogApp应用程序从ods_base_log中读取数据,进行分流
* >启动日志 --dwd_start_log
* >曝光日志 --dwd_display_log
* >页面日志 --dwd_page_log
* 6.uniqueVisitorApp从dwd_page_log主题读取数据
* >对pv进行过滤
* >按照mid进行分组
* >使用filter算子对数据进行过滤
* >在过滤的时候使用状态变量记录上次访问日期
* >
*
*@Version: 1.0
*/
public class UniqueVisitorApp {
public static void main(String[] args) throws Exception {
//TODO 1.基础环境状态
//1.1 流处理环境
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//1.2 设置并行度
env.setParallelism(1);
//TODO 2.检查点设置
//2.1 开启检查点
env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//2.2 设置检查点超时时间
env.getCheckpointConfig().setCheckpointTimeout(60000);
//2.3 设置重启策略
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 3000L));
//2.4 设置job取消后,检查点是否保存
env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//2.5 设置状态后端 内存/文件系统/RocksDB
env.setStateBackend(new FsStateBackend("hdfs://Ding202:8020/rt_gmall/gmall"));
//2.6 指定操作HDFS用户
System.setProperty("HADOOP_USER_NAME", "dingjiawen");
//TODO 3.从kafka中读取数据
//3.1 声明消费主题以及消费者组
String topic = "dwd_page_log";
String groupId = "unique_visitor_app_group";
//3.2 获取kafka消费者对象
FlinkKafkaConsumer<String> kafkaSource = MyKafkaUtils.getKafkaSource(topic, groupId);
//3.3 读取数据封装流
DataStreamSource<String> kafkaDS = env.addSource(kafkaSource);
// kafkaDS.print(">>>");
//TODO 4.对读取的数据进行类型转换 String ->JSONObject
SingleOutputStreamOperator<JSONObject> jsonDS = kafkaDS.map(JSON::parseObject);
// jsonDS.print(">>>>");
//TODO 5.按照设备id进行分层
/*
{
"common": {
"ar": "420000",
"uid": "49",
"os": "iOS 13.2.3",
"ch": "Appstore",
"is_new": "1",
"md": "iPhone Xs",
"mid": "mid_13",
"vc": "v2.1.134",
"ba": "iPhone"
},
"page": {
"page_id": "payment",
"item": "1,8",
"during_time": 1329,
"item_type": "sku_ids",
"last_page_id": "trade"
},
"ts": 1683278828000
}
*/
KeyedStream<JSONObject, String> keyedDS = jsonDS.keyBy(jsonObject -> jsonObject.getJSONObject("common").getString("mid"));
//TODO 6.过滤实现
SingleOutputStreamOperator<JSONObject> filterDs = keyedDS.filter(
new RichFilterFunction<JSONObject>() {
//声明状态变量,用于存放上次访问日期
private ValueState<String> lastVisitDateState;
//转化日期格式工具类
private SimpleDateFormat sdf;
@Override
public void open(Configuration parameters) throws Exception {
sdf = new SimpleDateFormat("yyyyMMdd");
//注意:UV其实可以延伸为计算日活如果是日活则状态值主要用于筛选当天是否访问过,所以状态过了今天基本上就没有存在的意义了
//所以这类设置状态的失效时间为1天
ValueStateDescriptor<String> valueStateDescriptor = new ValueStateDescriptor<>("lastVisitDate", String.class);
StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.days(1))
.setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)//在创建和写入的时候改变(就是默认值)
.setStateVisibility(StateTtlConfig.StateVisibility.NeverReturnExpired)//如果过期如果还没有被垃圾回收是否返回给状态调用者(这也是默认值)
.build();//构造者设计模式
valueStateDescriptor.enableTimeToLive(ttlConfig);
lastVisitDateState = getRuntimeContext().getState(valueStateDescriptor);
}
@Override
public boolean filter(JSONObject jsonObject) throws Exception {
//如果是从其他页面跳转的,那么直接过滤掉
String lastPageId = jsonObject.getJSONObject("page").getString("last_page_id");
if (lastPageId != null && lastPageId.length() > 0) {
return false;
}
//获取状态中上次访问日志
String lastPageDate = lastVisitDateState.value();
String curVisitDate = sdf.format(jsonObject.getLong("ts"));
if (lastPageDate != null && lastPageDate.length() > 0 && lastPageDate.equals(curVisitDate)) {
//已经访问过
return false;
} else {
//还没有访问过
lastVisitDateState.update(curVisitDate);
return true;
}
}
}
);
filterDs.print(">>>>");
//TODO 7.将过滤后的UV数据 写回到kafka的dwm层
filterDs.map(jsonObject -> jsonObject.toJSONString()).addSink(
MyKafkaUtils.getKafkaSink("dwm_unique_visitor")
);//需要转为String之后addSink才行
env.execute();
}
}

View File

@ -0,0 +1,229 @@
package com.atguigu.gmall.realtime.app.dwm;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.utils.MyKafkaUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkGenerator;
import org.apache.flink.api.common.eventtime.WatermarkGeneratorSupplier;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.cep.*;
import org.apache.flink.cep.functions.PatternProcessFunction;
import org.apache.flink.cep.functions.TimedOutPartialMatchHandler;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import java.util.List;
import java.util.Map;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.dwm
*@Author: markilue
*@CreateTime: 2023-05-08 20:17
*@Description: TODO 用户跳出明细统计
*@Version: 1.0
*/
public class UserJumpDetailApp {
public static void main(String[] args) throws Exception {
//TODO 1.创建流环境
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(4);
//TODO 2.检查点设置
env.enableCheckpointing(5000L);
env.getCheckpointConfig().setCheckpointTimeout(60000);
env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 3000L));
env.setStateBackend(new FsStateBackend("hdfs://Ding202:8020/rt_gmall/gmall"));
System.setProperty("HADOOP_USER_NAME", "dingjiawen");
//TODO 3.从kafka中读取数据
String topic = "dwd_page_log";
String groupId = "user_jump_detail_app_group";
DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtils.getKafkaSource(topic, groupId));
// DataStream<String> kafkaDS = env
// .fromElements(
// "{\"common\":{\"mid\":\"101\"},\"page\":{\"page_id\":\"home\"},\"ts\":10000} ",
// "{\"common\":{\"mid\":\"102\"},\"page\":{\"page_id\":\"home\"},\"ts\":12000}",
// "{\"common\":{\"mid\":\"102\"},\"page\":{\"page_id\":\"good_list\",\"last_page_id\":" +
// "\"home\"},\"ts\":15000} ",
// "{\"common\":{\"mid\":\"102\"},\"page\":{\"page_id\":\"good_list\",\"last_page_id\":" +
// "\"detail\"},\"ts\":30000} "
// );
//TODO 4.map String ->JSONObject
SingleOutputStreamOperator<JSONObject> jsonDS = kafkaDS.map(JSON::parseObject);
// jsonDS.print(">>>");
//TODO 5.指定watermark以及提取事件时间字段
SingleOutputStreamOperator<JSONObject> jsonObjWithWatermarkDS = jsonDS.assignTimestampsAndWatermarks(
WatermarkStrategy.<JSONObject>forMonotonousTimestamps()
.withTimestampAssigner(
new SerializableTimestampAssigner<JSONObject>() {
@Override
public long extractTimestamp(JSONObject jsonObject, long l) {
return jsonObject.getLong("ts");
}
}
)
);
jsonObjWithWatermarkDS.print("###");
//TODO 6.按照mid分组
KeyedStream<JSONObject, String> keyedDS = jsonObjWithWatermarkDS.keyBy(jsonObject -> jsonObject.getJSONObject("common").getString("mid"));
/*
{
"common": {
"ar": "370000",
"uid": "44",
"os": "iOS 13.3.1",
"ch": "Appstore",
"is_new": "1",
"md": "iPhone X",
"mid": "mid_13",
"vc": "v2.1.132",
"ba": "iPhone"
},
"page": {
"page_id": "cart",
"during_time": 14540,
"last_page_id": "good_detail"
},
"ts": 1683289583000
}
*/
//TODO 7.定义pattern
Pattern<JSONObject, JSONObject> pattern = Pattern.<JSONObject>begin("first").where(
new SimpleCondition<JSONObject>() {
@Override
public boolean filter(JSONObject jsonObject) {
//条件1:上次跳转为空
String lastPage = jsonObject.getJSONObject("page").getString("last_page_id");
return lastPage == null || lastPage.length() == 0;
}
}
).next("second").where(
new SimpleCondition<JSONObject>() {
//访问了网站其他的页面
@Override
public boolean filter(JSONObject jsonObject) {
//事实上:只要是第二个时间来了进入这个方法就证明他第二次访问了可以直接返回为true
String pageId = jsonObject.getJSONObject("page").getString("page_id");
return pageId != null && pageId.length() > 0;
}
}
).within(Time.seconds(10));//10秒钟;超时的数据或者没有下一条的数据,会放入侧输出流中
//TODO 8.将pattern应用到流上
PatternStream<JSONObject> patternDS = CEP.pattern(keyedDS, pattern);
//TODO 9.从流中提取数据
//9.1 定义侧输出流标记 FlinkCEP会将超时数据匹配放到侧输出流中
OutputTag<String> timeoutTag = new OutputTag<String>("timeout") {
};
//9.2 数据提取
//第一种方式
// patternDS.process(
// new MyPatternProcessFunction(outputTag)
// );
//第二种方式 --只能通过方法返回值传递
// patternDS.select(
// timeoutTag,
// //处理超时数据
// new PatternTimeoutFunction<JSONObject, String>() {
// @Override
// public String timeout(Map<String, List<JSONObject>> map, long l) throws Exception {
// return null;
// }
// },
// //处理非超时数据
// new PatternSelectFunction<JSONObject, String>() {
// @Override
// public String select(Map<String, List<JSONObject>> map) throws Exception {
// return null;
// }
// }
// );
//第三种方式 --可以通过collector进行收集
SingleOutputStreamOperator<String> timeoutDS = patternDS.flatSelect(
timeoutTag,
//处理超时数据
new PatternFlatTimeoutFunction<JSONObject, String>() {
@Override
public void timeout(Map<String, List<JSONObject>> map, long l, Collector<String> collector) throws Exception {
//超时情况 就是我们要统计的跳出
List<JSONObject> jsonObjectList = map.get("first");
for (JSONObject jsonObject : jsonObjectList) {
collector.collect(jsonObject.toJSONString());
}
}
},
//处理完全匹配数据
new PatternFlatSelectFunction<JSONObject, String>() {
@Override
public void flatSelect(Map<String, List<JSONObject>> map, Collector<String> collector) throws Exception {
//完全匹配的数据 是跳转情况不在我们统计范围之内
}
}
);
//9.3 从侧输出流中获取超时数据(跳出)
DataStream<String> jumpDS = timeoutDS.getSideOutput(timeoutTag);
jumpDS.print(">>>>");
//TODO 10.将跳出明细写到kafka的dwm层主题
DataStreamSink<String> dwm_user_jump_detailDS = jumpDS.addSink(
MyKafkaUtils.getKafkaSink("dwm_user_jump_detail")
);
env.execute();
}
}
class MyPatternProcessFunction extends PatternProcessFunction<JSONObject, JSONObject> implements TimedOutPartialMatchHandler<JSONObject> {
private OutputTag<String> outputTag;
public MyPatternProcessFunction(OutputTag outputTag) {
this.outputTag = outputTag;
}
//处理匹配上的数据
@Override
public void processMatch(Map<String, List<JSONObject>> match, Context ctx, Collector<JSONObject> out) throws Exception {
//map中存放<满足要求的name,list(满足要求的第一个事件)>
}
@Override
public void processTimedOutMatch(Map<String, List<JSONObject>> match, Context ctx) throws Exception {
List<JSONObject> jsonObjectList = match.get("first");
for (JSONObject jsonObject : jsonObjectList) {
ctx.output(outputTag, jsonObject.toJSONString());
}
}
}

View File

@ -0,0 +1,104 @@
package com.atguigu.gmall.realtime.app.dws;
import com.atguigu.gmall.realtime.app.func.KeywordUDTF;
import com.atguigu.gmall.realtime.beans.GmallConstant;
import com.atguigu.gmall.realtime.beans.KeywordStats;
import com.atguigu.gmall.realtime.utils.ClickhouseUtils;
import com.atguigu.gmall.realtime.utils.MyKafkaUtils;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.dws
*@Author: markilue
*@CreateTime: 2023-05-12 14:27
*@Description: TODO 关键词统计DWS
*@Version: 1.0
*/
public class KeywordStatsApp {
public static void main(String[] args) throws Exception {
//TODO 1.基本环境准备
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
EnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().build();
StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);
env.setParallelism(2);
//TODO 2.检查点设置
env.enableCheckpointing(5000L);
env.getCheckpointConfig().setCheckpointTimeout(5000L);
env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 3000L));
env.setStateBackend(new FsStateBackend("hdfs://Ding202:8020/rt_gmall/gmall"));
System.setProperty("HADOOP_USER_NAME", "dingjiawen");
//TODO 2.k注册自定义的UDTF函数
tableEnv.createTemporarySystemFunction("ik_analyze", KeywordUDTF.class);
//TODO 3.从kafka中读取数据
String topic = "dwd_page_log";
String groupId = "keyword_stats_app_group";
String createSQl = "create table page_view" +
"(" +
" common MAP<STRING, STRING>," +
" page MAP<STRING, STRING>," +
" ts BIGINT," +
" rowtime AS TO_TIMESTAMP(FROM_UNIXTIME(ts/1000,'yyyy-MM-dd HH:mm:ss'))," +
" WATERMARK FOR rowtime as rowtime - INTERVAL '3' SECOND" +
")" +
" WITH (" + MyKafkaUtils.getKafkaDDL(topic, groupId) + ")";
System.out.println(createSQl);
tableEnv.executeSql(createSQl);
//TODO 4.将动态表中表示搜索行为的记录过滤出来
String filterSQL = "select " +
" page['item'] fullword,rowtime " +
"from" +
" page_view " +
"where " +
" page['page_id'] = 'good_list' and page['item'] is not null ";
// System.out.println(filterSQL);
Table fullwordTable = tableEnv.sqlQuery(filterSQL);
//TODO 5.使用自定义的UDTF函数 对关键词进行拆分
//相当于将fullwordTable表进行了注册
Table keywordTable = tableEnv.sqlQuery("select rowtime ,keyword from " + fullwordTable + ", LATERAL TABLE(ik_analyze(fullword)) AS T(keyword)");
//TODO 6.分组开窗 聚合计算
String selectSQL ="select DATE_FORMAT(TUMBLE_START(rowtime, INTERVAL '10' SECOND), 'yyyy-MM-dd HH:mm:ss') as stt," +
" DATE_FORMAT(TUMBLE_END(rowtime, INTERVAL '10' SECOND), 'yyyy-MM-dd HH:mm:ss') as edt," +
" keyword," +
" count(*) ct," +
" '" + GmallConstant.KEYWORD_SEARCH + "' source," +
" UNIX_TIMESTAMP() * 1000 ts " +
"from " + keywordTable + " " +
"group by " +
" TUMBLE(rowtime, INTERVAL '10' SECOND)," +
" keyword";
System.out.println(selectSQL);
Table resTable = tableEnv.sqlQuery(selectSQL);
//TODO 7.将表转换为流
DataStream<KeywordStats> keywordStatsDS = tableEnv.toAppendStream(resTable, KeywordStats.class);
keywordStatsDS.print(">>>>");
//TODO 8.将流中数据写入CK
keywordStatsDS.addSink(
ClickhouseUtils.getJdbcSink("insert into keyword_stats(keyword,ct,source,stt,edt,ts) values(?,?,?,?,?,?)")
);
env.execute();
}
}

View File

@ -0,0 +1,426 @@
package com.atguigu.gmall.realtime.app.dws;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.func.DimAsyncFunction;
import com.atguigu.gmall.realtime.beans.GmallConstant;
import com.atguigu.gmall.realtime.beans.OrderWide;
import com.atguigu.gmall.realtime.beans.PaymentWide;
import com.atguigu.gmall.realtime.beans.ProductStats;
import com.atguigu.gmall.realtime.utils.ClickhouseUtils;
import com.atguigu.gmall.realtime.utils.DateTimeUtils;
import com.atguigu.gmall.realtime.utils.MyKafkaUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import java.time.Duration;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.concurrent.TimeUnit;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.dws
*@Author: markilue
*@CreateTime: 2023-05-11 15:47
*@Description: TODO 商品主题统计DWS
*
*@Version: 1.0
*/
public class ProductStatsApp {
public static void main(String[] args) throws Exception {
//TODO 1.流环境建立
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(4);
//TODO 2.检查点设置
env.enableCheckpointing(5000L);
env.getCheckpointConfig().setCheckpointTimeout(3000L);
env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5000L));
env.setStateBackend(new FsStateBackend("hdfs://Ding202:8020/rt_gmall/gmall"));
System.setProperty("HADOOP_USER_NAME", "dingjiawen");
//TODO 3.从kafka中读取数据(7个主题)
//3.1 声明消费者主题以及消费者组
String groupId = "product_stats_app";
String pageViewSourceTopic = "dwd_page_log";
String orderWideSourceTopic = "dwm_order_wide";
String paymentWideSourceTopic = "dwm_payment_wide";
String cartInfoSourceTopic = "dwd_cart_info";
String favorInfoSourceTopic = "dwd_favor_info";
String refundInfoSourceTopic = "dwd_order_refund_info";
String commentInfoSourceTopic = "dwd_comment_info";
//创建消费者对象,封装成流
DataStreamSource<String> pageViewSourceStrDS = env.addSource(
MyKafkaUtils.getKafkaSource(pageViewSourceTopic, groupId)
);
DataStreamSource<String> cartInfoSourceStrDS = env.addSource(
MyKafkaUtils.getKafkaSource(cartInfoSourceTopic, groupId)
);
DataStreamSource<String> favorInfoSourceStrDS = env.addSource(
MyKafkaUtils.getKafkaSource(favorInfoSourceTopic, groupId)
);
DataStreamSource<String> refundInfoSourceStrDS = env.addSource(
MyKafkaUtils.getKafkaSource(refundInfoSourceTopic, groupId)
);
DataStreamSource<String> commentInfoSourceStrDS = env.addSource(
MyKafkaUtils.getKafkaSource(commentInfoSourceTopic, groupId)
);
DataStreamSource<String> orderWideSourceStrDS = env.addSource(
MyKafkaUtils.getKafkaSource(orderWideSourceTopic, groupId)
);
DataStreamSource<String> paymentWideSourceStrDS = env.addSource(
MyKafkaUtils.getKafkaSource(paymentWideSourceTopic, groupId)
);
//TODO 4.对流中的数据进行类型转换 jsonStr ->ProductStats
//由于又有点击又有曝光,那么不合适在map里面做,map只能返回一个,这里使用process
//4.1 点击和曝光流转换
SingleOutputStreamOperator<ProductStats> clickAndDisplayStatsDS = pageViewSourceStrDS.process(
/*
{
"page": {
"page_id": "good_detail",
"item": "7",
"during_time": 14474,
"item_type": "sku_id",
"last_page_id": "good_list",
"source_type": "promotion"
},
"displays": [
{
"display_type": "query",
"item": "8",
"item_type": "sku_id",
"pos_id": 2,
"order": 1
},
],
"ts": 1683788701000
}
*/
new ProcessFunction<String, ProductStats>() {
@Override
public void processElement(String jsonStr, ProcessFunction<String, ProductStats>.Context context, Collector<ProductStats> collector) throws Exception {
JSONObject jsonObject = JSON.parseObject(jsonStr);
Long ts = jsonObject.getLong("ts");
//判断是否为点击行为
JSONObject page = jsonObject.getJSONObject("page");
String pageId = page.getString("page_id");
if ("good_detail".equals(pageId)) {
//如果当前日志记录的页面是商品的详情页,那么认为这条日志 记录的是点击行为
Long skuId = page.getLong("item");
ProductStats productStats = ProductStats.builder()
.sku_id(skuId)
.click_ct(1L)
.ts(ts)
.build();
collector.collect(productStats);
}
//判断是否为曝光
JSONArray displays = jsonObject.getJSONArray("displays");
if (displays != null && displays.size() > 0) {
//如果displays数组不为空,说明当前页面上有曝光行为,对所有的曝光进行遍历
for (int i = 0; i < displays.size(); i++) {
JSONObject displayJsonObj = displays.getJSONObject(i);
//曝光的是不是商品
if ("sku_id".equals(displayJsonObj.getString("item_type"))) {
Long itemId = displayJsonObj.getLong("item");
ProductStats productStats = ProductStats.builder()
.sku_id(itemId)
.display_ct(1L)
.ts(ts)
.build();
collector.collect(productStats);
}
}
}
}
}
);
//4.2 转换收藏流数据
SingleOutputStreamOperator<ProductStats> favorInfoStatsDS = favorInfoSourceStrDS.map(
new MapFunction<String, ProductStats>() {
@Override
public ProductStats map(String jsonStr) throws Exception {
JSONObject jsonObject = JSON.parseObject(jsonStr);
ProductStats productStats = ProductStats.builder()
.sku_id(jsonObject.getLong("sku_id"))
.favor_ct(1L)
.ts(DateTimeUtils.toTs(jsonObject.getString("create_time")))
.build();
return productStats;
}
}
);
//4.3 转换加购流数据
SingleOutputStreamOperator<ProductStats> cartInfoStatsDS = cartInfoSourceStrDS.map(
new MapFunction<String, ProductStats>() {
@Override
public ProductStats map(String jsonStr) throws Exception {
JSONObject jsonObject = JSON.parseObject(jsonStr);
ProductStats productStats = ProductStats.builder()
.sku_id(jsonObject.getLong("sku_id"))
.cart_ct(1L)
.ts(DateTimeUtils.toTs(jsonObject.getString("create_time")))
.build();
return productStats;
}
}
);
//4.4 转换退款订单流数据(注意是订单数不是商品数,所以需要进行一个去重)
SingleOutputStreamOperator<ProductStats> refundInfoStatsDS = refundInfoSourceStrDS.map(
new MapFunction<String, ProductStats>() {
@Override
public ProductStats map(String jsonStr) throws Exception {
JSONObject jsonObject = JSON.parseObject(jsonStr);
ProductStats productStats = ProductStats.builder()
.sku_id(jsonObject.getLong("sku_id"))
.refundOrderIdSet(new HashSet(Collections.singleton(jsonObject.getLong("order_id"))))
.refund_amount(jsonObject.getBigDecimal("refund_amount"))
.ts(DateTimeUtils.toTs(jsonObject.getString("create_time")))
.build();
return productStats;
}
}
);
//4.5 转换评价表数据
SingleOutputStreamOperator<ProductStats> commentInfoStatsDS = commentInfoSourceStrDS.map(
new MapFunction<String, ProductStats>() {
@Override
public ProductStats map(String jsonStr) throws Exception {
JSONObject jsonObject = JSON.parseObject(jsonStr);
Long goodCt = GmallConstant.APPRAISE_GOOD.equals(jsonObject.getString("appraise")) ? 1L : 0L;
ProductStats productStats = ProductStats.builder()
.sku_id(jsonObject.getLong("sku_id"))
.comment_ct(1L)
.good_comment_ct(goodCt)
.ts(DateTimeUtils.toTs(jsonObject.getString("create_time")))
.build();
return productStats;
}
}
);
//4.6 转换订单宽表流数据
SingleOutputStreamOperator<ProductStats> orderWideStatsDS = orderWideSourceStrDS.map(
new MapFunction<String, ProductStats>() {
@Override
public ProductStats map(String jsonStr) throws Exception {
OrderWide orderWide = JSON.parseObject(jsonStr, OrderWide.class);
ProductStats productStats = ProductStats.builder()
.sku_id(orderWide.getSku_id())
.order_sku_num(orderWide.getSku_num())
.order_amount(orderWide.getSplit_total_amount())
.ts(DateTimeUtils.toTs(orderWide.getCreate_time()))
.orderIdSet(new HashSet(Collections.singleton(orderWide.getOrder_id())))
.build();
return productStats;
}
}
);
//4.7 转换支付宽表流数据
SingleOutputStreamOperator<ProductStats> paymentWideStatsDS = paymentWideSourceStrDS.map(
new MapFunction<String, ProductStats>() {
@Override
public ProductStats map(String jsonStr) throws Exception {
PaymentWide paymentWide = JSON.parseObject(jsonStr, PaymentWide.class);
ProductStats productStats = ProductStats.builder()
.sku_id(paymentWide.getSku_id())
.payment_amount(paymentWide.getSplit_total_amount())
.paidOrderIdSet(new HashSet(Collections.singleton(paymentWide.getOrder_id())))
.ts(DateTimeUtils.toTs(paymentWide.getCallback_time()))
.build();
return productStats;
}
}
);
//TODO 5.将不同流的数据通过union合并到一起
DataStream<ProductStats> unionDS = clickAndDisplayStatsDS.union(
favorInfoStatsDS,
cartInfoStatsDS,
refundInfoStatsDS,
commentInfoStatsDS,
orderWideStatsDS,
paymentWideStatsDS
);
// unionDS.print(">>>>>>");
//TODO 6.指定watermark以及提取时间事件字段
SingleOutputStreamOperator<ProductStats> productStatsWithWatermarkDS = unionDS.
assignTimestampsAndWatermarks(
WatermarkStrategy.<ProductStats>forBoundedOutOfOrderness(Duration.ofSeconds(3))
.withTimestampAssigner(
new SerializableTimestampAssigner<ProductStats>() {
@Override
public long extractTimestamp(ProductStats productStats, long l) {
// System.out.println(productStats);
return productStats.getTs();
}
}
)
);
//TODO 7.分组 注意:目前商品维度数据 处理订单和支付宽表能够获取 其他的流没有维度数据.所以这里使用sku_id进行分组
KeyedStream<ProductStats, Long> keyedDS = productStatsWithWatermarkDS.keyBy(ProductStats::getSku_id);
//TODO 8.开窗
WindowedStream<ProductStats, Long, TimeWindow> windowDS = keyedDS.window(TumblingProcessingTimeWindows.of(Time.seconds(10)));
//TODO 9.聚合计算
SingleOutputStreamOperator<ProductStats> reduceDS = windowDS.reduce(
new ReduceFunction<ProductStats>() {
@Override
public ProductStats reduce(ProductStats productStats1, ProductStats productStats2) throws Exception {
productStats1.setDisplay_ct(productStats1.getDisplay_ct() + productStats2.getDisplay_ct());
productStats1.setClick_ct(productStats1.getClick_ct() + productStats2.getClick_ct());
productStats1.setCart_ct(productStats1.getCart_ct() + productStats2.getCart_ct());
productStats1.setFavor_ct(productStats1.getFavor_ct() + productStats2.getFavor_ct());
productStats1.setOrder_amount(productStats1.getOrder_amount().add(productStats2.getOrder_amount()));
productStats1.getOrderIdSet().addAll(productStats2.getOrderIdSet());
productStats1.setOrder_ct(productStats1.getOrderIdSet().size() + 0L);
productStats1.setOrder_sku_num(productStats1.getOrder_sku_num() + productStats2.getOrder_sku_num());
productStats1.setPayment_amount(productStats1.getPayment_amount().add(productStats2.getPayment_amount()));
productStats1.getRefundOrderIdSet().addAll(productStats1.getRefundOrderIdSet());
productStats1.setRefund_order_ct(productStats1.getRefundOrderIdSet().size() + 0L);
productStats1.setRefund_amount(productStats1.getRefund_amount().add(productStats2.getRefund_amount()));
productStats1.getPaidOrderIdSet().addAll(productStats1.getPaidOrderIdSet());
productStats1.setPaid_order_ct(productStats1.getPaidOrderIdSet().size() + 0L);
productStats1.setComment_ct(productStats1.getComment_ct() + productStats2.getComment_ct());
productStats1.setGood_comment_ct(productStats1.getGood_comment_ct() + productStats2.getGood_comment_ct());
return productStats1;
}
},
new ProcessWindowFunction<ProductStats, ProductStats, Long, TimeWindow>() {
@Override
public void process(Long aLong, ProcessWindowFunction<ProductStats, ProductStats, Long, TimeWindow>.Context context, Iterable<ProductStats> iterable, Collector<ProductStats> collector) throws Exception {
for (ProductStats productStats : iterable) {
long start = context.window().getStart();
long end = context.window().getEnd();
productStats.setStt(DateTimeUtils.toYMDHMS(new Date(start)));
productStats.setEdt(DateTimeUtils.toYMDHMS(new Date(end)));
productStats.setTs(System.currentTimeMillis());
collector.collect(productStats);
}
}
}
);
//TODO 10.补全商品维度信息
//10.3 补全SKU维度
SingleOutputStreamOperator<ProductStats> productStatsWithSKUDS = AsyncDataStream.unorderedWait(
reduceDS,
new DimAsyncFunction<ProductStats>("DIM_SKU_INFO") {
@Override
public void join(ProductStats input, JSONObject dimJsonObj) throws Exception {
input.setSku_name(dimJsonObj.getString("SKU_NAME"));
input.setSku_price(dimJsonObj.getBigDecimal("PRICE"));
input.setSpu_id(dimJsonObj.getLong("SPU_ID"));
input.setTm_id(dimJsonObj.getLong("TM_ID"));
input.setCategory3_id(dimJsonObj.getLong("CATEGORY3_ID"));
}
@Override
public String getKey(ProductStats input) {
return input.getSku_id().toString();
}
},
60, TimeUnit.SECONDS
);
//10.2 补充SPU维度
SingleOutputStreamOperator<ProductStats> productStatsWithSpuDS =
AsyncDataStream.unorderedWait(productStatsWithSKUDS,
new DimAsyncFunction<ProductStats>("DIM_SPU_INFO") {
@Override
public void join(ProductStats productStats, JSONObject jsonObject) throws Exception {
productStats.setSpu_name(jsonObject.getString("SPU_NAME"));
}
@Override
public String getKey(ProductStats productStats) {
return String.valueOf(productStats.getSpu_id());
}
}, 60, TimeUnit.SECONDS);
//10.3 补充品类维度
SingleOutputStreamOperator<ProductStats> productStatsWithCategory3DS =
AsyncDataStream.unorderedWait(productStatsWithSpuDS,
new DimAsyncFunction<ProductStats>("DIM_BASE_CATEGORY3") {
@Override
public void join(ProductStats productStats, JSONObject jsonObject) throws Exception {
productStats.setCategory3_name(jsonObject.getString("NAME"));
}
@Override
public String getKey(ProductStats productStats) {
return String.valueOf(productStats.getCategory3_id());
}
}, 60, TimeUnit.SECONDS);
//10.4 补充品牌维度
SingleOutputStreamOperator<ProductStats> productStatsWithTmDS =
AsyncDataStream.unorderedWait(productStatsWithCategory3DS,
new DimAsyncFunction<ProductStats>("DIM_BASE_TRADEMARK") {
@Override
public void join(ProductStats productStats, JSONObject jsonObject) throws Exception {
productStats.setTm_name(jsonObject.getString("TM_NAME"));
}
@Override
public String getKey(ProductStats productStats) {
return String.valueOf(productStats.getTm_id());
}
}, 60, TimeUnit.SECONDS);
productStatsWithTmDS.print(">>>>>");
//TODO 11.将计算结果写到clickhouse
productStatsWithTmDS.addSink(
ClickhouseUtils.getJdbcSink("insert into product_stats values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)")
);
env.execute();
}
}

View File

@ -0,0 +1,102 @@
package com.atguigu.gmall.realtime.app.dws;
import com.atguigu.gmall.realtime.beans.ProvinceStats;
import com.atguigu.gmall.realtime.utils.ClickhouseUtils;
import com.atguigu.gmall.realtime.utils.MyKafkaUtils;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.dws
*@Author: markilue
*@CreateTime: 2023-05-11 19:51
*@Description: TODO 地区主题统计 --SQL
*@Version: 1.0
*/
public class ProvinceStatsApp {
public static void main(String[] args) throws Exception {
//TODO 1.环境准备
//1.1 流处理环境
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//1.2 表执行环境
EnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().build();//设置流处理环境
StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);
//1.3 设置并行度
env.setParallelism(4);
//TODO 2.检查点相关设置
env.enableCheckpointing(5000L);
env.getCheckpointConfig().setCheckpointTimeout(5000L);
env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 3000L));
env.setStateBackend(new FsStateBackend("hdfs://Ding202:8020/rt_gmall/gmall"));
System.setProperty("HADOOP_USER_NAME", "dingjiawen");
//TODO 3.从指定数据源(kafka)读取数据 转换为动态表
//从orderWide读取数据
String topic = "dwm_order_wide";
String groupId = "province_stats_app_group";
String createSQL = "CREATE TABLE order_wide (" +
" province_id BIGINT," +
" province_name STRING," +
" province_area_code STRING," +
" province_iso_code STRING," +
" province_3166_2_code STRING," +
" order_id STRING," +
" split_total_amount DOUBLE," +
" create_time STRING," +
" rowtime as TO_TIMESTAMP(create_time) ," +
" WATERMARK FOR rowtime as rowtime - INTERVAL '3' SECOND " +
") WITH (" + MyKafkaUtils.getKafkaDDL(topic, groupId) + ")";
// System.out.println(createSQL);
tableEnv.executeSql(createSQL);
//TODO 4.进行分组 开窗 计算
String selectSQL = "select " +
" DATE_FORMAT(TUMBLE_START(rowtime,INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') as stt," +
" DATE_FORMAT(TUMBLE_END(rowtime,INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') as edt," +
" province_id," +
" province_name," +
" province_area_code area_code," +
" province_iso_code iso_code," +
" province_3166_2_code iso_3166_2," +
" count(distinct order_id) order_count," +
" sum(split_total_amount) order_amount," +
" UNIX_TIMESTAMP() * 1000 as ts" +
" from " +
" order_wide" +
" group by " +
" TUMBLE(rowtime,INTERVAL '10' SECOND)," +
" province_id," +
" province_name," +
" province_area_code," +
" province_iso_code," +
" province_3166_2_code";
// System.out.println(selectSQL);
Table orderWideTable = tableEnv.sqlQuery(selectSQL);
//TODO 5.将动态表转化为流
DataStream<ProvinceStats> provinceStatsDS = tableEnv.toAppendStream(orderWideTable, ProvinceStats.class);
provinceStatsDS.print(">>>>>");
//TODO 6.将流中的数据写到Clickhouse
provinceStatsDS.addSink(
ClickhouseUtils.getJdbcSink("insert into province_stats values(?,?,?,?,?,?,?,?,?,?)")
);
env.execute();
}
}

View File

@ -0,0 +1,299 @@
package com.atguigu.gmall.realtime.app.dws;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.beans.VisitorStats;
import com.atguigu.gmall.realtime.utils.ClickhouseUtils;
import com.atguigu.gmall.realtime.utils.DateTimeUtils;
import com.atguigu.gmall.realtime.utils.MyKafkaUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.time.Duration;
import java.util.Date;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.dws
*@Author: markilue
*@CreateTime: 2023-05-10 21:04
*@Description: TODO 访客主题统计DWS
* 测试流程:
* -需要启动的进程:
* zk,kafka,hdfs,logger.sh,clickhouse,
* BaseLogApp,UniqueVisitorApp,UserJumpDetailApp
*@Version: 1.0
*/
public class VisitorStatsApp {
public static void main(String[] args) throws Exception {
//TODO 1.流环境建立
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(4);
//TODO 2.检查点设置
env.enableCheckpointing(5000L);
env.getCheckpointConfig().setCheckpointTimeout(3000L);
env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5000L));
env.setStateBackend(new FsStateBackend("hdfs://Ding202:8020/rt_gmall/gmall"));
System.setProperty("HADOOP_USER_NAME", "dingjiawen");
//TODO 3.从kafka中读取数据
String groupId = "visitor_stats_app_group";
String pageLogTopic = "dwd_page_log";
DataStreamSource<String> pageLogStrDS = env.addSource(
MyKafkaUtils.getKafkaSource(pageLogTopic, groupId)
);
String uniqueVisitTopic = "dwm_unique_visitor";
DataStreamSource<String> uniqueVisitStrDS = env.addSource(
MyKafkaUtils.getKafkaSource(uniqueVisitTopic, groupId)
);
String userJumpDetailDetailTopic = "dwm_user_jump_detail";
DataStreamSource<String> userJumpDetailDetailStrDS = env.addSource(
MyKafkaUtils.getKafkaSource(userJumpDetailDetailTopic, groupId)
);
// pageLogStrDS.print(">>>>");
// uniqueVisitStrDS.print("$$$$");
// userJumpDetailDetailStrDS.print("&&&&");
//TODO 4.对流中数据进行格式转换 封装流 string ->VisitorStats
//4.1 dwd_page_log流中的数据的转换
SingleOutputStreamOperator<VisitorStats> pvStateDS = pageLogStrDS.map(
/*
{
"common": {
"ar": "440000",
"uid": "9",
"os": "Android 11.0",
"ch": "vivo",
"is_new": "1",
"md": "Xiaomi 9",
"mid": "mid_10",
"vc": "v2.1.132",
"ba": "Xiaomi"
},
"page": {
"page_id": "cart",
"during_time": 2832,
"last_page_id": "good_detail"
},
"ts": 1683724788000
}
*/
new MapFunction<String, VisitorStats>() {
@Override
public VisitorStats map(String s) throws Exception {
JSONObject jsonObject = JSON.parseObject(s);
JSONObject commonJsonObj = jsonObject.getJSONObject("common");
JSONObject pageJsonObj = jsonObject.getJSONObject("page");
VisitorStats visitorStats = new VisitorStats(
"",
"",
commonJsonObj.getString("vc"),
commonJsonObj.getString("ch"),
commonJsonObj.getString("ar"),
commonJsonObj.getString("is_new"),
0L,
1L,
0L,
0L,
pageJsonObj.getLong("during_time"),
jsonObject.getLong("ts")
);
//判断是否为新的会话
String lastPageId = pageJsonObj.getString("last_page_id");
if (lastPageId == null || lastPageId.length() == 0) {
visitorStats.setSv_ct(1L);
}
return visitorStats;
}
}
);
//4.2 dwm_unique_visitor流中的数据的转换
SingleOutputStreamOperator<VisitorStats> uvStateDS = uniqueVisitStrDS.map(
/*
{
"common": {
"ar": "440000",
"uid": "9",
"os": "Android 11.0",
"ch": "vivo",
"is_new": "1",
"md": "Xiaomi 9",
"mid": "mid_10",
"vc": "v2.1.132",
"ba": "Xiaomi"
},
"page": {
"page_id": "cart",
"during_time": 2832,
"last_page_id": "good_detail"
},
"ts": 1683724788000
}
*/
new MapFunction<String, VisitorStats>() {
@Override
public VisitorStats map(String s) throws Exception {
JSONObject jsonObject = JSON.parseObject(s);
JSONObject commonJsonObj = jsonObject.getJSONObject("common");
JSONObject pageJsonObj = jsonObject.getJSONObject("page");
VisitorStats visitorStats = new VisitorStats(
"",
"",
commonJsonObj.getString("vc"),
commonJsonObj.getString("ch"),
commonJsonObj.getString("ar"),
commonJsonObj.getString("is_new"),
1L,
0L,
0L,
0L,
0L,
jsonObject.getLong("ts")
);
return visitorStats;
}
}
);
//4.3 dwm_user_jump_detail流中的数据的转换
SingleOutputStreamOperator<VisitorStats> ujdStateDS = userJumpDetailDetailStrDS.map(
new MapFunction<String, VisitorStats>() {
@Override
public VisitorStats map(String s) throws Exception {
JSONObject jsonObject = JSON.parseObject(s);
JSONObject commonJsonObj = jsonObject.getJSONObject("common");
JSONObject pageJsonObj = jsonObject.getJSONObject("page");
VisitorStats visitorStats = new VisitorStats(
"",
"",
commonJsonObj.getString("vc"),
commonJsonObj.getString("ch"),
commonJsonObj.getString("ar"),
commonJsonObj.getString("is_new"),
0L,
0L,
0L,
1L,
0L,
jsonObject.getLong("ts")
);
//判断是否为新的会话
String lastPageId = pageJsonObj.getString("last_page_id");
if (lastPageId == null || lastPageId.length() == 0) {
visitorStats.setSv_ct(1L);
}
return visitorStats;
}
}
);
//TODO 5.将三条刘转换后的数据进行合并
DataStream<VisitorStats> unionDS = pvStateDS.union(uvStateDS, ujdStateDS);
// unionDS.print(">>>>>>");
//TODO 6.指定watermark以及提取事件时间字段
SingleOutputStreamOperator<VisitorStats> visitorStatWithWatermarkDS = unionDS
.assignTimestampsAndWatermarks(
WatermarkStrategy.<VisitorStats>forBoundedOutOfOrderness(Duration.ofSeconds(3))
.withTimestampAssigner(
new SerializableTimestampAssigner<VisitorStats>() {
@Override
public long extractTimestamp(VisitorStats visitorStats, long l) {
return visitorStats.getTs();
}
}
)
);
//TODO 7.按照维度对流中的数据进行分组 维度有:版本渠道地区新老访客 所以我们定义分组的key为Tuple4类型
KeyedStream<VisitorStats, Tuple4<String, String, String, String>> keyedDS = visitorStatWithWatermarkDS.keyBy(
new KeySelector<VisitorStats, Tuple4<String, String, String, String>>() {
@Override
public Tuple4<String, String, String, String> getKey(VisitorStats visitorStats) throws Exception {
return Tuple4.of(
visitorStats.getVc(),
visitorStats.getCh(),
visitorStats.getAr(),
visitorStats.getIs_new());
}
}
);
//TODO 8.开窗 对分组有的数据 进行开窗计算 每个分组 独立窗口 分组之间互不影响
WindowedStream<VisitorStats, Tuple4<String, String, String, String>, TimeWindow> windowDS = keyedDS.window(TumblingEventTimeWindows.of(Time.seconds(10)));
//TODO 9.聚合计算 对窗口中的数据进行聚合计算
SingleOutputStreamOperator<VisitorStats> reduceDS = windowDS.reduce(
new ReduceFunction<VisitorStats>() {
@Override
public VisitorStats reduce(VisitorStats visitorStats1, VisitorStats visitorStats2) throws Exception {
//度量值两两聚合
visitorStats1.setPv_ct(visitorStats1.getPv_ct() + visitorStats2.getPv_ct());
visitorStats1.setUv_ct(visitorStats1.getUv_ct() + visitorStats2.getUv_ct());
visitorStats1.setUj_ct(visitorStats1.getUj_ct() + visitorStats2.getUj_ct());
visitorStats1.setSv_ct(visitorStats1.getSv_ct() + visitorStats2.getSv_ct());
visitorStats1.setDur_sum(visitorStats1.getDur_sum() + visitorStats2.getDur_sum());
return visitorStats1;
}
},
new ProcessWindowFunction<VisitorStats, VisitorStats, Tuple4<String, String, String, String>, TimeWindow>() {
@Override
public void process(Tuple4<String, String, String, String> tuple4, ProcessWindowFunction<VisitorStats, VisitorStats, Tuple4<String, String, String, String>, TimeWindow>.Context context, Iterable<VisitorStats> iterable, Collector<VisitorStats> collector) throws Exception {
//补全时间字段
long start = context.window().getStart();
long end = context.window().getEnd();
for (VisitorStats visitorStats : iterable) {
visitorStats.setStt(DateTimeUtils.toYMDHMS(new Date(start)));
visitorStats.setEdt(DateTimeUtils.toYMDHMS(new Date(end)));
visitorStats.setTs(System.currentTimeMillis());
//将处理之后的数据发送到下游
collector.collect(visitorStats);
}
}
}
);
reduceDS.print(">>>>>");
//TODO 10.将聚合统计之后的数据写到clickhouse
DataStreamSink<VisitorStats> visitorStatsDataStreamSink = reduceDS.addSink(
ClickhouseUtils.getJdbcSink("insert into visitor_stats values(?,?,?,?,?,?,?,?,?,?,?,?)")
);
env.execute();
}
}

View File

@ -0,0 +1,129 @@
-- 省份订单表
create table order_wide
(
province_id BIGINT,
province_name STRING,
province_area_code STRING,
province_iso_code STRING,
province_3166_2_code STRING,
order_id STRING,
split_total_amount DOUBLE,
create_time STRING,
rowtime as TO_TIMESTAMP(create_time),
WATERMARK FOR rowtime as rowtime - INTERVAL '3' SECOND
)
WITH (
'connector' = 'kafka',
'topic' = 'dwm_order_wide',
'properties.bootstrap.servers' = 'Ding202:9092,Ding203:9092,Ding204:9092',
'properties.group.id' = 'province_stats_app_group',
'scan.startup.mode' = 'latest-offset',
'format' = 'json')
-- 开窗聚合计算
select DATE_FORMAT(TUMBLE_START(rowtime, INTERVAL '10' SECOND), 'yyyy-MM-dd HH:mm:ss') as stt,
DATE_FORMAT(TUMBLE_END(rowtime, INTERVAL '10' SECOND), 'yyyy-MM-dd HH:mm:ss') as edt,
province_id,
province_name,
province_area_code area_code,
province_iso_code iso_code,
province_3166_2_code iso_3166_2,
count(distinct order_id) order_count,
sum(split_total_amount) order_amount,
UNIX_TIMESTAMP() * 1000 ts
from order_wide
group by TUMBLE(rowtime, INTERVAL '10' SECOND),
province_id,
province_name,
province_area_code,
province_iso_code,
province_3166_2_code
-- 关键词主题统计动态表
create table page_view
(
common MAP<STRING,
STRING>,
page MAP<STRING,
STRING>,
ts BIGINT,
rowtime AS TO_TIMESTAMP(FROM_UNIXTIME(ts/1000,'yyyy-MM-dd HH:mm:ss')),
WATERMARK FOR rowtime as rowtime - INTERVAL '3' SECOND
)
WITH (
'connector' = 'kafka',
'topic' = 'dwm_page_log',
'properties.bootstrap.servers' = 'Ding202:9092,Ding203:9092,Ding204:9092',
'properties.group.id' = 'keyword_stats_app_group',
'scan.startup.mode' = 'latest-offset',
'format' = 'json')
-- 将动态表中表示搜索行为的记录过滤出来
select page['item'] fullword,
rowtime
from page_view
where page['page_id'] = 'good_list'
and page['item'] is not null
--使UDTF函数
/*
: 线
fullword rowtime
线 20210814
T表示一个临时表的名字
select rowtime ,keyword from fullwordTable,LATERAL TABLE((fullword)) AS T(keyword)
: 线
keyword rowtime
20210814
线 20210814
20210814
20210814
*
*/
select rowtime, keyword
from fullwordTable,
LATERAL TABLE((fullword)) AS T(keyword)
-- 分组开窗聚合计算
select DATE_FORMAT(TUMBLE_START(rowtime, INTERVAL '10' SECOND), 'yyyy-MM-dd HH:mm:ss') as stt,
DATE_FORMAT(TUMBLE_END(rowtime, INTERVAL '10' SECOND), 'yyyy-MM-dd HH:mm:ss') as edt,
keyword,
count(*) ct,
'KEYWORD_SEARCH' source,
UNIX_TIMESTAMP() * 1000 ts
from order_wide
group by TUMBLE(rowtime, INTERVAL '10' SECOND),
keyword
select DATE_FORMAT(TUMBLE_START(rowtime, INTERVAL '10' SECOND), 'yyyy-MM-dd HH:mm:ss') as stt,
DATE_FORMAT(TUMBLE_END(rowtime, INTERVAL '10' SECOND), 'yyyy-MM-dd HH:mm:ss') as edt,
keyword,
count(*) ct,
'SEARCH' source,
UNIX_TIMESTAMP() * 1000 ts
from UnnamedTable$1group by TUMBLE(rowtime, INTERVAL '10' SECOND), keyword
create table page_view
(
common MAP<STRING,STRING>,
page MAP<STRING,STRING>,
ts BIGINT,
rowtime AS TO_TIMESTAMP(FROM_UNIXTIME(ts/1000,'yyyy-MM-dd HH:mm:ss')),
WATERMARK FOR rowtime as rowtime - INTERVAL '3' SECOND
)
WITH (
'connector' = 'kafka',
'topic' = 'dwd_page_log',
'properties.bootstrap.servers' = 'Ding202:9092,Ding203:9092,Ding204:9092',
'properties.group.id' = 'keyword_stats_app_group',
'scan.startup.mode' = 'latest-offset',
'format' = 'json')

View File

@ -0,0 +1,79 @@
package com.atguigu.gmall.realtime.app.func;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.utils.DimUtils;
import com.atguigu.gmall.realtime.utils.ThreadPoolUtils;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.async.AsyncFunction;
import org.apache.flink.streaming.api.functions.async.ResultFuture;
import org.apache.flink.streaming.api.functions.async.RichAsyncFunction;
import java.text.ParseException;
import java.util.Collections;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadPoolExecutor;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.func
*@Author: markilue
*@CreateTime: 2023-05-10 15:17
*@Description: TODO 维度异步关联
* 模板方法设计模式:
* 在父类中定义实现某一个功能的核心算法骨架,将具体的实现延迟的子类中实现
* 子类在不改变父类核心算法骨架的前提下每一个子类都可以有自己的实现
*@Version: 1.0
*/
public abstract class DimAsyncFunction<T> extends RichAsyncFunction<T, T> implements DimJoinFunction<T> {
ExecutorService executorService;
private String tableName;
public DimAsyncFunction(String tableName) {
this.tableName = tableName;
}
@Override
public void open(Configuration parameters) throws Exception {
//创建线程池对象
executorService = ThreadPoolUtils.getInstance();
}
//发送异步请求,完成维度关联
//通过创建多线程的方式 发送异步请求
//asyncInvoke每处理一条流中的数据 都会执行一次
@Override
public void asyncInvoke(T input, ResultFuture<T> resultFuture) throws Exception {
//通过线程池对象获取线程
executorService.submit(
new Runnable() {
//TODO 在run中的代码就是异步维度关联的操作
@Override
public void run() {
try {
long start = System.currentTimeMillis();
//从对象获取维度关联的key
String key = getKey(input);
//根据key到维度表中获取维度对象
JSONObject dimJsonObj = DimUtils.getDimInfoWithCache(tableName, key);
//把维度对象的属性赋值给流中对象属性(维度关联)
if (dimJsonObj != null) {
join(input, dimJsonObj);
}
long end = System.currentTimeMillis();
System.out.println("维度异步查询耗时:" + (end - start) + "毫秒");
resultFuture.complete(Collections.singleton(input));//将input转换为一个集合向下游传递
} catch (Exception e) {
e.printStackTrace();
System.out.println("维度异步查询发生异常");
}
}
}
);
}
}

View File

@ -0,0 +1,22 @@
package com.atguigu.gmall.realtime.app.func;
import com.alibaba.fastjson.JSONObject;
import java.text.ParseException;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.func
*@Author: markilue
*@CreateTime: 2023-05-10 16:24
*@Description: TODO
*@Version: 1.0
*/
public interface DimJoinFunction<T> {
//怎么进行关联
void join(T input, JSONObject dimJsonObj) throws Exception;
//关联的id
String getKey(T input);
}

View File

@ -0,0 +1,79 @@
package com.atguigu.gmall.realtime.app.func;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.common.GmallConfig;
import com.atguigu.gmall.realtime.utils.DimUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Map;
import java.util.Set;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.func
*@Author: markilue
*@CreateTime: 2023-05-08 14:45
*@Description: TODO 将维度侧输出流的数据写到Hbase(phoenix)
*@Version: 1.0
*/
public class DimSink extends RichSinkFunction<JSONObject> {
//声明连接对象
private Connection connection;
@Override
public void open(Configuration parameters) throws Exception {
Class.forName("org.apache.phoenix.jdbc.PhoenixDriver");
connection = DriverManager.getConnection(GmallConfig.PHOENIX_SERVER);
}
@Override
public void invoke(JSONObject jsonObject, Context context) {
//{"database":"rt_gmall","xid":17654,"data":{"tm_name":"dadaaa","id":13},"commit":true,"sink_table":"dim_base_trademark","type":"insert","table":"base_trademark","ts":1683527538}
//获取维度表表名
String tableName = jsonObject.getString("sink_table");
JSONObject data = jsonObject.getJSONObject("data");
//拼接插入语句 upsert into 表空间.表名 values(xx,xx,xx)
String upsertSQL = generateSQL(tableName, data);
System.out.println("向phoenix维度表中插入数据:" + upsertSQL);
//创建数据库对象
try (PreparedStatement ps = connection.prepareStatement(upsertSQL)) {
ps.executeUpdate();
//TODO 注意:提交事务(MQSQL默认自动提交事务,Phoenix默认是手动提交事务)
connection.commit();
} catch (SQLException e) {
e.printStackTrace();
throw new RuntimeException("向phoenix维度表中插入数据失败");
}
//如果当前维度数据做的是删除或者修改数据,那么清空redis中缓存的维度数据
if (jsonObject.getString("type").equals("update") || jsonObject.getString("type").equals("delete")) {
//那么清空Redis中缓存的维度数据
DimUtils.deleteCached(tableName, data.getString("id"));
}
}
private String generateSQL(String tableName, JSONObject data) {
String key = StringUtils.join(data.keySet(), ",");
String value = StringUtils.join(data.values(), "','");
String sql = "upsert into " + GmallConfig.HBASE_SCHEMA + "." + tableName
+ "(" + key + ")" +
" values('" + value + "')";
//upsert into GMALL_REALTIME.dim_base_trademark(tm_name,id) values('asdas','12')
return sql;
}
}

View File

@ -0,0 +1,38 @@
package com.atguigu.gmall.realtime.app.func;
import com.atguigu.gmall.realtime.utils.KeywordUtils;
import org.apache.flink.table.annotation.DataTypeHint;
import org.apache.flink.table.annotation.FunctionHint;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.types.Row;
import java.util.List;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.func
*@Author: markilue
*@CreateTime: 2023-05-12 14:19
*@Description: TODO 自定义flinkUDTF函数 把keyword拆分成多个冠检测
*@Version: 1.0
*/
@FunctionHint(output = @DataTypeHint("ROW<word STRING>")) //返回值有几列,列的类型是什么;Row表示返回值的结果
public class KeywordUDTF extends TableFunction<Row> {
//从参数的个数和类型,判断调用哪一个eval函数
public void eval(String text) {
List<String> keywordList = KeywordUtils.analyze(text);
for (String keyword : keywordList) {
collect(Row.of(keyword));
}
}
// public void eval(String str,int a) {
// for (String s : str.split(" ")) {
// // use collect(...) to emit a row
// collect(Row.of(s, s.length()));
// }
// }
}

View File

@ -0,0 +1,57 @@
package com.atguigu.gmall.realtime.app.func;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.debezium.DebeziumDeserializationSchema;
import io.debezium.data.Envelope;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.util.Collector;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.func
*@Author: markilue
*@CreateTime: 2023-05-06 17:39
*@Description: TODO 自定义反序列化器
*@Version: 1.0
*/
public class MyDeserializationSchemaFunction implements DebeziumDeserializationSchema<String> {
@Override
public void deserialize(SourceRecord sourceRecord, Collector collector) throws Exception {
Struct valueStruct = (Struct) sourceRecord.value();//要用kafka的Struct因为本质上使用的是DebeziumDebezium使用了kafka的struct
Struct source = valueStruct.getStruct("source");
String database = source.getString("db");
String table = source.getString("table");
//类型
String type = Envelope.operationFor(sourceRecord).toString().toLowerCase();//内部通过枚举类将op转为对应的string
if (type.equals("create")) {
type = "insert";
}
//获取影响的数据data
JSONObject jsonObject = new JSONObject();
jsonObject.put("database", database);
jsonObject.put("table", table);
jsonObject.put("type", type);
JSONObject dataObject = new JSONObject();
Struct after = valueStruct.getStruct("after");
if(after!=null){
for (Field field : after.schema().fields()) {
String fieldName = field.name();
Object fieldValue = after.get(field);
dataObject.put(fieldName, fieldValue);
}
}
jsonObject.put("data",dataObject);
collector.collect(jsonObject.toJSONString());
}
@Override
public TypeInformation<String> getProducedType() {
return TypeInformation.of(String.class);
}
}

View File

@ -0,0 +1,199 @@
package com.atguigu.gmall.realtime.app.func;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.beans.TableProcess;
import com.atguigu.gmall.realtime.common.GmallConfig;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.*;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.app.func
*@Author: markilue
*@CreateTime: 2023-05-06 18:07
*@Description: TODO 动态分流实现
*@Version: 1.0
*/
public class TableProcessFunction extends BroadcastProcessFunction<JSONObject, String, JSONObject> {
private OutputTag<JSONObject> dimTag;
private MapStateDescriptor<String, TableProcess> mapStateDescriptor;
//声明连接对象
private Connection connection;
public TableProcessFunction(OutputTag<JSONObject> dimTag, MapStateDescriptor<String, TableProcess> mapStateDescriptor) {
this.dimTag = dimTag;
this.mapStateDescriptor = mapStateDescriptor;
}
//创建时只执行一次,非常适合注册驱动和创建连接的操作
@Override
public void open(Configuration parameters) throws Exception {
Class.forName("org.apache.phoenix.jdbc.PhoenixDriver");
connection = DriverManager.getConnection(GmallConfig.PHOENIX_SERVER);
}
//处理业务流中数据 maxwell从业务数据库中采集到的数据
//理论上来说,维度数据一定要比事实数据先到因为如果没有用户没有订单等信息就不会有事实
//这件事情可以人为的控制一下比如先开flinkCDC后开Maxwell
@Override
public void processElement(JSONObject jsonObject, BroadcastProcessFunction<JSONObject, String, JSONObject>.ReadOnlyContext readOnlyContext, Collector<JSONObject> collector) throws Exception {
String table = jsonObject.getString("table");
String type = jsonObject.getString("type");
//注意:maxwell可以对历史数据进行处理,这时候type为bootstrap-insert,这时候需要修复
if (type.equals("bootstrap-insert")) {
type = "insert";
jsonObject.put("type", type);
}
String key = table + ":" + type;
//获取状态(只读)
ReadOnlyBroadcastState<String, TableProcess> broadcastState = readOnlyContext.getBroadcastState(mapStateDescriptor);
//从状态中获取配置信息
TableProcess tableProcess = broadcastState.get(key);
if (tableProcess != null) {
//在配置表中找到了该操作对应的配置
//判断是事实数据还是维度数据
String sinkTable = tableProcess.getSinkTable();
jsonObject.put("sink_table", sinkTable);
//向下游传递数据之前,将不许要的字段过滤掉
//过滤思路:从配置表中读取保留子弹根据保留字段对data中的属性进行过滤
JSONObject data = jsonObject.getJSONObject("data");
String sinkColumns = tableProcess.getSinkColumns();
filterColumns(data, sinkColumns);
String sinkType = tableProcess.getSinkType();
if (sinkType.equals(TableProcess.SINK_TYPE_HBASE)) {
//是维度数据 放到维度侧输出流汇总
readOnlyContext.output(dimTag, jsonObject);
} else if (sinkType.equals(TableProcess.SINK_TYPE_KAFKA)) {
//事实数据 放入主流中
collector.collect(jsonObject);
}
} else {
//在配置表中没有该操作对应的配置
System.out.println("No this Key in TableProcess:" + key);
}
}
//过滤字段
private void filterColumns(JSONObject data, String sinkColumns) {
//data : {"tm_name":"ASDAD","logo_url":"FDSFS","id":17}
//sinkColumns : id,tm_name
String[] columns = sinkColumns.split(",");
HashSet<String> columnSet = new HashSet<>(Arrays.asList(columns));
Set<Map.Entry<String, Object>> entries = data.entrySet();
entries.removeIf(entry -> !columnSet.contains(entry.getKey()));
}
//处理广播流中的数据 flinkCDC从Mysql中读取配置信息
//s: {"database":"rt_gmall_realtime","data":{"name":"ssss","id":1},"type":"insert","table":"t_user"}
@Override
public void processBroadcastElement(String s, BroadcastProcessFunction<JSONObject, String, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
//获取广播状态
BroadcastState<String, TableProcess> broadcastState = context.getBroadcastState(mapStateDescriptor);
//json格式字符串转为对象
JSONObject jsonObject = JSONObject.parseObject(s);
//获取配置表中一条配置信息
TableProcess tableProcess = JSON.parseObject(jsonObject.getString("data"), TableProcess.class);
String sourceTable = tableProcess.getSourceTable();
String operateType = tableProcess.getOperateType();
//数据类型 kafka-事实 hbase维度
String sinkType = tableProcess.getSinkType();
//主键
String sinkPk = tableProcess.getSinkPk();
//指定保留字段
String sinkColumns = tableProcess.getSinkColumns();
//指定输出目的地
String sinkTable = tableProcess.getSinkTable();
//指定建表扩展语句
String sinkExtend = tableProcess.getSinkExtend();
//如果说 读取到的配置信息是维度数据的话那么提前在Hbase中创建维度表
if (sinkType.equals(TableProcess.SINK_TYPE_HBASE) && "insert".equals(operateType)) {
//如果是维度数据且是insert;update就没必要在创建
checkTable(sinkTable, sinkPk, sinkColumns, sinkExtend);
}
//拼接key
String key = sourceTable + ":" + operateType;
//将配置信息放在状态中
broadcastState.put(key, tableProcess);
}
//处理配置数据的时候 提前建立维度表 create table if not exist 表空间.表名(字段名 数据类型)
private void checkTable(String tableName, String pk, String fields, String ext) throws SQLException {
//对主键进行空值处理
if (pk == null) {
pk = "id";
}
//对建表扩展进行空值处理
if (ext == null) {
ext = "";
}
StringBuilder sql = new StringBuilder("create table if not exists " + GmallConfig.HBASE_SCHEMA + "." + tableName + "(");
String[] columns = fields.split(",");
for (int i = 0; i < columns.length; i++) {
String column = columns[i];
//判断是否为主键
if (column.equals(pk)) {
sql.append(column + " varchar primary key");
} else {
sql.append(column + " varchar");
}
//判断是否加,
if (i < columns.length - 1) {
sql.append(",");
}
}
sql.append(") " + ext);
System.out.println("建表语句:" + sql);
PreparedStatement ps = null;
try {
//创建数据库操作对象
ps = connection.prepareStatement(sql.toString());
//执行sql
ps.execute();
} catch (SQLException e) {
e.printStackTrace();
throw new RuntimeException("phoenix建表失败");
} finally {
//释放资源
if (ps != null) {
ps.close();
}
}
}
}

View File

@ -0,0 +1,79 @@
package com.atguigu.gmall.realtime.beans;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.beans
*@Author: markilue
*@CreateTime: 2023-05-11 16:45
*@Description: TODO 电商业务常量
*@Version: 1.0
*/
public class GmallConstant {
//10 单据状态
public static final String ORDER_STATUS_UNPAID = "1001"; //未支付
public static final String ORDER_STATUS_PAID = "1002"; //已支付
public static final String ORDER_STATUS_CANCEL = "1003";//已取消
public static final String ORDER_STATUS_FINISH = "1004";//已完成
public static final String ORDER_STATUS_REFUND = "1005";//退款中
public static final String ORDER_STATUS_REFUND_DONE = "1006";//退款完成
//11 支付状态
public static final String PAYMENT_TYPE_ALIPAY = "1101";//支付宝
public static final String PAYMENT_TYPE_WECHAT = "1102";//微信
public static final String PAYMENT_TYPE_UNION = "1103";//银联
//12 评价
public static final String APPRAISE_GOOD = "1201";// 好评
public static final String APPRAISE_SOSO = "1202";// 中评
public static final String APPRAISE_BAD = "1203";// 差评
public static final String APPRAISE_AUTO = "1204";// 自动
//13 退货原因
public static final String REFUND_REASON_BAD_GOODS = "1301";// 质量问题
public static final String REFUND_REASON_WRONG_DESC = "1302";// 商品描述与实际描述不一致
public static final String REFUND_REASON_SALE_OUT = "1303";// 缺货
public static final String REFUND_REASON_SIZE_ISSUE = "1304";// 号码不合适
public static final String REFUND_REASON_MISTAKE = "1305";// 拍错
public static final String REFUND_REASON_NO_REASON = "1306";// 不想买了
public static final String REFUND_REASON_OTHER = "1307";// 其他
//14 购物券状态
public static final String COUPON_STATUS_UNUSED = "1401";// 未使用
public static final String COUPON_STATUS_USING = "1402";// 使用中
public static final String COUPON_STATUS_USED = "1403";// 已使用
//15退款类型
public static final String REFUND_TYPE_ONLY_MONEY = "1501";// 仅退款
public static final String REFUND_TYPE_WITH_GOODS = "1502";// 退货退款
//24来源类型
public static final String SOURCE_TYPE_QUREY = "2401";// 用户查询
public static final String SOURCE_TYPE_PROMOTION = "2402";// 商品推广
public static final String SOURCE_TYPE_AUTO_RECOMMEND = "2403";// 智能推荐
public static final String SOURCE_TYPE_ACTIVITY = "2404";// 促销活动
//购物券范围
public static final String COUPON_RANGE_TYPE_CATEGORY3 = "3301";//
public static final String COUPON_RANGE_TYPE_TRADEMARK = "3302";//
public static final String COUPON_RANGE_TYPE_SPU = "3303";//
//购物券类型
public static final String COUPON_TYPE_MJ = "3201";//满减
public static final String COUPON_TYPE_DZ = "3202";// 满量打折
public static final String COUPON_TYPE_DJ = "3203";// 代金券
public static final String ACTIVITY_RULE_TYPE_MJ = "3101";
public static final String ACTIVITY_RULE_TYPE_DZ = "3102";
public static final String ACTIVITY_RULE_TYPE_ZK = "3103";
public static final String KEYWORD_SEARCH = "SEARCH";
public static final String KEYWORD_CLICK = "CLICK";
public static final String KEYWORD_CART = "CART";
public static final String KEYWORD_ORDER = "ORDER";
}

View File

@ -0,0 +1,25 @@
package com.atguigu.gmall.realtime.beans;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.beans
*@Author: markilue
*@CreateTime: 2023-05-12 15:24
*@Description: TODO 关键词实体类
*@Version: 1.0
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class KeywordStats {
private String keyword;
private Long ct;
private String source;
private String stt;
private String edt;
private Long ts;
}

View File

@ -0,0 +1,30 @@
package com.atguigu.gmall.realtime.beans;
import lombok.Data;
import java.math.BigDecimal;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.beans
*@Author: markilue
*@CreateTime: 2023-05-09 14:17
*@Description: TODO 订单明细对象
*@Version: 1.0
*/
@Data
public class OrderDetail {
Long id;
Long order_id;
Long sku_id;
BigDecimal order_price;
Long sku_num;
String sku_name;
String create_time;
BigDecimal split_total_amount;
BigDecimal split_activity_amount;
BigDecimal split_coupon_amount;
Long create_ts;
}

View File

@ -0,0 +1,34 @@
package com.atguigu.gmall.realtime.beans;
import lombok.Data;
import java.math.BigDecimal;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.beans
*@Author: markilue
*@CreateTime: 2023-05-09 14:16
*@Description: TODO 订单对象
*@Version: 1.0
*/
@Data
public class OrderInfo {
Long id;
Long province_id;
String order_status;
Long user_id;
BigDecimal total_amount;
BigDecimal activity_reduce_amount;
BigDecimal coupon_reduce_amount;
BigDecimal original_total_amount;
BigDecimal feight_fee;
String expire_time;
String create_time;
String operate_time;
String create_date; // 把其他字段处理得到
String create_hour;
Long create_ts;
}

View File

@ -0,0 +1,116 @@
package com.atguigu.gmall.realtime.beans;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.commons.lang3.ObjectUtils;
import java.math.BigDecimal;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.beans
*@Author: markilue
*@CreateTime: 2023-05-09 15:35
*@Description: TODO 订单宽表实体类
*@Version: 1.0
*/
@Data
@AllArgsConstructor
public class OrderWide {
Long detail_id;
Long order_id;
Long sku_id;
BigDecimal order_price;
Long sku_num;
String sku_name;
Long province_id;
String order_status;
Long user_id;
BigDecimal total_amount;
BigDecimal activity_reduce_amount;
BigDecimal coupon_reduce_amount;
BigDecimal original_total_amount;
BigDecimal feight_fee;
BigDecimal split_feight_fee;
BigDecimal split_activity_amount;
BigDecimal split_coupon_amount;
BigDecimal split_total_amount;
String expire_time;
String create_time;
String operate_time;
String create_date; // 把其他字段处理得到
String create_hour;
String province_name;//查询维表得到
String province_area_code;
String province_iso_code;
String province_3166_2_code;
Integer user_age;
String user_gender;
Long spu_id; //作为维度数据 要关联进来
Long tm_id;
Long category3_id;
String spu_name;
String tm_name;
String category3_name;
public OrderWide(OrderInfo orderInfo, OrderDetail orderDetail) {
mergeOrderInfo(orderInfo);
mergeOrderDetail(orderDetail);
}
public void mergeOrderInfo(OrderInfo orderInfo) {
if (orderInfo != null) {
this.order_id = orderInfo.id;
this.order_status = orderInfo.order_status;
this.create_time = orderInfo.create_time;
this.create_date = orderInfo.create_date;
this.activity_reduce_amount = orderInfo.activity_reduce_amount;
this.coupon_reduce_amount = orderInfo.coupon_reduce_amount;
this.original_total_amount = orderInfo.original_total_amount;
this.feight_fee = orderInfo.feight_fee;
this.total_amount = orderInfo.total_amount;
this.province_id = orderInfo.province_id;
this.user_id = orderInfo.user_id;
}
}
public void mergeOrderDetail(OrderDetail orderDetail) {
if (orderDetail != null) {
this.detail_id = orderDetail.id;
this.sku_id = orderDetail.sku_id;
this.sku_name = orderDetail.sku_name;
this.order_price = orderDetail.order_price;
this.sku_num = orderDetail.sku_num;
this.split_activity_amount = orderDetail.split_activity_amount;
this.split_coupon_amount = orderDetail.split_coupon_amount;
this.split_total_amount = orderDetail.split_total_amount;
}
}
public void mergeOtherOrderWide(OrderWide otherOrderWide) {
this.order_status = ObjectUtils.firstNonNull(this.order_status, otherOrderWide.order_status);
this.create_time = ObjectUtils.firstNonNull(this.create_time, otherOrderWide.create_time);
this.create_date = ObjectUtils.firstNonNull(this.create_date, otherOrderWide.create_date);
this.coupon_reduce_amount = ObjectUtils.firstNonNull(this.coupon_reduce_amount, otherOrderWide.coupon_reduce_amount);
this.activity_reduce_amount = ObjectUtils.firstNonNull(this.activity_reduce_amount, otherOrderWide.activity_reduce_amount);
this.original_total_amount = ObjectUtils.firstNonNull(this.original_total_amount, otherOrderWide.original_total_amount);
this.feight_fee = ObjectUtils.firstNonNull(this.feight_fee, otherOrderWide.feight_fee);
this.total_amount = ObjectUtils.firstNonNull(this.total_amount, otherOrderWide.total_amount);
this.user_id = ObjectUtils.<Long>firstNonNull(this.user_id, otherOrderWide.user_id);
this.sku_id = ObjectUtils.firstNonNull(this.sku_id, otherOrderWide.sku_id);
this.sku_name = ObjectUtils.firstNonNull(this.sku_name, otherOrderWide.sku_name);
this.order_price = ObjectUtils.firstNonNull(this.order_price, otherOrderWide.order_price);
this.sku_num = ObjectUtils.firstNonNull(this.sku_num, otherOrderWide.sku_num);
this.split_activity_amount = ObjectUtils.firstNonNull(this.split_activity_amount);
this.split_coupon_amount = ObjectUtils.firstNonNull(this.split_coupon_amount);
this.split_total_amount = ObjectUtils.firstNonNull(this.split_total_amount);
}
}

View File

@ -0,0 +1,27 @@
package com.atguigu.gmall.realtime.beans;
import lombok.Data;
import java.math.BigDecimal;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.beans
*@Author: markilue
*@CreateTime: 2023-05-10 18:57
*@Description: TODO 订单信息实体类
*@Version: 1.0
*/
@Data
public class PaymentInfo {
Long id;
Long order_id;
Long user_id;
BigDecimal total_amount;
String subject;
String payment_type;
String create_time;
String callback_time;
}

View File

@ -0,0 +1,95 @@
package com.atguigu.gmall.realtime.beans;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.commons.beanutils.BeanUtils;
import java.lang.reflect.InvocationTargetException;
import java.math.BigDecimal;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.beans
*@Author: markilue
*@CreateTime: 2023-05-10 18:58
*@Description: TODO 订单宽表实体类
*@Version: 1.0
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class PaymentWide {
Long payment_id;
String subject;
String payment_type;
String payment_create_time;
String callback_time;
Long detail_id;
Long order_id;
Long sku_id;
BigDecimal order_price;
Long sku_num;
String sku_name;
Long province_id;
String order_status;
Long user_id;
BigDecimal total_amount;
BigDecimal activity_reduce_amount;
BigDecimal coupon_reduce_amount;
BigDecimal original_total_amount;
BigDecimal feight_fee;
BigDecimal split_feight_fee;
BigDecimal split_activity_amount;
BigDecimal split_coupon_amount;
BigDecimal split_total_amount;
String order_create_time;
String province_name;//查询维表得到
String province_area_code;
String province_iso_code;
String province_3166_2_code;
Integer user_age;
String user_gender;
Long spu_id; //作为维度数据 要关联进来
Long tm_id;
Long category3_id;
String spu_name;
String tm_name;
String category3_name;
public PaymentWide(PaymentInfo paymentInfo, OrderWide orderWide) {
mergeOrderWide(orderWide);
mergePaymentInfo(paymentInfo);
}
public void mergePaymentInfo(PaymentInfo paymentInfo) {
if (paymentInfo != null) {
try {
BeanUtils.copyProperties(this, paymentInfo);
payment_create_time = paymentInfo.create_time;
payment_id = paymentInfo.id;
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
}
public void mergeOrderWide(OrderWide orderWide) {
if (orderWide != null) {
try {
BeanUtils.copyProperties(this, orderWide);
order_create_time = orderWide.create_time;
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
}
}

View File

@ -0,0 +1,94 @@
package com.atguigu.gmall.realtime.beans;
import lombok.Builder;
import lombok.Data;
import java.math.BigDecimal;
import java.util.HashSet;
import java.util.Set;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.beans
*@Author: markilue
*@CreateTime: 2023-05-11 15:38
*@Description: TODO 商品统计实体类
*
* @Builder注解
* 可以使用构造者方式创建对象给属性赋值
* @Builder.Default
* 在使用构造者方式给属性赋值的时候属性的初始值会丢失
* 该注解的作用就是修复这个问题
* 例如我们在属性上赋值了初始值为0L如果不加这个注解通过构造者创建的对象属性值会变为null
*@Version: 1.0
*/
@Data
@Builder //构造者设计模式
public class ProductStats {
String stt;//窗口起始时间
String edt; //窗口结束时间
Long sku_id; //sku编号
String sku_name;//sku名称
BigDecimal sku_price; //sku单价
Long spu_id; //spu编号
String spu_name;//spu名称
Long tm_id; //品牌编号
String tm_name;//品牌名称
Long category3_id;//品类编号
String category3_name;//品类名称
@Builder.Default
Long display_ct = 0L; //曝光数
@Builder.Default
Long click_ct = 0L; //点击数
@Builder.Default
Long favor_ct = 0L; //收藏数
@Builder.Default
Long cart_ct = 0L; //添加购物车数
@Builder.Default
Long order_sku_num = 0L; //下单商品个数
@Builder.Default //下单商品金额
BigDecimal order_amount = BigDecimal.ZERO;
@Builder.Default
Long order_ct = 0L; //订单数
@Builder.Default //支付金额
BigDecimal payment_amount = BigDecimal.ZERO;
@Builder.Default
Long paid_order_ct = 0L; //支付订单数
@Builder.Default
Long refund_order_ct = 0L; //退款订单数
@Builder.Default
BigDecimal refund_amount = BigDecimal.ZERO;
@Builder.Default
Long comment_ct = 0L;//评论数
@Builder.Default
Long good_comment_ct = 0L; //好评数
@Builder.Default
@TransientSink
Set orderIdSet = new HashSet(); //用于统计订单数
@Builder.Default
@TransientSink
Set paidOrderIdSet = new HashSet(); //用于统计支付订单数
@Builder.Default
@TransientSink
Set refundOrderIdSet = new HashSet();//用于退款支付订单数
Long ts; //统计时间戳
}

View File

@ -0,0 +1,47 @@
package com.atguigu.gmall.realtime.beans;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.math.BigDecimal;
import java.util.Date;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.beans
*@Author: markilue
*@CreateTime: 2023-05-11 20:51
*@Description: TODO 地区统计宽表实体类:
*@Version: 1.0
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class ProvinceStats {
private String stt;
private String edt;
private Long province_id;
private String province_name;
private String area_code;
private String iso_code;
private String iso_3166_2;
private BigDecimal order_amount;
private Long order_count;
private Long ts;
public ProvinceStats(OrderWide orderWide){
province_id = orderWide.getProvince_id();
order_amount = orderWide.getSplit_total_amount();
province_name=orderWide.getProvince_name();
area_code=orderWide.getProvince_area_code();
iso_3166_2=orderWide.getProvince_iso_code();
iso_code=orderWide.getProvince_iso_code();
order_count = 1L;
ts=new Date().getTime();
}
}

View File

@ -0,0 +1,35 @@
package com.atguigu.gmall.realtime.beans;
import lombok.Data;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.beans
*@Author: markilue
*@CreateTime: 2023-05-06 14:07
*@Description: TODO 配置表实体类
*@Version: 1.0
*/
@Data
public class TableProcess {
//动态分流Sink常量 改为小写和脚本一致
public static final String SINK_TYPE_HBASE = "hbase";
public static final String SINK_TYPE_KAFKA = "kafka";
public static final String SINK_TYPE_CK = "clickhouse";
//来源表
String sourceTable;
//操作类型 insert,update,delete
String operateType;
//输出类型 hbase kafka
String sinkType;
//输出表(主题)
String sinkTable;
//输出字段
String sinkColumns;
//主键字段
String sinkPk;
//建表扩展
String sinkExtend;
}

View File

@ -0,0 +1,19 @@
package com.atguigu.gmall.realtime.beans;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.beans
*@Author: markilue
*@CreateTime: 2023-05-11 14:39
*@Description: TODO 当处理向clickhouse写入数据属性时,如果属性不需要保存到CK那么可以通过改注解标记
*@Version: 1.0
*/
@Target(ElementType.FIELD)//注解加的位置
@Retention(RetentionPolicy.RUNTIME)//注解生效的时间
public @interface TransientSink {
}

View File

@ -0,0 +1,43 @@
package com.atguigu.gmall.realtime.beans;
import lombok.AllArgsConstructor;
import lombok.Data;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.beans
*@Author: markilue
*@CreateTime: 2023-05-10 21:11
*@Description: TODO 访客主题实体类 包含各个维度和度量(需要类中的属性顺序和clickhouse表中的顺序一致)因为后续需要用反射去获取值
*@Version: 1.0
*/
@Data
@AllArgsConstructor
public class VisitorStats {
//统计开始时间
private String stt;
//统计结束时间
private String edt;
//维度版本
private String vc;
//维度渠道
private String ch;
//维度地区
private String ar;
//维度新老用户标识
private String is_new;
//度量独立访客数
private Long uv_ct = 0L;
//度量页面访问数
private Long pv_ct = 0L;
//度量 进入次数
private Long sv_ct = 0L;
//度量 跳出次数
private Long uj_ct = 0L;
//度量 持续访问时间
private Long dur_sum = 0L;
//统计时间
private Long ts;
}

View File

@ -0,0 +1,16 @@
package com.atguigu.gmall.realtime.common;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.common
*@Author: markilue
*@CreateTime: 2023-05-08 13:31
*@Description: TODO 实时数仓项目常量类
*@Version: 1.0
*/
public class GmallConfig {
public static final String HBASE_SCHEMA = "GMALL_REALTIME";//Hbase库名
public static final String PHOENIX_SERVER = "jdbc:phoenix:Ding202,Ding203,Ding204:2181";//Phoenix连接
public static final String CLICKHOUSE_URL = "jdbc:clickhouse://Ding202:8123/rt_gmall";//Phoenix连接
}

View File

@ -0,0 +1,72 @@
package com.atguigu.gmall.realtime.utils;
import com.atguigu.gmall.realtime.beans.TransientSink;
import com.atguigu.gmall.realtime.beans.VisitorStats;
import com.atguigu.gmall.realtime.common.GmallConfig;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import java.lang.reflect.AnnotatedType;
import java.lang.reflect.Field;
import java.sql.PreparedStatement;
import java.sql.SQLException;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.utils
*@Author: markilue
*@CreateTime: 2023-05-11 14:02
*@Description: TODO 操作clickhouse工具类
*@Version: 1.0
*/
public class ClickhouseUtils {
public static <T> SinkFunction<T> getJdbcSink(String sql) {
SinkFunction<T> sinkFunction = JdbcSink.<T>sink(
sql,
new JdbcStatementBuilder<T>() {
@Override
public void accept(PreparedStatement preparedStatement, T obj) throws SQLException {
//obj即为流中的一条数据 获取流中对象obj的属性值,赋值给?占位符
//获取流中对象所属类的属性
Field[] fields = obj.getClass().getDeclaredFields();
int index = 1;
for (Field field : fields) {
//对象属性数组进行遍历 获取每一个属性
//判断该属性是否有TransientSink注解修饰
TransientSink transientSink = field.getAnnotation(TransientSink.class);
if (transientSink != null) {//没有这个注解才需要赋值
continue;
}
//设置私有属性的访问权限
field.setAccessible(true);
try {
//获取对象属性值
Object filedValue = field.get(obj);
//将属性的值赋值给?占位符
preparedStatement.setObject(index++, filedValue);//顺序从1开始
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
}
},
new JdbcExecutionOptions.Builder()//构造者设计模式-通过内部类对象帮助构造外部对象:https://www.jianshu.com/p/7b52c5e1b6ce
.withBatchSize(5)//批量插入 保证效率(每一个slot到5)
// .withBatchIntervalMs(2000)//超时时间 如果超过2s也插入
// .withMaxRetries(3)//插入失败之后,重试次数
.build(),
new JdbcConnectionOptions.JdbcConnectionOptionsBuilder() //构造者设计模式:https://www.jianshu.com/p/7b52c5e1b6ce
.withDriverName("ru.yandex.clickhouse.ClickHouseDriver")
.withUrl(GmallConfig.CLICKHOUSE_URL)
.build()
);
return sinkFunction;
}
}

View File

@ -0,0 +1,48 @@
package com.atguigu.gmall.realtime.utils;
import com.ctc.wstx.osgi.WstxBundleActivator;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.Date;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.utils
*@Author: markilue
*@CreateTime: 2023-05-10 19:49
*@Description: TODO 日期转换工具类
* SimpleDateFormat是线程不安全的
* 在JDK1.8之后,使用线程安全的DateTimeFormatter类替换
*@Version: 1.0
*/
public class DateTimeUtils {
// private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private static DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
//将日期对象转换为字符串
public static String toYMDHMS(Date date) {
LocalDateTime localDateTime = LocalDateTime.ofInstant(date.toInstant(), ZoneId.systemDefault());
return dtf.format(localDateTime);
}
//将字符串日期转换为时间毫秒数
public static Long toTs(String dateStr) {
//SimpleDateFormat: Date Calendar
//DateTimeFormatter: LocalDateTime instant
LocalDateTime localDateTime = LocalDateTime.parse(dateStr, dtf);
Long ts = localDateTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
return ts;
}
public static void main(String[] args) {
System.out.println(ZoneId.systemDefault());
System.out.println(new Date(1683728230000L));
}
}

View File

@ -0,0 +1,136 @@
package com.atguigu.gmall.realtime.utils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.java.tuple.Tuple2;
import redis.clients.jedis.Jedis;
import java.util.List;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.utils
*@Author: markilue
*@CreateTime: 2023-05-09 17:47
*@Description: TODO 查询维度数据工具类
*@Version: 1.0
*/
public class DimUtils {
public static JSONObject getDimInfoWithCache(String tableName, String id) {
return getDimInfoWithCache(tableName, Tuple2.of("id", id));
}
//使用旁路缓存 对维度查询进行优化
//redis缓存: type:string key: dim:表名:主键值1_主键值2 ttl:1天(失效时间)
public static JSONObject getDimInfoWithCache(String tableName, Tuple2<String, String>... colNameAndValue) {
//拼接查询Redis的key
//拼接查询维度sql
StringBuilder selectDimSql = new StringBuilder("select * from " + tableName + " where ");
StringBuilder redisKey = new StringBuilder("dim:" + tableName.toLowerCase() + ":");
for (int i = 0; i < colNameAndValue.length; i++) {
String colName = colNameAndValue[i].f0;
String colValue = colNameAndValue[i].f1;
selectDimSql.append(colName + "='" + colValue + "'");
redisKey.append(colValue);
if (i < colNameAndValue.length - 1) {
selectDimSql.append(" and ");
redisKey.append("_");
}
}
//根据key到redis中查询redis缓存数据
//声明操作Redis客户端
Jedis jedis = null;
//声明变量 用于接受从Redis中查询出的缓存数据
String jsonStr = null;
//声明变量 用于处理返回的维度对象
JSONObject dimInfoJsonObj = null;
try {
jedis = MyRedisUtils.getJedis();
jsonStr = jedis.get(redisKey.toString());
} catch (Exception e) {
e.printStackTrace();
System.out.println("从Redis中查询维度数据发生了异常");//catch让程序继续执行而不至于报错
}
//判断是否从Reids中获取到了维度缓存数据
if (jsonStr != null && jsonStr.length() > 0) {
//从redis中查到了维度的缓存数据,将缓存的维度字符串转换为json对象
dimInfoJsonObj = JSON.parseObject(jsonStr);
} else {
//缓存中没有查询到数据,到phoenix数据库中查询
System.out.println("查询维度的sql:" + selectDimSql);
//底层还是调用封装的查询phoenix表的数据方法
List<JSONObject> resultList = MyPhoenixUtils.queryList(selectDimSql.toString(), JSONObject.class);
if (resultList != null && resultList.size() > 0) {
dimInfoJsonObj = resultList.get(0);//因为是根据维度数据的主键查询,一般维度数据只有一条
//将从Phoenix中查询到的维度数据放入redis缓存中
if (jedis != null) {
jedis.setex(redisKey.toString(), 3600 * 24, dimInfoJsonObj.toJSONString());//设置超时时间
}
} else {
System.out.println("维度数据没有找到" + selectDimSql);
}
}
//释放连接
if (jedis != null) {
jedis.close();
System.out.println("------关闭redis连接-------");
}
return dimInfoJsonObj;
}
//从phoenix表中查询维度数据(封装成json) {"ID":"13","TM_NAME":"adsdf"}
//查询条件可能有多个(联合主键),用可变长的tuple来操作
public static JSONObject getDimInfoNocache(String tableName, Tuple2<String, String>... colNameAndValue) {
//拼接查询维度sql
StringBuilder selectDimSql = new StringBuilder("select * from " + tableName + " where ");
for (int i = 0; i < colNameAndValue.length; i++) {
String colName = colNameAndValue[i].f0;
String colValue = colNameAndValue[i].f1;
selectDimSql.append(colName + "='" + colValue + "'");
if (i < colNameAndValue.length - 1) {
selectDimSql.append(" and ");
}
}
System.out.println("查询维度的sql:" + selectDimSql);
//底层还是调用封装的查询phoenix表的数据方法
List<JSONObject> resultList = MyPhoenixUtils.queryList(selectDimSql.toString(), JSONObject.class);
JSONObject dimInfoJsonObj = null;
if (resultList != null && resultList.size() > 0) {
dimInfoJsonObj = resultList.get(0);//因为是根据维度数据的主键查询,一般维度数据只有一条
} else {
System.out.println("维度数据没有找到" + selectDimSql);
}
return dimInfoJsonObj;
}
//根据redis的key,删除缓存
public static void deleteCached(String tableName, String id) {
String redisKey = "dim:" + tableName.toLowerCase() + ":" + id;
try {
Jedis jedis = MyRedisUtils.getJedis();
jedis.del(redisKey);
jedis.close();
} catch (Exception e) {
e.printStackTrace();
System.out.println("删除redis缓存发生了异常");
}
}
public static void main(String[] args) {
// System.out.println(DimUtils.getDimInfoNocache("dim_base_trademark", Tuple2.of("id", "13")));
System.out.println(DimUtils.getDimInfoWithCache("dim_base_trademark", "13"));
}
}

View File

@ -0,0 +1,48 @@
package com.atguigu.gmall.realtime.utils;
import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.utils
*@Author: markilue
*@CreateTime: 2023-05-11 21:55
*@Description: TODO 使用IK分组器进行分词
*@Version: 1.0
*/
public class KeywordUtils {
//分词方法
public static List<String> analyze(String text) {
StringReader reader = new StringReader(text);
//useSmart:是否使用智能分词策略
//非智能分词:细粒度输出所有可能的切分结果
//智能分词:合并数词和量词对分词结果进行歧义判断
IKSegmenter ikSegmenter = new IKSegmenter(reader, true);
List<String> resultList = new ArrayList<>();
try {
Lexeme lexeme = null;
while ((lexeme = ikSegmenter.next()) != null) {
//不等于null则还有词可以分
resultList.add(lexeme.getLexemeText());
}
} catch (IOException e) {
e.printStackTrace();
}
return resultList;
}
public static void main(String[] args) {
String text = "Apple iPhoneXSMax (A2104) 256GB 深空灰色 移动联通电信4G手机 双卡双待";
System.out.println(KeywordUtils.analyze(text));
}
}

View File

@ -3,8 +3,13 @@ package com.atguigu.gmall.realtime.utils;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import javax.annotation.Nullable;
import java.util.Properties;
/**
@ -12,16 +17,64 @@ import java.util.Properties;
*/
public class MyKafkaUtils {
private static final String KAFKA_SERVER = "Ding202:9092,Ding203:9092,Ding204:9092";
private static final String DEFAULT_TOPIC = "dwd_default_topic";
//获取kafka的消费者
public static FlinkKafkaConsumer<String> getKafkaSource(String topic,String groupId) {
public static FlinkKafkaConsumer<String> getKafkaSource(String topic, String groupId) {
Properties properties = new Properties();
properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_SERVER);
properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);
return new FlinkKafkaConsumer<String>(topic, new SimpleStringSchema(), properties);
}
//获取kafka的生产者
// public static FlinkKafkaProducer<String> getKafkaSink(String topic) {
// Properties properties = new Properties();
// properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_SERVER);
//// properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);
//
//
// return new FlinkKafkaProducer<String>(topic, new SimpleStringSchema(), properties);//从构造方法来看只能保证至少一次
// }
public static FlinkKafkaProducer<String> getKafkaSink(String topic) {
Properties properties = new Properties();
properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_SERVER);
properties.setProperty(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, 1000 * 60 * 15 + "");//15分钟事务超时
// properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);
// return new FlinkKafkaProducer<String>(topic, new SimpleStringSchema(), properties);//从构造方法来看只能保证至少一次
return new FlinkKafkaProducer<String>(topic, new KafkaSerializationSchema<String>() {
@Override
public ProducerRecord<byte[], byte[]> serialize(String s, @Nullable Long aLong) {
return new ProducerRecord<byte[], byte[]>(topic, s.toString().getBytes());
}
}, properties, FlinkKafkaProducer.Semantic.EXACTLY_ONCE);//从构造方法来看只能保证至少一次
}
public static <T> FlinkKafkaProducer<T> getKafkaSinkBySchema(KafkaSerializationSchema<T> kafkaSerializationSchema) {
Properties properties = new Properties();
properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_SERVER);
properties.setProperty(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, 1000 * 60 * 15 + "");//15分钟事务超时
// properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);
return new FlinkKafkaProducer<T>(DEFAULT_TOPIC, kafkaSerializationSchema, properties, FlinkKafkaProducer.Semantic.EXACTLY_ONCE);
}
public static String getKafkaDDL(String topic, String groupId) {
String connector = " 'connector' = 'kafka'," +
" 'topic' = '" + topic + "'," +
" 'properties.bootstrap.servers' = '" + KAFKA_SERVER + "'," +
" 'properties.group.id' = '" + groupId + "'," +
" 'scan.startup.mode' = 'latest-offset'," +
" 'format' = 'json'";
return connector;
}

View File

@ -0,0 +1,99 @@
package com.atguigu.gmall.realtime.utils;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.common.GmallConfig;
import org.apache.commons.beanutils.BeanUtils;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.utils
*@Author: markilue
*@CreateTime: 2023-05-06 20:57
*@Description:
* TODO 编写Phoenix工具类,执行sql
* 从Phoenix表中查询数据
*@Version: 1.0
*/
public class MyPhoenixUtils {
private static Connection connection;
//执行查询SQL将查询结果集封装T类型对象放到List中
public static <T> List<T> queryList(String sql, Class<T> clazz) {
if (connection == null) {
//注册驱动
initConnection();
}
List<T> result = new ArrayList<>();
//创建数据库操作对象
PreparedStatement preparedStatement = null;
ResultSet resultSet = null;
try {
preparedStatement = connection.prepareStatement(sql);
//执行sql语句
resultSet = preparedStatement.executeQuery();
//获取查询结果集的元数据信息
ResultSetMetaData metaData = resultSet.getMetaData();
//处理结果集
while (resultSet.next()) {
//创建要封装的对象
T obj = clazz.newInstance();
for (int i = 1; i <= metaData.getColumnCount(); i++) {//注意:JDBC列的索引从1开始
//根据元数据获取列名
String columnName = metaData.getColumnName(i);
//给对象的属性赋值
BeanUtils.setProperty(obj, columnName, resultSet.getObject(i));
}
//将封装的对象放到List集合中
result.add(obj);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
//释放资源
if (resultSet != null) {
try {
resultSet.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
if (preparedStatement != null) {
try {
preparedStatement.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
return result;
}
private static void initConnection() {
try {
Class.forName("org.apache.phoenix.jdbc.PhoenixDriver");
//获取连接
connection = DriverManager.getConnection(GmallConfig.PHOENIX_SERVER);
connection.setSchema(GmallConfig.HBASE_SCHEMA);//设置操作的表空间
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static void main(String[] args) {
List<JSONObject> jsonObjects = queryList("select * from dim_base_trademark", JSONObject.class);
System.out.println(jsonObjects);
}
}

View File

@ -0,0 +1,56 @@
package com.atguigu.gmall.realtime.utils;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.utils
*@Author: markilue
*@CreateTime: 2023-05-09 19:51
*@Description: TODO 获取操作Redis的Java客户端Jedis
*@Version: 1.0
*/
public class MyRedisUtils {
//声明JedisPool连接池
private static JedisPool jedisPool;
public static Jedis getJedis() {
if (jedisPool == null) {
initJedisPool();
}
System.out.println("-----获取Redis连接--------");
return jedisPool.getResource();
}
//初始化连接池对象
private static void initJedisPool() {
//连接池配置对象
JedisPoolConfig jedisPoolConfig = new JedisPoolConfig();
//最大连接数
jedisPoolConfig.setMaxTotal(100);
//每次在连接的时候是否进行ping pong测试
jedisPoolConfig.setTestOnBorrow(true);
//连接耗尽是否等待
jedisPoolConfig.setBlockWhenExhausted(true);
//最大等待时间
jedisPoolConfig.setMaxWaitMillis(2000);
//最小空闲连接数
jedisPoolConfig.setMinIdle(5);
//最大空闲连接数(多了之后空闲了销毁之后最少还剩多少个)
jedisPoolConfig.setMaxIdle(5);
jedisPool = new JedisPool(jedisPoolConfig, "Ding202", 6379, 10000);
}
public static void main(String[] args) {
Jedis jedis = getJedis();
String pong = jedis.ping();
System.out.println(pong);
}
}

View File

@ -0,0 +1,43 @@
package com.atguigu.gmall.realtime.utils;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
*@BelongsProject: rt-gmall-parent
*@BelongsPackage: com.atguigu.gmall.realtime.utils
*@Author: markilue
*@CreateTime: 2023-05-10 14:59
*@Description: TODO 线程池工具类
*@Version: 1.0
*/
public class ThreadPoolUtils {
public static volatile ThreadPoolExecutor threadPool;
public static final int corePoolSize = 4;
public static final int maximumPoolSize = 16;
public static final long keepAliveTime = 300;
public static final TimeUnit unit = TimeUnit.SECONDS;
public static final BlockingQueue<Runnable> workQueue = new LinkedBlockingQueue<Runnable>(Integer.MAX_VALUE);
public static ThreadPoolExecutor getInstance() {
if (threadPool == null) {
synchronized (ThreadPoolExecutor.class) {
System.out.println("---开辟线程池---");
if (threadPool == null) {
threadPool = new ThreadPoolExecutor(
corePoolSize,
maximumPoolSize,
keepAliveTime,
unit,
workQueue
);
}
}
}
return threadPool;
}
}

View File

@ -0,0 +1,40 @@
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>hbase.rootdir</name>
<value>hdfs://Ding202:8020/hbase</value>
</property>
<property>
<name>hbase.cluster.distributed</name>
<value>true</value>
</property>
<property>
<name>hbase.zookeeper.quorum</name>
<value>Ding202,Ding203,Ding204</value>
</property>
<property>
<name>hbase.unsafe.stream.capability.enforce</name>
<value>false</value>
</property>
<property>
<name>hbase.wal.provider</name>
<value>filesystem</value>
</property>
<property>
<name>phoenix.schema.isNamespaceMappingEnabled</name>
<value>true</value>
</property>
<property>
<name>phoenix.schema.mapSystemTablesToNamespace</name>
<value>true</value>
</property>
</configuration>

View File

@ -1,4 +1,4 @@
log4j.rootLogger=warn,stdout
log4j.rootLogger=error,stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.target=System.out
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout

View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.atguigu.rtgmall</groupId>
<artifactId>rt-gmall-parent</artifactId>
<packaging>pom</packaging>
<version>1.0-SNAPSHOT</version>
<modules>
<module>gmall-realtime</module>
<module>gmall-logger</module>
<module>gmall-cdc</module>
</modules>
</project>

View File

@ -16,12 +16,24 @@
<version>4.12</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.cqu.ge</groupId>-->
<!-- <artifactId>GE_Migrating_data</artifactId>-->
<!-- <version>1.0-SNAPSHOT</version>-->
<!-- </dependency>-->
<dependency>
<groupId>com.cqu</groupId>
<artifactId>ge</artifactId>
<version>1.0.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.code.gson/gson -->
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.5</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.cqu</groupId>-->
<!-- <artifactId>ge</artifactId>-->

View File

@ -5,6 +5,43 @@ package com.cqu.ge;
* and open the template in the editor.
*/
import com.cqu.ge.TestHelper;
import com.ge.ip.hds.historian.API.ArchiveService;
import com.ge.ip.hds.historian.API.ArchiveServiceImpl;
import com.ge.ip.hds.historian.API.CollectorService;
import com.ge.ip.hds.historian.API.CollectorServiceImpl;
import com.ge.ip.hds.historian.API.ConfigurationManager;
import com.ge.ip.hds.historian.API.DataStoreService;
import com.ge.ip.hds.historian.API.DataStoreServiceImpl;
import com.ge.ip.hds.historian.API.TagService;
import com.ge.ip.hds.historian.API.TagServiceImpl;
import com.ge.ip.hds.historian.DataContracts.Archive;
import com.ge.ip.hds.historian.DataContracts.ArchiveStatistics;
import com.ge.ip.hds.historian.DataContracts.CollectorProperty;
import com.ge.ip.hds.historian.DataContracts.DataStore;
import com.ge.ip.hds.historian.DataContracts.DataStoreState;
import com.ge.ip.hds.historian.DataContracts.ErrorCode;
import com.ge.ip.hds.historian.DataContracts.HistorianOperationException;
import com.ge.ip.hds.historian.DataContracts.NativeDataType;
import com.ge.ip.hds.historian.DataContracts.TagProperty;
//import com.ge.ip.hds.historianjavaapitest.ReadClass;
import java.io.File;
import java.util.Date;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertEquals;
import org.junit.After;
import org.junit.AfterClass;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import com.ge.ip.hds.historian.API.ArchiveService;
import com.ge.ip.hds.historian.API.ArchiveServiceImpl;
import com.ge.ip.hds.historian.API.ConfigurationManager;
@ -124,7 +161,11 @@ public class ArchiveAPITest {
ArchiveServiceImpl instance = new ArchiveServiceImpl();
Archive result = instance.GetArchive(createdArchive.getName(), "User");
<<<<<<< HEAD
// assertEquals(createdArchive.getName(), result.getName());
=======
assertEquals(createdArchive.getName(), result.getName());
>>>>>>> 74afd287d8c69e5aa5c2fc7ef4c6d7b90beb6455
th.deleteArchive(createdArchive.getName());
@ -187,7 +228,11 @@ public class ArchiveAPITest {
ArchiveServiceImpl instance = new ArchiveServiceImpl();
Archive result = instance.AddArchive(archive);
<<<<<<< HEAD
// assertEquals(archive.getName(), result.getName());
=======
assertEquals(archive.getName(), result.getName());
>>>>>>> 74afd287d8c69e5aa5c2fc7ef4c6d7b90beb6455
TestHelper th = new TestHelper();
th.deleteArchive(archive.getName());

View File

@ -147,11 +147,15 @@ public class Collector {
System.out.println("UpdateCollector");
TestHelper th = new TestHelper();
CollectorProperty cp = th.createCollector("NewCollector");
// CollectorProperty cp = th.createCollector("NewCollector");
String collectorName = cp.getName();
// String collectorName = cp.getName();
String collectorName = "NewCollector";
CollectorServiceImpl instance = new CollectorServiceImpl();
CollectorProperty cp = instance.GetCollector(collectorName);
cp.setComment("test comment");
CollectorProperty result = instance.UpdateCollector(collectorName, cp);

View File

@ -84,7 +84,7 @@ public class CollectorServiceImplTest {
found = false;
}
instance.DeleteCollector(cp.getName(), true);
// instance.DeleteCollector(cp.getName(), true);
assertTrue(found);
}

View File

@ -30,19 +30,15 @@ import com.ge.ip.hds.historian.DataContracts.TimeStampDeterminedByType;
import java.io.File;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import java.util.UUID;
import java.util.*;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.junit.After;
import org.junit.AfterClass;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@ -54,139 +50,140 @@ import org.w3c.dom.NodeList;
public class DataServiceImplTest {
public DataServiceImplTest() throws Exception {
public DataServiceImplTest() throws Exception {
// this.newDoc = ob.Read();
}
}
// ReadClass ob = new ReadClass();
Document newDoc;
// ReadClass ob = new ReadClass();
Document newDoc;
@BeforeClass
public static void setUpClass() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@AfterClass
public static void tearDownClass() {
}
/**
* 配置类设置HistorianWebServiceCppBridge.dll的位置
* historianServer设置位置集群模式还是单机模式和当时安装时候的配置有关
*/
@Before
public void setUp() {
String cppBringPath = "C:\\Program Files\\Proficy\\Proficy Historian\\x86\\Java API\\HistorianWebServiceCppBridge.dll";
String historianServer = "localhost";
/**
* 配置类设置HistorianWebServiceCppBridge.dll的位置
* historianServer设置位置集群模式还是单机模式和当时安装时候的配置有关
*/
@Before
public void setUp() {
String cppBringPath = "C:\\Program Files\\Proficy\\Proficy Historian\\x86\\Java API\\HistorianWebServiceCppBridge.dll";
String historianServer = "localhost";
ConfigurationManager.getInstance().Initialize(cppBringPath, historianServer, "", "", 10000, 10000, true, false);
ConfigurationManager.getInstance().Initialize(cppBringPath, historianServer, "", "", 10000, 10000, true, false);
}
}
@After
public void tearDown() {
}
@After
public void tearDown() {
}
// TODO add test methods here.
// The methods must be annotated with annotation @Test. For example:
//
/**
* Test of GetCurrentData method, of class DataServiceImpl.
*/
/**
* 测试获取当前数据
* @throws Exception
*/
@Test
public void testGetCurrentData_self() throws Exception {
// TODO add test methods here.
// The methods must be annotated with annotation @Test. For example:
//
/**
* Test of GetCurrentData method, of class DataServiceImpl.
*/
/**
* 测试获取当前数据
*
* @throws Exception
*/
@Test
public void testGetCurrentData_self() throws Exception {
System.out.println("GetCurrentData");
TestHelper th = new TestHelper();
System.out.println("GetCurrentData");
TestHelper th = new TestHelper();
TagProperty tagCreated = th.createTag("TagForSample1", "Float");
String tagName1 = tagCreated.getName();
TagProperty tagCreated = th.createTag("TagForSample1", "Float");
String tagName1 = tagCreated.getName();
tagCreated = th.createTag("TagForSample2", "Float");
String tagName2 = tagCreated.getName();
tagCreated = th.createTag("TagForSample2", "Float");
String tagName2 = tagCreated.getName();
// String tagName1 = "AirInletDP";
// String tagName2 = "AirInletDP_2";
DataService dataService = new DataServiceImpl() {
};
DataSample dataSample = th.createDataSample("UTC", "100",QualityStatus.Uncertain);
DataService dataService = new DataServiceImpl() {
};
DataSample dataSample = th.createDataSample("UTC", "100", QualityStatus.Uncertain);
dataService.CreateTagSample(tagName1, new ArrayList<DataSample>(Arrays.asList(dataSample)));
dataService.CreateTagSample(tagName2, new ArrayList<DataSample>(Arrays.asList(dataSample)));
dataService.CreateTagSample(tagName1, new ArrayList<DataSample>(Arrays.asList(dataSample)));
dataService.CreateTagSample(tagName2, new ArrayList<DataSample>(Arrays.asList(dataSample)));
List<String> tagNames = new ArrayList<String>();
tagNames.add(tagName1);
tagNames.add(tagName2);
List<String> tagNames = new ArrayList<String>();
tagNames.add(tagName1);
tagNames.add(tagName2);
DataServiceImpl instance = new DataServiceImpl();
List<TagSamples> result = instance.GetCurrentData(tagNames);
DataServiceImpl instance = new DataServiceImpl();
List<TagSamples> result = instance.GetCurrentData(tagNames);
th.deleteTag(tagName1);
th.deleteTag(tagName2);
th.deleteTag(tagName1);
th.deleteTag(tagName2);
assertEquals(tagName1, result.get(0).getTagName());
assertEquals(tagName2, result.get(1).getTagName());
assertEquals(tagName1, result.get(0).getTagName());
assertEquals(tagName2, result.get(1).getTagName());
if (result.get(0).getTagName().equals(tagName1) && result.get(1).getTagName().equals(tagName2)) {
if (result.get(0).getTagName().equals(tagName1) && result.get(1).getTagName().equals(tagName2)) {
DataSample[] datasamples1 = result.get(0).getSamples();
DataSample[] datasamples2 = result.get(1).getSamples();
System.out.println(datasamples1);
System.out.println(datasamples2);
System.out.println("datasamples1[0].getValue():"+datasamples1[0].getValue());
System.out.println("datasamples1[0].getValue():"+datasamples2[0].getValue());
if (datasamples1[0].getValue().equals(tagName1) && datasamples2[0].getValue().equals(tagName2)) {
System.out.println("ll:"+datasamples1[0]);
System.out.println("dd:"+datasamples1[1]);
assertNull(null);
}
} else {
fail();
}
DataSample[] datasamples1 = result.get(0).getSamples();
DataSample[] datasamples2 = result.get(1).getSamples();
System.out.println(datasamples1);
System.out.println(datasamples2);
System.out.println("datasamples1[0].getValue():" + datasamples1[0].getValue());
System.out.println("datasamples1[0].getValue():" + datasamples2[0].getValue());
if (datasamples1[0].getValue().equals(tagName1) && datasamples2[0].getValue().equals(tagName2)) {
System.out.println("ll:" + datasamples1[0]);
System.out.println("dd:" + datasamples1[1]);
assertNull(null);
}
} else {
fail();
}
// datastoreService.DeleteDataStore(datastoreName);
// TODO review the generated test code and remove the default call to fail.
// datastoreService.DeleteDataStore(datastoreName);
// TODO review the generated test code and remove the default call to fail.
// fail("The test case is a prototype.");
}
}
@Test
public void testGetCurrentData() throws Exception {
@Test
public void testGetCurrentData() throws Exception {
System.out.println("GetCurrentData");
TestHelper th = new TestHelper();
System.out.println("GetCurrentData");
TestHelper th = new TestHelper();
TagProperty tagCreated = th.createTag("TagForSample1", "Float");
String tagName1 = tagCreated.getName();
TagProperty tagCreated = th.createTag("TagForSample1", "Float");
String tagName1 = tagCreated.getName();
tagCreated = th.createTag("TagForSample2", "Float");
String tagName2 = tagCreated.getName();
tagCreated = th.createTag("TagForSample2", "Float");
String tagName2 = tagCreated.getName();
DataService dataService = new DataServiceImpl() {
};
DataSample dataSample = th.createDataSample("UTC", "100",QualityStatus.Uncertain);
DataService dataService = new DataServiceImpl() {
};
DataSample dataSample = th.createDataSample("UTC", "100", QualityStatus.Uncertain);
dataService.CreateTagSample(tagName1, new ArrayList<DataSample>(Arrays.asList(dataSample)));
dataService.CreateTagSample(tagName2, new ArrayList<DataSample>(Arrays.asList(dataSample)));
dataService.CreateTagSample(tagName1, new ArrayList<DataSample>(Arrays.asList(dataSample)));
dataService.CreateTagSample(tagName2, new ArrayList<DataSample>(Arrays.asList(dataSample)));
List<String> tagNames = new ArrayList<String>();
tagNames.add(tagName1);
tagNames.add(tagName2);
List<String> tagNames = new ArrayList<String>();
tagNames.add(tagName1);
tagNames.add(tagName2);
DataServiceImpl instance = new DataServiceImpl();
List<TagSamples> result = instance.GetCurrentData(tagNames);
DataServiceImpl instance = new DataServiceImpl();
List<TagSamples> result = instance.GetCurrentData(tagNames);
th.deleteTag(tagName1);
th.deleteTag(tagName2);
th.deleteTag(tagName1);
th.deleteTag(tagName2);
assertEquals(tagName1, result.get(0).getTagName());
assertEquals(tagName2, result.get(1).getTagName());
assertEquals(tagName1, result.get(0).getTagName());
assertEquals(tagName2, result.get(1).getTagName());
// if (result.get(0).getTagName().equals(tagName1) && result.get(1).getTagName().equals(tagName2)) {
// DataSample[] datasamples1 = result.get(0).getSamples();
@ -198,51 +195,62 @@ public class DataServiceImplTest {
// fail();
// }
// datastoreService.DeleteDataStore(datastoreName);
// TODO review the generated test code and remove the default call to fail.
// datastoreService.DeleteDataStore(datastoreName);
// TODO review the generated test code and remove the default call to fail.
// fail("The test case is a prototype.");
}
}
/**
* Test of GetRawDataByTime method, of class DataServiceImpl.
*/
@Test
public void testGetRawDataByTime() throws Exception {
System.out.println("GetRawDataByTime");
TestHelper th = new TestHelper();
/**
* Test of GetRawDataByTime method, of class DataServiceImpl.
*/
@Test
public void testGetRawDataByTime() throws Exception {
TagProperty tagCreated = th.createTag("NewTag", "Float");
String tagName = tagCreated.getName();
System.out.println("GetRawDataByTime");
TestHelper th = new TestHelper();
List<String> tagNames = new ArrayList<String>();
tagNames.add(tagName);
DataService dataService = new DataServiceImpl() {
};
// TagProperty tagCreated = th.createTag("NewTag", "Float");
// String tagName = tagCreated.getName();
String tagName = "NewTag";
List<String> tagNames = new ArrayList<String>();
tagNames.add(tagName);
DataService dataService = new DataServiceImpl() {
};
DataSample dataSample = th.createDataSample("UTC", "72",QualityStatus.Uncertain);
dataService.CreateTagSample(tagName, new ArrayList<DataSample>(Arrays.asList(dataSample)));
Calendar cal1 = Calendar.getInstance();
cal1.setTime(dataSample.getTimeStamp());
cal1.add(Calendar.SECOND, -1);
Calendar cal2 = Calendar.getInstance();
cal2.setTime(dataSample.getTimeStamp());
cal2.add(Calendar.SECOND, 1);
Date cur1 = new Date();
Calendar cal1 = Calendar.getInstance();
cal1.setTime(cur1);
cal1.add(Calendar.YEAR, -1);
Date startTime = cal1.getTime();
Date endTime = cal2.getTime();
Calendar cal2 = Calendar.getInstance();
cal2.setTime(cur1);
cal2.add(Calendar.SECOND, 1);
DataServiceImpl instance = new DataServiceImpl();
List<TagSamples> result = instance.GetRawDataByTime(tagNames, startTime, endTime);
th.deleteTag(tagName);
for (TagSamples tagSamples : result) {
assertEquals(tagName, tagSamples.getTagName());
Date startTime = cal1.getTime();
Date endTime = cal2.getTime();
DataServiceImpl instance = new DataServiceImpl();
List<TagSamples> result = instance.GetRawDataByTime(tagNames, startTime, endTime);
// th.deleteTag(tagName);
for (TagSamples tagSamples : result) {
System.out.println("tagName:" + tagSamples.getTagName());
DataSample[] samples = tagSamples.getSamples();
System.out.println(samples.length);
for (DataSample sample : samples) {
System.out.println("tag_value:" + sample.getValue());
}
// assertEquals(tagName, tagSamples.getTagName());
// if (tagSamples.getTagName().equals(tagName)) {
// int length = tagSamples.getSamples().length;
// if (length == 0) {
@ -256,50 +264,50 @@ public class DataServiceImplTest {
// } else {
// fail();
// }
}
}
}
}
/**
* Test of GetRawDataByNumber method, of class DataServiceImpl.
*/
@Test
public void testGetRawDataByNumber() throws Exception {
System.out.println("GetRawDataByNumber");
TestHelper th = new TestHelper();
/**
* Test of GetRawDataByNumber method, of class DataServiceImpl.
*/
@Test
public void testGetRawDataByNumber() throws Exception {
TagProperty tagCreated = th.createTag("NewTag", "Float");
String tagName = tagCreated.getName();
List<String> tagNames = new ArrayList<String>();
tagNames.add(tagName);
DataService dataService = new DataServiceImpl() {
};
DataSample dataSample = th.createDataSample("UTC", "72",QualityStatus.Uncertain);
dataService.CreateTagSample(tagName, new ArrayList<DataSample>(Arrays.asList(dataSample)));
System.out.println("GetRawDataByNumber");
TestHelper th = new TestHelper();
Calendar cal1 = Calendar.getInstance();
cal1.setTime(dataSample.getTimeStamp());
int startTimeOffset = Integer
.parseInt("-1");
cal1.add(Calendar.SECOND, -1);
TagProperty tagCreated = th.createTag("NewTag", "Float");
String tagName = tagCreated.getName();
Date startTime = cal1.getTime();
int count = 2;
boolean backwardTimeOrder = false;
DataServiceImpl instance = new DataServiceImpl();
List<TagSamples> result = instance.GetRawDataByNumber(tagNames, startTime, count, backwardTimeOrder);
th.deleteTag(tagName);
for (TagSamples tagSamples : result) {
assertEquals(tagName, tagSamples.getTagName());
List<String> tagNames = new ArrayList<String>();
tagNames.add(tagName);
DataService dataService = new DataServiceImpl() {
};
DataSample dataSample = th.createDataSample("UTC", "72", QualityStatus.Uncertain);
dataService.CreateTagSample(tagName, new ArrayList<DataSample>(Arrays.asList(dataSample)));
Calendar cal1 = Calendar.getInstance();
cal1.setTime(dataSample.getTimeStamp());
int startTimeOffset = Integer
.parseInt("-1");
cal1.add(Calendar.SECOND, -1);
Date startTime = cal1.getTime();
int count = 2;
boolean backwardTimeOrder = false;
DataServiceImpl instance = new DataServiceImpl();
List<TagSamples> result = instance.GetRawDataByNumber(tagNames, startTime, count, backwardTimeOrder);
th.deleteTag(tagName);
for (TagSamples tagSamples : result) {
assertEquals(tagName, tagSamples.getTagName());
/*if (tagSamples.getTagName().equals(tagName)) {
DataSample[] datasample = new DataSample[1];
int length = tagSamples.getSamples().length;
@ -313,151 +321,175 @@ public class DataServiceImplTest {
} else {
fail();
}*/
}
}
// assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
// fail("The test case is a prototype.");
}
// assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
// fail("The test case is a prototype.");
}
/**
* Test of GetInterpolatedData method, of class DataServiceImpl.
*/
@Test
public void testGetInterpolatedData() throws Exception {
System.out.println("GetInterpolatedData");
TestHelper th = new TestHelper();
TagProperty tagCreated = th.createTag("TagForSample1", "Float");
String tagName1 = tagCreated.getName();
/**
* Test of GetInterpolatedData method, of class DataServiceImpl.
*/
@Test
public void testGetInterpolatedData() throws Exception {
System.out.println("GetInterpolatedData");
TestHelper th = new TestHelper();
tagCreated = th.createTag("TagForSample2", "Float");
String tagName2 = tagCreated.getName();
TagProperty tagCreated = th.createTag("TagForSample1", "Float");
String tagName1 = tagCreated.getName();
List<String> tagNames = new ArrayList<String>();
tagNames.add(tagName1);
tagNames.add(tagName2);
DataService dataService = new DataServiceImpl() {
};
DataSample dataSample1 =th.createDataSample("UTC", "10", QualityStatus.Good);
DataSample dataSample2 = th.createDataSample("UTC", "12", QualityStatus.Good);
tagCreated = th.createTag("TagForSample2", "Float");
String tagName2 = tagCreated.getName();
dataService.CreateTagSample(tagName1, new ArrayList<DataSample>(Arrays.asList(dataSample1)));
dataService.CreateTagSample(tagName2, new ArrayList<DataSample>(Arrays.asList(dataSample2)));
List<String> tagNames = new ArrayList<String>();
tagNames.add(tagName1);
tagNames.add(tagName2);
Calendar cal2 = Calendar.getInstance();
int startTimeOffset = Integer
.parseInt("10");
cal2.add(Calendar.SECOND, startTimeOffset);
Date startTime = cal2.getTime();
Calendar cal3 = Calendar.getInstance();
int endTimeOffset = Integer.parseInt("10");
cal3.add(Calendar.SECOND, endTimeOffset);
Date endTime = cal3.getTime();
DataService dataService = new DataServiceImpl() {
};
DataSample dataSample1 = th.createDataSample("UTC", "10", QualityStatus.Good);
DataSample dataSample2 = th.createDataSample("UTC", "12", QualityStatus.Good);
long intervalMs = Long.parseLong("1000");
DataServiceImpl instance = new DataServiceImpl();
List<TagSamples> result = instance.GetInterpolatedData(tagNames, startTime, endTime, intervalMs);
th.deleteTag(tagName1);
th.deleteTag(tagName2);
assertEquals(tagName1, result.get(0).getTagName());
assertEquals(tagName2, result.get(1).getTagName());
dataService.CreateTagSample(tagName1, new ArrayList<DataSample>(Arrays.asList(dataSample1)));
dataService.CreateTagSample(tagName2, new ArrayList<DataSample>(Arrays.asList(dataSample2)));
// assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
// fail("The test case is a prototype.");
}
Calendar cal2 = Calendar.getInstance();
int startTimeOffset = Integer
.parseInt("10");
cal2.add(Calendar.SECOND, startTimeOffset);
Date startTime = cal2.getTime();
/**
* Test of CreateTagSample method, of class DataServiceImpl.
*/
@Test
public void testCreateTagSample() throws Exception {
System.out.println("CreateTagSample");
TestHelper testhelper = new TestHelper();
DataService dataService = new DataServiceImpl();
DataSample dataSample =testhelper.createDataSample("UTC", "10",QualityStatus.Bad);
// datasample created
// creating tag1
TagProperty _tagCreated = testhelper.createTag("NewTag", "Float");
// end of creating tag1
String tagName = _tagCreated.getName();
List<DataSample> dataSamples = null;
DataServiceImpl instance = new DataServiceImpl();
instance.CreateTagSample(tagName, new ArrayList<DataSample>(Arrays.asList(dataSample)));
Calendar cal3 = Calendar.getInstance();
int endTimeOffset = Integer.parseInt("10");
cal3.add(Calendar.SECOND, endTimeOffset);
Date endTime = cal3.getTime();
List<String> tagNames = new ArrayList<String>();
tagNames.add(tagName);
List<TagSamples> lis = instance.GetCurrentData(tagNames);
DataSample[] sample = lis.get(0).getSamples();
testhelper.deleteTag(tagName);
assertEquals(sample[0].getValue(), dataSample.getValue());
long intervalMs = Long.parseLong("1000");
DataServiceImpl instance = new DataServiceImpl();
// TODO review the generated test code and remove the default call to fail.
// fail("The test case is a prototype.");
}
List<TagSamples> result = instance.GetInterpolatedData(tagNames, startTime, endTime, intervalMs);
th.deleteTag(tagName1);
th.deleteTag(tagName2);
/**
* Test of CreateTagsSample method, of class DataServiceImpl.
*/
@Test
public void testCreateTagsSample() throws Exception {
System.out.println("CreateTagsSample");
TestHelper testhelper = new TestHelper();
// creating first tag
TagProperty tagCreated =testhelper.createTag("tag1", "Float");
String tagName1 = tagCreated.getName();
// creating 2nd tag
tagCreated =testhelper.createTag("tag2", "Float");
String tagName2 = tagCreated.getName();
// creating tagsample
DataService dataService = new DataServiceImpl();
DataSample dataSample = testhelper.createDataSample("UTC", "8", QualityStatus.Good);
// datasample created
assertEquals(tagName1, result.get(0).getTagName());
assertEquals(tagName2, result.get(1).getTagName());
DataSample[] datasamples = new DataSample[1];
datasamples[0] = dataSample;
// assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
// fail("The test case is a prototype.");
}
TagWithData tagwithData = testhelper.createTagWithData(datasamples, NativeDataType.Float, tagName1);
TagWithData tagwithData2 = testhelper.createTagWithData(datasamples, NativeDataType.Float, tagName2);
/**
* Test of CreateTagSample method, of class DataServiceImpl.
* todo 测试获取当前值通过
*/
@Test
public void testCreateTagSample() throws Exception {
List<TagWithData> tagsDataSample = new ArrayList<TagWithData>();
tagsDataSample.add(tagwithData);
tagsDataSample.add(tagwithData2);
System.out.println("CreateTagSample");
TestHelper testhelper = new TestHelper();
DataServiceImpl instance = new DataServiceImpl();
instance.CreateTagsSample(tagsDataSample);
// creating tag1
// TagProperty _tagCreated = testhelper.createTag("NewTag", "Float");
List<String> tagNames = new ArrayList<String>();
tagNames.add(tagName1);
tagNames.add(tagName2);
// String tagName = _tagCreated.getName();
List<TagSamples> lis = instance.GetCurrentData(tagNames);
DataSample[] sample = lis.get(0).getSamples();
testhelper.deleteTag(tagName1);
testhelper.deleteTag(tagName2);
assertEquals(sample[0].getValue(), dataSample.getValue());
// end of creating tag1
DataSample[] sample2 = lis.get(1).getSamples();
assertEquals(sample2[0].getValue(), dataSample.getValue());
DataService dataService = new DataServiceImpl();
List<DataSample> dataSamples = null;
DataServiceImpl instance = new DataServiceImpl();
String tagName = "NewTag";
// TODO review the generated test code and remove the default call to fail.
// fail("The test case is a prototype.");
}
ArrayList<DataSample> list = new ArrayList<>();
for (int i = 0; i < 10; i++) {
DataSample dataSample = testhelper.createDataSample("UTC", "20" + i, QualityStatus.Bad);
Thread.sleep(1000);
list.add(dataSample);
}
dataService.CreateTagSample(tagName, list);
// datasample created
List<String> tagNames = new ArrayList<String>();
tagNames.add(tagName);
List<TagSamples> lis = instance.GetCurrentData(tagNames);
DataSample[] sample = lis.get(0).getSamples();
System.out.println("tag_value:" + sample[0].getValue());
System.out.println("tag_Quality:" + sample[0].getQuality());
/**
* CreateTagSample
* tag_value:10
* tag_Quality:Bad
*/
// testhelper.deleteTag(tagName);
// assertEquals(sample[0].getValue(), dataSample.getValue());
// TODO review the generated test code and remove the default call to fail.
// fail("The test case is a prototype.");
}
/**
* Test of CreateTagsSample method, of class DataServiceImpl.
*/
@Test
public void testCreateTagsSample() throws Exception {
System.out.println("CreateTagsSample");
TestHelper testhelper = new TestHelper();
// creating first tag
TagProperty tagCreated = testhelper.createTag("tag1", "Float");
String tagName1 = tagCreated.getName();
// creating 2nd tag
tagCreated = testhelper.createTag("tag2", "Float");
String tagName2 = tagCreated.getName();
// creating tagsample
DataService dataService = new DataServiceImpl();
DataSample dataSample = testhelper.createDataSample("UTC", "8", QualityStatus.Good);
// datasample created
DataSample[] datasamples = new DataSample[1];
datasamples[0] = dataSample;
TagWithData tagwithData = testhelper.createTagWithData(datasamples, NativeDataType.Float, tagName1);
TagWithData tagwithData2 = testhelper.createTagWithData(datasamples, NativeDataType.Float, tagName2);
List<TagWithData> tagsDataSample = new ArrayList<TagWithData>();
tagsDataSample.add(tagwithData);
tagsDataSample.add(tagwithData2);
DataServiceImpl instance = new DataServiceImpl();
instance.CreateTagsSample(tagsDataSample);
List<String> tagNames = new ArrayList<String>();
tagNames.add(tagName1);
tagNames.add(tagName2);
List<TagSamples> lis = instance.GetCurrentData(tagNames);
DataSample[] sample = lis.get(0).getSamples();
testhelper.deleteTag(tagName1);
testhelper.deleteTag(tagName2);
assertEquals(sample[0].getValue(), dataSample.getValue());
DataSample[] sample2 = lis.get(1).getSamples();
assertEquals(sample2[0].getValue(), dataSample.getValue());
// TODO review the generated test code and remove the default call to fail.
// fail("The test case is a prototype.");
}
}

View File

@ -52,6 +52,7 @@ public class TestHelper {
CollectorService instance = new CollectorServiceImpl();
CollectorProperty collectorProperty = new CollectorProperty();
collectorProperty.setName(colName);
collectorProperty.setDefaultCollectionType(2);
collectorProperty = instance.AddCollector(collectorProperty);
return collectorProperty;

View File

@ -0,0 +1,14 @@
{
/*
token
:curl -u username:password https://<nodename>/uaa/oauth/token -d grant_type=client_credentials
usernameadminhostname.admin
grant_type=client_credentials使grant_type=password
curl -u admin:dingjiawen.123 http://localhost:9480/uaa/oauth/token -d grant_type=client_credentials | iconv -f utf-8 -t gbk
*/
"access_token": "eyJhbGciOiJSUzI1NiIsImprdSI6Imh0dHBzOi8vdGVzdC5nZS5jb20ubG9jYWxob3N0OjEwMDUyL3VhYS90b2tlbl9rZXlzIiwia2lkIjoia2V5LWlkLTEiLCJ0eXAiOiJKV1QifQ.eyJqdGkiOiIwZGEyNzk3YzM5ZGU0ZjMwOGIwYTRiODgzNTMyNWU1YyIsInN1YiI6ImFkbWluIiwiYXV0aG9yaXRpZXMiOlsiY2xpZW50cy5yZWFkIiwicGFzc3dvcmQud3JpdGUiLCJjbGllbnRzLnNlY3JldCIsImNsaWVudHMud3JpdGUiLCJ1YWEuYWRtaW4iLCJjbGllbnRzLmFkbWluIiwic2NpbS53cml0ZSIsInNjaW0ucmVhZCJdLCJzY29wZSI6WyJjbGllbnRzLnJlYWQiLCJwYXNzd29yZC53cml0ZSIsImNsaWVudHMuc2VjcmV0IiwiY2xpZW50cy53cml0ZSIsInVhYS5hZG1pbiIsImNsaWVudHMuYWRtaW4iLCJzY2ltLndyaXRlIiwic2NpbS5yZWFkIl0sImNsaWVudF9pZCI6ImFkbWluIiwiY2lkIjoiYWRtaW4iLCJhenAiOiJhZG1pbiIsInJldm9jYWJsZSI6dHJ1ZSwiZ3JhbnRfdHlwZSI6ImNsaWVudF9jcmVkZW50aWFscyIsInJldl9zaWciOiI4MWMyOTU5YyIsImlhdCI6MTY4MzE5MzA3OSwiZXhwIjoxNjgzMjM2Mjc5LCJpc3MiOiJodHRwczovL3Rlc3QuZ2UuY29tLmxvY2FsaG9zdDoxMDA1Mi91YWEvb2F1dGgvdG9rZW4iLCJ6aWQiOiJ1YWEiLCJhdWQiOlsic2NpbSIsInBhc3N3b3JkIiwiY2xpZW50cyIsInVhYSIsImFkbWluIl19.EFhqDhU6pQe18PKMfsLOZXtPRL2XWJqFoi2qcZtrfM6BcHnMoolO6vildyM-XtlsjPpJenxmAavbpAatn7twl48qxY_I8gmAaOnXvrPgkfRbvBNpNe3X8iYcFG58y_zFHbzgu_zxIbrNwdUTtM-l1Pmeo5sA-Vtm2Agz3bDbYGptbSEr_lOMkmxJYIz0p8tWuQsB-FHg1K9BHOd3nsY6zQN7Ci_q-ZHG_ExlxBJOn8DX3-I2mW0Ddw9OSEWPoLjViovMgavcheIwxIobb8U6SHAeWi-_frnxwTMdZukZNcOZxrZktJTaOhdsiR4OCmVtTOaYmJochoo2m5SvDSjpCA",
"token_type": "bearer",
"expires_in": 43199,
"scope": "clients.read password.write clients.secret clients.write uaa.admin clients.admin scim.write scim.read",
"jti": "0da2797c39de4f308b0a4b8835325e5c"
}

View File

@ -0,0 +1,34 @@
package com.cqu.ge.test;
import java.util.Calendar;
import java.util.Date;
/**
*@BelongsProject: GE_Migrating_data
*@BelongsPackage: com.cqu.ge.test
*@Author: markilue
*@CreateTime: 2023-05-04 16:07
*@Description: TODO
*@Version: 1.0
*/
public class test1 {
public static void main(String[] args) {
Date cur1 = new Date();
System.out.println(cur1);
Calendar cal1 = Calendar.getInstance();
cal1.setTime(cur1);
cal1.add(Calendar.YEAR, -1);
Calendar cal2 = Calendar.getInstance();
cal2.setTime(cur1);
cal2.add(Calendar.SECOND, 1);
Date startTime = cal1.getTime();
Date endTime = cal2.getTime();
System.out.println(startTime);
System.out.println(endTime);
}
}

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More