This commit is contained in:
kevinding1125 2023-06-05 14:40:22 +08:00
commit 0c74438f55
20 changed files with 282884 additions and 1 deletions

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,27 @@
city_id city_name area
1 北京 华北
2 上海 华东
3 深圳 华南
4 广州 华南
5 武汉 华中
6 南京 华东
7 天津 华北
8 成都 西南
9 哈尔滨 东北
10 大连 东北
11 沈阳 东北
12 西安 西北
13 长沙 华中
14 重庆 西南
15 济南 华东
16 石家庄 华北
17 银川 西北
18 杭州 华东
19 保定 华北
20 福州 华南
21 贵阳 西南
22 青岛 华东
23 苏州 华东
24 郑州 华北
25 无锡 华东
26 厦门 华南

View File

@ -0,0 +1,101 @@
product_id product_name extend_info
1 商品_1 自营
2 商品_2 自营
3 商品_3 自营
4 商品_4 自营
5 商品_5 自营
6 商品_6 自营
7 商品_7 自营
8 商品_8 自营
9 商品_9 自营
10 商品_10 自营
11 商品_11 自营
12 商品_12 第三方
13 商品_13 第三方
14 商品_14 自营
15 商品_15 自营
16 商品_16 自营
17 商品_17 第三方
18 商品_18 自营
19 商品_19 第三方
20 商品_20 自营
21 商品_21 第三方
22 商品_22 第三方
23 商品_23 自营
24 商品_24 第三方
25 商品_25 第三方
26 商品_26 自营
27 商品_27 自营
28 商品_28 自营
29 商品_29 第三方
30 商品_30 第三方
31 商品_31 自营
32 商品_32 自营
33 商品_33 自营
34 商品_34 自营
35 商品_35 第三方
36 商品_36 第三方
37 商品_37 自营
38 商品_38 自营
39 商品_39 自营
40 商品_40 第三方
41 商品_41 第三方
42 商品_42 自营
43 商品_43 自营
44 商品_44 自营
45 商品_45 自营
46 商品_46 自营
47 商品_47 自营
48 商品_48 自营
49 商品_49 自营
50 商品_50 第三方
51 商品_51 第三方
52 商品_52 自营
53 商品_53 第三方
54 商品_54 自营
55 商品_55 自营
56 商品_56 自营
57 商品_57 自营
58 商品_58 第三方
59 商品_59 自营
60 商品_60 第三方
61 商品_61 第三方
62 商品_62 自营
63 商品_63 自营
64 商品_64 自营
65 商品_65 第三方
66 商品_66 自营
67 商品_67 自营
68 商品_68 自营
69 商品_69 自营
70 商品_70 自营
71 商品_71 自营
72 商品_72 自营
73 商品_73 自营
74 商品_74 自营
75 商品_75 自营
76 商品_76 第三方
77 商品_77 自营
78 商品_78 自营
79 商品_79 第三方
80 商品_80 第三方
81 商品_81 第三方
82 商品_82 自营
83 商品_83 自营
84 商品_84 自营
85 商品_85 第三方
86 商品_86 自营
87 商品_87 自营
88 商品_88 自营
89 商品_89 自营
90 商品_90 自营
91 商品_91 自营
92 商品_92 自营
93 商品_93 第三方
94 商品_94 第三方
95 商品_95 自营
96 商品_96 自营
97 商品_97 自营
98 商品_98 自营
99 商品_99 第三方
100 商品_100 第三方

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,116 @@
package com.atguigu.spark.core.practice
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
/**
* 统计热门品类Top10
*/
object example1 {
def main(args: Array[String]): Unit = {
val sparkConf = new SparkConf().setMaster("local[*]").setAppName("example1")
val sc: SparkContext = new SparkContext(sparkConf)
//2019-05-05,85,e2eef06e-beaa-4b49-acaf-38e057e1cd6e,31,2019-05-05 02:54:16,苹果,-1,-1,,,,,19
val sourceData: RDD[String] = sc.textFile("data/user_visit_action.txt")
//TODO 1.分别获取点击数下单数支付数
//1)获取点击数
//过滤掉不是点击事件的值
val clickFilterRDD = sourceData.filter(
data => {
val words = data.split("_")
words(6) != "-1"
}
)
val clickRDD: RDD[(String, Int)] = clickFilterRDD.map {
data => {
val line: Array[String] = data.split("_")
(line(6),1)
}
}
val clickNumRDD = clickRDD.reduceByKey(_ + _)
//2)获取下单数
//过滤掉不是下单事件的值
val filterOrderRDD = sourceData.filter(
data => {
val words = data.split("_")
words(8) != "null"
}
)
//这里使用flatMap将合在一起的[(1,1),(2,1),(3,1)]拆开
val orderNumRDD = filterOrderRDD.flatMap(
data => {
val words: Array[String] = data.split("_")
val ids = words(8).split(",")
ids.map(id => (id, 1))
}
).reduceByKey(_+_)
//3)获取支付数
val filterPayRDD = sourceData.filter(
data => {
val words = data.split("_")
words(10) != "null"
}
)
//这里使用flatMap将合在一起的[(1,1),(2,1),(3,1)]拆开
val payNumRDD = filterPayRDD.flatMap(
data => {
val words: Array[String] = data.split("_")
val ids = words(10).split(",")
ids.map(id => (id, 1))
}
).reduceByKey(_+_)
//TODO 2.将三者合并为(id,(点击数,下单数,支付数))
//采用cogroup将三者合并
val totalRDD: RDD[(String, (Iterable[Int], Iterable[Int], Iterable[Int]))] = clickNumRDD.cogroup(orderNumRDD, payNumRDD)
val combineRDD: Array[(String, (Int, Int, Int))] = totalRDD.mapValues{
case (iter1,iter2,iter3) =>{
var clickNum=0
val clickIter = iter1.iterator
if (clickIter.hasNext){
clickNum = clickIter.next()
}
var orderNum =0
val orderIter = iter2.iterator
if (orderIter.hasNext){
orderNum = orderIter.next()
}
var payNum =0
val payIter = iter3.iterator
if (payIter.hasNext){
payNum = payIter.next()
}
(clickNum,orderNum,payNum)
}
}.sortBy(_._2,false).take(10)
combineRDD.foreach(println(_))
}
}

View File

@ -0,0 +1,111 @@
package com.atguigu.spark.core.practice
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
/**
* 统计热门品类Top10
*/
object example1_other {
def main(args: Array[String]): Unit = {
/**
* 第一种方法存在的问题
* total的复用问题
* cogroup存在的shuffle效率低问题
*/
val sparkConf = new SparkConf().setMaster("local[*]").setAppName("example1")
val sc: SparkContext = new SparkContext(sparkConf)
//2019-05-05,85,e2eef06e-beaa-4b49-acaf-38e057e1cd6e,31,2019-05-05 02:54:16,苹果,-1,-1,,,,,19
val sourceData: RDD[String] = sc.textFile("data/user_visit_action.txt")
sourceData.cache()
//TODO 1.分别获取点击数下单数支付数
//1)获取点击数
//过滤掉不是点击事件的值
val clickFilterRDD = sourceData.filter(
data => {
val words = data.split("_")
words(6) != "-1"
}
)
val clickRDD: RDD[(String, Int)] = clickFilterRDD.map {
data => {
val line: Array[String] = data.split("_")
(line(6),1)
}
}
val clickNumRDD = clickRDD.reduceByKey(_ + _)
//2)获取下单数
//过滤掉不是下单事件的值
val filterOrderRDD = sourceData.filter(
data => {
val words = data.split("_")
words(8) != "null"
}
)
//这里使用flatMap将合在一起的[(1,1),(2,1),(3,1)]拆开
val orderNumRDD = filterOrderRDD.flatMap(
data => {
val words: Array[String] = data.split("_")
val ids = words(8).split(",")
ids.map(id => (id, 1))
}
).reduceByKey(_+_)
//3)获取支付数
val filterPayRDD = sourceData.filter(
data => {
val words = data.split("_")
words(10) != "null"
}
)
//这里使用flatMap将合在一起的[(1,1),(2,1),(3,1)]拆开
val payNumRDD = filterPayRDD.flatMap(
data => {
val words: Array[String] = data.split("_")
val ids = words(10).split(",")
ids.map(id => (id, 1))
}
).reduceByKey(_+_)
//TODO 2.将三者合并为(id,(点击数,下单数,支付数))
//采用cogroup将三者合并
val clickSumRDD = clickNumRDD.mapValues((_, 0, 0))
val orderSumRDD = orderNumRDD.mapValues((0, _, 0))
val paySumRDD = payNumRDD.mapValues((0, 0, _))
//此时得到的可能有重复的id,这时通过reduceByKey将其合并
val sumRDD: RDD[(String, (Int, Int, Int))] = clickSumRDD.union(orderSumRDD).union(paySumRDD)
val result = sumRDD.reduceByKey {
case (tuple1, tuple2) => {
(tuple1._1 + tuple2._1, tuple1._2 + tuple2._2, tuple1._3 + tuple2._3)
}
}.sortBy(_._2,false)
result.foreach(println(_))
}
}

View File

@ -0,0 +1,208 @@
package com.atguigu.spark.core.practice
import org.apache.spark.rdd.RDD
import org.apache.spark.util.AccumulatorV2
import org.apache.spark.{SparkConf, SparkContext}
import scala.collection.mutable
/**
* 使用累加器实现热门品类Top10
*/
object example1_withAcc {
/**
* 之前的使用reduceByKey虽然可以在shuffle之前进行预聚合但是仍然不可以避免使用shuffle
* 如果能使用累加器进行计算则可以避免效率低的shuffle操作加快计算速度
*
* @param args
*/
def main(args: Array[String]): Unit = {
val sparkConf = new SparkConf().setMaster("local[*]").setAppName("top10")
val sc = new SparkContext(sparkConf)
val sourceData = sc.textFile("data/user_visit_action.txt")
sourceData.cache()
//TODO 1.分别获取点击数下单数支付数
//1)获取点击数
//过滤掉不是点击事件的值
val clickFilterRDD = sourceData.filter(
data => {
val words = data.split("_")
words(6) != "-1"
}
)
val clickRDD: RDD[(String, String)] = clickFilterRDD.map {
data => {
val line: Array[String] = data.split("_")
(line(6), "click")
}
}
//2)获取下单数
//过滤掉不是下单事件的值
val filterOrderRDD = sourceData.filter(
data => {
val words = data.split("_")
words(8) != "null"
}
)
//这里使用flatMap将合在一起的[(1,1),(2,1),(3,1)]拆开
val orderNumRDD: RDD[(String, String)] = filterOrderRDD.flatMap(
data => {
val words: Array[String] = data.split("_")
val ids = words(8).split(",")
ids.map(id => (id, "order"))
}
)
//3)获取支付数
val filterPayRDD = sourceData.filter(
data => {
val words = data.split("_")
words(10) != "null"
}
)
//这里使用flatMap将合在一起的[(1,1),(2,1),(3,1)]拆开
val payNumRDD: RDD[(String, String)] = filterPayRDD.flatMap(
data => {
val words: Array[String] = data.split("_")
val ids = words(10).split(",")
ids.map(id => (id, "pay"))
}
)
val accumulator = new HotCategoryAccumulator
sc.register(accumulator, "hotCategoryAccumulator")
// clickRDD.union(orderNumRDD).union(payNumRDD).collect().foreach(println(_))
clickRDD.union(orderNumRDD).union(payNumRDD).foreach {
case (id, actionId) => {
accumulator.add(id, actionId)
}
}
val total: mutable.Map[String, HotCategory] = accumulator.value
total.toList.sortWith {
case (iter1, iter2) => {
val category1 = iter1._2
val category2 = iter2._2
if (category1.clickNum > category2.clickNum) {
true
} else if (category1.clickNum < category2.clickNum) {
false
} else {
if (category1.orderNum > category2.orderNum) {
true
} else if (category1.orderNum < category2.orderNum) {
false
} else {
if (category1.payNum >= category2.payNum) {
true
} else {
false
}
}
}
}
}.map{
case(cid,hotCategory) =>{
(cid,(hotCategory.clickNum,hotCategory.orderNum,hotCategory.payNum))
}
}.take(10).foreach(println(_))
// println(accumulator.value)
sc.stop()
}
/**
* 样例类 id,clickNum,orderNum,payNum
*
* @param cid
* @param clickNum
* @param orderNum
* @param payNum
*/
case class HotCategory(val cid: String, var clickNum: Int, var orderNum: Int, var payNum: Int)
/**
* /**
* 自定义累加器
* 1.继承AccumulatorV2,定义泛型
* IN:(品类ID行为类型)
* OUT:mutable.Map[String,HotCategory]
*
* 2.重写方法(6)
* */
*/
class HotCategoryAccumulator extends AccumulatorV2[(String, String), mutable.Map[String, HotCategory]] {
private var hcMap = mutable.Map[String, HotCategory]()
override def isZero: Boolean = {
hcMap.isEmpty
}
override def copy(): AccumulatorV2[(String, String), mutable.Map[String, HotCategory]] = {
new HotCategoryAccumulator
}
override def reset(): Unit = {
hcMap.clear()
}
override def add(v: (String, String)): Unit = {
val cid = v._1
val actionId = v._2
val category = hcMap.getOrElse(cid, HotCategory(cid, 0, 0, 0))
if (actionId == "click") {
category.clickNum += 1
} else if (actionId == "order") {
category.orderNum += 1
} else if (actionId == "pay") {
category.payNum += 1
}
hcMap.update(cid, category)
}
override def merge(other: AccumulatorV2[(String, String), mutable.Map[String, HotCategory]]): Unit = {
var map1 = this.hcMap
val otherMap: mutable.Map[String, HotCategory] = other.value
otherMap.foreach {
case (cid, category1) => {
val category = map1.getOrElse(cid, HotCategory(cid, 0, 0, 0))
category.clickNum += category1.clickNum
category.orderNum += category1.orderNum
category.payNum += category1.payNum
map1.update(cid, category)
}
}
}
override def value: mutable.Map[String, HotCategory] = hcMap
}
}

View File

@ -0,0 +1,247 @@
package com.atguigu.spark.core.practice
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.util.AccumulatorV2
import java.io
import scala.collection.mutable
import scala.util.control.Breaks.{break, breakable}
object example2 {
val sparkConf = new SparkConf().setMaster("local[*]").setAppName("top10")
val sc = new SparkContext(sparkConf)
def main(args: Array[String]): Unit = {
val sourceData = sc.textFile("data/user_visit_action.txt")
val Top10ids = Top10Ids(sourceData = sourceData)
//TODO 1.先根据包含Top10的过滤一遍
val filterRDD = sourceData.filter {
lines => {
val words = lines.split("_")
if (words(6) != "-1") {
Top10ids.contains(words(6))
} else if (words(8) != "null") {
val ids = words(8).split(",")
ContainsAnyOne(Top10ids, ids)
} else if (words(10) != "null") {
val ids = words(10).split(",")
ContainsAnyOne(Top10ids, ids)
} else {
false
}
}
}
// filterRDD.collect().foreach(println(_))
//将ids类型的拆开
val mapRDD: RDD[(String, String)] = filterRDD.flatMap {
line => {
val words = line.split("_")
if (words(6) != "-1") {
Array((words(6), words(2)))
} else if (words(8) != "null") {
val ids = words(8).split(",")
val tuples: Array[(String, String)] = ids.map((_, words(2)))
tuples
} else if (words(10) != "null") {
val ids = words(10).split(",")
ids.map((_, words(2)))
} else {
Array(("-1", words(2)))
}
}
}
//mapRDD.collect().foreach(println(_))
//TODO 3.二次过滤
val totalData = mapRDD.filter {
data => {
Top10ids.contains(data._1)
}
}
//TODO 4.map成需要的形式进行计算Top10
totalData.map{
data =>{
(data._1+"/"+data._2,1)
}
}.reduceByKey(_+_)
.map{
case (data,sum) =>{
val strings = data.split("/")
(strings(0),(strings(1),sum))
}
}.groupByKey()
.mapValues {
data => {
data.toList.sortBy(_._2)(Ordering.Int.reverse).take(10)
}
}.collect().foreach(println(_))
//(20,List((22e78a14-c5eb-45fe-a67d-2ce538814d98,13), (4509c42c-3aa3-4d28-84c6-5ed27bbf2444,13), (329b966c-d61b-46ad-949a-7e37142d384a,10), (632972a4-f811-4000-b920-dc12ea803a41,10), (215bdee7-db27-458d-80f4-9088d2361a2e,10), (9bdc044f-8593-49fc-bbf0-14c28f901d42,9), (4869238b-21b0-4bf5-b455-6ac3251381ac,9), (8371a0f6-df4a-4e64-8f75-1c17db255cdf,9), (cde33446-095b-433c-927b-263ba7cd102a,9), (5e3545a0-1521-4ad6-91fe-e792c20c46da,9)))
//(19,List((fde62452-7c09-4733-9655-5bd3fb705813,14), (85157915-aa25-4a8d-8ca0-9da1ee67fa70,10), (d0398e80-ad1d-4c36-b608-6ce0de1c7c85,10), (199f8e1d-db1a-4174-b0c2-ef095aaef3ee,10), (4d93913f-a892-490d-aa58-3a74b9099e29,10), (a41bc6ea-b3e3-47ce-98af-48169da7c91b,10), (329b966c-d61b-46ad-949a-7e37142d384a,10), (d56a53ff-a23e-404b-9919-5b7ef3df2664,9), (66c96daa-0525-4e1b-ba55-d38a4b462b97,9), (5e3545a0-1521-4ad6-91fe-e792c20c46da,9)))
//(15,List((632972a4-f811-4000-b920-dc12ea803a41,13), (4509c42c-3aa3-4d28-84c6-5ed27bbf2444,11), (329b966c-d61b-46ad-949a-7e37142d384a,11),
}
def ContainsAnyOne(list1:List[String],list2:Array[String]): Boolean ={
var flag :Boolean =false
breakable{
for (item <- list2){
if(list1.contains(item)){
flag=true
break()
}
}
}
flag
}
def Top10Ids(sourceData:RDD[String]):List[(String)] ={
val accumulator = new HotCategoryAccumulator
sc.register(accumulator, "HotCategoryAccumulator")
sourceData.foreach(
line => {
val words = line.split("_")
if (words(6) != "-1") {
accumulator.add((words(6), "click"))
}
if (words(8) != "null") {
val ids = words(8).split(",")
ids.foreach{
id =>{
accumulator.add((id, "order"))
}
}
}
if (words(10) != "null") {
val ids = words(10).split(",")
ids.foreach{
id =>{
accumulator.add((id, "pay"))
}
}
}
}
)
val value: mutable.Map[String, HotCategory] = accumulator.value
val cids: List[(String)] = value.toList.sortWith {
case (iter1, iter2) => {
val category1 = iter1._2
val category2 = iter2._2
if (category1.clickNum > category2.clickNum) {
true
} else if (category1.clickNum < category2.clickNum) {
false
} else {
if (category1.orderNum > category2.orderNum) {
true
} else if (category1.orderNum < category2.orderNum) {
false
} else {
if (category1.payNum >= category2.payNum) {
true
} else {
false
}
}
}
}
}.map {
case (cid, hotCategory) => {
cid
}
}.take(10)
cids
}
/**
* 样例类 id,clickNum,orderNum,payNum
*
* @param cid
* @param clickNum
* @param orderNum
* @param payNum
*/
case class HotCategory(val cid: String, var clickNum: Int, var orderNum: Int, var payNum: Int)
/**
* /**
* 自定义累加器
* 1.继承AccumulatorV2,定义泛型
* IN:(品类ID行为类型)
* OUT:mutable.Map[String,HotCategory]
*
* 2.重写方法(6)
* */
*/
class HotCategoryAccumulator extends AccumulatorV2[(String, String), mutable.Map[String, HotCategory]] {
private var hcMap = mutable.Map[String, HotCategory]()
override def isZero: Boolean = {
hcMap.isEmpty
}
override def copy(): AccumulatorV2[(String, String), mutable.Map[String, HotCategory]] = {
new HotCategoryAccumulator
}
override def reset(): Unit = {
hcMap.clear()
}
override def add(v: (String, String)): Unit = {
val cid = v._1
val actionId = v._2
val category = hcMap.getOrElse(cid, HotCategory(cid, 0, 0, 0))
if (actionId == "click") {
category.clickNum += 1
} else if (actionId == "order") {
category.orderNum += 1
} else if (actionId == "pay") {
category.payNum += 1
}
hcMap.update(cid, category)
}
override def merge(other: AccumulatorV2[(String, String), mutable.Map[String, HotCategory]]): Unit = {
var map1 = this.hcMap
val otherMap: mutable.Map[String, HotCategory] = other.value
otherMap.foreach {
case (cid, category1) => {
val category = map1.getOrElse(cid, HotCategory(cid, 0, 0, 0))
category.clickNum += category1.clickNum
category.orderNum += category1.orderNum
category.payNum += category1.payNum
map1.update(cid, category)
}
}
}
override def value: mutable.Map[String, HotCategory] = hcMap
}
}

View File

@ -0,0 +1,158 @@
package com.atguigu.spark.core.practice
import org.apache.spark.util.AccumulatorV2
import org.apache.spark.{SparkConf, SparkContext}
import scala.collection.mutable
/**
* 计算单挑转换率
*/
object example3 {
def main(args: Array[String]): Unit = {
val sparkConf = new SparkConf().setMaster("local[*]").setAppName("example3")
val sc = new SparkContext(sparkConf)
val sourceData = sc.textFile("data/user_visit_action.txt")
//TODO 将Data先map成需要的形式 ((user,sessionId),(time,pageId))
//TODO 这里加入sessionId是为了避免动作不是在一个session中进行的
val mapRDD = sourceData.map {
line => {
val words = line.split("_")
((words(1),words(2)), (words(4), words(3)))
}
}
// mapRDD.collect().foreach(println(_))
/*
((61,de45a822-fd78-42df-a80c-ef367c66b64f),(2019-07-27 19:43:41,18))
((61,de45a822-fd78-42df-a80c-ef367c66b64f),(2019-07-27 19:43:48,9))
((61,de45a822-fd78-42df-a80c-ef367c66b64f),(2019-07-27 19:43:54,2))
*/
//TODO 先根据人和sessionId进行分组
val sortedRDD = mapRDD.groupByKey()
.mapValues {
iter => {
iter.toList.sortBy(_._1).map {
case (time, pageId) => {
pageId
}
}
}
}
// sortedRDD.collect().foreach(println(_))
/*
((38,2c5c5546-b50f-49f7-8eb4-48a85c05e8b3),List(2, 46, 42, 16, 3, 42, 24, 27, 4, 37, 4, 34, 33, 37, 33))
((21,c2677a54-c29f-49ef-8eb3-9a3bc939c7ce),List(39, 29, 47, 21, 5))
*/
val indexRDD = sortedRDD.flatMap {
case ((userId, sessionId), list) => {
val list1 = list.takeRight(list.length - 1)
// println("list:",list)
// println("list1:",list1)
list.zip(list1)
}
}
// indexRDD.collect().foreach(println(_))
/*
map的结果
List((2,46), (46,42), (42,16), (16,3), (3,42), (42,24), (24,27), (27,4), (4,37), (37,4), (4,34), (34,33), (33,37), (37,33))
List((39,29), (29,47), (47,21), (21,5))
flatMap的结果:
(29,12)
(12,29)
(29,27)
(27,21)
(21,30)
(30,8)
*/
indexRDD.cache()
val accumulator = new MapAccumulator
sc.register(accumulator,"MapAccumulator")
//TODO 统计进入的量
indexRDD.foreach{
case tuple =>{
accumulator.add(tuple)
}
}
val map: mutable.Map[String, Int] = accumulator.value
//TODO 统计出入的量
val result = indexRDD.groupBy(word => word).map {
case ((page1, page2), iter) => {
val in: Double = map.getOrElse(page1, 9999999).toDouble
val out: Double = iter.toList.length.toDouble
// println(s"in:$in out:$out")
(page1 + "-" + page2, s"${page1}-${page2}的单跳转换率为${out / in}")
}
}
result.collect().foreach(println(_))
/*
(6-18,6-18的单跳转换率为0.01723625557206538)
(45-37,45-37的单跳转换率为0.019375361480624638)
(35-49,35-49的单跳转换率为0.01752370008618213)
*/
sc.stop()
}
class MapAccumulator extends AccumulatorV2[(String,String),mutable.Map[String,Int]]{
private var map =mutable.Map[String,Int]()
override def isZero: Boolean = {
map.isEmpty
}
override def copy(): AccumulatorV2[(String, String), mutable.Map[String, Int]] = {
new MapAccumulator
}
override def reset(): Unit = {
map.clear()
}
override def add(v: (String, String)): Unit = {
val key = v._2
val sum = map.getOrElse(key, 0)
map.update(key,sum+1)
}
override def merge(other: AccumulatorV2[(String, String), mutable.Map[String, Int]]): Unit = {
val map1 = other.value
var maps = this.map
map1.foreach{
case (key,sum1) =>{
val sum = maps.getOrElse(key, 0)
maps.update(key,sum+sum1)
}
}
}
override def value: mutable.Map[String, Int] = map
}
}

View File

@ -0,0 +1,142 @@
package com.atguigu.spark.sql.practice
import org.apache.spark.SparkConf
import org.apache.spark.sql.{Encoder, Encoders, SparkSession, functions}
import org.apache.spark.sql.expressions.Aggregator
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
/**
* 采用自定义UDAF函数,实现以下功能
* 求各个区域热门商品Top3,以及对应的各个城市的商品占比
* 地区 商品名称 点击次数 城市备注
* 华北 商品A 100000 北京21.2%天津13.2%其他65.6%
* 华北 商品P 80200 北京63.0%太原10%其他27.0%
* 华北 商品M 40000 北京63.0%太原10%其他27.0%
* 东北 商品J 92000 大连28%辽宁17.0%其他 55.0%
*
*/
object example1 {
def main(args: Array[String]): Unit = {
val sparkConf = new SparkConf().setMaster("local[*]").setAppName("example1")
val spark = SparkSession.builder().enableHiveSupport().config(sparkConf).getOrCreate()
spark.sql(
"""
|select a.*,b.product_name,c.city_name,c.area
|from user_visit_action a
|left join product_info b on a.click_product_id =b.product_id
|left join city_info c on a.city_id=c.city_id
|where click_product_id > -1
|""".stripMargin
).createOrReplaceTempView("t1")
spark.udf.register("combine", functions.udaf(new CombineAggregator()))
spark.sql(
"""
|select area,product_name,count(click_product_id) as cli_num,combine(city_name)
|from t1
|group by area,product_name
|""".stripMargin
)
//区域内对点击数量进行排行
spark.sql(
"""
|select *,rank() over(partition by area order by clickCnt desc) as rank
|from t2
|""".stripMargin).createOrReplaceTempView("t3")
//取前三名
spark.sql(
"""
|select *
|from t3 where rank <= 3
|""".stripMargin).show(false)
spark.close()
}
/**
* 样例类记录UDAF的中间件
*
* @param total 记录所有城市总点击量之和
* @param cityMap 记录各个城市和其对应的点击量
*/
case class Buffer(var total: Long, var cityMap: mutable.Map[String, Long])
/**
* 自定义UDAF函数,实现聚合功能
*/
class CombineAggregator extends Aggregator[String, Buffer, String] {
override def zero: Buffer = {
Buffer(0, mutable.Map[String, Long]())
}
//分区内合并函数
override def reduce(b: Buffer, a: String): Buffer = {
val city_name = a
val total = b.total + 1
val city_sum = b.cityMap.getOrElse(city_name, 0L) + 1
b.total = total
b.cityMap.update(city_name, city_sum)
b
}
//分区间结果函数
override def merge(b1: Buffer, b2: Buffer): Buffer = {
b1.total = b1.total + b2.total
b2.cityMap.foreach {
case (city_name, sum2) => {
val sum1 = b1.cityMap.getOrElse(city_name, 0L)
b1.cityMap.update(city_name, sum1 + sum2)
}
}
b2
}
//结果返回函数
override def finish(reduction: Buffer): String = {
//定义一个结果函数,用于存放变换后的结果
val resultList = new ListBuffer[String]
//首先在map中排序取前两个,如果map中数量大于2则剩下的为其他
val needed: List[(String, Long)] = reduction.cityMap.toList.sortWith {
(left, right) => {
left._2 > right._2
}
}.take(2)
val flag = reduction.cityMap.size > 2
var rsum=0L
needed.foreach{
case (city,sum) =>{
val ratio = sum*100/reduction.total
resultList.append(s"${city}:${ratio}%")
rsum +=ratio
}
}
if (flag){
resultList.append(s"其他:${100-rsum}%")
}
resultList.mkString(",")
}
override def bufferEncoder: Encoder[Buffer] = Encoders.product
override def outputEncoder: Encoder[String] = Encoders.STRING
}
}

View File

@ -0,0 +1,127 @@
--DWT层-DWS层在各个时间跨度上进行扩充
--
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
--
DROP TABLE IF EXISTS dwt_user_topic;
CREATE EXTERNAL TABLE dwt_user_topic
(
`user_id` STRING COMMENT '用户id',
`login_date_first` STRING COMMENT '首次活跃日期',
`login_date_last` STRING COMMENT '末次活跃日期',
`login_date_1d_count` STRING COMMENT '最近1日登录次数',
`login_last_1d_day_count` BIGINT COMMENT '最近1日登录天数',
`login_last_7d_count` BIGINT COMMENT '最近7日登录次数',
`login_last_7d_day_count` BIGINT COMMENT '最近7日登录天数',
`login_last_30d_count` BIGINT COMMENT '最近30日登录次数',
`login_last_30d_day_count` BIGINT COMMENT '最近30日登录天数',
`login_count` BIGINT COMMENT '累积登录次数',
`login_day_count` BIGINT COMMENT '累积登录天数',
`order_date_first` STRING COMMENT '首次下单时间',
`order_date_last` STRING COMMENT '末次下单时间',
`order_last_1d_count` BIGINT COMMENT '最近1日下单次数',
`order_activity_last_1d_count` BIGINT COMMENT '最近1日订单参与活动次数',
`order_activity_reduce_last_1d_amount` DECIMAL(16, 2) COMMENT '最近1日订单减免金额(活动)',
`order_coupon_last_1d_count` BIGINT COMMENT '最近1日下单用券次数',
`order_coupon_reduce_last_1d_amount` DECIMAL(16, 2) COMMENT '最近1日订单减免金额(优惠券)',
`order_last_1d_original_amount` DECIMAL(16, 2) COMMENT '最近1日原始下单金额',
`order_last_1d_final_amount` DECIMAL(16, 2) COMMENT '最近1日最终下单金额',
`order_last_7d_count` BIGINT COMMENT '最近7日下单次数',
`order_activity_last_7d_count` BIGINT COMMENT '最近7日订单参与活动次数',
`order_activity_reduce_last_7d_amount` DECIMAL(16, 2) COMMENT '最近7日订单减免金额(活动)',
`order_coupon_last_7d_count` BIGINT COMMENT '最近7日下单用券次数',
`order_coupon_reduce_last_7d_amount` DECIMAL(16, 2) COMMENT '最近7日订单减免金额(优惠券)',
`order_last_7d_original_amount` DECIMAL(16, 2) COMMENT '最近7日原始下单金额',
`order_last_7d_final_amount` DECIMAL(16, 2) COMMENT '最近7日最终下单金额',
`order_last_30d_count` BIGINT COMMENT '最近30日下单次数',
`order_activity_last_30d_count` BIGINT COMMENT '最近30日订单参与活动次数',
`order_activity_reduce_last_30d_amount` DECIMAL(16, 2) COMMENT '最近30日订单减免金额(活动)',
`order_coupon_last_30d_count` BIGINT COMMENT '最近30日下单用券次数',
`order_coupon_reduce_last_30d_amount` DECIMAL(16, 2) COMMENT '最近30日订单减免金额(优惠券)',
`order_last_30d_original_amount` DECIMAL(16, 2) COMMENT '最近30日原始下单金额',
`order_last_30d_final_amount` DECIMAL(16, 2) COMMENT '最近30日最终下单金额',
`order_count` BIGINT COMMENT '累积下单次数',
`order_activity_count` BIGINT COMMENT '累积订单参与活动次数',
`order_activity_reduce_amount` DECIMAL(16, 2) COMMENT '累积订单减免金额(活动)',
`order_coupon_count` BIGINT COMMENT '累积下单用券次数',
`order_coupon_reduce_amount` DECIMAL(16, 2) COMMENT '累积订单减免金额(优惠券)',
`order_original_amount` DECIMAL(16, 2) COMMENT '累积原始下单金额',
`order_final_amount` DECIMAL(16, 2) COMMENT '累积最终下单金额',
`payment_date_first` STRING COMMENT '首次支付时间',
`payment_date_last` STRING COMMENT '末次支付时间',
`payment_last_1d_count` BIGINT COMMENT '最近1日支付次数',
`payment_last_1d_amount` DECIMAL(16, 2) COMMENT '最近1日支付金额',
`payment_last_7d_count` BIGINT COMMENT '最近7日支付次数',
`payment_last_7d_amount` DECIMAL(16, 2) COMMENT '最近7日支付金额',
`payment_last_30d_count` BIGINT COMMENT '最近30日支付次数',
`payment_last_30d_amount` DECIMAL(16, 2) COMMENT '最近30日支付金额',
`payment_count` BIGINT COMMENT '累积支付次数',
`payment_amount` DECIMAL(16, 2) COMMENT '累积支付金额',
`refund_order_last_1d_count` BIGINT COMMENT '最近1日退单次数',
`refund_order_last_1d_num` BIGINT COMMENT '最近1日退单件数',
`refund_order_last_1d_amount` DECIMAL(16, 2) COMMENT '最近1日退单金额',
`refund_order_last_7d_count` BIGINT COMMENT '最近7日退单次数',
`refund_order_last_7d_num` BIGINT COMMENT '最近7日退单件数',
`refund_order_last_7d_amount` DECIMAL(16, 2) COMMENT '最近7日退单金额',
`refund_order_last_30d_count` BIGINT COMMENT '最近30日退单次数',
`refund_order_last_30d_num` BIGINT COMMENT '最近30日退单件数',
`refund_order_last_30d_amount` DECIMAL(16, 2) COMMENT '最近30日退单金额',
`refund_order_count` BIGINT COMMENT '累积退单次数',
`refund_order_num` BIGINT COMMENT '累积退单件数',
`refund_order_amount` DECIMAL(16, 2) COMMENT '累积退单金额',
`refund_payment_last_1d_count` BIGINT COMMENT '最近1日退款次数',
`refund_payment_last_1d_num` BIGINT COMMENT '最近1日退款件数',
`refund_payment_last_1d_amount` DECIMAL(16, 2) COMMENT '最近1日退款金额',
`refund_payment_last_7d_count` BIGINT COMMENT '最近7日退款次数',
`refund_payment_last_7d_num` BIGINT COMMENT '最近7日退款件数',
`refund_payment_last_7d_amount` DECIMAL(16, 2) COMMENT '最近7日退款金额',
`refund_payment_last_30d_count` BIGINT COMMENT '最近30日退款次数',
`refund_payment_last_30d_num` BIGINT COMMENT '最近30日退款件数',
`refund_payment_last_30d_amount` DECIMAL(16, 2) COMMENT '最近30日退款金额',
`refund_payment_count` BIGINT COMMENT '累积退款次数',
`refund_payment_num` BIGINT COMMENT '累积退款件数',
`refund_payment_amount` DECIMAL(16, 2) COMMENT '累积退款金额',
`cart_last_1d_count` BIGINT COMMENT '最近1日加入购物车次数',
`cart_last_7d_count` BIGINT COMMENT '最近7日加入购物车次数',
`cart_last_30d_count` BIGINT COMMENT '最近30日加入购物车次数',
`cart_count` BIGINT COMMENT '累积加入购物车次数',
`favor_last_1d_count` BIGINT COMMENT '最近1日收藏次数',
`favor_last_7d_count` BIGINT COMMENT '最近7日收藏次数',
`favor_last_30d_count` BIGINT COMMENT '最近30日收藏次数',
`favor_count` BIGINT COMMENT '累积收藏次数',
`coupon_last_1d_get_count` BIGINT COMMENT '最近1日领券次数',
`coupon_last_1d_using_count` BIGINT COMMENT '最近1日用券(下单)次数',
`coupon_last_1d_used_count` BIGINT COMMENT '最近1日用券(支付)次数',
`coupon_last_7d_get_count` BIGINT COMMENT '最近7日领券次数',
`coupon_last_7d_using_count` BIGINT COMMENT '最近7日用券(下单)次数',
`coupon_last_7d_used_count` BIGINT COMMENT '最近7日用券(支付)次数',
`coupon_last_30d_get_count` BIGINT COMMENT '最近30日领券次数',
`coupon_last_30d_using_count` BIGINT COMMENT '最近30日用券(下单)次数',
`coupon_last_30d_used_count` BIGINT COMMENT '最近30日用券(支付)次数',
`coupon_get_count` BIGINT COMMENT '累积领券次数',
`coupon_using_count` BIGINT COMMENT '累积用券(下单)次数',
`coupon_used_count` BIGINT COMMENT '累积用券(支付)次数',
`appraise_last_1d_good_count` BIGINT COMMENT '最近1日好评次数',
`appraise_last_1d_mid_count` BIGINT COMMENT '最近1日中评次数',
`appraise_last_1d_bad_count` BIGINT COMMENT '最近1日差评次数',
`appraise_last_1d_default_count` BIGINT COMMENT '最近1日默认评价次数',
`appraise_last_7d_good_count` BIGINT COMMENT '最近7日好评次数',
`appraise_last_7d_mid_count` BIGINT COMMENT '最近7日中评次数',
`appraise_last_7d_bad_count` BIGINT COMMENT '最近7日差评次数',
`appraise_last_7d_default_count` BIGINT COMMENT '最近7日默认评价次数',
`appraise_last_30d_good_count` BIGINT COMMENT '最近30日好评次数',
`appraise_last_30d_mid_count` BIGINT COMMENT '最近30日中评次数',
`appraise_last_30d_bad_count` BIGINT COMMENT '最近30日差评次数',
`appraise_last_30d_default_count` BIGINT COMMENT '最近30日默认评价次数',
`appraise_good_count` BIGINT COMMENT '累积好评次数',
`appraise_mid_count` BIGINT COMMENT '累积中评次数',
`appraise_bad_count` BIGINT COMMENT '累积差评次数',
`appraise_default_count` BIGINT COMMENT '累积默认评价次数'
) COMMENT '会员主题宽表'
PARTITIONED BY (`dt` STRING)
STORED AS ORC
LOCATION '/warehouse/gmall/dwt/dwt_user_topic/'
TBLPROPERTIES ("orc.compress" = "snappy");

View File

@ -34,9 +34,51 @@
<artifactId>gson</artifactId>
<version>2.8.5</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.cqu</groupId>-->
<!-- <artifactId>ge</artifactId>-->
<!-- <version>1.0.1</version>-->
<!-- <scope>system</scope>-->
<!-- <systemPath>${basedir}/src/main/resources/lib/HistorianServiceAPI.jar</systemPath>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>com.cqu</groupId>-->
<!-- <artifactId>ge</artifactId>-->
<!-- <version>1.0.0</version>-->
<!-- </dependency>-->
<!-- https://mvnrepository.com/artifact/com.google.code.gson/gson -->
<!-- <dependency>-->
<!-- <groupId>com.google.code.gson</groupId>-->
<!-- <artifactId>gson</artifactId>-->
<!-- <version>2.8.5</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>junit</groupId>-->
<!-- <artifactId>junit</artifactId>-->
<!-- <version>4.12</version>-->
<!-- <scope>compile</scope>-->
<!-- </dependency>-->
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -42,6 +42,20 @@ import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import com.ge.ip.hds.historian.API.ArchiveService;
import com.ge.ip.hds.historian.API.ArchiveServiceImpl;
import com.ge.ip.hds.historian.API.ConfigurationManager;
import com.ge.ip.hds.historian.DataContracts.Archive;
import com.ge.ip.hds.historian.DataContracts.ArchiveStatistics;
import com.ge.ip.hds.historian.DataContracts.HistorianOperationException;
import org.junit.*;
import org.w3c.dom.Document;
import java.util.List;
import static org.junit.Assert.assertTrue;
//import com.ge.ip.hds.historianjavaapitest.ReadClass;
public class ArchiveAPITest {
@ -146,8 +160,9 @@ public class ArchiveAPITest {
ArchiveServiceImpl instance = new ArchiveServiceImpl();
Archive result = instance.GetArchive(createdArchive.getName(), "User");
assertEquals(createdArchive.getName(), result.getName());
th.deleteArchive(createdArchive.getName());
@ -210,7 +225,9 @@ public class ArchiveAPITest {
ArchiveServiceImpl instance = new ArchiveServiceImpl();
Archive result = instance.AddArchive(archive);
assertEquals(archive.getName(), result.getName());
TestHelper th = new TestHelper();
th.deleteArchive(archive.getName());

View File

@ -0,0 +1,17 @@
package com.cqu.ge.test;
/**
* @BelongsProject: GE_Migrating_data
* @BelongsPackage: com.cqu.ge.test
* @Author: marklue
* @CreateTime: 2023/5/3 14:28
* @Description: TODO
* @Version: 1.0
*/
public class try1 {
static Try try11;
public static void main(String[] args){
try11.equals(1);
}
}

264
Leecode/hs_err_pid12532.log Normal file
View File

@ -0,0 +1,264 @@
#
# A fatal error has been detected by the Java Runtime Environment:
#
# EXCEPTION_ACCESS_VIOLATION (0xc0000005) at pc=0x000000006abea148, pid=12532, tid=0x0000000000004e18
#
# JRE version: Java(TM) SE Runtime Environment (8.0_311-b11) (build 1.8.0_311-b11)
# Java VM: Java HotSpot(TM) 64-Bit Server VM (25.311-b11 mixed mode windows-amd64 compressed oops)
# Problematic frame:
# V [jvm.dll+0x19a148]
#
# Failed to write core dump. Minidumps are not enabled by default on client versions of Windows
#
# If you would like to submit a bug report, please visit:
# http://bugreport.java.com/bugreport/crash.jsp
#
--------------- T H R E A D ---------------
Current thread (0x000002d51ddc9000): JavaThread "JDWP Transport Listener: dt_socket" daemon [_thread_in_vm, id=19992, stack(0x000000ec02500000,0x000000ec02600000)]
siginfo: ExceptionCode=0xc0000005, reading address 0x000002d51d8e2018
Registers:
RAX=0x000002d51d7786d0, RBX=0x0000000000000003, RCX=0x000002d51d8e2008, RDX=0x000002d51d779200
RSP=0x000000ec025ff7b0, RBP=0x000000ec025ff829, RSI=0x00000000000000b6, RDI=0x000002d51d779268
R8 =0x000002d51d779aa8, R9 =0x00007ffedf820000, R10=0x000002d51d779211, R11=0x000002d51fea87d9
R12=0x000002d51fea87d8, R13=0x000000ec025ff8b0, R14=0x000000000000005b, R15=0x00000000000000b6
RIP=0x000000006abea148, EFLAGS=0x0000000000010202
Top of Stack: (sp=0x000000ec025ff7b0)
0x000000ec025ff7b0: 000002d51ddc9000 00000000000000b6
0x000000ec025ff7c0: 0000000000000003 000002d51ddc9000
0x000000ec025ff7d0: 000002d51d779268 000002d51d779268
0x000000ec025ff7e0: 000002d51d779268 000002d51d779268
0x000000ec025ff7f0: 000002d51ddc9000 000002d51d779268
0x000000ec025ff800: 000002d51ddc9000 000002d51d779268
0x000000ec025ff810: 000002d51ddc9000 0000005b00000058
0x000000ec025ff820: 000000b600000072 000000006ac50000
0x000000ec025ff830: 00000000000000b6 0000000000000000
0x000000ec025ff840: 0000000000000000 0000000000000072
0x000000ec025ff850: 000000ec025ff9c0 0000000000000000
0x000000ec025ff860: 0000000000000000 000000ec025ff9c8
0x000000ec025ff870: 000002d51ddc9000 000002d51d779268
0x000000ec025ff880: 0000000000000000 000000006abef64f
0x000000ec025ff890: 000000ec025ff8b0 000002d51fea87d8
0x000000ec025ff8a0: 000002d507c50a01 000002d51d779268
Instructions: (pc=0x000000006abea148)
0x000000006abea128: 10 84 d2 74 0b 41 8b 45 31 f7 d0 48 63 c8 eb 05
0x000000006abea138: 41 0f b7 4d 31 4c 8b 6d 67 48 c1 e1 05 49 03 c8
0x000000006abea148: 48 8b 49 10 44 8b 75 f3 0f b6 c1 66 c1 e0 08 66
0x000000006abea158: c1 e9 08 66 0b c1 66 41 89 44 24 01 84 d2 0f 84
Register to memory mapping:
RAX=0x000002d51d7786d0 is pointing into metadata
RBX=0x0000000000000003 is an unknown value
RCX=0x000002d51d8e2008 is an unknown value
RDX=0x000002d51d779200 is pointing into metadata
RSP=0x000000ec025ff7b0 is pointing into the stack for thread: 0x000002d51ddc9000
RBP=0x000000ec025ff829 is pointing into the stack for thread: 0x000002d51ddc9000
RSI=0x00000000000000b6 is an unknown value
RDI={method} {0x000002d51d779270} 'test' '()V' in 'com/markilue/leecode/listnode/MyLinkedList'
R8 =0x000002d51d779aa8 is pointing into metadata
R9 =0x00007ffedf820000 is an unknown value
R10=0x000002d51d779211 is pointing into metadata
R11=0x000002d51fea87d9 is an unknown value
R12=0x000002d51fea87d8 is an unknown value
R13=0x000000ec025ff8b0 is pointing into the stack for thread: 0x000002d51ddc9000
R14=0x000000000000005b is an unknown value
R15=0x00000000000000b6 is an unknown value
Stack: [0x000000ec02500000,0x000000ec02600000], sp=0x000000ec025ff7b0, free space=1021k
Native frames: (J=compiled Java code, j=interpreted, Vv=VM code, C=native code)
V [jvm.dll+0x19a148]
V [jvm.dll+0x19f64f]
V [jvm.dll+0x3408eb]
C [jdwp.dll+0x4296]
C [jdwp.dll+0xef91]
C [jdwp.dll+0x1f4f5]
C [jdwp.dll+0x1f45e]
V [jvm.dll+0x1ba3aa]
V [jvm.dll+0x23df22]
V [jvm.dll+0x29253c]
C [ucrtbase.dll+0x21bb2]
C [KERNEL32.DLL+0x17034]
C [ntdll.dll+0x52651]
--------------- P R O C E S S ---------------
Java Threads: ( => current thread )
0x000002d51fc40800 JavaThread "Service Thread" daemon [_thread_blocked, id=21716, stack(0x000000ec02c00000,0x000000ec02d00000)]
0x000002d51fba9000 JavaThread "C1 CompilerThread3" daemon [_thread_blocked, id=14316, stack(0x000000ec02b00000,0x000000ec02c00000)]
0x000002d51fb9e800 JavaThread "C2 CompilerThread2" daemon [_thread_blocked, id=19840, stack(0x000000ec02a00000,0x000000ec02b00000)]
0x000002d51fb9d800 JavaThread "C2 CompilerThread1" daemon [_thread_blocked, id=16824, stack(0x000000ec02900000,0x000000ec02a00000)]
0x000002d51fb9b000 JavaThread "C2 CompilerThread0" daemon [_thread_blocked, id=12300, stack(0x000000ec02800000,0x000000ec02900000)]
0x000002d51faf3800 JavaThread "JDWP Command Reader" daemon [_thread_in_native, id=2456, stack(0x000000ec02700000,0x000000ec02800000)]
0x000002d51faf0800 JavaThread "JDWP Event Helper Thread" daemon [_thread_blocked, id=9096, stack(0x000000ec02600000,0x000000ec02700000)]
=>0x000002d51ddc9000 JavaThread "JDWP Transport Listener: dt_socket" daemon [_thread_in_vm, id=19992, stack(0x000000ec02500000,0x000000ec02600000)]
0x000002d51ddbc000 JavaThread "Attach Listener" daemon [_thread_blocked, id=18836, stack(0x000000ec02400000,0x000000ec02500000)]
0x000002d51dd67800 JavaThread "Signal Dispatcher" daemon [_thread_blocked, id=21684, stack(0x000000ec02300000,0x000000ec02400000)]
0x000002d51dd39000 JavaThread "Finalizer" daemon [_thread_blocked, id=10800, stack(0x000000ec02200000,0x000000ec02300000)]
0x000002d51dd30800 JavaThread "Reference Handler" daemon [_thread_blocked, id=21884, stack(0x000000ec02100000,0x000000ec02200000)]
0x000002d507b9a800 JavaThread "main" [_thread_blocked, id=16780, stack(0x000000ec01700000,0x000000ec01800000)]
Other Threads:
0x000002d51dd06800 VMThread [stack: 0x000000ec02000000,0x000000ec02100000] [id=3664]
0x000002d51fc59000 WatcherThread [stack: 0x000000ec02d00000,0x000000ec02e00000] [id=15732]
VM state:not at safepoint (normal execution)
VM Mutex/Monitor currently owned by a thread: None
heap address: 0x0000000081c00000, size: 2020 MB, Compressed Oops mode: 32-bit
Narrow klass base: 0x0000000000000000, Narrow klass shift: 3
Compressed class space size: 1073741824 Address: 0x0000000100000000
Heap:
PSYoungGen total 38400K, used 10018K [0x00000000d5f00000, 0x00000000d8980000, 0x0000000100000000)
eden space 33280K, 30% used [0x00000000d5f00000,0x00000000d68c8bd8,0x00000000d7f80000)
from space 5120K, 0% used [0x00000000d8480000,0x00000000d8480000,0x00000000d8980000)
to space 5120K, 0% used [0x00000000d7f80000,0x00000000d7f80000,0x00000000d8480000)
ParOldGen total 87552K, used 0K [0x0000000081c00000, 0x0000000087180000, 0x00000000d5f00000)
object space 87552K, 0% used [0x0000000081c00000,0x0000000081c00000,0x0000000087180000)
Metaspace used 5032K, capacity 5348K, committed 5504K, reserved 1056768K
class space used 579K, capacity 595K, committed 640K, reserved 1048576K
Card table byte_map: [0x000002d518910000,0x000002d518d10000] byte_map_base: 0x000002d518502000
Marking Bits: (ParMarkBitMap*) 0x000000006b238030
Begin Bits: [0x000002d518fc0000, 0x000002d51af50000)
End Bits: [0x000002d51af50000, 0x000002d51cee0000)
Polling page: 0x000002d507cf0000
CodeCache: size=245760Kb used=1754Kb max_used=1771Kb free=244005Kb
bounds [0x000002d509550000, 0x000002d5097c0000, 0x000002d518550000]
total_blobs=523 nmethods=259 adapters=185
compilation: enabled
Compilation events (10 events):
Event: 0.989 Thread 0x000002d51fba9000 256 3 java.io.File::isInvalid (47 bytes)
Event: 0.989 Thread 0x000002d51fba9000 nmethod 256 0x000002d5096ffdd0 code [0x000002d5096fff40, 0x000002d509700390]
Event: 0.989 Thread 0x000002d51fb9e800 257 4 sun.misc.MetaIndex::mayContain (51 bytes)
Event: 0.991 Thread 0x000002d51fb9b000 nmethod 243 0x000002d509704390 code [0x000002d5097045c0, 0x000002d509705850]
Event: 0.994 Thread 0x000002d51fba9000 258 3 java.lang.Character::charCount (12 bytes)
Event: 0.994 Thread 0x000002d51fba9000 nmethod 258 0x000002d509704010 code [0x000002d509704160, 0x000002d5097042f8]
Event: 0.997 Thread 0x000002d51fb9e800 nmethod 257 0x000002d509706e10 code [0x000002d509706f60, 0x000002d509707498]
Event: 1.000 Thread 0x000002d51fba9000 259 1 java.nio.Buffer::limit (5 bytes)
Event: 1.000 Thread 0x000002d51fba9000 nmethod 259 0x000002d509703d50 code [0x000002d509703ea0, 0x000002d509703fb8]
Event: 1.017 Thread 0x000002d51fb9d800 nmethod 253 0x000002d50970a7d0 code [0x000002d50970aa80, 0x000002d50970c1a8]
GC Heap History (0 events):
No events
Deoptimization events (0 events):
No events
Classes redefined (6 events):
Event: 120.821 Thread 0x000002d51dd06800 redefined class name=com.markilue.leecode.listnode.MyLinkedList, count=1
Event: 120.822 Thread 0x000002d51dd06800 redefined class name=com.markilue.leecode.listnode.ListNode, count=1
Event: 164.527 Thread 0x000002d51dd06800 redefined class name=com.markilue.leecode.listnode.MyLinkedList, count=2
Event: 164.528 Thread 0x000002d51dd06800 redefined class name=com.markilue.leecode.listnode.ListNode, count=2
Event: 308.152 Thread 0x000002d51dd06800 redefined class name=com.markilue.leecode.listnode.MyLinkedList, count=3
Event: 308.152 Thread 0x000002d51dd06800 redefined class name=com.markilue.leecode.listnode.ListNode, count=3
Internal exceptions (7 events):
Event: 0.110 Thread 0x000002d507b9a800 Exception <a 'java/lang/NoSuchMethodError': Method sun.misc.Unsafe.defineClass(Ljava/lang/String;[BII)Ljava/lang/Class; name or signature does not match> (0x00000000d5f07cc0) thrown at [C:\jenkins\workspace\8-2-build-windows-amd64-cygwin\jdk8u311\1894\hot
Event: 0.110 Thread 0x000002d507b9a800 Exception <a 'java/lang/NoSuchMethodError': Method sun.misc.Unsafe.prefetchRead(Ljava/lang/Object;J)V name or signature does not match> (0x00000000d5f07fa8) thrown at [C:\jenkins\workspace\8-2-build-windows-amd64-cygwin\jdk8u311\1894\hotspot\src\share\vm\
Event: 0.817 Thread 0x000002d507b9a800 Exception <a 'java/io/FileNotFoundException'> (0x00000000d62ad468) thrown at [C:\jenkins\workspace\8-2-build-windows-amd64-cygwin\jdk8u311\1894\hotspot\src\share\vm\prims\jni.cpp, line 710]
Event: 0.848 Thread 0x000002d507b9a800 Exception <a 'java/security/PrivilegedActionException'> (0x00000000d638d038) thrown at [C:\jenkins\workspace\8-2-build-windows-amd64-cygwin\jdk8u311\1894\hotspot\src\share\vm\prims\jvm.cpp, line 1523]
Event: 0.848 Thread 0x000002d507b9a800 Exception <a 'java/security/PrivilegedActionException'> (0x00000000d638d430) thrown at [C:\jenkins\workspace\8-2-build-windows-amd64-cygwin\jdk8u311\1894\hotspot\src\share\vm\prims\jvm.cpp, line 1523]
Event: 0.849 Thread 0x000002d507b9a800 Exception <a 'java/security/PrivilegedActionException'> (0x00000000d638fb28) thrown at [C:\jenkins\workspace\8-2-build-windows-amd64-cygwin\jdk8u311\1894\hotspot\src\share\vm\prims\jvm.cpp, line 1523]
Event: 0.849 Thread 0x000002d507b9a800 Exception <a 'java/security/PrivilegedActionException'> (0x00000000d638ff20) thrown at [C:\jenkins\workspace\8-2-build-windows-amd64-cygwin\jdk8u311\1894\hotspot\src\share\vm\prims\jvm.cpp, line 1523]
Events (10 events):
Event: 1527.523 Executing VM operation: GetOrSetLocal
Event: 1527.523 Executing VM operation: GetOrSetLocal done
Event: 1559.202 Executing VM operation: ChangeBreakpoints
Event: 1559.202 Executing VM operation: ChangeBreakpoints done
Event: 1559.805 Executing VM operation: ChangeBreakpoints
Event: 1559.805 Executing VM operation: ChangeBreakpoints done
Event: 1573.008 Executing VM operation: ChangeBreakpoints
Event: 1573.008 Executing VM operation: ChangeBreakpoints done
Event: 1581.175 Executing VM operation: ChangeBreakpoints
Event: 1581.176 Executing VM operation: ChangeBreakpoints done
Dynamic libraries:
0x00007ff61b6b0000 - 0x00007ff61b6f7000 E:\Java\JDK8\bin\java.exe
0x00007ffee8f90000 - 0x00007ffee9188000 C:\WINDOWS\SYSTEM32\ntdll.dll
0x00007ffee7cf0000 - 0x00007ffee7dad000 C:\WINDOWS\System32\KERNEL32.DLL
0x00007ffee6a20000 - 0x00007ffee6cee000 C:\WINDOWS\System32\KERNELBASE.dll
0x00007ffee8050000 - 0x00007ffee80fe000 C:\WINDOWS\System32\ADVAPI32.dll
0x00007ffee7560000 - 0x00007ffee75fe000 C:\WINDOWS\System32\msvcrt.dll
0x00007ffee8dd0000 - 0x00007ffee8e6c000 C:\WINDOWS\System32\sechost.dll
0x00007ffee8990000 - 0x00007ffee8ab5000 C:\WINDOWS\System32\RPCRT4.dll
0x00007ffee7eb0000 - 0x00007ffee8050000 C:\WINDOWS\System32\USER32.dll
0x00007ffee6880000 - 0x00007ffee68a2000 C:\WINDOWS\System32\win32u.dll
0x00007ffee8170000 - 0x00007ffee819a000 C:\WINDOWS\System32\GDI32.dll
0x00007ffee6e40000 - 0x00007ffee6f4b000 C:\WINDOWS\System32\gdi32full.dll
0x00007ffee6f50000 - 0x00007ffee6fed000 C:\WINDOWS\System32\msvcp_win.dll
0x00007ffee6cf0000 - 0x00007ffee6df0000 C:\WINDOWS\System32\ucrtbase.dll
0x00007ffed59c0000 - 0x00007ffed5c5a000 C:\WINDOWS\WinSxS\amd64_microsoft.windows.common-controls_6595b64144ccf1df_6.0.19041.1110_none_60b5254171f9507e\COMCTL32.dll
0x00007ffee88f0000 - 0x00007ffee8920000 C:\WINDOWS\System32\IMM32.DLL
0x00007ffedf820000 - 0x00007ffedf835000 E:\Java\JDK8\jre\bin\vcruntime140.dll
0x00007ffebd920000 - 0x00007ffebd9bb000 E:\Java\JDK8\jre\bin\msvcp140.dll
0x000000006aa50000 - 0x000000006b2b0000 E:\Java\JDK8\jre\bin\server\jvm.dll
0x00007ffee7ce0000 - 0x00007ffee7ce8000 C:\WINDOWS\System32\PSAPI.DLL
0x00007ffed2a20000 - 0x00007ffed2a29000 C:\WINDOWS\SYSTEM32\WSOCK32.dll
0x00007ffed5e10000 - 0x00007ffed5e37000 C:\WINDOWS\SYSTEM32\WINMM.dll
0x00007ffedfb10000 - 0x00007ffedfb1a000 C:\WINDOWS\SYSTEM32\VERSION.dll
0x00007ffee8100000 - 0x00007ffee816b000 C:\WINDOWS\System32\WS2_32.dll
0x00007ffee4f90000 - 0x00007ffee4fa2000 C:\WINDOWS\SYSTEM32\kernel.appcore.dll
0x00007ffee0460000 - 0x00007ffee0470000 E:\Java\JDK8\jre\bin\verify.dll
0x00007ffedf7f0000 - 0x00007ffedf81b000 E:\Java\JDK8\jre\bin\java.dll
0x00007ffedb7d0000 - 0x00007ffedb806000 E:\Java\JDK8\jre\bin\jdwp.dll
0x00007ffee1c90000 - 0x00007ffee1c99000 E:\Java\JDK8\jre\bin\npt.dll
0x00007ffedf8f0000 - 0x00007ffedf920000 E:\Java\JDK8\jre\bin\instrument.dll
0x00007ffedef10000 - 0x00007ffedef28000 E:\Java\JDK8\jre\bin\zip.dll
0x00007ffee81a0000 - 0x00007ffee88e4000 C:\WINDOWS\System32\SHELL32.dll
0x00007ffee4790000 - 0x00007ffee4f24000 C:\WINDOWS\SYSTEM32\windows.storage.dll
0x00007ffee6ff0000 - 0x00007ffee7344000 C:\WINDOWS\System32\combase.dll
0x00007ffee6110000 - 0x00007ffee6140000 C:\WINDOWS\SYSTEM32\Wldp.dll
0x00007ffee7a70000 - 0x00007ffee7b1d000 C:\WINDOWS\System32\SHCORE.dll
0x00007ffee7e30000 - 0x00007ffee7e85000 C:\WINDOWS\System32\shlwapi.dll
0x00007ffee65f0000 - 0x00007ffee660f000 C:\WINDOWS\SYSTEM32\profapi.dll
0x00007ffedf850000 - 0x00007ffedf85a000 E:\Java\JDK8\jre\bin\dt_socket.dll
0x00007ffee5e70000 - 0x00007ffee5eda000 C:\WINDOWS\system32\mswsock.dll
0x00007ffee44a0000 - 0x00007ffee4684000 C:\WINDOWS\SYSTEM32\dbghelp.dll
0x00007ffee68e0000 - 0x00007ffee6962000 C:\WINDOWS\System32\bcryptPrimitives.dll
VM Arguments:
jvm_args: -agentlib:jdwp=transport=dt_socket,address=127.0.0.1:5541,suspend=y,server=n -ea -Didea.test.cyclic.buffer.size=1048576 -javaagent:C:\Users\marklue\AppData\Local\JetBrains\IntelliJIdea2021.1\captureAgent\debugger-agent.jar -Dfile.encoding=UTF-8
java_command: com.intellij.rt.junit.JUnitStarter -ideVersion5 -junit4 com.markilue.leecode.listnode.MyLinkedList,test
java_class_path (initial): D:\software\JetBrains\IntelliJ IDEA 2021.1\lib\idea_rt.jar;D:\software\JetBrains\IntelliJ IDEA 2021.1\plugins\junit\lib\junit5-rt.jar;D:\software\JetBrains\IntelliJ IDEA 2021.1\plugins\junit\lib\junit-rt.jar;E:\Java\JDK8\jre\lib\charsets.jar;E:\Java\JDK8\jre\lib\deploy.jar;E:\Java\JDK8\jre\lib\ext\access-bridge-64.jar;E:\Java\JDK8\jre\lib\ext\cldrdata.jar;E:\Java\JDK8\jre\lib\ext\dnsns.jar;E:\Java\JDK8\jre\lib\ext\jaccess.jar;E:\Java\JDK8\jre\lib\ext\jfxrt.jar;E:\Java\JDK8\jre\lib\ext\localedata.jar;E:\Java\JDK8\jre\lib\ext\nashorn.jar;E:\Java\JDK8\jre\lib\ext\sunec.jar;E:\Java\JDK8\jre\lib\ext\sunjce_provider.jar;E:\Java\JDK8\jre\lib\ext\sunmscapi.jar;E:\Java\JDK8\jre\lib\ext\sunpkcs11.jar;E:\Java\JDK8\jre\lib\ext\zipfs.jar;E:\Java\JDK8\jre\lib\javaws.jar;E:\Java\JDK8\jre\lib\jce.jar;E:\Java\JDK8\jre\lib\jfr.jar;E:\Java\JDK8\jre\lib\jfxswt.jar;E:\Java\JDK8\jre\lib\jsse.jar;E:\Java\JDK8\jre\lib\management-agent.jar;E:\Java\JDK8\jre\lib\plugin.jar;E:\Java\JDK8\jre\lib\resources.jar;E:\Java\JDK8\jre\lib\rt.jar;D:\example\self_example\Leecode\target\classes;E:\maven\apache-maven-3.5.4-bin\RepMaven\org\projectlombok\lombok\1.18.24\lombok-1.18.24.jar;E:\maven\apache-maven-3.5.4-bin\RepMaven\junit\junit\4.13.2\junit-4.13.2.jar;E:\maven\apache-maven-3.5.4-bin\RepMaven\org\hamcrest\hamcrest-core\1.3\hamcrest-core-1.3.jar;C:\Users\marklue\AppData\Local\JetBrains\IntelliJIdea2021.1\captureAgent\debugger-agent.jar
Launcher Type: SUN_STANDARD
Environment Variables:
JAVA_HOME=E:\Java\JDK8
PATH=C:\WINDOWS\system32;C:\WINDOWS;C:\WINDOWS\System32\Wbem;C:\WINDOWS\System32\WindowsPowerShell\v1.0\;C:\WINDOWS\System32\OpenSSH\;D:\software\RAR½âѹ¹¤¾ß\Bandizip\;D:\;oftware\nodejs\;E:\Java\JDK8\bin;E:\maven\apache-maven-3.5.4-bin\apache-maven-3.5.4\bin;E:\scala\scala-2.12.11\bin;D:\software\anaconda\pkgs\python-3.7.11-h6244533_0;D:\software\anaconda\Scripts;D:\software\Git\Git\cmd;D:\software\nodejs;C:\Users\marklue\AppData\Local\Microsoft\WindowsApps;C:\Users\marklue\AppData\Roaming\npm;D:\software\JetBrains\PyCharm 2020.1\bin;
USERNAME=marklue
OS=Windows_NT
PROCESSOR_IDENTIFIER=Intel64 Family 6 Model 142 Stepping 10, GenuineIntel
--------------- S Y S T E M ---------------
OS: Windows 10.0 , 64 bit Build 19041 (10.0.19041.1806)
CPU:total 8 (initial active 8) (4 cores per cpu, 2 threads per core) family 6 model 142 stepping 10, cmov, cx8, fxsr, mmx, sse, sse2, sse3, ssse3, sse4.1, sse4.2, popcnt, avx, avx2, aes, clmul, erms, 3dnowpref, lzcnt, ht, tsc, tscinvbit, bmi1, bmi2, adx
Memory: 4k page, physical 8272104k(2053676k free), swap 11902816k(1707060k free)
vm_info: Java HotSpot(TM) 64-Bit Server VM (25.311-b11) for windows-amd64 JRE (1.8.0_311-b11), built on Sep 27 2021 05:15:14 by "java_re" with MS VC++ 15.9 (VS2017)
time: Mon Sep 5 12:31:48 2022
timezone: Öйú±ê׼ʱ¼ä
elapsed time: 1581.277238 seconds (0d 0h 26m 21s)

View File

@ -0,0 +1,67 @@
package com.markilue.leecode.sort;
import org.junit.Test;
import java.util.Arrays;
/**
* @BelongsProject: Leecode
* @BelongsPackage: com.markilue.leecode.sort
* @Author: marklue
* @CreateTime: 2023/4/23 20:27
* @Description: TODO
* @Version: 1.0
*/
public class QuickSort1 {
@Test
public void test(){
int[] nums={8,5,7,6,9,2,4,3,1};
quickSort(nums);
System.out.println(Arrays.toString(nums));
}
public void quickSort(int[] nums) {
quickSort(nums,0,nums.length-1);
}
public void quickSort(int[] nums, int left, int right) {
if (left > right) return;
int partition = partition(nums, left, right);
quickSort(nums, left, partition-1);
quickSort(nums,partition+1,right);
}
public int partition(int[] nums, int left, int right) {
if (left > right) return left;
int start = left;
int end = right + 1;
while (start < end) {
while (start < right && nums[++start] < nums[left]) {
}
while (end > left && nums[--end] > nums[left]) {
}
if (start >= end) {
break;
}
swap(nums, start, end);
}
swap(nums,left,end);
return end;
}
private void swap(int[] nums, int start, int end) {
int temp = nums[start];
nums[start] = nums[end];
nums[end] = temp;
}
}

View File

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.makrilue.interview</groupId>
<artifactId>RedCampus</artifactId>
<version>1.0-SNAPSHOT</version>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
<scope>compile</scope>
</dependency>
</dependencies>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
</project>

View File

@ -0,0 +1,64 @@
import java.util.Scanner;
/**
* @BelongsProject: RedCampus
* @BelongsPackage: com.markilue.interview
* @Author: marklue
* @CreateTime: 2023/3/26 15:12
* @Description: TODO
* @Version: 1.0
*/
public class Test1 {
public static void main(String[] args) {
Scanner sc = new Scanner(System.in);
int n = sc.nextInt();
int k = sc.nextInt();
int[] nums = new int[n];
for (int i = 0; i < n; i++) {
nums[i] = sc.nextInt();
}
int[][][] dp = new int[n + 1][k+1][2];
for (int j = 0; j < k; j++) {
dp[1][j][0] = nums[0];
dp[1][j][1] = last(nums[0]);
}
for (int i = 2; i < dp.length; i++) {
for (int j = 0; j < k; j++) {
dp[i][j][0] = Math.min(dp[i - 1][j][0], dp[i - 1][j][1])+nums[i-1];
if (j > 0) {
dp[i][j][1] = Math.min(dp[i - 1][j][0] + nums[i - 1], dp[i - 1][j - 1][1] + last(nums[i - 1]));
} else {
dp[i][j][1] = dp[i - 1][j][0] + last(nums[i - 1]);
}
}
}
System.out.println(dp);
}
private static int last(int num) {
for (int i = num; i > 1; i--) {
if (num % i == 0 && isProdiom(i)) {
return num / i;
}
}
return num;
}
public static boolean isProdiom(int num) {
if (num == 1) return false;
for (int i = 2; i < Math.sqrt(num); i++) {
if (num % i == 0) return false;
}
return true;
}
}