diff --git a/Big_data_example/flume/pom.xml b/Big_data_example/flume/pom.xml
index c294c22..5318fb2 100644
--- a/Big_data_example/flume/pom.xml
+++ b/Big_data_example/flume/pom.xml
@@ -9,13 +9,40 @@
1.0-SNAPSHOT
+
+
+ net.alchim31.maven
+ scala-maven-plugin
+ 3.4.6
+
+
+
+
+ compile
+ testCompile
+
+
+
+
+
org.apache.maven.plugins
- maven-compiler-plugin
+ maven-assembly-plugin
+ 3.0.0
- 7
- 7
+
+ jar-with-dependencies
+
+
+
+ make-assembly
+ package
+
+ single
+
+
+
@@ -27,6 +54,13 @@
1.9.0
+
+
+ org.eclipse.paho
+ org.eclipse.paho.client.mqttv3
+ 1.2.2
+
+
diff --git a/Big_data_example/flume/src/main/java/com/atguigu/flume/test/MQTTSource.java b/Big_data_example/flume/src/main/java/com/atguigu/flume/test/MQTTSource.java
new file mode 100644
index 0000000..c36b0af
--- /dev/null
+++ b/Big_data_example/flume/src/main/java/com/atguigu/flume/test/MQTTSource.java
@@ -0,0 +1,258 @@
+package com.atguigu.flume.test;
+
+import org.apache.flume.*;
+import org.apache.flume.conf.Configurable;
+import org.apache.flume.event.EventBuilder;
+import org.apache.flume.event.SimpleEvent;
+import org.apache.flume.source.AbstractSource;
+import org.eclipse.paho.client.mqttv3.*;
+
+import java.nio.charset.StandardCharsets;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+
+/**
+ * @Author:huhy
+ * @DATE:Created on 2018/1/25 14:33
+ * @Modified By:
+ * @Class Description: flume收集MQTT(Mosquitto)的数据。
+ * 方法就是flume自定义source,source中来订阅(subscribe)MQTT
+ */
+public class MQTTSource extends AbstractSource implements EventDrivenSource,
+ Configurable {
+
+
+ /**
+ *源代码的初始化方法。上下文(context)包含了所有的内容。
+ * Flume配置信息,可用于检索任何配置。
+ *设置源所需的值。
+ */
+ @Override
+ public void configure(Context context) {
+ // TODO Auto-generated method stub
+
+ }
+
+ SimpleMqttClient client = null;
+
+ /**
+ * Start any dependent systems and begin processing events.
+ * 启动任何依赖系统并开始处理事件。
+ */
+ @Override
+ public void start() {
+ // TODO Auto-generated method stub ||自动生成的方法
+ // super.start();
+ client = new SimpleMqttClient();
+ client.runClient();
+ }
+
+ /**
+ * Stop processing events and shut any dependent systems down.
+ */
+ @Override
+ public void stop() {
+ // TODO Auto-generated method stub || 自动生成的方法
+ // super.stop();
+ if (client != null) {
+ System.out.println("生命周期结束,关闭mqtt");
+ client.closeConn();
+ }
+ }
+
+
+
+ public class SimpleMqttClient implements MqttCallback {
+
+ MqttClient myClient;
+ MqttConnectOptions connOpt;
+ /**
+ * tcp连接
+ */
+// String BROKER_URL = "tcp://192.168.118.202:1883";
+ String BROKER_URL = "tcp://119.91.214.52:1883";
+
+ /**
+ * M2MIO_DOMAIN:ip
+ * M2MIO_STUFF:域
+ */
+ /*String M2MIO_DOMAIN = "192.168.1.19";
+ String M2MIO_STUFF = "mytest";*/
+ // topic
+ String M2MIO_THING = "37b0ce9f8e3643b48f23e5896d4d8c14";
+ String M2MIO_USERNAME = "mqtt";
+ String M2MIO_PASSWORD_MD5 ="mqtt";
+
+ //发布 订阅
+ Boolean subscriber = true;
+ Boolean publisher = false;
+
+ /**
+ *
+ * connectionLost This callback is invoked upon losing the MQTT
+ * connection.
+ * MQTT时调用connectionLost这个回调。
+ *
+ */
+ @Override
+ public void connectionLost(Throwable t) {
+ System.out.println("Connection lost!");
+ // code to reconnect to the broker would go here if desired
+ }
+
+ public void closeConn() {
+ if (myClient != null) {
+ if (myClient.isConnected()) {
+ try {
+ System.out.println("关闭mqtt");
+ myClient.disconnect();
+ } catch (MqttException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ /**
+ *
+ * deliveryComplete This callback is invoked when a message published by
+ * this client is successfully received by the broker.
+ *
+ */
+ @Override
+ public void deliveryComplete(IMqttDeliveryToken token) {
+ // System.out.println("Pub complete" + new
+ // String(token.getMessage().getPayload()));
+ System.out.println("收到了一条数据");
+ }
+
+ /**
+ * 在接收到消息时调用messageArrived这个回调。
+ * 一个订阅的主题。
+ */
+ @Override
+ public void messageArrived(String topic, MqttMessage message)
+ throws Exception {
+ System.out.println("getmessage...");
+
+ Map headers = new HashMap();
+ headers.put("id", "123");//数据包带有唯一标示
+ headers.put("time", String.valueOf(System.currentTimeMillis()));//设备的时间戳
+ Event flumeEvent = EventBuilder.withBody(message.getPayload(),
+ headers);
+ try {
+ getChannelProcessor().processEvent(flumeEvent);
+ System.out.println("消息到达-------->");
+ } catch (Exception e) {
+ // TODO: handle exception
+ e.printStackTrace();
+ }
+
+ }
+
+ /**
+ *
+ * runClient The main functionality of this simple example. Create a
+ * MQTT client, connect to broker, pub/sub, disconnect.
+ *
+ */
+ public void runClient() {
+ // setup MQTT Client
+ String clientID = M2MIO_THING;
+ connOpt = new MqttConnectOptions();
+
+ connOpt.setCleanSession(true);
+ connOpt.setKeepAliveInterval(3000);
+ connOpt.setUserName(M2MIO_USERNAME);
+ connOpt.setPassword(M2MIO_PASSWORD_MD5.toCharArray());
+
+ // Connect to Broker
+ try {
+ myClient = new MqttClient(BROKER_URL, clientID);
+ myClient.setCallback(this);
+ myClient.connect(connOpt);
+ System.out.println("连接成功");
+ } catch (MqttException e) {
+ e.printStackTrace();
+ System.exit(-1);
+ }
+
+ System.out.println("Connected to " + BROKER_URL);
+
+ // setup topic
+ // topics on m2m.io are in the form //
+ /**
+ * 方式一
+ */
+ /*String myTopic = M2MIO_DOMAIN + "/" + M2MIO_STUFF + "/"
+ + M2MIO_THING;
+ System.out.println("myTopic:" + myTopic);
+ MqttTopic topic = myClient.getTopic(myTopic);*/
+ /**
+ * 方式二
+ */
+ String topic1= "/thing/"+M2MIO_THING+"/test_unchange_scada/data/post";
+ MqttTopic topic = myClient.getTopic(topic1);
+
+ // subscribe to topic if subscriber
+ if (subscriber) {
+ try {
+ System.out.println("开始订阅信息");
+ int subQoS = 2;
+ // myClient.subscribe(myTopic, subQoS);
+ myClient.subscribe(topic1, subQoS);
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // publish messages if publisher
+ if (publisher) {
+ System.out.println("开始发布消息");
+ for (int i = 1; i <= 10; i++) {
+ String pubMsg = "{\"pubmsg\":" + i + "}";
+ int pubQoS = 2;
+ MqttMessage message = new MqttMessage(pubMsg.getBytes());
+ message.setQos(pubQoS);
+ message.setRetained(false);
+
+ // Publish the message
+ System.out.println("Publishing to topic \"" + topic
+ + "\" qos " + pubQoS);
+ MqttDeliveryToken token = null;
+ try {
+ // publish message to broker
+ token = topic.publish(message);
+ // Wait until the message has been delivered to the
+ // broker
+ token.waitForCompletion();
+ Thread.sleep(2000);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ // disconnect
+ try {
+ // wait to ensure subscribed messages are delivered
+ if (subscriber) {
+ while (true) {
+ Thread.sleep(5000);
+ }
+ }
+ // myClient.disconnect();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ }
+ }
+
+ }
+
+}
diff --git a/Big_data_example/flume/src/main/java/com/atguigu/flume/test/MQTTSource1.java b/Big_data_example/flume/src/main/java/com/atguigu/flume/test/MQTTSource1.java
new file mode 100644
index 0000000..910c1fe
--- /dev/null
+++ b/Big_data_example/flume/src/main/java/com/atguigu/flume/test/MQTTSource1.java
@@ -0,0 +1,310 @@
+package com.atguigu.flume.test;
+
+import org.apache.flume.*;
+import org.apache.flume.conf.Configurable;
+import org.apache.flume.event.EventBuilder;
+import org.apache.flume.event.SimpleEvent;
+import org.apache.flume.source.AbstractSource;
+import org.eclipse.paho.client.mqttv3.*;
+
+import java.nio.charset.StandardCharsets;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+
+/**
+ * @Author:huhy
+ * @DATE:Created on 2018/1/25 14:33
+ * @Modified By:
+ * @Class Description: flume收集MQTT(Mosquitto)的数据。
+ * 方法就是flume自定义source,source中来订阅(subscribe)MQTT
+ */
+public class MQTTSource1 extends AbstractSource implements EventDrivenSource,
+ Configurable, PollableSource {
+ private String prefix;
+
+ /**
+ *源代码的初始化方法。上下文(context)包含了所有的内容。
+ * Flume配置信息,可用于检索任何配置。
+ *设置源所需的值。
+ */
+ @Override
+ public void configure(Context context) {
+ // TODO Auto-generated method stub
+ prefix = context.getString("prefix","log--");
+
+ }
+
+ SimpleMqttClient client = null;
+
+ /**
+ * Start any dependent systems and begin processing events.
+ * 启动任何依赖系统并开始处理事件。
+ */
+ @Override
+ public void start() {
+ // TODO Auto-generated method stub ||自动生成的方法
+ // super.start();
+ client = new SimpleMqttClient();
+ client.runClient();
+ }
+
+ /**
+ * Stop processing events and shut any dependent systems down.
+ */
+ @Override
+ public void stop() {
+ // TODO Auto-generated method stub || 自动生成的方法
+ // super.stop();
+ if (client != null) {
+ System.out.println("生命周期结束");
+ client.closeConn();
+ }
+ }
+
+ @Override
+ public Status process() throws EventDeliveryException {
+
+ //当前生成数据太快了,休眠1秒钟
+// 可以用Thread.sleep(1000);但是要计算很麻烦,TimeUnit中有分别的小时,分秒的方法
+ try {
+ TimeUnit.SECONDS.sleep(1);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+
+ Status status=null;
+ try {
+ // This try clause includes whatever Channel/Event operations you want to do
+
+ // Receive new data
+ //采集数据,封装成event对象
+ //创建getSomeData方法去获取数据
+ Event e = getSomeData();
+
+ // Store the Event into this Source's associated Channel(s)
+ //将event对象交给ChannelProcessor进行处理
+ getChannelProcessor().processEvent(e);
+ //正常处理,返回Status.READY
+ status = Status.READY;
+ } catch (Throwable t) {
+ // Log exception, handle individual exceptions as needed
+ //处理失败,返回Status.BACKOFF
+ status = Status.BACKOFF;
+// // re-throw all Errors
+// if (t instanceof Error) {
+// throw (Error)t;
+// }
+ }
+ return status;
+ }
+
+ private Event getSomeData() {
+ //UUID.randomUUID()可以随机生成一个字符串,有固定的长度和格式
+ String data= UUID.randomUUID().toString();
+ String resultData = prefix+data;
+ //封装一个event类,使用SimpleEvent,在创建event时,内部已经创建好了header
+ SimpleEvent event = new SimpleEvent();
+ //将字符串转化为字节数组放在event中
+ event.setBody(resultData.getBytes(StandardCharsets.UTF_8));
+ //可以创建source
+ event.getHeaders().put("author","Ding");
+ return event;
+ }
+
+ @Override
+ public long getBackOffSleepIncrement() {
+ return 1;
+ }
+
+ @Override
+ public long getMaxBackOffSleepInterval() {
+ return 10;
+ }
+
+
+ public class SimpleMqttClient implements MqttCallback {
+
+ MqttClient myClient;
+ MqttConnectOptions connOpt;
+ /**
+ * tcp连接
+ */
+ String BROKER_URL = "tcp://192.168.118.202:61613";
+ /**
+ * M2MIO_DOMAIN:ip
+ * M2MIO_STUFF:域
+ */
+ /*String M2MIO_DOMAIN = "192.168.1.19";
+ String M2MIO_STUFF = "mytest";*/
+ // topic
+ String M2MIO_THING = "huhy";
+ String M2MIO_USERNAME = "admin";
+ String M2MIO_PASSWORD_MD5 ="password";
+
+ //发布 订阅
+ Boolean subscriber = true;
+ Boolean publisher = false;
+
+ /**
+ *
+ * connectionLost This callback is invoked upon losing the MQTT
+ * connection.
+ * MQTT时调用connectionLost这个回调。
+ *
+ */
+ @Override
+ public void connectionLost(Throwable t) {
+ System.out.println("Connection lost!");
+ // code to reconnect to the broker would go here if desired
+ }
+
+ public void closeConn() {
+ if (myClient != null) {
+ if (myClient.isConnected()) {
+ try {
+ myClient.disconnect();
+ } catch (MqttException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ /**
+ *
+ * deliveryComplete This callback is invoked when a message published by
+ * this client is successfully received by the broker.
+ *
+ */
+ @Override
+ public void deliveryComplete(IMqttDeliveryToken token) {
+ // System.out.println("Pub complete" + new
+ // String(token.getMessage().getPayload()));
+ System.out.println("发送了一条数据");
+ }
+
+ /**
+ * 在接收到消息时调用messageArrived这个回调。
+ * 一个订阅的主题。
+ */
+ @Override
+ public void messageArrived(String topic, MqttMessage message)
+ throws Exception {
+
+ Map headers = new HashMap();
+ headers.put("id", "123");//数据包带有唯一标示
+ headers.put("time", String.valueOf(System.currentTimeMillis()));//设备的时间戳
+ Event flumeEvent = EventBuilder.withBody(message.getPayload(),
+ headers);
+ try {
+ getChannelProcessor().processEvent(flumeEvent);
+ System.out.println("消息到达-------->");
+ } catch (Exception e) {
+ // TODO: handle exception
+ e.printStackTrace();
+ }
+
+ }
+
+ /**
+ *
+ * runClient The main functionality of this simple example. Create a
+ * MQTT client, connect to broker, pub/sub, disconnect.
+ *
+ */
+ public void runClient() {
+ // setup MQTT Client
+ String clientID = M2MIO_THING;
+ connOpt = new MqttConnectOptions();
+
+ connOpt.setCleanSession(true);
+ connOpt.setKeepAliveInterval(3000);
+ connOpt.setUserName(M2MIO_USERNAME);
+ connOpt.setPassword(M2MIO_PASSWORD_MD5.toCharArray());
+
+ // Connect to Broker
+ try {
+ myClient = new MqttClient(BROKER_URL, clientID);
+ myClient.setCallback(this);
+ myClient.connect(connOpt);
+ } catch (MqttException e) {
+ e.printStackTrace();
+ System.exit(-1);
+ }
+
+ System.out.println("Connected to " + BROKER_URL);
+
+ // setup topic
+ // topics on m2m.io are in the form //
+ /**
+ * 方式一
+ */
+ /*String myTopic = M2MIO_DOMAIN + "/" + M2MIO_STUFF + "/"
+ + M2MIO_THING;
+ System.out.println("myTopic:" + myTopic);
+ MqttTopic topic = myClient.getTopic(myTopic);*/
+ /**
+ * 方式二
+ */
+ MqttTopic topic = myClient.getTopic(M2MIO_THING);
+
+ // subscribe to topic if subscriber
+ if (subscriber) {
+ try {
+ int subQoS = 2;
+ // myClient.subscribe(myTopic, subQoS);
+ myClient.subscribe(M2MIO_THING, subQoS);
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // publish messages if publisher
+ if (publisher) {
+ for (int i = 1; i <= 10; i++) {
+ String pubMsg = "{\"pubmsg\":" + i + "}";
+ int pubQoS = 2;
+ MqttMessage message = new MqttMessage(pubMsg.getBytes());
+ message.setQos(pubQoS);
+ message.setRetained(false);
+
+ // Publish the message
+ System.out.println("Publishing to topic \"" + topic
+ + "\" qos " + pubQoS);
+ MqttDeliveryToken token = null;
+ try {
+ // publish message to broker
+ token = topic.publish(message);
+ // Wait until the message has been delivered to the
+ // broker
+ token.waitForCompletion();
+ Thread.sleep(2000);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ // disconnect
+ try {
+ // wait to ensure subscribed messages are delivered
+ if (subscriber) {
+ while (true) {
+ Thread.sleep(5000);
+ }
+ }
+ // myClient.disconnect();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ }
+ }
+
+ }
+
+}
diff --git a/Big_data_example/flume/src/main/java/com/atguigu/flume/test/MQTTSource2.java b/Big_data_example/flume/src/main/java/com/atguigu/flume/test/MQTTSource2.java
new file mode 100644
index 0000000..111d8a7
--- /dev/null
+++ b/Big_data_example/flume/src/main/java/com/atguigu/flume/test/MQTTSource2.java
@@ -0,0 +1,232 @@
+package com.atguigu.flume.test;
+
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.flume.Context;
+import org.apache.flume.Event;
+import org.apache.flume.EventDrivenSource;
+import org.apache.flume.conf.Configurable;
+import org.apache.flume.event.EventBuilder;
+import org.apache.flume.source.AbstractSource;
+import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken;
+import org.eclipse.paho.client.mqttv3.MqttCallback;
+import org.eclipse.paho.client.mqttv3.MqttClient;
+import org.eclipse.paho.client.mqttv3.MqttConnectOptions;
+import org.eclipse.paho.client.mqttv3.MqttDeliveryToken;
+import org.eclipse.paho.client.mqttv3.MqttException;
+import org.eclipse.paho.client.mqttv3.MqttMessage;
+import org.eclipse.paho.client.mqttv3.MqttTopic;
+
+public class MQTTSource2 extends AbstractSource implements EventDrivenSource,
+ Configurable {
+ /**
+ * The initialization method for the Source. The context contains all the
+ * Flume configuration info, and can be used to retrieve any configuration
+ * values necessary to set up the Source.
+ */
+ @Override
+ public void configure(Context arg0) {
+ // TODO Auto-generated method stub
+
+ }
+
+ SimpleMqttClient client = null;
+
+ /**
+ * Start any dependent systems and begin processing events.
+ */
+ @Override
+ public void start() {
+ // TODO Auto-generated method stub
+ // super.start();
+ client = new SimpleMqttClient();
+ client.runClient();
+ }
+
+ /**
+ * Stop processing events and shut any dependent systems down.
+ */
+ @Override
+ public void stop() {
+ // TODO Auto-generated method stub
+ // super.stop();
+ if (client != null) {
+ client.closeConn();
+ }
+ }
+
+ // public static void main(String[] args) {
+ // SimpleMqttClient smc = new SimpleMqttClient();
+ // smc.runClient();
+ // }
+
+ public class SimpleMqttClient implements MqttCallback {
+
+ MqttClient myClient;
+ MqttConnectOptions connOpt;
+
+ String BROKER_URL = "tcp://192.168.116.202:1883";
+ String M2MIO_DOMAIN = "192.168.116.202";
+ String M2MIO_STUFF = "yhx";
+ String M2MIO_THING = "yhx_flume";
+ // String M2MIO_USERNAME = "";
+ // String M2MIO_PASSWORD_MD5 =
+ // "";
+
+ Boolean subscriber = true;
+ Boolean publisher = false;
+
+ /**
+ * connectionLost This callback is invoked upon losing the MQTT
+ * connection.
+ */
+ @Override
+ public void connectionLost(Throwable t) {
+ System.out.println("Connection lost!");
+ // code to reconnect to the broker would go here if desired
+ }
+
+ public void closeConn() {
+ if (myClient != null) {
+ if (myClient.isConnected()) {
+ try {
+ myClient.disconnect();
+ } catch (MqttException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ /**
+ * deliveryComplete This callback is invoked when a message published by
+ * this client is successfully received by the broker.
+ */
+ @Override
+ public void deliveryComplete(IMqttDeliveryToken token) {
+ // System.out.println("Pub complete" + new
+ // String(token.getMessage().getPayload()));
+ }
+
+ /**
+ * messageArrived This callback is invoked when a message is received on
+ * a subscribed topic.
+ */
+ @Override
+ public void messageArrived(String topic, MqttMessage message)
+ throws Exception {
+ // System.out
+ // .println("-------------------------------------------------");
+ // // System.out.println("| Topic:" + topic.getName());
+ // System.out.println("| Topic:" + topic);
+ // System.out
+ // .println("| Message: " + new String(message.getPayload()));
+ // System.out
+ // .println("-------------------------------------------------");
+
+
+ Map headers = new HashMap();
+ //headers.put("curDate", df.format(new Date()));
+
+ Event flumeEvent = EventBuilder.withBody(message.getPayload(),
+ headers);
+ try {
+ getChannelProcessor().processEvent(flumeEvent);
+ } catch (Exception e) {
+ // TODO: handle exception
+ e.printStackTrace();
+ }
+
+ }
+
+ /**
+ * runClient The main functionality of this simple example. Create a
+ * MQTT client, connect to broker, pub/sub, disconnect.
+ */
+ public void runClient() {
+ // setup MQTT Client
+ String clientID = M2MIO_THING;
+ connOpt = new MqttConnectOptions();
+
+ connOpt.setCleanSession(true);
+ connOpt.setKeepAliveInterval(3000);
+ // connOpt.setUserName(M2MIO_USERNAME);
+ // connOpt.setPassword(M2MIO_PASSWORD_MD5.toCharArray());
+
+ // Connect to Broker
+ try {
+ myClient = new MqttClient(BROKER_URL, clientID);
+ myClient.setCallback(this);
+ myClient.connect(connOpt);
+ } catch (MqttException e) {
+ e.printStackTrace();
+ System.exit(-1);
+ }
+
+ System.out.println("Connected to " + BROKER_URL);
+
+ // setup topic
+ // topics on m2m.io are in the form //
+ String myTopic = M2MIO_DOMAIN + "/" + M2MIO_STUFF + "/"
+ + M2MIO_THING;
+ System.out.println("myTopic:" + myTopic);
+ MqttTopic topic = myClient.getTopic(myTopic);
+
+ // subscribe to topic if subscriber
+ if (subscriber) {
+ try {
+ int subQoS = 0;
+ myClient.subscribe(myTopic, subQoS);
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // publish messages if publisher
+ if (publisher) {
+ for (int i = 1; i <= 10; i++) {
+ String pubMsg = "{\"pubmsg\":" + i + "}";
+ int pubQoS = 0;
+ MqttMessage message = new MqttMessage(pubMsg.getBytes());
+ message.setQos(pubQoS);
+ message.setRetained(false);
+
+ // Publish the message
+ System.out.println("Publishing to topic \"" + topic
+ + "\" qos " + pubQoS);
+ MqttDeliveryToken token = null;
+ try {
+ // publish message to broker
+ token = topic.publish(message);
+ // Wait until the message has been delivered to the
+ // broker
+ token.waitForCompletion();
+ Thread.sleep(100);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ // disconnect
+ try {
+ // wait to ensure subscribed messages are delivered
+ if (subscriber) {
+ while (true) {
+ Thread.sleep(5000);
+ }
+ }
+ // myClient.disconnect();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ }
+ }
+
+ }
+
+}
diff --git a/Big_data_example/log/agent.log b/Big_data_example/log/agent.log
index 64a4d54..5e8c2fe 100644
--- a/Big_data_example/log/agent.log
+++ b/Big_data_example/log/agent.log
@@ -687,3 +687,4357 @@ Caused by: java.lang.ClassNotFoundException: Class com.hadoop.compression.lzo.Lz
at org.apache.hadoop.io.compress.CompressionCodecFactory.getCodecClasses(CompressionCodecFactory.java:132)
... 115 more
2022-05-13 15:34:00,710 ERROR --- [ main] org.apache.hadoop.hdfs.KeyProviderCache (line: 87) : Could not find uri with key [dfs.encryption.key.provider.uri] to create a keyProvider !!
+2022-05-20 12:03:52,360 ERROR --- [ main] org.apache.spark.ml.util.Instrumentation (line: 73) : java.lang.UnsupportedOperationException: empty collection
+ at org.apache.spark.rdd.RDD.$anonfun$treeReduce$6(RDD.scala:1128)
+ at scala.Option.getOrElse(Option.scala:189)
+ at org.apache.spark.rdd.RDD.$anonfun$treeReduce$1(RDD.scala:1128)
+ at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
+ at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
+ at org.apache.spark.rdd.RDD.withScope(RDD.scala:388)
+ at org.apache.spark.rdd.RDD.treeReduce(RDD.scala:1105)
+ at org.apache.spark.ml.feature.VectorIndexer.fit(VectorIndexer.scala:152)
+ at org.apache.spark.ml.feature.VectorIndexer.fit(VectorIndexer.scala:119)
+ at org.apache.spark.ml.Pipeline.$anonfun$fit$5(Pipeline.scala:151)
+ at org.apache.spark.ml.MLEvents.withFitEvent(events.scala:132)
+ at org.apache.spark.ml.MLEvents.withFitEvent$(events.scala:125)
+ at org.apache.spark.ml.util.Instrumentation.withFitEvent(Instrumentation.scala:42)
+ at org.apache.spark.ml.Pipeline.$anonfun$fit$4(Pipeline.scala:151)
+ at scala.collection.Iterator.foreach(Iterator.scala:943)
+ at scala.collection.Iterator.foreach$(Iterator.scala:943)
+ at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
+ at scala.collection.IterableViewLike$Transformed.foreach(IterableViewLike.scala:47)
+ at scala.collection.IterableViewLike$Transformed.foreach$(IterableViewLike.scala:47)
+ at scala.collection.SeqViewLike$AbstractTransformed.foreach(SeqViewLike.scala:40)
+ at org.apache.spark.ml.Pipeline.$anonfun$fit$2(Pipeline.scala:147)
+ at org.apache.spark.ml.MLEvents.withFitEvent(events.scala:132)
+ at org.apache.spark.ml.MLEvents.withFitEvent$(events.scala:125)
+ at org.apache.spark.ml.util.Instrumentation.withFitEvent(Instrumentation.scala:42)
+ at org.apache.spark.ml.Pipeline.$anonfun$fit$1(Pipeline.scala:133)
+ at org.apache.spark.ml.util.Instrumentation$.$anonfun$instrumented$1(Instrumentation.scala:191)
+ at scala.util.Try$.apply(Try.scala:213)
+ at org.apache.spark.ml.util.Instrumentation$.instrumented(Instrumentation.scala:191)
+ at org.apache.spark.ml.Pipeline.fit(Pipeline.scala:133)
+ at com.atguigu.userprofile.ml.pipline.MyPipeLine.train(MyPipeLine.scala:147)
+ at com.atguigu.userprofile.ml.train.StudGenderTrain$.main(StudGenderTrain.scala:54)
+ at com.atguigu.userprofile.ml.train.StudGenderTrain.main(StudGenderTrain.scala)
+
+2022-05-20 12:05:05,000 ERROR --- [ main] org.apache.spark.ml.util.Instrumentation (line: 73) : java.lang.UnsupportedOperationException: empty collection
+ at org.apache.spark.rdd.RDD.$anonfun$treeReduce$6(RDD.scala:1128)
+ at scala.Option.getOrElse(Option.scala:189)
+ at org.apache.spark.rdd.RDD.$anonfun$treeReduce$1(RDD.scala:1128)
+ at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
+ at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
+ at org.apache.spark.rdd.RDD.withScope(RDD.scala:388)
+ at org.apache.spark.rdd.RDD.treeReduce(RDD.scala:1105)
+ at org.apache.spark.ml.feature.VectorIndexer.fit(VectorIndexer.scala:152)
+ at org.apache.spark.ml.feature.VectorIndexer.fit(VectorIndexer.scala:119)
+ at org.apache.spark.ml.Pipeline.$anonfun$fit$5(Pipeline.scala:151)
+ at org.apache.spark.ml.MLEvents.withFitEvent(events.scala:132)
+ at org.apache.spark.ml.MLEvents.withFitEvent$(events.scala:125)
+ at org.apache.spark.ml.util.Instrumentation.withFitEvent(Instrumentation.scala:42)
+ at org.apache.spark.ml.Pipeline.$anonfun$fit$4(Pipeline.scala:151)
+ at scala.collection.Iterator.foreach(Iterator.scala:943)
+ at scala.collection.Iterator.foreach$(Iterator.scala:943)
+ at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
+ at scala.collection.IterableViewLike$Transformed.foreach(IterableViewLike.scala:47)
+ at scala.collection.IterableViewLike$Transformed.foreach$(IterableViewLike.scala:47)
+ at scala.collection.SeqViewLike$AbstractTransformed.foreach(SeqViewLike.scala:40)
+ at org.apache.spark.ml.Pipeline.$anonfun$fit$2(Pipeline.scala:147)
+ at org.apache.spark.ml.MLEvents.withFitEvent(events.scala:132)
+ at org.apache.spark.ml.MLEvents.withFitEvent$(events.scala:125)
+ at org.apache.spark.ml.util.Instrumentation.withFitEvent(Instrumentation.scala:42)
+ at org.apache.spark.ml.Pipeline.$anonfun$fit$1(Pipeline.scala:133)
+ at org.apache.spark.ml.util.Instrumentation$.$anonfun$instrumented$1(Instrumentation.scala:191)
+ at scala.util.Try$.apply(Try.scala:213)
+ at org.apache.spark.ml.util.Instrumentation$.instrumented(Instrumentation.scala:191)
+ at org.apache.spark.ml.Pipeline.fit(Pipeline.scala:133)
+ at com.atguigu.userprofile.ml.pipline.MyPipeLine.train(MyPipeLine.scala:147)
+ at com.atguigu.userprofile.ml.train.StudGenderTrain$.main(StudGenderTrain.scala:55)
+ at com.atguigu.userprofile.ml.train.StudGenderTrain.main(StudGenderTrain.scala)
+
+2022-05-24 13:32:29,008 ERROR --- [ Executor task launch worker for task 7425] org.apache.spark.executor.Executor (line: 94) : Exception in task 64.0 in stage 122.0 (TID 7425)
+org.apache.spark.SparkException: Failed to execute user defined function(VectorIndexerModel$$Lambda$4552/338152470: (struct,values:array>) => struct,values:array>)
+ at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage9.processNext(Unknown Source)
+ at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
+ at org.apache.spark.sql.execution.WholeStageCodegenExec$$anon$1.hasNext(WholeStageCodegenExec.scala:729)
+ at org.apache.spark.sql.execution.SparkPlan.$anonfun$getByteArrayRdd$1(SparkPlan.scala:340)
+ at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2(RDD.scala:872)
+ at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2$adapted(RDD.scala:872)
+ at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
+ at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:349)
+ at org.apache.spark.rdd.RDD.iterator(RDD.scala:313)
+ at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
+ at org.apache.spark.scheduler.Task.run(Task.scala:127)
+ at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:444)
+ at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
+ at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:447)
+ at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
+ at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
+ at java.lang.Thread.run(Thread.java:748)
+Caused by: org.apache.spark.SparkException: VectorIndexer encountered invalid value 2.0 on feature index 2. To handle or skip invalid value, try setting VectorIndexer.handleInvalid.
+ at org.apache.spark.ml.feature.VectorIndexerModel.$anonfun$transformFunc$3(VectorIndexer.scala:370)
+ at org.apache.spark.ml.feature.VectorIndexerModel.$anonfun$transformFunc$3$adapted(VectorIndexer.scala:361)
+ at scala.collection.immutable.Map$Map3.foreach(Map.scala:267)
+ at org.apache.spark.ml.feature.VectorIndexerModel.$anonfun$transformFunc$1(VectorIndexer.scala:361)
+ at org.apache.spark.ml.feature.VectorIndexerModel.$anonfun$transform$1(VectorIndexer.scala:429)
+ ... 17 more
+2022-05-24 13:32:29,254 ERROR --- [ task-result-getter-2] org.apache.spark.scheduler.TaskSetManager (line: 73) : Task 64 in stage 122.0 failed 1 times; aborting job
+2022-05-24 13:56:06,664 ERROR --- [ Executor task launch worker for task 8914] org.apache.spark.executor.Executor (line: 94) : Exception in task 53.0 in stage 144.0 (TID 8914)
+org.apache.spark.SparkException: Failed to execute user defined function(VectorIndexerModel$$Lambda$4629/826045717: (struct,values:array>) => struct,values:array>)
+ at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage9.processNext(Unknown Source)
+ at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
+ at org.apache.spark.sql.execution.WholeStageCodegenExec$$anon$1.hasNext(WholeStageCodegenExec.scala:729)
+ at org.apache.spark.sql.execution.SparkPlan.$anonfun$getByteArrayRdd$1(SparkPlan.scala:340)
+ at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2(RDD.scala:872)
+ at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2$adapted(RDD.scala:872)
+ at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
+ at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:349)
+ at org.apache.spark.rdd.RDD.iterator(RDD.scala:313)
+ at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
+ at org.apache.spark.scheduler.Task.run(Task.scala:127)
+ at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:444)
+ at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
+ at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:447)
+ at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
+ at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
+ at java.lang.Thread.run(Thread.java:748)
+Caused by: org.apache.spark.SparkException: VectorIndexer encountered invalid value 14.0 on feature index 2. To handle or skip invalid value, try setting VectorIndexer.handleInvalid.
+ at org.apache.spark.ml.feature.VectorIndexerModel.$anonfun$transformFunc$3(VectorIndexer.scala:370)
+ at org.apache.spark.ml.feature.VectorIndexerModel.$anonfun$transformFunc$3$adapted(VectorIndexer.scala:361)
+ at scala.collection.immutable.Map$Map3.foreach(Map.scala:267)
+ at org.apache.spark.ml.feature.VectorIndexerModel.$anonfun$transformFunc$1(VectorIndexer.scala:361)
+ at org.apache.spark.ml.feature.VectorIndexerModel.$anonfun$transform$1(VectorIndexer.scala:429)
+ ... 17 more
+2022-05-24 13:56:06,945 ERROR --- [ task-result-getter-1] org.apache.spark.scheduler.TaskSetManager (line: 73) : Task 53 in stage 144.0 failed 1 times; aborting job
+2022-05-24 19:06:24,812 ERROR --- [ main] DataNucleus.Datastore.Schema (line: 125) : Failed initialising database.
+Unable to open a test connection to the given database. JDBC url = jdbc:mysql://Ding202:3306/metastore?createDatabaseIfNotExist=true&characterEncoding=utf-8&useSSL=false, username = root. Terminating connection pool (set lazyInit to true if you expect to start your database after your app). Original Exception: ------
+com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:655)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+------
+
+org.datanucleus.exceptions.NucleusDataStoreException: Unable to open a test connection to the given database. JDBC url = jdbc:mysql://Ding202:3306/metastore?createDatabaseIfNotExist=true&characterEncoding=utf-8&useSSL=false, username = root. Terminating connection pool (set lazyInit to true if you expect to start your database after your app). Original Exception: ------
+com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:655)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+------
+
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:498)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:655)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: java.sql.SQLException: Unable to open a test connection to the given database. JDBC url = jdbc:mysql://Ding202:3306/metastore?createDatabaseIfNotExist=true&characterEncoding=utf-8&useSSL=false, username = root. Terminating connection pool (set lazyInit to true if you expect to start your database after your app). Original Exception: ------
+com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:655)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+------
+
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.jolbox.bonecp.PoolUtil.generateSQLException(PoolUtil.java:192)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:422)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ ... 108 more
+Caused by: com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ ... 110 more
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+Nested Throwables StackTrace:
+java.sql.SQLException: Unable to open a test connection to the given database. JDBC url = jdbc:mysql://Ding202:3306/metastore?createDatabaseIfNotExist=true&characterEncoding=utf-8&useSSL=false, username = root. Terminating connection pool (set lazyInit to true if you expect to start your database after your app). Original Exception: ------
+com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:655)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+------
+
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.jolbox.bonecp.PoolUtil.generateSQLException(PoolUtil.java:192)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:422)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:655)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ ... 110 more
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+2022-05-24 19:06:24,842 ERROR --- [ main] DataNucleus.Datastore (line: 125) : Exception thrown creating StoreManager. See the nested exception
+Unable to open a test connection to the given database. JDBC url = jdbc:mysql://Ding202:3306/metastore?createDatabaseIfNotExist=true&characterEncoding=utf-8&useSSL=false, username = root. Terminating connection pool (set lazyInit to true if you expect to start your database after your app). Original Exception: ------
+com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:655)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+------
+
+org.datanucleus.exceptions.NucleusDataStoreException: Unable to open a test connection to the given database. JDBC url = jdbc:mysql://Ding202:3306/metastore?createDatabaseIfNotExist=true&characterEncoding=utf-8&useSSL=false, username = root. Terminating connection pool (set lazyInit to true if you expect to start your database after your app). Original Exception: ------
+com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:655)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+------
+
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:498)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:655)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: java.sql.SQLException: Unable to open a test connection to the given database. JDBC url = jdbc:mysql://Ding202:3306/metastore?createDatabaseIfNotExist=true&characterEncoding=utf-8&useSSL=false, username = root. Terminating connection pool (set lazyInit to true if you expect to start your database after your app). Original Exception: ------
+com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:655)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+------
+
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.jolbox.bonecp.PoolUtil.generateSQLException(PoolUtil.java:192)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:422)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ ... 108 more
+Caused by: com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ ... 110 more
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+Nested Throwables StackTrace:
+java.sql.SQLException: Unable to open a test connection to the given database. JDBC url = jdbc:mysql://Ding202:3306/metastore?createDatabaseIfNotExist=true&characterEncoding=utf-8&useSSL=false, username = root. Terminating connection pool (set lazyInit to true if you expect to start your database after your app). Original Exception: ------
+com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:655)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+------
+
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.jolbox.bonecp.PoolUtil.generateSQLException(PoolUtil.java:192)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:422)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:655)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ ... 110 more
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+2022-05-24 19:07:07,524 ERROR --- [ main] DataNucleus.Datastore.Schema (line: 125) : Failed initialising database.
+Unable to open a test connection to the given database. JDBC url = jdbc:mysql://Ding202:3306/metastore?createDatabaseIfNotExist=true&characterEncoding=utf-8&useSSL=false, username = root. Terminating connection pool (set lazyInit to true if you expect to start your database after your app). Original Exception: ------
+com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:659)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+------
+
+org.datanucleus.exceptions.NucleusDataStoreException: Unable to open a test connection to the given database. JDBC url = jdbc:mysql://Ding202:3306/metastore?createDatabaseIfNotExist=true&characterEncoding=utf-8&useSSL=false, username = root. Terminating connection pool (set lazyInit to true if you expect to start your database after your app). Original Exception: ------
+com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:659)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+------
+
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:498)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:659)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: java.sql.SQLException: Unable to open a test connection to the given database. JDBC url = jdbc:mysql://Ding202:3306/metastore?createDatabaseIfNotExist=true&characterEncoding=utf-8&useSSL=false, username = root. Terminating connection pool (set lazyInit to true if you expect to start your database after your app). Original Exception: ------
+com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:659)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+------
+
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.jolbox.bonecp.PoolUtil.generateSQLException(PoolUtil.java:192)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:422)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ ... 108 more
+Caused by: com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ ... 110 more
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+Nested Throwables StackTrace:
+java.sql.SQLException: Unable to open a test connection to the given database. JDBC url = jdbc:mysql://Ding202:3306/metastore?createDatabaseIfNotExist=true&characterEncoding=utf-8&useSSL=false, username = root. Terminating connection pool (set lazyInit to true if you expect to start your database after your app). Original Exception: ------
+com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
+
+The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:990)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:342)
+ at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2197)
+ at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2230)
+ at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2025)
+ at com.mysql.jdbc.ConnectionImpl.(ConnectionImpl.java:778)
+ at com.mysql.jdbc.JDBC4Connection.(JDBC4Connection.java:47)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.mysql.jdbc.Util.handleNewInstance(Util.java:425)
+ at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:386)
+ at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:330)
+ at java.sql.DriverManager.getConnection(DriverManager.java:664)
+ at java.sql.DriverManager.getConnection(DriverManager.java:208)
+ at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:416)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:659)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:79)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
+ at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:164)
+ at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:70)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
+ at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
+ at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
+ at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
+ at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
+ at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388)
+ at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
+ at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
+ at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:260)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:286)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:227)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:226)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:276)
+ at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:389)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
+ at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
+ at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:137)
+ at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:127)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager$lzycompute(SharedState.scala:157)
+ at org.apache.spark.sql.internal.SharedState.globalTempViewManager(SharedState.scala:155)
+ at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$2(HiveSessionStateBuilder.scala:59)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager$lzycompute(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.globalTempViewManager(SessionCatalog.scala:93)
+ at org.apache.spark.sql.catalyst.catalog.SessionCatalog.setCurrentDatabase(SessionCatalog.scala:260)
+ at org.apache.spark.sql.connector.catalog.CatalogManager.setCurrentNamespace(CatalogManager.scala:113)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.$anonfun$run$2$adapted(SetCatalogAndNamespaceExec.scala:36)
+ at scala.Option.map(Option.scala:230)
+ at org.apache.spark.sql.execution.datasources.v2.SetCatalogAndNamespaceExec.run(SetCatalogAndNamespaceExec.scala:36)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:39)
+ at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:45)
+ at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
+ at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
+ at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
+ at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
+ at org.apache.spark.sql.Dataset.(Dataset.scala:229)
+ at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
+ at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:606)
+ at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:763)
+ at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:601)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp$.main(BusiGenderApp.scala:69)
+ at com.atguigu.userprofile.ml.app.BusiGenderApp.main(BusiGenderApp.scala)
+Caused by: java.net.ConnectException: Connection timed out: connect
+ at java.net.DualStackPlainSocketImpl.connect0(Native Method)
+ at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:75)
+ at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:476)
+ at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:218)
+ at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:200)
+ at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:162)
+ at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:394)
+ at java.net.Socket.connect(Socket.java:606)
+ at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:211)
+ at com.mysql.jdbc.MysqlIO.(MysqlIO.java:301)
+ ... 126 more
+------
+
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at com.jolbox.bonecp.PoolUtil.generateSQLException(PoolUtil.java:192)
+ at com.jolbox.bonecp.BoneCP.(BoneCP.java:422)
+ at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
+ at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
+ at org.datanucleus.store.rdbms.RDBMSStoreManager.(RDBMSStoreManager.java:297)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
+ at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
+ at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
+ at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
+ at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
+ at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
+ at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
+ at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
+ at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
+ at java.security.AccessController.doPrivileged(Native Method)
+ at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
+ at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
+ at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
+ at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
+ at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
+ at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
+ at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
+ at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:58)
+ at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:659)
+ at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
+ at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
+ at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
+ at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
+ at java.lang.reflect.Method.invoke(Method.java:498)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
+ at org.apache.hadoop.hive.metastore.RetryingHMSHandler.