- 将
${hive.home}/conf/hive-site.xml,
${hadoop.home}/etc/hadoop/core-site.xml,
${hadoop.home}/etc/hadoop/hdfs-site.xml
三个文件拷贝到
{spark.home}/conf/
SUCCEED: 1 | |
SUCCEED: 2 | |
SUCCEED: 3 | |
SUCCEED: 4 | |
FAILED: 5 | |
FAILED: 6 | |
SUCCEED: 7 | |
FAILED: 8 | |
FAILED: 9 | |
SUCCEED: 10 |
Blaze Spark | |
1 3.172 4.779 | |
2 8.51 5.634 | |
3 4.251 3.028 | |
4 30.899 19.321 | |
5 7.27 | |
6 7.6 | |
7 5.281 3.535 | |
8 5.579 | |
9 6.432 |
Yijie-MBP18 ॐ ~/pulsar-project/flink-connector-test:(12h37m|git@master) | |
6578 ± jar tvf target/flink-connector-test-1.1-SNAPSHOT.jar| grep GlobalEventExecutor | |
1202 Tue Aug 27 09:08:50 CST 2019 org/apache/pulsar/shade/io/netty/util/concurrent/GlobalEventExecutor$2.class | |
7835 Tue Aug 27 09:08:50 CST 2019 org/apache/pulsar/shade/io/netty/util/concurrent/GlobalEventExecutor.class | |
2546 Tue Aug 27 09:08:50 CST 2019 org/apache/pulsar/shade/io/netty/util/concurrent/GlobalEventExecutor$TaskRunner.class | |
797 Tue Aug 27 09:08:50 CST 2019 org/apache/pulsar/shade/io/netty/util/concurrent/GlobalEventExecutor$1.class | |
Yijie-MBP18 ॐ ~/pulsar-project/flink-connector-test:(12h43m|git@master) | |
6591 ± jar tvf target/flink-connector-test-1.1-SNAPSHOT.jar| grep CommandPing | |
8956 Tue Aug 27 09:08:52 CST 2019 org/apache/pulsar/common/api/proto/PulsarApi$CommandPing.class | |
337 Tue Aug 27 09:08:52 CST 2019 org/apache/pulsar/common/api/proto/PulsarApi$CommandPingOrBuilder.class |
${hive.home}/conf/hive-site.xml,
${hadoop.home}/etc/hadoop/core-site.xml,
${hadoop.home}/etc/hadoop/hdfs-site.xml
三个文件拷贝到
{spark.home}/conf/
Failed to create schema for persistent://public/default/topic-27: Too many open files | |
java.lang.RuntimeException: Failed to create schema for persistent://public/default/topic-27: Too many open files | |
at org.apache.spark.sql.pulsar.PulsarTest$$anonfun$createPulsarSchema$2.apply(PulsarTest.scala:282) | |
at org.apache.spark.sql.pulsar.PulsarTest$$anonfun$createPulsarSchema$2.apply(PulsarTest.scala:276) | |
at org.apache.spark.util.Utils$.tryWithResource(Utils.scala:2543) | |
at org.apache.spark.sql.pulsar.PulsarTest$class.createPulsarSchema(PulsarTest.scala:276) | |
at org.apache.spark.sql.pulsar.PulsarSinkSuite.createPulsarSchema(PulsarSinkSuite.scala:33) | |
at org.apache.spark.sql.pulsar.PulsarSinkSuite$$anonfun$6.check$2(PulsarSinkSuite.scala:265) | |
at org.apache.spark.sql.pulsar.PulsarSinkSuite$$anonfun$6.apply$mcV$sp(PulsarSinkSuite.scala:308) | |
at org.apache.spark.sql.pulsar.PulsarSinkSuite$$anonfun$6.apply(PulsarSinkSuite.scala:259) |
case (RECORD, BinaryType) => (updater, ordinal, value) => | |
val x = value | |
updater.set(ordinal, x) | |
case (RECORD, DateType) => (updater, ordinal, value) => | |
val millis = value match { | |
case ud: java.util.Date => | |
ud.getTime |
/** | |
* Split date (expressed in days since 1.1.1970) into four fields: | |
* year, month (Jan is Month 1), dayInMonth, daysToMonthEnd (0 if it's last day of month). | |
*/ | |
def splitDate(date: Int): (Int, Int, Int, Int) = { | |
var (year, dayInYear) = getYearAndDayInYear(date) | |
val isLeap = isLeapYear(year) | |
if (isLeap && dayInYear == 60) { | |
(year, 2, 29, 0) | |
} else { |
[INFO] ------------------------------------------------------------------------ | |
[INFO] Building Spark Project Hive 1.5.0-SNAPSHOT | |
[INFO] ------------------------------------------------------------------------ | |
[INFO] | |
[INFO] --- maven-enforcer-plugin:1.4:enforce (enforce-versions) @ spark-hive_2.10 --- | |
[INFO] | |
[INFO] --- scala-maven-plugin:3.2.0:add-source (eclipse-add-source) @ spark-hive_2.10 --- | |
[INFO] Add Source directory: /Users/yijie/spark/sql/hive/src/main/scala | |
[INFO] Add Test Source directory: /Users/yijie/spark/sql/hive/src/test/scala | |
[INFO] |