This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Dataset<Row> hiveDf = spark.read().format("hive-with-jdbc") | |
.option("dbTable", "mc.crawl_youtube") | |
.option("conditionClause", "where year = '2020' and month = '02' and day = '19'") | |
.option("hiveJdbcUrl", "jdbc:hive2://mc-d01.mykidong.io:10000") | |
.option("hiveJdbcUser", "xxxx") | |
.option("hiveJdbcPassword", "xxxx") | |
.option("hiveMetastoreUrl", "jdbc:mysql://mc-d01.mykidong.io:3306/hive") | |
.option("hiveMetastoreUser", "xxxx") | |
.option("hiveMetastorePassword", "xxxx") | |
.option("defaultFs", "") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
private void buildSchema() | |
{ | |
String dbTable = parametersAsJava.get(JdbcHiveOptions.dbTable); | |
String hiveJdbcUrl = parametersAsJava.get(JdbcHiveOptions.hiveJdbcUrl); | |
String hiveJdbcUser = parametersAsJava.get(JdbcHiveOptions.hiveJdbcUser); | |
String hiveJdbcPassword = parametersAsJava.get(JdbcHiveOptions.hiveJdbcPassword); | |
String hiveMetastoreUrl = parametersAsJava.get(JdbcHiveOptions.hiveMetastoreUrl); | |
String hiveMetastoreUser = parametersAsJava.get(JdbcHiveOptions.hiveMetastoreUser); | |
String hiveMetastorePassword = parametersAsJava.get(JdbcHiveOptions.hiveMetastorePassword); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Dataset<Row> jdbcHiveDf = spark.read().format("jdbc-hive") | |
.option("dbTable", "mc.crawl_youtube") | |
.option("conditionClause", "where year = '2020' and month = '02' and day = '19'") | |
.option("hiveJdbcUrl", "jdbc:hive2://mc-d01.mykidong.io:10000") | |
.option("hiveJdbcUser", "xxxx") | |
.option("hiveJdbcPassword", "xxxx") | |
.option("hiveMetastoreUrl", "jdbc:mysql://mc-d01.mykidong.io:3306/hive") | |
.option("hiveMetastoreUser", "xxxx") | |
.option("hiveMetastorePassword", "xxxx") | |
.option("fetchsize", "10") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
SELECT | |
a.DB_TABLE AS DB_TABLE, | |
a.COLUMN_NAME AS COLUMN_NAME, | |
a.COLUMN_TYPE AS COLUMN_TYPE | |
FROM | |
(SELECT | |
CONCAT(DBS.NAME, '.', TBLS.TBL_NAME) AS DB_TABLE, | |
COLUMNS_V2.COLUMN_NAME AS COLUMN_NAME, | |
COLUMNS_V2.TYPE_NAME AS COLUMN_TYPE | |
FROM |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
public class HiveRelation extends BaseRelation implements Serializable, TableScan { | |
private SQLContext sqlContext; | |
private StructType schema; | |
private java.util.Map<String, String> parametersAsJava; | |
private Dataset<Row> df; | |
public HiveRelation(SQLContext sqlContext, Map<String, String> parameters) | |
{ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package mykidong.connector.hive; | |
import org.apache.spark.sql.SQLContext; | |
import org.apache.spark.sql.sources.BaseRelation; | |
import org.apache.spark.sql.sources.DataSourceRegister; | |
import org.apache.spark.sql.sources.RelationProvider; | |
import scala.collection.immutable.Map; | |
public class HiveRelationProvider implements RelationProvider, DataSourceRegister { |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
public class JdbcHiveRelation extends BaseRelation implements Serializable, TableScan { | |
private SQLContext sqlContext; | |
private StructType schema; | |
private java.util.Map<String, String> parametersAsJava; | |
private java.util.Map<String, HiveMetaResolver.HiveMetadata> hiveMetadataMap; | |
public JdbcHiveRelation(SQLContext sqlContext, Map<String, String> parameters) | |
{ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package mykidong.datasources.jdbc.hive; | |
import org.apache.spark.sql.SQLContext; | |
import org.apache.spark.sql.sources.BaseRelation; | |
import org.apache.spark.sql.sources.DataSourceRegister; | |
import org.apache.spark.sql.sources.RelationProvider; | |
import scala.collection.immutable.Map; | |
public class JdbcHiveRelationProvider implements RelationProvider, DataSourceRegister { |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
public class HiveMetaResolver { | |
private String dbTable; | |
private String hiveJdbcUrl; | |
private String hiveJdbcUser; | |
private String hiveJdbcPassword; | |
private String hiveMetastoreUrl; | |
private String hiveMetastoreUser; | |
private String hiveMetastorePassword; |
NewerOlder