來從遠程服務器hive2表我用下面的代碼試圖從遠程火花服務器hive2訪問表:錯誤而嘗試使用火花
import org.apache.spark.SparkContext, org.apache.spark.SparkConf, org.apache.spark.sql._
import com.typesafe.config._
import java.io._
import org.apache.hadoop.fs._
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.Row
import org.apache.spark.sql.SparkSession
object stack {
def main(args: Array[String]) {
val warehouseLocation = "/usr/hive/warehouse"
System.setProperty("javax.jdo.option.ConnectionURL","jdbc:mysql://sparkserver:3306/metastore?createDatabaseIfNotExist=true")
System.setProperty("javax.jdo.option.ConnectionUserName","hiveroot")
System.setProperty("javax.jdo.option.ConnectionPassword","hivepassword")
System.setProperty("hive.exec.scratchdir","/tmp/hive/${user.name}")
System.setProperty("spark.sql.warehouse.dir", warehouseLocation)
// System.setProperty("hive.metastore.uris", "thrift://sparkserver:9083")
System.setProperty("javax.jdo.option.ConnectionDriverName","com.mysql.jdbc.Driver")
System.setProperty("hive.metastore.warehouse.dir","/user/hive/warehouse")
val spark = SparkSession.builder().master("local")
.appName("spark remote")
// .config("javax.jdo.option.ConnectionURL","jdbc:mysql://sparkserver:3306/metastore?createDatabaseIfNotExist=true")
.config("javax.jdo.option.ConnectionURL","jdbc:mysql://sparkserver:3306/metastore?createDatabaseIfNotExist=true")
.config("javax.jdo.option.ConnectionUserName","hiveroot")
.config("javax.jdo.option.ConnectionPassword","hivepassword")
.config("hive.exec.scratchdir","/tmp/hive/${user.name}")
.config("spark.sql.warehouse.dir", warehouseLocation)
// .config("hive.metastore.uris", "thrift://sparkserver:9083")
.config("javax.jdo.option.ConnectionDriverName","com.mysql.jdbc.Driver")
.config("hive.querylog.location","/tmp/hivequerylogs/${user.name}")
.config("hive.support.concurrency","false")
.config("hive.server2.enable.doAs","true")
.config("hive.server2.authentication","PAM")
.config("hive.server2.custom.authentication.class","org.apache.hive.service.auth.PamAuthenticationProvider")
.config("hive.server2.authentication.pam.services","sshd,sudo")
.config("hive.stats.dbclass","jdbc:mysql")
.config("hive.stats.jdbcdriver","com.mysql.jdbc.Driver")
.config("hive.session.history.enabled","true")
.config("hive.metastore.schema.verification","false")
.config("hive.optimize.sort.dynamic.partition","false")
.config("hive.optimize.insert.dest.volume","false")
.config("datanucleus.fixedDatastore","true")
.config("hive.metastore.warehouse.dir","/user/hive/warehouse")
.config("datanucleus.autoCreateSchema","false")
.config("datanucleus.schema.autoCreateAll","true")
.config("datanucleus.schema.validateConstraints","true")
.config("datanucleus.schema.validateColumns","true")
.config("datanucleus.schema.validateTables","true")
.config("fs.default.name","hdfs://sparkserver:54310")
.config("dfs.namenode.name.dir","/usr/local/hadoop_tmp/hdfs/namenode")
.config("dfs.datanode.name.dir","/usr/local/hadoop_tmp/hdfs/datanode")
.enableHiveSupport()
.getOrCreate()
import spark.implicits._
import spark.sql
sql("select * from sample.source").collect.foreach(println)
sql("select * from sample.destination").collect.foreach(println)
}
}
連接請求元店被拒絕通過遠程配置單元服務器。
錯誤:無法啓動蜂房metastore.service:未找到單位蜂房metastore.service
謝謝!
Raktotpal,我有兩個不同的hive安裝,一個是在localhost上有hive.metastore。uris as 其工作正常時,即時通訊試圖訪問從本地主機配置單位,我的火花也運行在本地主機,但現在我試圖連接遠程機器已安裝配置單元和metstore是節儉://sparkserver:9083 –
Vickyster
是啊,我明白了,然後使用hive-site.xml - 在我的答案中提到的所有屬性。 ------------程序將能夠連接到該配置單元服務器。 Hive-Metastore服務在該遠程配置單元服務器中完全沒有運行;所以,不需要單獨連接到Metastore服務器。 –
但我不想使用Hive-site.xml文件而是我想調用config中的所有屬性。我會試着讓你知道 – Vickyster