Skip to content

Commit

Permalink
Support custom hbase configuration when using spark dataframe.
Browse files Browse the repository at this point in the history
  • Loading branch information
litao-buptsse committed Jul 19, 2016
1 parent 19b349e commit 2d311da
Showing 1 changed file with 11 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,9 @@
*/
package org.apache.phoenix.spark

import org.apache.spark.sql.{SaveMode, DataFrame, SQLContext}
import org.apache.spark.sql.sources.{CreatableRelationProvider, BaseRelation, RelationProvider}
import org.apache.hadoop.conf.Configuration
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}
import org.apache.spark.sql.sources.{BaseRelation, CreatableRelationProvider, RelationProvider}
import org.apache.phoenix.spark._

class DefaultSource extends RelationProvider with CreatableRelationProvider {
Expand All @@ -43,8 +44,15 @@ class DefaultSource extends RelationProvider with CreatableRelationProvider {

verifyParameters(parameters)

val conf = new Configuration
parameters.foreach { kv =>
if (kv._1.startsWith("hbase.")) {
conf.set(kv._1, kv._2)
}
}

// Save the DataFrame to Phoenix
data.saveToPhoenix(parameters("table"), zkUrl = parameters.get("zkUrl"))
data.saveToPhoenix(parameters("table"), conf, parameters.get("zkUrl"))

// Return a relation of the saved data
createRelation(sqlContext, parameters)
Expand Down

0 comments on commit 2d311da

Please sign in to comment.