sparkからhbaseへアクセスし、RDDを作成しようとしていますがimportの段階でエラーになります。
HBaseContextを使用したいため、com.cloudera.spark.hbase.HBaseContextが含まれるjarファイルの入手元、jarファイル名を教えて頂けないでしょうか。

環境:
CentOS release 6.2 (Final)
spark-1.5.0-bin-hadoop2.6.tgz
hadoop-2.6.0(cdh5.4.9)

scala> import org.apache.hadoop.hbase.client.{HBaseAdmin, Result}
import org.apache.hadoop.hbase.client.{HBaseAdmin, Result}

scala> import org.apache.hadoop.hbase.{ HBaseConfiguration,HTableDescriptor }
import org.apache.hadoop.hbase.{HBaseConfiguration, HTableDescriptor}

scala> import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.mapreduce.TableInputFormat

scala>  import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.io.ImmutableBytesWritable

scala> import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.Path

scala>  import org.apache.spark._
import org.apache.spark._

scala>

scala> val conf = HBaseConfiguration.create()
conf: org.apache.hadoop.conf.Configuration = Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, yarn-default.xml, yarn-site.xml, hdfs-default.xml, hdfs-site.xml, hbase-default.xml, hbase-site.xml

scala> conf.set( "hbase.zookeeper.quorum", "spark-hbase-master" )

scala> conf.set( "hbase.zookeeper.property.clientPort", "2181" )


scala> conf.addResource(new Path("/etc/hbase/conf/core-site.xml"))


scala> conf.addResource(new Path("/etc/hbase/conf/hbase-site.xml"))


scala> val tableName = "japan"

tableName: String = japan


scala> val hbaseContext = new HBaseContext(sc, conf)
<console>:31: error: not found: type HBaseContext
   val hbaseContext = new HBaseContext(sc, conf)
                          ^

scala> import com.cloudera.spark.hbase.HBaseContext
<console>:27: error: object cloudera is not a member of package com
   import com.cloudera.spark.hbase.HBaseContext
              ^