Skip to content

Commit ae47489

Browse files
committed
update and fix conflicts
1 parent 92e017f commit ae47489

File tree

8 files changed

+677
-207
lines changed

8 files changed

+677
-207
lines changed

pom.xml

Lines changed: 24 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,6 @@
127127
<hbase.version>0.94.6</hbase.version>
128128
<flume.version>1.4.0</flume.version>
129129
<zookeeper.version>3.4.5</zookeeper.version>
130-
<hive.version>0.12.0-protobuf-2.5</hive.version>
131130
<parquet.version>1.4.3</parquet.version>
132131
<jblas.version>1.2.3</jblas.version>
133132
<jetty.version>8.1.14.v20131031</jetty.version>
@@ -441,7 +440,7 @@
441440
<dependency>
442441
<groupId>org.apache.derby</groupId>
443442
<artifactId>derby</artifactId>
444-
<version>10.4.2.0</version>
443+
<version>${derby.version}</version>
445444
</dependency>
446445
<dependency>
447446
<groupId>com.codahale.metrics</groupId>
@@ -1272,7 +1271,18 @@
12721271
</dependency>
12731272
</dependencies>
12741273
</profile>
1275-
1274+
<profile>
1275+
<id>hive-default</id>
1276+
<activation>
1277+
<property>
1278+
<name>!hive.version</name>
1279+
</property>
1280+
</activation>
1281+
<properties>
1282+
<hive.version>0.12.0-protobuf-2.5</hive.version>
1283+
<derby.version>10.4.2.0</derby.version>
1284+
</properties>
1285+
</profile>
12761286
<profile>
12771287
<id>hive</id>
12781288
<activation>
@@ -1282,6 +1292,16 @@
12821292
<module>sql/hive-thriftserver</module>
12831293
</modules>
12841294
</profile>
1285-
1295+
<profile>
1296+
<id>hive-versions</id>
1297+
<activation>
1298+
<property>
1299+
<name>hive.version</name>
1300+
</property>
1301+
</activation>
1302+
<properties>
1303+
<derby.version>10.10.1.1</derby.version>
1304+
</properties>
1305+
</profile>
12861306
</profiles>
12871307
</project>

sql/hive-thriftserver/pom.xml

Lines changed: 135 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -41,27 +41,147 @@
4141
<artifactId>spark-hive_${scala.binary.version}</artifactId>
4242
<version>${project.version}</version>
4343
</dependency>
44-
<dependency>
45-
<groupId>org.spark-project.hive</groupId>
46-
<artifactId>hive-cli</artifactId>
47-
<version>${hive.version}</version>
48-
</dependency>
49-
<dependency>
50-
<groupId>org.spark-project.hive</groupId>
51-
<artifactId>hive-jdbc</artifactId>
52-
<version>${hive.version}</version>
53-
</dependency>
54-
<dependency>
55-
<groupId>org.spark-project.hive</groupId>
56-
<artifactId>hive-beeline</artifactId>
57-
<version>${hive.version}</version>
58-
</dependency>
5944
<dependency>
6045
<groupId>org.scalatest</groupId>
6146
<artifactId>scalatest_${scala.binary.version}</artifactId>
6247
<scope>test</scope>
6348
</dependency>
6449
</dependencies>
50+
51+
<profiles>
52+
<profile>
53+
<id>hive-default</id>
54+
<activation>
55+
<property>
56+
<name>!hive.version</name>
57+
</property>
58+
</activation>
59+
<dependencies>
60+
<dependency>
61+
<groupId>org.spark-project.hive</groupId>
62+
<artifactId>hive-cli</artifactId>
63+
<version>${hive.version}</version>
64+
</dependency>
65+
<dependency>
66+
<groupId>org.spark-project.hive</groupId>
67+
<artifactId>hive-jdbc</artifactId>
68+
<version>${hive.version}</version>
69+
</dependency>
70+
<dependency>
71+
<groupId>org.spark-project.hive</groupId>
72+
<artifactId>hive-beeline</artifactId>
73+
<version>${hive.version}</version>
74+
</dependency>
75+
</dependencies>
76+
<build>
77+
<plugins>
78+
<plugin>
79+
<groupId>org.codehaus.mojo</groupId>
80+
<artifactId>build-helper-maven-plugin</artifactId>
81+
<executions>
82+
<execution>
83+
<id>add-default-sources</id>
84+
<phase>generate-sources</phase>
85+
<goals>
86+
<goal>add-source</goal>
87+
</goals>
88+
<configuration>
89+
<sources>
90+
<source>v${hive.version}/src/main/scala</source>
91+
</sources>
92+
</configuration>
93+
</execution>
94+
</executions>
95+
</plugin>
96+
</plugins>
97+
</build>
98+
</profile>
99+
<profile>
100+
<id>hive</id>
101+
<build>
102+
<plugins>
103+
<plugin>
104+
<groupId>org.codehaus.mojo</groupId>
105+
<artifactId>build-helper-maven-plugin</artifactId>
106+
<executions>
107+
<execution>
108+
<id>add-scala-test-sources</id>
109+
<phase>generate-test-sources</phase>
110+
<goals>
111+
<goal>add-test-source</goal>
112+
</goals>
113+
<configuration>
114+
<sources>
115+
<source>src/test/scala</source>
116+
</sources>
117+
</configuration>
118+
</execution>
119+
</executions>
120+
</plugin>
121+
</plugins>
122+
</build>
123+
</profile>
124+
<profile>
125+
<id>hive-versions</id>
126+
<activation>
127+
<property>
128+
<name>hive.version</name>
129+
</property>
130+
</activation>
131+
<dependencies>
132+
<dependency>
133+
<groupId>org.spark-project.hive</groupId>
134+
<artifactId>hive-cli</artifactId>
135+
<version>${hive.version}</version>
136+
</dependency>
137+
<dependency>
138+
<groupId>org.spark-project.hive</groupId>
139+
<artifactId>hive-jdbc</artifactId>
140+
<version>${hive.version}</version>
141+
</dependency>
142+
<dependency>
143+
<groupId>org.spark-project.hive</groupId>
144+
<artifactId>hive-beeline</artifactId>
145+
<version>${hive.version}</version>
146+
</dependency>
147+
</dependencies>
148+
<build>
149+
<plugins>
150+
<plugin>
151+
<groupId>org.codehaus.mojo</groupId>
152+
<artifactId>build-helper-maven-plugin</artifactId>
153+
<executions>
154+
<execution>
155+
<id>add-v13-sources</id>
156+
<phase>generate-sources</phase>
157+
<goals>
158+
<goal>add-source</goal>
159+
</goals>
160+
<configuration>
161+
<sources>
162+
<source>v${hive.version}/src/main/scala</source>
163+
</sources>
164+
</configuration>
165+
</execution>
166+
<execution>
167+
<id>add-scala-test-sources</id>
168+
<phase>generate-test-sources</phase>
169+
<goals>
170+
<goal>add-test-source</goal>
171+
</goals>
172+
<configuration>
173+
<sources>
174+
<source>src/test/scala</source>
175+
</sources>
176+
</configuration>
177+
</execution>
178+
</executions>
179+
</plugin>
180+
</plugins>
181+
</build>
182+
</profile>
183+
</profiles>
184+
65185
<build>
66186
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
67187
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
Lines changed: 4 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -29,11 +29,11 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse
2929
import org.apache.spark.Logging
3030
import org.apache.spark.sql.hive.{HiveContext, HiveMetastoreTypes}
3131

32-
private[hive] class SparkSQLDriver(val context: HiveContext = SparkSQLEnv.hiveContext)
33-
extends Driver with Logging {
32+
private[hive] abstract class AbstractSparkSQLDriver(
33+
val context: HiveContext = SparkSQLEnv.hiveContext) extends Driver with Logging {
3434

35-
private var tableSchema: Schema = _
36-
private var hiveResponse: Seq[String] = _
35+
private[hive] var tableSchema: Schema = _
36+
private[hive] var hiveResponse: Seq[String] = _
3737

3838
override def init(): Unit = {
3939
}
@@ -74,16 +74,6 @@ private[hive] class SparkSQLDriver(val context: HiveContext = SparkSQLEnv.hiveCo
7474

7575
override def getSchema: Schema = tableSchema
7676

77-
override def getResults(res: JArrayList[String]): Boolean = {
78-
if (hiveResponse == null) {
79-
false
80-
} else {
81-
res.addAll(hiveResponse)
82-
hiveResponse = null
83-
true
84-
}
85-
}
86-
8777
override def destroy() {
8878
super.destroy()
8979
hiveResponse = null

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.shims.ShimLoader
3838
import org.apache.thrift.transport.TSocket
3939

4040
import org.apache.spark.Logging
41+
import org.apache.spark.sql.hive.thriftserver.HiveShim
4142

4243
private[hive] object SparkSQLCLIDriver {
4344
private var prompt = "spark-sql"
@@ -116,7 +117,7 @@ private[hive] object SparkSQLCLIDriver {
116117
}
117118
}
118119

119-
if (!sessionState.isRemoteMode && !ShimLoader.getHadoopShims.usesJobShell()) {
120+
if (!sessionState.isRemoteMode) {
120121
// Hadoop-20 and above - we need to augment classpath using hiveconf
121122
// components.
122123
// See also: code in ExecDriver.java
@@ -258,7 +259,7 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
258259
} else {
259260
var ret = 0
260261
val hconf = conf.asInstanceOf[HiveConf]
261-
val proc: CommandProcessor = CommandProcessorFactory.get(tokens(0), hconf)
262+
val proc: CommandProcessor = HiveShim.getCommandProcessor(Array(tokens(0)), hconf)
262263

263264
if (proc != null) {
264265
if (proc.isInstanceOf[Driver] || proc.isInstanceOf[SetProcessor]) {

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import java.util.{List => JList}
2424
import javax.security.auth.login.LoginException
2525

2626
import org.apache.commons.logging.Log
27+
import org.apache.hadoop.security.UserGroupInformation
2728
import org.apache.hadoop.hive.conf.HiveConf
2829
import org.apache.hadoop.hive.shims.ShimLoader
2930
import org.apache.hive.service.Service.STATE
@@ -44,15 +45,17 @@ private[hive] class SparkSQLCLIService(hiveContext: HiveContext)
4445
val sparkSqlSessionManager = new SparkSQLSessionManager(hiveContext)
4546
setSuperField(this, "sessionManager", sparkSqlSessionManager)
4647
addService(sparkSqlSessionManager)
48+
var sparkServiceUGI: UserGroupInformation = null
4749

48-
try {
49-
HiveAuthFactory.loginFromKeytab(hiveConf)
50-
val serverUserName = ShimLoader.getHadoopShims
51-
.getShortUserName(ShimLoader.getHadoopShims.getUGIForConf(hiveConf))
52-
setSuperField(this, "serverUserName", serverUserName)
53-
} catch {
54-
case e @ (_: IOException | _: LoginException) =>
55-
throw new ServiceException("Unable to login to kerberos with given principal/keytab", e)
50+
if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
51+
try {
52+
HiveAuthFactory.loginFromKeytab(hiveConf)
53+
sparkServiceUGI = ShimLoader.getHadoopShims.getUGIForConf(hiveConf)
54+
HiveShim.setServerUserName(sparkServiceUGI, this)
55+
} catch {
56+
case e @ (_: IOException | _: LoginException) =>
57+
throw new ServiceException("Unable to login to kerberos with given principal/keytab", e)
58+
}
5659
}
5760

5861
initCompositeService(hiveConf)

0 commit comments

Comments
 (0)