@@ -29,7 +29,7 @@ import org.apache.commons.lang3.StringUtils
2929import org .apache .commons .logging .LogFactory
3030import org .apache .hadoop .conf .Configuration
3131import org .apache .hadoop .hive .cli .{CliDriver , CliSessionState , OptionsProcessor }
32- import org .apache .hadoop .hive .common .{ HiveInterruptCallback , HiveInterruptUtils }
32+ import org .apache .hadoop .hive .common .HiveInterruptUtils
3333import org .apache .hadoop .hive .conf .HiveConf
3434import org .apache .hadoop .hive .ql .Driver
3535import org .apache .hadoop .hive .ql .exec .Utilities
@@ -65,16 +65,14 @@ private[hive] object SparkSQLCLIDriver extends Logging {
6565 * a command is being processed by the current thread.
6666 */
6767 def installSignalHandler () {
68- HiveInterruptUtils .add(new HiveInterruptCallback {
69- override def interrupt () {
70- // Handle remote execution mode
71- if (SparkSQLEnv .sparkContext != null ) {
72- SparkSQLEnv .sparkContext.cancelAllJobs()
73- } else {
74- if (transport != null ) {
75- // Force closing of TCP connection upon session termination
76- transport.getSocket.close()
77- }
68+ HiveInterruptUtils .add(() => {
69+ // Handle remote execution mode
70+ if (SparkSQLEnv .sparkContext != null ) {
71+ SparkSQLEnv .sparkContext.cancelAllJobs()
72+ } else {
73+ if (transport != null ) {
74+ // Force closing of TCP connection upon session termination
75+ transport.getSocket.close()
7876 }
7977 }
8078 })
@@ -208,7 +206,7 @@ private[hive] object SparkSQLCLIDriver extends Logging {
208206 reader.setBellEnabled(false )
209207 reader.setExpandEvents(false )
210208 // reader.setDebug(new PrintWriter(new FileWriter("writer.debug", true)))
211- CliDriver .getCommandCompleter.foreach((e) => reader.addCompleter(e) )
209+ CliDriver .getCommandCompleter.foreach(reader.addCompleter)
212210
213211 val historyDirectory = System .getProperty(" user.home" )
214212
0 commit comments