Skip to content

Commit 983ecb8

Browse files
committed
fix lint error
1 parent 8df8804 commit 983ecb8

File tree

3 files changed

+9
-4
lines changed

3 files changed

+9
-4
lines changed

src/main/scala/org/apache/spark/sql/pulsar/JSONParser.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -375,7 +375,8 @@ class JacksonRecordParser(schema: DataType, val options: JSONOptions) extends Lo
375375
|""".stripMargin + e.getMessage
376376
val wrappedCharException = new CharConversionException(msg)
377377
wrappedCharException.initCause(e)
378-
throw BadRecordException(() => recordLiteral(record), () => Array.empty[InternalRow], wrappedCharException)
378+
throw BadRecordException(() => recordLiteral(record),
379+
() => Array.empty[InternalRow], wrappedCharException)
379380
}
380381
}
381382
}

src/main/scala/org/apache/spark/sql/pulsar/PulsarProvider.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,11 +15,13 @@ package org.apache.spark.sql.pulsar
1515

1616
import java.{util => ju}
1717
import java.util.{Locale, UUID}
18+
1819
import org.apache.pulsar.client.api.MessageId
1920
import org.apache.pulsar.common.naming.TopicName
21+
2022
import org.apache.spark.SparkEnv
2123
import org.apache.spark.internal.Logging
22-
import org.apache.spark.sql.{AnalysisException, DataFrame, SQLContext, SaveMode, SparkSession}
24+
import org.apache.spark.sql.{AnalysisException, DataFrame, SaveMode, SparkSession, SQLContext}
2325
import org.apache.spark.sql.catalyst.json.JSONOptionsInRead
2426
import org.apache.spark.sql.catalyst.types.DataTypeUtils
2527
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap

src/main/scala/org/apache/spark/sql/pulsar/PulsarSinks.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,11 +15,13 @@ package org.apache.spark.sql.pulsar
1515

1616
import java.{util => ju}
1717
import java.util.concurrent.TimeUnit
18+
1819
import scala.util.control.NonFatal
20+
1921
import org.apache.pulsar.client.api.{Producer, PulsarClientException, Schema}
20-
import org.apache.spark.SparkEnv
22+
2123
import org.apache.spark.internal.Logging
22-
import org.apache.spark.sql.{AnalysisException, DataFrame, SQLContext, SparkSession}
24+
import org.apache.spark.sql.{AnalysisException, DataFrame, SparkSession, SQLContext}
2325
import org.apache.spark.sql.catalyst.expressions
2426
import org.apache.spark.sql.catalyst.expressions.{Attribute, Literal}
2527
import org.apache.spark.sql.catalyst.types.DataTypeUtils

0 commit comments

Comments
 (0)