Skip to content

Commit 57cabf7

Browse files
committed
[AURON #1867][BUILD] Add scalastyle-maven-plugin for code style enforcement
1 parent b36354e commit 57cabf7

File tree

9 files changed

+906
-13
lines changed

9 files changed

+906
-13
lines changed

dev/scalastyle-config.xml

Lines changed: 856 additions & 0 deletions
Large diffs are not rendered by default.

pom.xml

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,7 @@
6060
<maven.version>3.9.12</maven.version>
6161
<maven.plugin.scala.version>4.9.2</maven.plugin.scala.version>
6262
<maven.plugin.scalatest.version>2.2.0</maven.plugin.scalatest.version>
63+
<maven.plugin.scalastyle.version>1.0.0</maven.plugin.scalastyle.version>
6364
<maven.plugin.scalatest.exclude.tags />
6465
<maven.plugin.scalatest.include.tags />
6566
<maven.plugin.scalatest.debug.enabled>false</maven.plugin.scalatest.debug.enabled>
@@ -466,6 +467,32 @@
466467
</executions>
467468
</plugin>
468469

470+
<plugin>
471+
<groupId>org.scalastyle</groupId>
472+
<artifactId>scalastyle-maven-plugin</artifactId>
473+
<version>${maven.plugin.scalastyle.version}</version>
474+
<configuration>
475+
<verbose>false</verbose>
476+
<failOnViolation>true</failOnViolation>
477+
<includeTestSourceDirectory>false</includeTestSourceDirectory>
478+
<failOnWarning>false</failOnWarning>
479+
<sourceDirectory>${basedir}/src/main/scala</sourceDirectory>
480+
<testSourceDirectory>${basedir}/src/test/scala</testSourceDirectory>
481+
<configLocation>${maven.multiModuleProjectDirectory}/dev/scalastyle-config.xml</configLocation>
482+
<outputFile>${basedir}/target/scalastyle-output.xml</outputFile>
483+
<inputEncoding>${project.build.sourceEncoding}</inputEncoding>
484+
<outputEncoding>${project.reporting.outputEncoding}</outputEncoding>
485+
</configuration>
486+
<executions>
487+
<execution>
488+
<goals>
489+
<goal>check</goal>
490+
</goals>
491+
<phase>validate</phase>
492+
</execution>
493+
</executions>
494+
</plugin>
495+
469496
<plugin>
470497
<groupId>org.codehaus.mojo</groupId>
471498
<artifactId>flatten-maven-plugin</artifactId>

spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeParquetInsertIntoHiveTableExec.scala

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,8 @@ case class NativeParquetInsertIntoHiveTableExec(
104104
}
105105

106106
@sparkver("3.2 / 3.3")
107-
override def basicWriteJobStatsTracker(hadoopConf: org.apache.hadoop.conf.Configuration) = {
107+
override def basicWriteJobStatsTracker(hadoopConf: org.apache.hadoop.conf.Configuration)
108+
: org.apache.spark.sql.execution.datasources.BasicWriteJobStatsTracker = {
108109
import org.apache.spark.sql.catalyst.InternalRow
109110
import org.apache.spark.sql.execution.datasources.BasicWriteJobStatsTracker
110111
import org.apache.spark.sql.execution.datasources.BasicWriteTaskStatsTracker
@@ -138,7 +139,8 @@ case class NativeParquetInsertIntoHiveTableExec(
138139
}
139140

140141
@sparkver("3.1")
141-
override def basicWriteJobStatsTracker(hadoopConf: org.apache.hadoop.conf.Configuration) = {
142+
override def basicWriteJobStatsTracker(hadoopConf: org.apache.hadoop.conf.Configuration)
143+
: org.apache.spark.sql.execution.datasources.BasicWriteJobStatsTracker = {
142144
import org.apache.spark.sql.catalyst.InternalRow
143145
import org.apache.spark.sql.execution.datasources.BasicWriteJobStatsTracker
144146
import org.apache.spark.sql.execution.datasources.BasicWriteTaskStats
@@ -187,7 +189,8 @@ case class NativeParquetInsertIntoHiveTableExec(
187189
}
188190

189191
@sparkver("3.0")
190-
override def basicWriteJobStatsTracker(hadoopConf: org.apache.hadoop.conf.Configuration) = {
192+
override def basicWriteJobStatsTracker(hadoopConf: org.apache.hadoop.conf.Configuration)
193+
: org.apache.spark.sql.execution.datasources.BasicWriteJobStatsTracker = {
191194
import org.apache.spark.sql.catalyst.InternalRow
192195
import org.apache.spark.sql.execution.datasources.BasicWriteJobStatsTracker
193196
import org.apache.spark.sql.execution.datasources.BasicWriteTaskStats

spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeShuffleExchangeExec.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
package org.apache.spark.sql.execution.auron.plan
1818

1919
import scala.collection.mutable
20-
import scala.concurrent.ExecutionContext.Implicits.global
2120
import scala.concurrent.Future
2221

2322
import org.apache.spark._
@@ -78,6 +77,9 @@ case class NativeShuffleExchangeExec(
7877

7978
// 'mapOutputStatisticsFuture' is only needed when enable AQE.
8079
@transient override lazy val mapOutputStatisticsFuture: Future[MapOutputStatistics] = {
80+
// scalastyle:off executioncontextglobal
81+
import scala.concurrent.ExecutionContext.Implicits.global
82+
// scalastyle:on executioncontextglobal
8183
if (inputRDD.getNumPartitions == 0) {
8284
Future.successful(null)
8385
} else {
@@ -173,7 +175,7 @@ case class NativeShuffleExchangeExec(
173175
outputPartitioning != SinglePartition
174176

175177
@sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5")
176-
override def shuffleOrigin = {
178+
override def shuffleOrigin: org.apache.spark.sql.execution.exchange.ShuffleOrigin = {
177179
import org.apache.spark.sql.execution.exchange.ShuffleOrigin;
178180
_shuffleOrigin.get.asInstanceOf[ShuffleOrigin]
179181
}

spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeBroadcastJoinExec.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,8 @@ case class NativeBroadcastJoinExec(
6262
}
6363

6464
@sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5")
65-
override def requiredChildDistribution = {
65+
override def requiredChildDistribution
66+
: List[org.apache.spark.sql.catalyst.plans.physical.Distribution] = {
6667
import org.apache.spark.sql.catalyst.plans.physical.BroadcastDistribution
6768
import org.apache.spark.sql.catalyst.plans.physical.UnspecifiedDistribution
6869
import org.apache.spark.sql.execution.joins.HashedRelationBroadcastMode
@@ -83,7 +84,7 @@ case class NativeBroadcastJoinExec(
8384
override def supportCodegen: Boolean = false
8485

8586
@sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5")
86-
override def inputRDDs() = {
87+
override def inputRDDs(): Nothing = {
8788
throw new NotImplementedError("NativeBroadcastJoin dose not support codegen")
8889
}
8990

spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronArrowColumnVector.scala

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -190,7 +190,7 @@ object AuronArrowColumnVector {
190190

191191
private class NullAccessor(vector: NullVector)
192192
extends AuronArrowColumnVector.ArrowVectorAccessor(vector) {
193-
override def isNullAt(rowId: Int) = true
193+
override def isNullAt(rowId: Int): Boolean = true
194194
}
195195

196196
private class BooleanAccessor(vector: BitVector)
@@ -215,7 +215,7 @@ object AuronArrowColumnVector {
215215

216216
private class UInt4Accessor(vector: UInt4Vector)
217217
extends AuronArrowColumnVector.ArrowVectorAccessor(vector) {
218-
final override def getInt(rowId: Int) = vector.get(rowId)
218+
final override def getInt(rowId: Int): Int = vector.get(rowId)
219219
}
220220

221221
private class UInt8Accessor(vector: UInt8Vector)
@@ -260,14 +260,15 @@ object AuronArrowColumnVector {
260260
extends AuronArrowColumnVector.ArrowVectorAccessor(vector) {
261261
final private val stringResult = new NullableVarCharHolder
262262

263-
final override def getUTF8String(rowId: Int) = {
263+
final override def getUTF8String(rowId: Int): UTF8String = {
264264
vector.get(rowId, stringResult)
265265
if (stringResult.isSet == 0) null
266-
else
266+
else {
267267
UTF8String.fromAddress(
268268
null,
269269
stringResult.buffer.memoryAddress + stringResult.start,
270270
stringResult.end - stringResult.start)
271+
}
271272
}
272273
}
273274

spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronColumnarMap.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,5 +33,5 @@ class AuronColumnarMap(
3333

3434
override def valueArray: ArrayData = new AuronColumnarArray(values, offset, length)
3535

36-
override def copy = new ArrayBasedMapData(keyArray.copy, valueArray.copy)
36+
override def copy: ArrayBasedMapData = new ArrayBasedMapData(keyArray.copy, valueArray.copy)
3737
}

spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronBlockStoreShuffleReaderBase.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,8 +90,10 @@ object AuronBlockStoreShuffleReaderBase extends Logging {
9090
}
9191

9292
private def unwrapInputStream(in: InputStream): InputStream = {
93+
// scalastyle:off classforname
9394
val bufferReleasingInputStreamCls =
9495
Class.forName("org.apache.spark.storage.BufferReleasingInputStream")
96+
// scalastyle:on classforname
9597
if (in.getClass != bufferReleasingInputStreamCls) {
9698
return in
9799
}

thirdparty/auron-uniffle/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/uniffle/AuronUniffleShuffleReader.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -250,8 +250,9 @@ class AuronUniffleShuffleReader[K, C](
250250
var readBytes = 0
251251
while (readBytes < len) {
252252
while (byteBuffer == null || !byteBuffer.hasRemaining()) {
253-
if (!this.toNextBuffer)
253+
if (!this.toNextBuffer) {
254254
return if (readBytes > 0) readBytes else -1
255+
}
255256
}
256257
val bytesToRead = Math.min(byteBuffer.remaining(), len - readBytes)
257258
byteBuffer.get(arrayBytes, off + readBytes, bytesToRead)

0 commit comments

Comments
 (0)