Skip to content

Commit

Permalink
Fix compiler warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
RustedBones committed Jul 24, 2023
1 parent 7825218 commit 9f5d377
Show file tree
Hide file tree
Showing 62 changed files with 170 additions and 148 deletions.
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,7 @@ ThisBuild / tpolecatDevModeOptions ~= { opts =>
ScalacOptions.privateWarnDeadCode,
ScalacOptions.privateWarnValueDiscard,
ScalacOptions.warnDeadCode,
ScalacOptions.warnNonUnitStatement,
ScalacOptions.warnValueDiscard
)

Expand All @@ -163,7 +164,6 @@ ThisBuild / tpolecatDevModeOptions ~= { opts =>
Scalac.privateBackendParallelism,
Scalac.privateWarnMacrosOption,
Scalac.release8,
Scalac.targetOption,
Scalac.warnConfOption,
Scalac.warnMacrosOption
)
Expand Down
3 changes: 0 additions & 3 deletions project/ScalacOptions.scala
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,6 @@ object Scalac {

val release8 = ScalacOptions.release("8")

// JVM
val targetOption = ScalacOptions.other("-target:jvm-1.8")

// Warn
val privateWarnMacrosOption = ScalacOptions.privateWarnOption(
"macros:after",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -350,7 +350,7 @@ object AvroIO {
}

object AvroTyped {
private[scio] def writeTransform[T <: HasAvroAnnotation: TypeTag: Coder]()
private[scio] def writeTransform[T <: HasAvroAnnotation: TypeTag]()
: BAvroIO.TypedWrite[T, Void, GenericRecord] = {
val avroT = AvroType[T]
BAvroIO
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,6 @@ package object types {
* @doc("user age") age: Int)
* }}}
*/
@nowarn("msg=parameter value value in class doc is never used")
@nowarn("msg=parameter value in class doc is never used")
class doc(value: String) extends StaticAnnotation
}
Original file line number Diff line number Diff line change
Expand Up @@ -563,7 +563,7 @@ class ScioContext private[scio] (

if (_counters.nonEmpty) {
val counters = _counters.toArray
this.parallelize(Seq(0)).withName("Initialize counters").map { _ =>
this.parallelize(Seq(0)).withName("Initialize counters").tap { _ =>
counters.foreach(_.inc(0))
}
}
Expand Down
31 changes: 21 additions & 10 deletions scio-core/src/main/scala/com/spotify/scio/coders/BeamCoders.scala
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,13 @@ private[scio] object BeamCoders {
Some(unwrap(options, coder))
.collect { case c: StructuredCoder[_] => c }
.map(_.getComponents.asScala.toList)
.collect { case (c1: BCoder[K]) :: (c2: BCoder[V]) :: Nil =>
val k = Coder.beam(unwrap(options, c1))
val v = Coder.beam(unwrap(options, c2))
k -> v
.collect {
case (c1: BCoder[K @unchecked]) ::
(c2: BCoder[V @unchecked]) ::
Nil =>
val k = Coder.beam(unwrap(options, c1))
val v = Coder.beam(unwrap(options, c2))
k -> v
}
.getOrElse {
throw new IllegalArgumentException(
Expand All @@ -69,11 +72,15 @@ private[scio] object BeamCoders {
Some(unwrap(options, coder))
.collect { case c: StructuredCoder[_] => c }
.map(_.getComponents.asScala.toList)
.collect { case (c1: BCoder[A]) :: (c2: BCoder[B]) :: (c3: BCoder[C]) :: Nil =>
val a = Coder.beam(unwrap(options, c1))
val b = Coder.beam(unwrap(options, c2))
val c = Coder.beam(unwrap(options, c3))
(a, b, c)
.collect {
case (c1: BCoder[A @unchecked]) ::
(c2: BCoder[B @unchecked]) ::
(c3: BCoder[C @unchecked]) ::
Nil =>
val a = Coder.beam(unwrap(options, c1))
val b = Coder.beam(unwrap(options, c2))
val c = Coder.beam(unwrap(options, c3))
(a, b, c)
}
.getOrElse {
throw new IllegalArgumentException(
Expand All @@ -91,7 +98,11 @@ private[scio] object BeamCoders {
.collect { case c: StructuredCoder[_] => c }
.map(_.getComponents.asScala.toList)
.collect {
case (c1: BCoder[A]) :: (c2: BCoder[B]) :: (c3: BCoder[C]) :: (c4: BCoder[D]) :: Nil =>
case (c1: BCoder[A @unchecked]) ::
(c2: BCoder[B @unchecked]) ::
(c3: BCoder[C @unchecked]) ::
(c4: BCoder[D @unchecked]) ::
Nil =>
val a = Coder.beam(unwrap(options, c1))
val b = Coder.beam(unwrap(options, c2))
val c = Coder.beam(unwrap(options, c3))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ trait JavaCoders extends JavaBeanCoders {
Coder.transform(c)(bc => Coder.beam(bcoders.ListCoder.of(bc)))

implicit def jArrayListCoder[T](implicit c: Coder[T]): Coder[java.util.ArrayList[T]] =
Coder.xmap(jlistCoder[T])(new java.util.ArrayList(_), identity)
Coder.xmap(jListCoder[T])(new java.util.ArrayList(_), identity)

implicit def jMapCoder[K, V](implicit ck: Coder[K], cv: Coder[V]): Coder[java.util.Map[K, V]] =
Coder.transform(ck)(bk => Coder.transform(cv)(bv => Coder.beam(bcoders.MapCoder.of(bk, bv))))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import com.spotify.scio.schemas.{ArrayType, MapType, RawRecord, Schema, Type}
import org.apache.beam.sdk.schemas.JavaBeanSchema
import org.apache.beam.sdk.schemas.Schema.{FieldType, LogicalType}

import scala.annotation.nowarn
import scala.reflect.ClassTag

trait JavaInstances {
Expand Down Expand Up @@ -66,6 +67,9 @@ trait JavaInstances {
): Schema[java.util.Map[K, V]] =
MapType(ks, vs, identity, identity)

@nowarn(
"msg=evidence parameter evidence.* of type com.spotify.scio.IsJavaBean\\[.\\] in .* is never used"
)
implicit def javaBeanSchema[T: IsJavaBean: ClassTag]: RawRecord[T] =
RawRecord[T](new JavaBeanSchema())

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ sealed private[scio] trait JobInputSource[T] {
val asIterable: Try[Iterable[T]]
}

final private[scio] case class TestStreamInputSource[T: Coder](
final private[scio] case class TestStreamInputSource[T](
stream: TestStream[T]
) extends JobInputSource[T] {
override val asIterable: Try[Iterable[T]] = Failure(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ private[scio] object Functions {
override def partitionFor(elem: T, numPartitions: Int): Int = g(elem)
}

abstract private class ReduceFn[T: Coder] extends CombineFn[T, JList[T], T] {
abstract private class ReduceFn[T] extends CombineFn[T, JList[T], T] {
override def createAccumulator(): JList[T] = new JArrayList[T]()

override def addInput(accumulator: JList[T], input: T): JList[T] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ abstract private[scio] class RandomValueSampler[K, V, R](val fractions: Map[K, D
protected var seed: Long = -1

// TODO: is it necessary to setSeed for each instance like Spark does?
@nowarn("msg=parameter value c in method startBundle is never used")
@nowarn("msg=parameter c in method startBundle is never used")
@StartBundle
def startBundle(c: DoFn[(K, V), (K, V)]#StartBundleContext): Unit =
rngs = fractions.iterator.map { case (k, v) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ import com.spotify.scio.coders.{BeamCoders, Coder}
*/
class PairHashSCollectionFunctions[K, V](val self: SCollection[(K, V)]) {

implicit private[this] val (keyCoder, valueCoder): (Coder[K], Coder[V]) =
(self.keyCoder, self.valueCoder)
implicit private val keyCoder: Coder[K] = self.keyCoder
implicit private val valueCoder: Coder[V] = self.valueCoder

/**
* Perform an inner join by replicating `rhs` to all workers. The right side should be tiny and
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,9 @@ class PairSCollectionFunctions[K, V](val self: SCollection[(K, V)]) {

private[this] val context: ScioContext = self.context

implicit val (keyCoder, valueCoder): (Coder[K], Coder[V]) = BeamCoders.getTupleCoders(self)
private val kvCoder = BeamCoders.getTupleCoders(self)
implicit val keyCoder: Coder[K] = kvCoder._1
implicit val valueCoder: Coder[V] = kvCoder._2

private[scio] def toKV: SCollection[KV[K, V]] =
self.map(kv => KV.of(kv._1, kv._2))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ object RunPreReleaseIT {
val runId = args("runId")

try {
val jobs = List(parquet(runId), avro(runId), smb(runId), bigquery(runId))
val jobs = List(parquet(runId), avro(runId), smb(runId), bigquery())
Await.result(Future.sequence(jobs), 1.hour)
} catch {
case t: Throwable =>
Expand Down Expand Up @@ -139,7 +139,7 @@ object RunPreReleaseIT {
}
}

private def bigquery(runId: String): Future[Unit] = {
private def bigquery(): Future[Unit] = {
import com.spotify.scio.examples.extra.{TypedBigQueryTornadoes, TypedStorageBigQueryTornadoes}
log.info("Starting BigQuery tests... ")
val jobs = List(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ package com.spotify.scio.examples.extra
import com.spotify.scio._
import com.spotify.scio.avro._
import com.spotify.scio.examples.common.ExampleData
import com.spotify.scio.examples.extra.MagnolifyAvroExample.wordCountType
import org.apache.avro.generic.GenericRecord

object MagnolifyAvroExample {
// limit import scope to avoid polluting namespace
Expand All @@ -43,11 +45,14 @@ object MagnolifyAvroExample {
// --input=gs://apache-beam-samples/shakespeare/kinglear.txt
// --output=gs://[BUCKET]/[PATH]/wordcount-avro"`
object MagnolifyAvroWriteExample {

implicit val genericCoder: Coder[GenericRecord] =
avroGenericRecordCoder(wordCountType.schema)

def main(cmdlineArgs: Array[String]): Unit = {
import MagnolifyAvroExample._

val (sc, args) = ContextAndArgs(cmdlineArgs)
implicit def genericCoder = avroGenericRecordCoder(wordCountType.schema)
sc.textFile(args.getOrElse("input", ExampleData.KING_LEAR))
.flatMap(_.split("[^a-zA-Z']+").filter(_.nonEmpty))
.countByValue
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ object MetricsExample {
val gauge: Gauge = ScioMetrics.gauge[MetricsExample.type]("gauge")

def main(cmdlineArgs: Array[String]): Unit = {
val (sc, args) = ContextAndArgs(cmdlineArgs)
val (sc, _) = ContextAndArgs(cmdlineArgs)

// Create and initialize counters from ScioContext
sc.initCounter("ctxcount")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,13 @@ import org.apache.beam.sdk.transforms.DoFn
import org.apache.beam.sdk.transforms.DoFn.{Element, OutputReceiver, ProcessElement, StateId}
import org.apache.beam.sdk.values.KV

import scala.annotation.nowarn

object StatefulExample {
// States are persisted on a per-key-and-window basis
type DoFnT = DoFn[KV[String, Int], KV[String, (Int, Int)]]

@nowarn("msg=private val count in class StatefulDoFn is never used")
class StatefulDoFn extends DoFnT {
// Declare mutable state
@StateId("count") private val count = StateSpecs.value[JInt]()
Expand All @@ -52,7 +55,7 @@ object StatefulExample {
}

def main(cmdlineArgs: Array[String]): Unit = {
val (sc, args) = ContextAndArgs(cmdlineArgs)
val (sc, _) = ContextAndArgs(cmdlineArgs)

val input = for {
k <- Seq("a", "b")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ object TapsExample {
t2 <- taps.textFile("macbeth.txt")
} yield {
// execution logic when both taps are available
val (sc, args) = ContextAndArgs(cmdlineArgs)
val (sc, _) = ContextAndArgs(cmdlineArgs)
val out = (t1.open(sc) ++ t2.open(sc))
.flatMap(_.split("[^a-zA-Z']+").filter(_.nonEmpty))
.countByValue
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,13 @@ import com.spotify.scio.testing._

class CloudSqlExampleTest extends PipelineSpec {
"CloudSqlExample" should "work" in {
val args =
Array(
"--cloudSqlUsername=john",
"--cloudSqlPassword=secret",
"--cloudSqlDb=mydb",
"--cloudSqlInstanceConnectionName=project-id:zone:db-instance-name"
)
val (opts, _) = ScioContext.parseArguments[CloudSqlOptions](args)
val args = Seq(
"--cloudSqlUsername=john",
"--cloudSqlPassword=secret",
"--cloudSqlDb=mydb",
"--cloudSqlInstanceConnectionName=project-id:zone:db-instance-name"
)
val (opts, _) = ScioContext.parseArguments[CloudSqlOptions](args.toArray)
val connOpts = CloudSqlExample.getConnectionOptions(opts)
val query = "SELECT * FROM word_count"
val statement = "INSERT INTO result_word_count values(?, ?)"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ class MagnolifyAvroExampleTest extends PipelineSpec {
val textOut: Seq[String] = wordCount.map(kv => kv._1 + ": " + kv._2)

"MagnolifyAvroWriteExample" should "work" in {
import MagnolifyAvroWriteExample.genericCoder
JobTest[com.spotify.scio.examples.extra.MagnolifyAvroWriteExample.type]
.args("--input=in.txt", "--output=wc.avro")
.input(TextIO("in.txt"), textIn)
Expand All @@ -44,6 +45,7 @@ class MagnolifyAvroExampleTest extends PipelineSpec {
}

"MagnolifyAvroReadExample" should "work" in {
import MagnolifyAvroWriteExample.genericCoder
JobTest[com.spotify.scio.examples.extra.MagnolifyAvroReadExample.type]
.args("--input=wc.avro", "--output=out.txt")
.input(AvroIO[GenericRecord]("wc.avro"), records)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ trait SCollectionSyntax {
filenamePolicySupplier: FilenamePolicySupplier =
CsvIO.WriteParam.DefaultFilenamePolicySupplier,
prefix: String = CsvIO.WriteParam.DefaultPrefix
)(implicit coder: Coder[T], enc: HeaderEncoder[T]): ClosedTap[Nothing] =
)(implicit enc: HeaderEncoder[T]): ClosedTap[Nothing] =
self.write(CsvIO.Write[T](path))(
CsvIO.WriteParam(
compression = compression,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,15 @@ import org.apache.beam.sdk.extensions.sorter.ExternalSorter.Options.SorterType
import org.apache.beam.sdk.extensions.sorter.{BufferedExternalSorter, SortValues}
import org.apache.beam.sdk.values.KV

import scala.annotation.nowarn
import scala.collection.AbstractIterator
import scala.jdk.CollectionConverters._

@nowarn(
"msg=evidence parameter evidence.* " +
"of type com.spotify.scio.extra.sorter.SortingKey\\[K2\\] " +
"in class SorterOps is never used"
)
final class SorterOps[K1, K2: SortingKey, V](self: SCollection[(K1, Iterable[(K2, V)])]) {

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ import com.spotify.sparkey.CompressionType
*/
class PairLargeHashSCollectionFunctions[K, V](private val self: SCollection[(K, V)]) {

implicit private[this] val (keyCoder, valueCoder): (Coder[K], Coder[V]) =
(self.keyCoder, self.valueCoder)
implicit private val keyCoder: Coder[K] = self.keyCoder
implicit private val valueCoder: Coder[V] = self.valueCoder

/**
* Perform an inner join by replicating `rhs` to all workers. The right side should be <<10x
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import java.lang.Math.floorMod
import java.util.UUID
import com.spotify.scio.ScioContext
import com.spotify.scio.annotations.experimental
import com.spotify.scio.coders.{BeamCoders, Coder, CoderMaterializer}
import com.spotify.scio.coders.{Coder, CoderMaterializer}
import com.spotify.scio.extra.sparkey.instances._
import com.spotify.scio.util.Cache
import com.spotify.scio.values.{SCollection, SideInput}
Expand Down Expand Up @@ -187,7 +187,7 @@ package object sparkey extends SparkeyReaderInstances {
compressionType: CompressionType,
compressionBlockSize: Int,
elements: Iterable[(K, V)]
)(implicit w: SparkeyWritable[K, V], koder: Coder[K], voder: Coder[V]): SparkeyUri = {
)(implicit w: SparkeyWritable[K, V]): SparkeyUri = {
val writer = new SparkeyWriter(uri, compressionType, compressionBlockSize, maxMemoryUsage)
val it = elements.iterator
while (it.hasNext) {
Expand All @@ -210,7 +210,8 @@ package object sparkey extends SparkeyReaderInstances {

import SparkeyPairSCollection._

implicit val (keyCoder, valueCoder): (Coder[K], Coder[V]) = BeamCoders.getTupleCoders(self)
implicit val keyCoder: Coder[K] = self.keyCoder
implicit val valueCoder: Coder[V] = self.valueCoder

/**
* Write the key-value pairs of this SCollection as a Sparkey file to a specific location.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ class CsvIOTest extends ScioIOSpec with TapSpec with BeforeAndAfterEach {
)
}

private def writeAsCsvAndReadLines[T: HeaderEncoder: Coder](dir: File)(
private def writeAsCsvAndReadLines(dir: File)(
transform: (ScioContext, String) => ClosedTap[Nothing]
): List[String] = {
val sc = ScioContext()
Expand All @@ -213,7 +213,7 @@ class CsvIOTest extends ScioIOSpec with TapSpec with BeforeAndAfterEach {
FileUtils.readLines(file, StandardCharsets.UTF_8).asScala.toList
}

private def getFirstCsvFileFrom[T: HeaderEncoder: Coder](dir: File) =
private def getFirstCsvFileFrom(dir: File) =
dir
.listFiles(new FilenameFilter {
override def accept(dir: File, name: String): Boolean = name.endsWith("csv")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,12 +66,12 @@ class StorageIT extends AnyFlatSpec with Matchers {
val expected = (0 until 10).map { i =>
Optional(
Some(true),
Some(i),
Some(i),
Some(i.toLong),
Some(i.toDouble),
Some(BigDecimal(i)),
Some(s"s$i"),
Some(ByteString.copyFromUtf8(s"s$i")),
Some(t.plus(Duration.millis(i))),
Some(t.plus(Duration.millis(i.toLong))),
Some(dt.toLocalDate.plusDays(i)),
Some(dt.toLocalTime.plusMillis(i)),
Some(dt.toLocalDateTime.plusMillis(i))
Expand Down
Loading

0 comments on commit 9f5d377

Please sign in to comment.