Skip to content

Commit

Permalink
Fix deprecation warns (#2966)
Browse files Browse the repository at this point in the history
  • Loading branch information
regadas committed May 13, 2020
1 parent 4638fde commit 4ca965e
Show file tree
Hide file tree
Showing 159 changed files with 198 additions and 178 deletions.
22 changes: 21 additions & 1 deletion build.sbt
Expand Up @@ -531,6 +531,7 @@ lazy val `scio-sql`: Project = Project(
.settings(
description := "Scio - SQL extension",
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"org.apache.beam" % "beam-sdks-java-core" % beamVersion,
"org.apache.beam" % "beam-sdks-java-extensions-sql" % beamVersion,
"org.apache.commons" % "commons-lang3" % commonsLang3Version,
Expand All @@ -551,6 +552,7 @@ lazy val `scio-test`: Project = project
.settings(
description := "Scio helpers for ScalaTest",
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"org.apache.beam" % "beam-runners-direct-java" % beamVersion,
"org.apache.beam" % "beam-runners-google-cloud-dataflow-java" % beamVersion % "test,it",
"org.apache.beam" % "beam-sdks-java-core" % beamVersion % "test",
Expand Down Expand Up @@ -616,6 +618,7 @@ lazy val `scio-avro`: Project = project
.settings(
description := "Scio add-on for working with Avro",
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"me.lyh" %% "protobuf-generic" % protobufGenericVersion,
"org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion,
"org.apache.beam" % "beam-sdks-java-core" % beamVersion,
Expand Down Expand Up @@ -646,6 +649,7 @@ lazy val `scio-bigquery`: Project = project
.settings(
description := "Scio add-on for Google BigQuery",
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion,
"com.twitter" %% "chill" % chillVersion,
"com.google.protobuf" % "protobuf-java" % protobufVersion,
Expand Down Expand Up @@ -691,6 +695,7 @@ lazy val `scio-bigtable`: Project = project
.settings(
description := "Scio add-on for Google Cloud Bigtable",
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"org.apache.beam" % "beam-sdks-java-core" % beamVersion,
"joda-time" % "joda-time" % jodaTimeVersion,
"com.google.protobuf" % "protobuf-java" % protobufVersion,
Expand Down Expand Up @@ -723,6 +728,7 @@ lazy val `scio-cassandra3`: Project = project
.settings(
description := "Scio add-on for Apache Cassandra 3.x",
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"com.google.protobuf" % "protobuf-java" % protobufVersion,
"com.google.guava" % "guava" % guavaVersion,
"com.twitter" %% "chill" % chillVersion,
Expand Down Expand Up @@ -751,6 +757,7 @@ lazy val `scio-elasticsearch5`: Project = project
.settings(
description := "Scio add-on for writing to Elasticsearch",
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion,
"org.apache.beam" % "beam-sdks-java-core" % beamVersion,
"joda-time" % "joda-time" % jodaTimeVersion,
Expand All @@ -770,6 +777,7 @@ lazy val `scio-elasticsearch6`: Project = project
.settings(
description := "Scio add-on for writing to Elasticsearch",
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion,
"org.apache.beam" % "beam-sdks-java-core" % beamVersion,
"joda-time" % "joda-time" % jodaTimeVersion,
Expand All @@ -790,6 +798,7 @@ lazy val `scio-elasticsearch7`: Project = project
.settings(
description := "Scio add-on for writing to Elasticsearch",
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion,
"org.apache.beam" % "beam-sdks-java-core" % beamVersion,
"joda-time" % "joda-time" % jodaTimeVersion,
Expand All @@ -814,6 +823,7 @@ lazy val `scio-extra`: Project = project
.settings(
description := "Scio extra utilities",
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"org.apache.beam" % "beam-sdks-java-core" % beamVersion,
"org.apache.beam" % "beam-sdks-java-extensions-sorter" % beamVersion,
"com.google.apis" % "google-api-services-bigquery" % googleApiServicesBigQuery,
Expand Down Expand Up @@ -885,6 +895,7 @@ lazy val `scio-parquet`: Project = project
javacOptions ++= Seq("-s", (sourceManaged.value / "main").toString),
description := "Scio add-on for Parquet",
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"me.lyh" %% "parquet-avro" % parquetExtraVersion,
"me.lyh" % "parquet-tensorflow" % parquetExtraVersion,
"com.google.cloud.bigdataoss" % "gcs-connector" % s"hadoop2-$bigdataossVersion",
Expand Down Expand Up @@ -917,6 +928,7 @@ lazy val `scio-spanner`: Project = project
.settings(
description := "Scio add-on for Google Cloud Spanner",
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"com.google.cloud" % "google-cloud-core" % "1.92.2",
"org.apache.beam" % "beam-sdks-java-core" % beamVersion,
"com.google.cloud" % "google-cloud-spanner" % googleCloudSpannerVersion excludeAll (
Expand Down Expand Up @@ -944,6 +956,7 @@ lazy val `scio-tensorflow`: Project = project
Compile / managedSourceDirectories := (Compile / managedSourceDirectories).value
.filterNot(_.getPath.endsWith("/src_managed/main")),
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"org.apache.beam" % "beam-sdks-java-core" % beamVersion,
"org.tensorflow" % "tensorflow" % tensorFlowVersion,
"org.tensorflow" % "proto" % tensorFlowVersion,
Expand Down Expand Up @@ -975,7 +988,10 @@ lazy val `scio-schemas`: Project = project
.settings(protobufSettings)
.settings(
description := "Avro/Proto schemas for testing",
libraryDependencies += "org.apache.avro" % "avro" % avroVersion,
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"org.apache.avro" % "avro" % avroVersion
),
Compile / sourceDirectories := (Compile / sourceDirectories).value
.filterNot(_.getPath.endsWith("/src_managed/main")),
Compile / managedSourceDirectories := (Compile / managedSourceDirectories).value
Expand All @@ -994,6 +1010,7 @@ lazy val `scio-examples`: Project = project
.settings(macroSettings)
.settings(
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"org.apache.beam" % "beam-sdks-java-core" % beamVersion,
"org.apache.avro" % "avro" % avroVersion,
"com.google.cloud.datastore" % "datastore-v1-proto-client" % datastoreV1ProtoClientVersion,
Expand Down Expand Up @@ -1066,6 +1083,7 @@ lazy val `scio-repl`: Project = project
.settings(macroSettings)
.settings(
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"org.apache.beam" % "beam-runners-direct-java" % beamVersion,
"org.apache.beam" % "beam-sdks-java-core" % beamVersion,
"org.apache.avro" % "avro" % avroVersion,
Expand Down Expand Up @@ -1104,6 +1122,7 @@ lazy val `scio-jmh`: Project = project
classDirectory in Jmh := (classDirectory in Test).value,
dependencyClasspath in Jmh := (dependencyClasspath in Test).value,
libraryDependencies ++= directRunnerDependencies ++ Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"junit" % "junit" % junitVersion % "test",
"org.hamcrest" % "hamcrest-core" % hamcrestVersion % "test",
"org.hamcrest" % "hamcrest-library" % hamcrestVersion % "test",
Expand All @@ -1124,6 +1143,7 @@ lazy val `scio-smb`: Project = project
.settings(
description := "Sort Merge Bucket source/sink implementations for Apache Beam",
libraryDependencies ++= Seq(
"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion,
"org.apache.beam" % "beam-sdks-java-core" % beamVersion,
"org.apache.beam" % "beam-sdks-java-core" % beamVersion % "it,test" classifier "tests",
"org.apache.beam" % "beam-sdks-java-extensions-sorter" % beamVersion,
Expand Down
Expand Up @@ -32,7 +32,7 @@ import org.apache.beam.sdk.transforms.DoFn.ProcessElement
import org.apache.beam.sdk.transforms.{DoFn, SerializableFunction}
import org.apache.beam.sdk.{io => beam}

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.reflect.runtime.universe._
import scala.reflect.ClassTag

Expand Down
Expand Up @@ -118,7 +118,7 @@ private[types] object ConverterProvider {

val tn = TermName("r")
q"""(r: ${p(c, ApacheAvro)}.generic.GenericRecord) => {
import _root_.scala.collection.JavaConverters._
import _root_.scala.jdk.CollectionConverters._
${constructor(tpe, tn)}
}
"""
Expand Down Expand Up @@ -204,7 +204,7 @@ private[types] object ConverterProvider {

val tn = TermName("r")
q"""(r: $tpe) => {
import _root_.scala.collection.JavaConverters._
import _root_.scala.jdk.CollectionConverters._
${constructor(tpe, tn)}
}
"""
Expand Down
Expand Up @@ -26,7 +26,7 @@ import com.spotify.scio.avro.types.MacroUtil._
import org.apache.avro.{JsonProperties, Schema}
import org.apache.avro.Schema.Field

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.reflect.runtime.universe
import scala.reflect.runtime.universe._

Expand Down
Expand Up @@ -20,7 +20,7 @@ package com.spotify.scio.avro.types
import org.apache.avro.Schema
import org.apache.avro.Schema.Type._

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

/** Utility for Avro schemas. */
object SchemaUtil {
Expand Down
Expand Up @@ -38,7 +38,7 @@ import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.hash.Hashing
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.Files
import org.slf4j.LoggerFactory

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.reflect.macros._
import scala.util.Try

Expand Down
Expand Up @@ -23,7 +23,7 @@ import org.apache.avro.Schema
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

class SchemaUtilTest extends AnyFlatSpec with Matchers {
"toPrettyString()" should "support primitive types" in {
Expand Down
Expand Up @@ -28,7 +28,7 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers
import org.scalatest.matchers.should.Matchers
import org.scalatest.flatspec.AnyFlatSpec

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.util.Success

// scio-test/it:runMain PopulateTestData to re-populate data for integration tests
Expand Down
Expand Up @@ -28,7 +28,7 @@ import org.apache.beam.sdk.testing.PAssert
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

object BeamSchemaIT {
final case class Shakespeare(word: String, word_count: Long, corpus: String, corpus_date: Long)
Expand Down
Expand Up @@ -23,7 +23,7 @@ import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

import scala.annotation.StaticAnnotation
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.reflect.runtime.universe._
import com.spotify.scio.bigquery.Query

Expand Down
Expand Up @@ -29,7 +29,7 @@ import org.joda.time.{DateTimeZone, Duration, Instant}
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

// scio-test/it:runMain PopulateTestData to re-populate data for integration tests
class StorageIT extends AnyFlatSpec with Matchers {
Expand Down
Expand Up @@ -41,7 +41,7 @@ import org.apache.beam.sdk.io.gcp.{bigquery => beam}
import org.apache.beam.sdk.io.{Compression, TextIO}
import org.apache.beam.sdk.transforms.SerializableFunction

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.reflect.ClassTag
import scala.reflect.runtime.universe._

Expand Down
Expand Up @@ -22,7 +22,7 @@ import com.google.cloud.bigquery.storage.v1beta1.ReadOptions.TableReadOptions
import org.apache.avro.Schema
import org.apache.avro.Schema.Type

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

/** Utility for BigQuery Storage API. */
object StorageUtil {
Expand Down
Expand Up @@ -40,7 +40,7 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.{CreateDisposition, WriteDisposition}
import org.apache.beam.sdk.io.gcp.{bigquery => beam}

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.reflect.runtime.universe.TypeTag
import scala.util.Try

Expand Down
Expand Up @@ -23,7 +23,7 @@ import com.spotify.scio.bigquery.BigQueryUtil
import org.apache.beam.sdk.io.gcp.{bigquery => bq}
import org.slf4j.LoggerFactory

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

private[client] object ExtractOps {
private val Logger = LoggerFactory.getLogger(this.getClass)
Expand Down
Expand Up @@ -26,7 +26,7 @@ import org.joda.time.Period
import org.joda.time.format.PeriodFormatterBuilder
import org.slf4j.LoggerFactory

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

private[client] object JobOps {
private val Logger = LoggerFactory.getLogger(this.getClass)
Expand Down
Expand Up @@ -24,7 +24,7 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.{CreateDisposition,
import org.apache.beam.sdk.io.gcp.{bigquery => bq}
import org.slf4j.LoggerFactory

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.util.Try

private[client] object LoadOps {
Expand Down
Expand Up @@ -27,7 +27,7 @@ import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.{CreateDisposition,
import org.apache.beam.sdk.io.gcp.{bigquery => bq}
import org.slf4j.LoggerFactory

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.collection.mutable.{Map => MMap}
import scala.util.control.NonFatal
import scala.util.{Failure, Success, Try}
Expand Down
Expand Up @@ -37,7 +37,7 @@ import org.joda.time.Instant
import org.joda.time.format.DateTimeFormat
import org.slf4j.LoggerFactory

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.collection.mutable.ArrayBuffer
import scala.util.Random
import scala.util.control.NonFatal
Expand Down
Expand Up @@ -21,7 +21,7 @@ package com.spotify.scio.bigquery.syntax
import com.spotify.scio.bigquery.{Date, DateTime, TableRow, Time, Timestamp}
import org.joda.time.{Instant, LocalDate, LocalDateTime, LocalTime}

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import scala.util.Try

Expand Down
Expand Up @@ -25,7 +25,7 @@ import com.spotify.scio.coders.Coder
import com.spotify.scio.io.{FileStorage, Tap, Taps}
import com.spotify.scio.values.SCollection

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.concurrent.Future
import scala.reflect.ClassTag
import scala.reflect.runtime.universe._
Expand Down
Expand Up @@ -153,7 +153,7 @@ private[types] object ConverterProvider {

val tn = TermName("r")
q"""(r: _root_.org.apache.avro.generic.GenericRecord) => {
import _root_.scala.collection.JavaConverters._
import _root_.scala.jdk.CollectionConverters._
${constructor(tpe, tn)}
}
"""
Expand Down Expand Up @@ -247,7 +247,7 @@ private[types] object ConverterProvider {

val tn = TermName("r")
q"""(r: $tpe) => {
import _root_.scala.collection.JavaConverters._
import _root_.scala.jdk.CollectionConverters._
${constructor(tpe, tn)}
}
"""
Expand Down Expand Up @@ -353,7 +353,7 @@ private[types] object ConverterProvider {

val tn = TermName("r")
q"""(r: _root_.java.util.Map[String, AnyRef]) => {
import _root_.scala.collection.JavaConverters._
import _root_.scala.jdk.CollectionConverters._
${constructor(tpe, tn)}
}
"""
Expand Down Expand Up @@ -451,7 +451,7 @@ private[types] object ConverterProvider {

val tn = TermName("r")
q"""(r: $tpe) => {
import _root_.scala.collection.JavaConverters._
import _root_.scala.jdk.CollectionConverters._
${constructor(tpe, tn)}
}
"""
Expand Down
Expand Up @@ -25,7 +25,7 @@ import org.apache.avro.Schema
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryUtils
import org.joda.time.{Instant, LocalDate, LocalDateTime, LocalTime}

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.reflect.runtime.universe._
import com.spotify.scio.util.Cache

Expand Down
Expand Up @@ -21,7 +21,7 @@ import java.util.{List => JList}

import com.google.api.services.bigquery.model.{TableFieldSchema, TableSchema}

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

/** Utility for BigQuery schemas. */
object SchemaUtil {
Expand Down
Expand Up @@ -37,7 +37,7 @@ import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.hash.Hashing
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.Files
import org.slf4j.LoggerFactory

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.collection.mutable.{Buffer => MBuffer, Map => MMap}
import scala.reflect.macros._

Expand Down

0 comments on commit 4ca965e

Please sign in to comment.