Skip to content

Commit

Permalink
Explicitly replace "import tensorflow" with "tensorflow.compat.v1"
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 286204718
  • Loading branch information
sun51 authored and lamblin committed Jan 22, 2020
1 parent 5522c59 commit f707943
Show file tree
Hide file tree
Showing 10 changed files with 10 additions and 10 deletions.
2 changes: 1 addition & 1 deletion meta_dataset/data/decoder.py
Expand Up @@ -16,7 +16,7 @@
# Lint as: python2, python3
"""Module responsible for decoding image/feature examples."""
import gin.tf
import tensorflow as tf
import tensorflow.compat.v1 as tf


@gin.configurable
Expand Down
2 changes: 1 addition & 1 deletion meta_dataset/data/decoder_test.py
Expand Up @@ -23,7 +23,7 @@
from meta_dataset.data import decoder
from meta_dataset.dataset_conversion import dataset_to_records
import numpy as np
import tensorflow as tf
import tensorflow.compat.v1 as tf


class DecoderTest(tf.test.TestCase):
Expand Down
2 changes: 1 addition & 1 deletion meta_dataset/data/imagenet_specification_test.py
Expand Up @@ -24,7 +24,7 @@
from meta_dataset.data import learning_spec
import numpy as np
from six.moves import range
import tensorflow as tf
import tensorflow.compat.v1 as tf

DESIRED_TOY_NUM_VALID_CLASSES = 2
DESIRED_TOY_NUM_TEST_CLASSES = 1
Expand Down
2 changes: 1 addition & 1 deletion meta_dataset/data/pipeline.py
Expand Up @@ -39,7 +39,7 @@
from meta_dataset.data import reader
from meta_dataset.data import sampling
from six.moves import zip
import tensorflow as tf
import tensorflow.compat.v1 as tf


def filter_dummy_examples(example_strings, class_ids):
Expand Down
2 changes: 1 addition & 1 deletion meta_dataset/data/pipeline_test.py
Expand Up @@ -26,7 +26,7 @@
from meta_dataset.data.dataset_spec import DatasetSpecification
from meta_dataset.dataset_conversion import dataset_to_records
import numpy as np
import tensorflow as tf
import tensorflow.compat.v1 as tf


class PipelineTest(tf.test.TestCase):
Expand Down
2 changes: 1 addition & 1 deletion meta_dataset/data/reader.py
Expand Up @@ -35,7 +35,7 @@
from meta_dataset import data
import numpy as np
from six.moves import range
import tensorflow as tf
import tensorflow.compat.v1 as tf

# DUMMY_CLASS_ID will be used as the target of examples used for padding only.
DUMMY_CLASS_ID = -1
Expand Down
2 changes: 1 addition & 1 deletion meta_dataset/data/reader_test.py
Expand Up @@ -31,7 +31,7 @@
import numpy as np
from six.moves import range
from six.moves import zip
import tensorflow as tf
import tensorflow.compat.v1 as tf

# DatasetSpecification to use in tests
DATASET_SPEC = DatasetSpecification(
Expand Down
2 changes: 1 addition & 1 deletion meta_dataset/data/sampling_test.py
Expand Up @@ -28,7 +28,7 @@
import numpy as np
from six.moves import range
from six.moves import zip
import tensorflow as tf
import tensorflow.compat.v1 as tf

# DatasetSpecification to use in tests
DATASET_SPEC = DatasetSpecification(
Expand Down
2 changes: 1 addition & 1 deletion meta_dataset/learner.py
Expand Up @@ -27,7 +27,7 @@
import six
from six.moves import range
from six.moves import zip
import tensorflow as tf
import tensorflow.compat.v1 as tf

MAX_WAY = 50 # The maximum number of classes we will see in any batch.

Expand Down
2 changes: 1 addition & 1 deletion meta_dataset/trainer_test.py
Expand Up @@ -25,7 +25,7 @@
from meta_dataset import trainer
from meta_dataset.data import config
from meta_dataset.data import providers
import tensorflow as tf
import tensorflow.compat.v1 as tf


class TrainerTest(tf.test.TestCase):
Expand Down

0 comments on commit f707943

Please sign in to comment.