TFE: Add compatibility errors and doc strings to queues, input pipelines and Supervisor

PiperOrigin-RevId: 173712330
This commit is contained in:
Shanqing Cai 2017-10-27 13:39:08 -07:00 committed by TensorFlower Gardener
parent b31b08bb0f
commit 0bc432a443
3 changed files with 94 additions and 12 deletions

View File

@ -149,14 +149,15 @@ def input_producer(input_tensor,
RuntimeError: If called with eager execution enabled.
@compatibility(eager)
Queue-using input pipelines are not supported when eager execution is enabled.
Please use tf.data to ingest data into your model instead.
Input pipelines based on Queues are not supported when eager execution is
enabled. Please use the `tf.data` API to ingest data under eager execution.
@end_compatibility
"""
if context.in_eager_mode():
raise RuntimeError(
"Queue-using input pipelines are not supported when eager execution is"
" enabled. Please use tf.data to ingest data into your model instead.")
"Input pipelines based on Queues are not supported when eager execution"
" is enabled. Please use tf.data to ingest data into your model"
" instead.")
with ops.name_scope(name, "input_producer", [input_tensor]):
input_tensor = ops.convert_to_tensor(input_tensor, name="input_tensor")
element_shape = input_tensor.shape[1:].merge_with(element_shape)
@ -222,6 +223,11 @@ def string_input_producer(string_tensor,
Raises:
ValueError: If the string_tensor is a null Python list. At runtime,
will fail with an assertion if string_tensor becomes a null tensor.
@compatibility(eager)
Input pipelines based on Queues are not supported when eager execution is
enabled. Please use the `tf.data` API to ingest data under eager execution.
@end_compatibility
"""
not_null_err = "string_input_producer requires a non-null input tensor"
if not isinstance(string_tensor, ops.Tensor) and not string_tensor:
@ -271,6 +277,11 @@ def range_input_producer(limit, num_epochs=None, shuffle=True, seed=None,
Returns:
A Queue with the output integers. A `QueueRunner` for the Queue
is added to the current `Graph`'s `QUEUE_RUNNER` collection.
@compatibility(eager)
Input pipelines based on Queues are not supported when eager execution is
enabled. Please use the `tf.data` API to ingest data under eager execution.
@end_compatibility
"""
with ops.name_scope(name, "input_producer", [limit]) as name:
range_tensor = math_ops.range(limit)
@ -308,6 +319,11 @@ def slice_input_producer(tensor_list, num_epochs=None, shuffle=True, seed=None,
Raises:
ValueError: if `slice_input_producer` produces nothing from `tensor_list`.
@compatibility(eager)
Input pipelines based on Queues are not supported when eager execution is
enabled. Please use the `tf.data` API to ingest data under eager execution.
@end_compatibility
"""
with ops.name_scope(name, "input_producer", tensor_list):
tensor_list = ops.convert_n_to_tensor_or_indexed_slices(tensor_list)
@ -698,8 +714,9 @@ def _batch(tensors, batch_size, keep_input, num_threads=1, capacity=32,
"""Helper function for `batch` and `maybe_batch`."""
if context.in_eager_mode():
raise ValueError(
"Queue-using input pipelines are not supported when eager execution is"
" enabled. Please use tf.data to ingest data into your model instead.")
"Input pipelines based on Queues are not supported when eager execution"
" is enabled. Please use tf.data to ingest data into your model"
" instead.")
tensor_list = _as_tensor_list(tensors)
with ops.name_scope(name, "batch", list(tensor_list) + [keep_input]) as name:
tensor_list = _validate(tensor_list)
@ -735,8 +752,9 @@ def _batch_join(tensors_list, batch_size, keep_input, capacity=32,
"""Helper function for `batch_join` and `maybe_batch_join`."""
if context.in_eager_mode():
raise ValueError(
"Queue-using input pipelines are not supported when eager execution is"
" enabled. Please use tf.data to ingest data into your model instead.")
"Input pipelines based on Queues are not supported when eager execution"
" is enabled. Please use tf.data to ingest data into your model"
" instead.")
tensor_list_list = _as_tensor_list_list(tensors_list)
with ops.name_scope(name, "batch_join",
_flatten(tensor_list_list) + [keep_input]) as name:
@ -769,8 +787,9 @@ def _shuffle_batch(tensors, batch_size, capacity, min_after_dequeue,
"""Helper function for `shuffle_batch` and `maybe_shuffle_batch`."""
if context.in_eager_mode():
raise ValueError(
"Queue-using input pipelines are not supported when eager execution is"
" enabled. Please use tf.data to ingest data into your model instead.")
"Input pipelines based on Queues are not supported when eager execution"
" is enabled. Please use tf.data to ingest data into your model"
" instead.")
tensor_list = _as_tensor_list(tensors)
with ops.name_scope(name, "shuffle_batch",
list(tensor_list) + [keep_input]) as name:
@ -813,8 +832,9 @@ def _shuffle_batch_join(tensors_list, batch_size, capacity,
"""Helper function for `shuffle_batch_join` and `maybe_shuffle_batch_join`."""
if context.in_eager_mode():
raise ValueError(
"Queue-using input pipelines are not supported when eager execution is"
" enabled. Please use tf.data to ingest data into your model instead.")
"Input pipelines based on Queues are not supported when eager execution"
" is enabled. Please use tf.data to ingest data into your model"
" instead.")
tensor_list_list = _as_tensor_list_list(tensors_list)
with ops.name_scope(name, "shuffle_batch_join",
_flatten(tensor_list_list) + [keep_input]) as name:
@ -923,6 +943,11 @@ def batch(tensors, batch_size, num_threads=1, capacity=32,
Raises:
ValueError: If the `shapes` are not specified, and cannot be
inferred from the elements of `tensors`.
@compatibility(eager)
Input pipelines based on Queues are not supported when eager execution is
enabled. Please use the `tf.data` API to ingest data under eager execution.
@end_compatibility
"""
return _batch(
tensors,
@ -1076,6 +1101,11 @@ def batch_join(tensors_list, batch_size, capacity=32, enqueue_many=False,
Raises:
ValueError: If the `shapes` are not specified, and cannot be
inferred from the elements of `tensor_list_list`.
@compatibility(eager)
Input pipelines based on Queues are not supported when eager execution is
enabled. Please use the `tf.data` API to ingest data under eager execution.
@end_compatibility
"""
return _batch_join(
tensors_list,
@ -1220,6 +1250,11 @@ def shuffle_batch(tensors, batch_size, capacity, min_after_dequeue,
Raises:
ValueError: If the `shapes` are not specified, and cannot be
inferred from the elements of `tensors`.
@compatibility(eager)
Input pipelines based on Queues are not supported when eager execution is
enabled. Please use the `tf.data` API to ingest data under eager execution.
@end_compatibility
"""
return _shuffle_batch(
tensors,
@ -1274,6 +1309,11 @@ def maybe_shuffle_batch(tensors, batch_size, capacity, min_after_dequeue,
Raises:
ValueError: If the `shapes` are not specified, and cannot be
inferred from the elements of `tensors`.
@compatibility(eager)
Input pipelines based on Queues are not supported when eager execution is
enabled. Please use the `tf.data` API to ingest data under eager execution.
@end_compatibility
"""
return _shuffle_batch(
tensors,
@ -1363,6 +1403,11 @@ def shuffle_batch_join(tensors_list, batch_size, capacity,
Raises:
ValueError: If the `shapes` are not specified, and cannot be
inferred from the elements of `tensors_list`.
@compatibility(eager)
Input pipelines based on Queues are not supported when eager execution is
enabled. Please use the `tf.data` API to ingest data under eager execution.
@end_compatibility
"""
return _shuffle_batch_join(
tensors_list,
@ -1417,6 +1462,11 @@ def maybe_shuffle_batch_join(tensors_list, batch_size, capacity,
Raises:
ValueError: If the `shapes` are not specified, and cannot be
inferred from the elements of `tensors_list`.
@compatibility(eager)
Input pipelines based on Queues are not supported when eager execution is
enabled. Please use the `tf.data` API to ingest data under eager execution.
@end_compatibility
"""
return _shuffle_batch_join(
tensors_list,

View File

@ -23,6 +23,7 @@ import weakref
from tensorflow.core.protobuf import queue_runner_pb2
from tensorflow.python.client import session
from tensorflow.python.eager import context
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.platform import tf_logging as logging
@ -414,7 +415,18 @@ def start_queue_runners(sess=None, coord=None, daemon=True, start=True,
Returns:
A list of threads.
Raises:
RuntimeError: If called with eager execution enabled.
ValueError: If called without a default `tf.Session` registered.
@compatibility(eager)
Not compatible with eager execution. To ingest data under eager execution,
use the `tf.data` API instead.
@end_compatibility
"""
if context.in_eager_mode():
raise RuntimeError("Queues are not compatible with eager execution.")
if sess is None:
sess = ops.get_default_session()
if not sess:

View File

@ -23,6 +23,7 @@ import time
from tensorflow.core.framework.summary_pb2 import Summary
from tensorflow.core.util.event_pb2 import SessionLog
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import meta_graph
from tensorflow.python.framework import ops
@ -288,7 +289,16 @@ class Supervisor(object):
Returns:
A `Supervisor`.
Raises:
RuntimeError: If called with eager execution enabled.
@compatibility(eager)
`Supervisor`s are not supported when eager execution is enabled.
@end_compatibility
"""
if context.in_eager_mode():
raise RuntimeError("Supervisors are compatible with eager execution.")
# Set default values of arguments.
if graph is None:
graph = ops.get_default_graph()
@ -735,7 +745,17 @@ class Supervisor(object):
Returns:
The list of threads started for the `QueueRunners`.
Raises:
RuntimeError: If called with eager execution enabled.
@compatibility(eager)
Queues are not compatible with eager execution. To ingest data when eager
execution is enabled, use the `tf.data` API.
@end_compatibility
"""
if context.in_eager_mode():
raise RuntimeError("Queues are not compatible with eager execution.")
if queue_runners is None:
queue_runners = self._graph.get_collection(ops.GraphKeys.QUEUE_RUNNERS)
threads = []