mirror of
https://github.com/zebrajr/tensorflow.git
synced 2025-12-07 00:20:20 +01:00
Extract the graphs dashboard to a plugin
This completes the great plugin migration!
The graphs plugin is somewhat different from the plugins considered so
far. First, it exposes two kinds of data: graph data and run metadata.
We elect to put both sources of data under the domain of the graphs
plugin for now, because it's not clear that the run metadata would be
useful for anything else. Second, the graph data really has no use for
"tags": a run either has an associated graph or it does not. Thus, we
expose an endpoint /data/plugin/graphs/runs that is different in format
from the /tags routes exposed by other plugins (it returns just a list
instead of a run-to-tag mapping).
This change removes a bunch of tests from application_test.py. The tests
cover the compresion behavior of the graph endpoint, but the graph
endpoint doesn't have any special logic in the way of compression. Thus,
the tests are, apparently, testing that werkzeug (or whatever is
relevant here) provides good compression defaults. This isn't
necessarily a bad idea, but it shouldn't be coupled to the graph tests.
To get test data that includes run metadata, you can run this script:
https://raw.githubusercontent.com/tensorflow/tensorflow/326942394e69074d50d5889218a24c9371eff259/tensorflow/examples/tutorials/mnist/mnist_with_summaries.py
PiperOrigin-RevId: 157884714
This commit is contained in:
parent
8939b85620
commit
a4caeb2ea4
|
|
@ -381,6 +381,7 @@ filegroup(
|
|||
"//tensorflow/tensorboard/plugins:all_files",
|
||||
"//tensorflow/tensorboard/plugins/audio:all_files",
|
||||
"//tensorflow/tensorboard/plugins/distributions:all_files",
|
||||
"//tensorflow/tensorboard/plugins/graphs:all_files",
|
||||
"//tensorflow/tensorboard/plugins/histograms:all_files",
|
||||
"//tensorflow/tensorboard/plugins/images:all_files",
|
||||
"//tensorflow/tensorboard/plugins/projector:all_files",
|
||||
|
|
|
|||
|
|
@ -231,6 +231,7 @@ add_python_module("tensorflow/tensorboard/backend/event_processing")
|
|||
add_python_module("tensorflow/tensorboard/plugins")
|
||||
add_python_module("tensorflow/tensorboard/plugins/audio")
|
||||
add_python_module("tensorflow/tensorboard/plugins/distributions")
|
||||
add_python_module("tensorflow/tensorboard/plugins/graphs")
|
||||
add_python_module("tensorflow/tensorboard/plugins/histograms")
|
||||
add_python_module("tensorflow/tensorboard/plugins/images")
|
||||
add_python_module("tensorflow/tensorboard/plugins/projector")
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ py_binary(
|
|||
"//tensorflow/tensorboard/backend/event_processing:event_file_inspector",
|
||||
"//tensorflow/tensorboard/plugins/audio:audio_plugin",
|
||||
"//tensorflow/tensorboard/plugins/distributions:distributions_plugin",
|
||||
"//tensorflow/tensorboard/plugins/graphs:graphs_plugin",
|
||||
"//tensorflow/tensorboard/plugins/histograms:histograms_plugin",
|
||||
"//tensorflow/tensorboard/plugins/images:images_plugin",
|
||||
"//tensorflow/tensorboard/plugins/projector:projector_plugin",
|
||||
|
|
|
|||
|
|
@ -63,7 +63,6 @@ py_library(
|
|||
srcs_version = "PY2AND3",
|
||||
deps = [
|
||||
":http_util",
|
||||
":process_graph",
|
||||
"//tensorflow:tensorflow_py",
|
||||
"//tensorflow/tensorboard/backend/event_processing:event_accumulator",
|
||||
"//tensorflow/tensorboard/backend/event_processing:event_multiplexer",
|
||||
|
|
|
|||
|
|
@ -33,7 +33,6 @@ import tensorflow as tf
|
|||
from werkzeug import wrappers
|
||||
|
||||
from tensorflow.tensorboard.backend import http_util
|
||||
from tensorflow.tensorboard.backend import process_graph
|
||||
from tensorflow.tensorboard.backend.event_processing import event_accumulator
|
||||
from tensorflow.tensorboard.backend.event_processing import event_multiplexer
|
||||
|
||||
|
|
@ -57,8 +56,10 @@ DEFAULT_SIZE_GUIDANCE = {
|
|||
_MIGRATED_DATA_KEYS = frozenset((
|
||||
'audio',
|
||||
'distributions',
|
||||
'graph',
|
||||
'histograms',
|
||||
'images',
|
||||
'run_metadata',
|
||||
'scalars',
|
||||
))
|
||||
|
||||
|
|
@ -67,8 +68,6 @@ LOGDIR_ROUTE = '/logdir'
|
|||
RUNS_ROUTE = '/runs'
|
||||
PLUGIN_PREFIX = '/plugin'
|
||||
PLUGINS_LISTING_ROUTE = '/plugins_listing'
|
||||
GRAPH_ROUTE = '/' + event_accumulator.GRAPH
|
||||
RUN_METADATA_ROUTE = '/' + event_accumulator.RUN_METADATA
|
||||
TAB_ROUTES = ['', '/events', '/images', '/audio', '/graphs', '/histograms']
|
||||
|
||||
# Slashes in a plugin name could throw the router for a loop. An empty
|
||||
|
|
@ -146,16 +145,12 @@ class TensorBoardWSGIApp(object):
|
|||
reload_multiplexer(self._multiplexer, path_to_run)
|
||||
|
||||
self.data_applications = {
|
||||
DATA_PREFIX + GRAPH_ROUTE:
|
||||
self._serve_graph,
|
||||
DATA_PREFIX + LOGDIR_ROUTE:
|
||||
self._serve_logdir,
|
||||
# TODO(chizeng): Delete this RPC once we have skylark rules that obviate
|
||||
# the need for the frontend to determine which plugins are active.
|
||||
DATA_PREFIX + PLUGINS_LISTING_ROUTE:
|
||||
self._serve_plugins_listing,
|
||||
DATA_PREFIX + RUN_METADATA_ROUTE:
|
||||
self._serve_run_metadata,
|
||||
DATA_PREFIX + RUNS_ROUTE:
|
||||
self._serve_runs,
|
||||
}
|
||||
|
|
@ -212,57 +207,6 @@ class TensorBoardWSGIApp(object):
|
|||
return http_util.Respond(
|
||||
request, {'logdir': self._logdir}, 'application/json')
|
||||
|
||||
@wrappers.Request.application
|
||||
def _serve_graph(self, request):
|
||||
"""Given a single run, return the graph definition in json format."""
|
||||
run = request.args.get('run', None)
|
||||
if run is None:
|
||||
return http_util.Respond(
|
||||
request, 'query parameter "run" is required', 'text/plain', 400)
|
||||
|
||||
try:
|
||||
graph = self._multiplexer.Graph(run)
|
||||
except ValueError:
|
||||
return http_util.Respond(
|
||||
request, '404 Not Found', 'text/plain; charset=UTF-8', code=404)
|
||||
|
||||
limit_attr_size = request.args.get('limit_attr_size', None)
|
||||
if limit_attr_size is not None:
|
||||
try:
|
||||
limit_attr_size = int(limit_attr_size)
|
||||
except ValueError:
|
||||
return http_util.Respond(
|
||||
request, 'query parameter `limit_attr_size` must be integer',
|
||||
'text/plain', 400)
|
||||
|
||||
large_attrs_key = request.args.get('large_attrs_key', None)
|
||||
try:
|
||||
process_graph.prepare_graph_for_ui(graph, limit_attr_size,
|
||||
large_attrs_key)
|
||||
except ValueError as e:
|
||||
return http_util.Respond(request, e.message, 'text/plain', 400)
|
||||
|
||||
return http_util.Respond(request, str(graph), 'text/x-protobuf') # pbtxt
|
||||
|
||||
@wrappers.Request.application
|
||||
def _serve_run_metadata(self, request):
|
||||
"""Given a tag and a TensorFlow run, return the session.run() metadata."""
|
||||
tag = request.args.get('tag', None)
|
||||
run = request.args.get('run', None)
|
||||
if tag is None:
|
||||
return http_util.Respond(
|
||||
request, 'query parameter "tag" is required', 'text/plain', 400)
|
||||
if run is None:
|
||||
return http_util.Respond(
|
||||
request, 'query parameter "run" is required', 'text/plain', 400)
|
||||
try:
|
||||
run_metadata = self._multiplexer.RunMetadata(run, tag)
|
||||
except ValueError:
|
||||
return http_util.Respond(
|
||||
request, '404 Not Found', 'text/plain; charset=UTF-8', code=404)
|
||||
return http_util.Respond(
|
||||
request, str(run_metadata), 'text/x-protobuf') # pbtxt
|
||||
|
||||
@wrappers.Request.application
|
||||
def _serve_plugins_listing(self, request):
|
||||
"""Serves an object mapping plugin name to whether it is enabled.
|
||||
|
|
|
|||
|
|
@ -35,7 +35,6 @@ from six.moves import http_client
|
|||
import tensorflow as tf
|
||||
|
||||
from werkzeug import serving
|
||||
from google.protobuf import text_format
|
||||
|
||||
from tensorflow.core.protobuf import meta_graph_pb2
|
||||
from tensorflow.tensorboard import tensorboard
|
||||
|
|
@ -168,9 +167,7 @@ class TensorboardServerTest(tf.test.TestCase):
|
|||
{
|
||||
'run1': {
|
||||
# if only_use_meta_graph, the graph is from the metagraph
|
||||
'graph': True,
|
||||
'meta_graph': self._only_use_meta_graph,
|
||||
'run_metadata': ['test run'],
|
||||
'tensors': [],
|
||||
}
|
||||
})
|
||||
|
|
@ -191,8 +188,7 @@ class TensorboardServerTest(tf.test.TestCase):
|
|||
|
||||
def testDataPaths_disableAllCaching(self):
|
||||
"""Test the format of the /data/runs endpoint."""
|
||||
for path in ('/data/runs', '/data/logdir',
|
||||
'/data/run_metadata?run=run1&tag=test%20run'):
|
||||
for path in ('/data/runs', '/data/logdir'):
|
||||
connection = http_client.HTTPConnection('localhost',
|
||||
self._server.server_address[1])
|
||||
connection.request('GET', path)
|
||||
|
|
@ -202,69 +198,11 @@ class TensorboardServerTest(tf.test.TestCase):
|
|||
response.read()
|
||||
connection.close()
|
||||
|
||||
def testGraph(self):
|
||||
"""Test retrieving the graph definition."""
|
||||
response = self._get('/data/graph?run=run1&limit_attr_size=1024'
|
||||
'&large_attrs_key=_very_large_attrs')
|
||||
self.assertEqual(response.status, 200)
|
||||
graph_pbtxt = response.read()
|
||||
# Parse the graph from pbtxt into a graph message.
|
||||
graph = tf.GraphDef()
|
||||
graph = text_format.Parse(graph_pbtxt, graph)
|
||||
self.assertEqual(len(graph.node), 2)
|
||||
self.assertEqual(graph.node[0].name, 'a')
|
||||
self.assertEqual(graph.node[1].name, 'b')
|
||||
# Make sure the second node has an attribute that was filtered out because
|
||||
# it was too large and was added to the "too large" attributes list.
|
||||
self.assertEqual(list(graph.node[1].attr.keys()), ['_very_large_attrs'])
|
||||
self.assertEqual(graph.node[1].attr['_very_large_attrs'].list.s,
|
||||
[b'very_large_attr'])
|
||||
|
||||
def testAcceptGzip_compressesResponse(self):
|
||||
response = self._get('/data/graph?run=run1&limit_attr_size=1024'
|
||||
'&large_attrs_key=_very_large_attrs',
|
||||
{'Accept-Encoding': 'gzip'})
|
||||
self.assertEqual(response.status, 200)
|
||||
self.assertEqual(response.getheader('Content-Encoding'), 'gzip')
|
||||
pbtxt = gzip.GzipFile('', 'rb', 9, BytesIO(response.read())).read()
|
||||
graph = text_format.Parse(pbtxt, tf.GraphDef())
|
||||
self.assertEqual(len(graph.node), 2)
|
||||
|
||||
def testAcceptAnyEncoding_compressesResponse(self):
|
||||
response = self._get('/data/graph?run=run1&limit_attr_size=1024'
|
||||
'&large_attrs_key=_very_large_attrs',
|
||||
{'Accept-Encoding': '*'})
|
||||
self.assertEqual(response.status, 200)
|
||||
self.assertEqual(response.getheader('Content-Encoding'), 'gzip')
|
||||
pbtxt = gzip.GzipFile('', 'rb', 9, BytesIO(response.read())).read()
|
||||
graph = text_format.Parse(pbtxt, tf.GraphDef())
|
||||
self.assertEqual(len(graph.node), 2)
|
||||
|
||||
def testAcceptDoodleEncoding_doesNotCompressResponse(self):
|
||||
response = self._get('/data/graph?run=run1&limit_attr_size=1024'
|
||||
'&large_attrs_key=_very_large_attrs',
|
||||
{'Accept-Encoding': 'doodle'})
|
||||
self.assertEqual(response.status, 200)
|
||||
self.assertIsNone(response.getheader('Content-Encoding'))
|
||||
graph = text_format.Parse(response.read(), tf.GraphDef())
|
||||
self.assertEqual(len(graph.node), 2)
|
||||
|
||||
def testRunMetadata(self):
|
||||
"""Test retrieving the run metadata information."""
|
||||
response = self._get('/data/run_metadata?run=run1&tag=test%20run')
|
||||
self.assertEqual(response.status, 200)
|
||||
run_metadata_pbtxt = response.read()
|
||||
# Parse from pbtxt into a message.
|
||||
run_metadata = tf.RunMetadata()
|
||||
text_format.Parse(run_metadata_pbtxt, run_metadata)
|
||||
self.assertEqual(len(run_metadata.step_stats.dev_stats), 1)
|
||||
self.assertEqual(run_metadata.step_stats.dev_stats[0].device, 'test device')
|
||||
|
||||
def _GenerateTestData(self):
|
||||
"""Generates the test data directory.
|
||||
|
||||
The test data has a single run named run1 which contains:
|
||||
- a graph definition
|
||||
- a graph definition and metagraph definition
|
||||
|
||||
Returns:
|
||||
temp_dir: The directory the test data is generated under.
|
||||
|
|
@ -290,12 +228,6 @@ class TensorboardServerTest(tf.test.TestCase):
|
|||
else:
|
||||
writer.add_graph(graph_def)
|
||||
|
||||
# Add a simple run metadata event.
|
||||
run_metadata = tf.RunMetadata()
|
||||
device_stats = run_metadata.step_stats.dev_stats.add()
|
||||
device_stats.device = 'test device'
|
||||
writer.add_run_metadata(run_metadata, 'test run')
|
||||
|
||||
writer.flush()
|
||||
writer.close()
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ limitations under the License.
|
|||
import {compareTagNames} from '../vz-sorting/sorting';
|
||||
import {RequestManager} from './requestManager';
|
||||
import {Router} from './router';
|
||||
import {demoify} from './urlPathHelpers';
|
||||
import {demoify, queryEncoder} from './urlPathHelpers';
|
||||
|
||||
export interface RunEnumeration {
|
||||
histograms: string[];
|
||||
|
|
@ -199,16 +199,16 @@ export class Backend {
|
|||
* Return a promise showing list of runs that contain graphs.
|
||||
*/
|
||||
public graphRuns(): Promise<string[]> {
|
||||
return this.runs().then((x) => {
|
||||
return _.keys(x).filter((k) => x[k].graph);
|
||||
});
|
||||
return this.requestManager.request(
|
||||
this.router.pluginRoute('graphs', '/runs'));
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a promise showing the Run-to-Tag mapping for run_metadata objects.
|
||||
*/
|
||||
public runMetadataRuns(): Promise<RunToTag> {
|
||||
return this.runs().then((x) => _.mapValues(x, 'run_metadata'));
|
||||
public runMetadataTags(): Promise<RunToTag> {
|
||||
return this.requestManager.request(
|
||||
this.router.pluginRoute('graphs', '/run_metadata_tags'));
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -233,11 +233,25 @@ export class Backend {
|
|||
}
|
||||
|
||||
/**
|
||||
* Return a promise of a graph string from the backend.
|
||||
* Return a URL to fetch a graph (cf. method 'graph').
|
||||
*/
|
||||
public graph(tag: string, limitAttrSize?: number, largeAttrKeys?: string):
|
||||
public graphUrl(run: string, limitAttrSize?: number, largeAttrsKey?: string):
|
||||
string {
|
||||
const demoMode = this.router.isDemoMode();
|
||||
const base = this.router.pluginRoute('graphs', '/graph');
|
||||
const optional = (p) => (p != null && !demoMode || undefined) && p;
|
||||
const parameters = {
|
||||
'run': run,
|
||||
'limit_attr_size': optional(limitAttrSize),
|
||||
'large_attrs_key': optional(largeAttrsKey),
|
||||
};
|
||||
const extension = demoMode ? '.pbtxt' : '';
|
||||
return base + queryEncoder(parameters) + extension;
|
||||
}
|
||||
|
||||
public graph(run: string, limitAttrSize?: number, largeAttrsKey?: string):
|
||||
Promise<string> {
|
||||
const url = this.router.graph(tag, limitAttrSize, largeAttrKeys);
|
||||
const url = this.graphUrl(run, limitAttrSize, largeAttrsKey);
|
||||
return this.requestManager.request(url);
|
||||
}
|
||||
|
||||
|
|
@ -288,7 +302,7 @@ export class Backend {
|
|||
Promise<Array<HistogramSeriesDatum>> {
|
||||
let p: Promise<TupleData<HistogramTuple>[]>;
|
||||
const url =
|
||||
(this.router.pluginRunTagRoute('histograms', '/histograms')(tag, run));
|
||||
this.router.pluginRunTagRoute('histograms', '/histograms')(tag, run);
|
||||
p = this.requestManager.request(url);
|
||||
return p.then(map(detupler(createHistogram))).then(function(histos) {
|
||||
// Get the minimum and maximum values across all histograms so that the
|
||||
|
|
@ -326,11 +340,18 @@ export class Backend {
|
|||
return p.then(map(this.createAudio.bind(this)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the url for the RunMetadata for the given run/tag.
|
||||
*/
|
||||
public runMetadataUrl(tag: string, run: string): string {
|
||||
return this.router.pluginRunTagRoute('graphs', '/run_metadata')(tag, run);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a promise to load the string RunMetadata for given run/tag.
|
||||
*/
|
||||
public runMetadata(tag: string, run: string): Promise<string> {
|
||||
const url = this.router.runMetadata(tag, run);
|
||||
const url = this.runMetadataUrl(tag, run);
|
||||
return this.requestManager.request(url);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -21,10 +21,6 @@ export interface Router {
|
|||
logdir: () => string;
|
||||
runs: () => string;
|
||||
isDemoMode: () => boolean;
|
||||
graph:
|
||||
(run: string, limit_attr_size?: number,
|
||||
large_attrs_key?: string) => string;
|
||||
runMetadata: RunTagUrlFn;
|
||||
textRuns: () => string;
|
||||
text: RunTagUrlFn;
|
||||
healthPills: () => string;
|
||||
|
|
@ -54,26 +50,6 @@ export function router(dataDir = 'data', demoMode = false): Router {
|
|||
return url;
|
||||
};
|
||||
}
|
||||
function graphUrl(
|
||||
run: string, limit_attr_size?: number, large_attrs_key?: string) {
|
||||
let query_params = [['run', clean(run)]];
|
||||
if (limit_attr_size != null && !demoMode) {
|
||||
query_params.push(['limit_attr_size', String(limit_attr_size)]);
|
||||
}
|
||||
if (large_attrs_key != null && !demoMode) {
|
||||
query_params.push(['large_attrs_key', large_attrs_key]);
|
||||
}
|
||||
let query = query_params
|
||||
.map(param => {
|
||||
return param[0] + '=' + encodeURIComponent(param[1]);
|
||||
})
|
||||
.join('&');
|
||||
var url = dataDir + '/graph' + clean('?' + query);
|
||||
if (demoMode) {
|
||||
url += '.pbtxt';
|
||||
}
|
||||
return url;
|
||||
}
|
||||
function pluginRoute(pluginName: string, route: string): string {
|
||||
return `${dataDir}/plugin/${pluginName}${route}`;
|
||||
}
|
||||
|
|
@ -86,8 +62,6 @@ export function router(dataDir = 'data', demoMode = false): Router {
|
|||
logdir: () => dataDir + '/logdir',
|
||||
runs: () => dataDir + '/runs' + (demoMode ? '.json' : ''),
|
||||
isDemoMode: () => demoMode,
|
||||
graph: graphUrl,
|
||||
runMetadata: standardRoute('run_metadata', '.pbtxt'),
|
||||
healthPills: () => dataDir + '/plugin/debugger/health_pills',
|
||||
textRuns: () => dataDir + '/plugin/text/runs' + (demoMode ? '.json' : ''),
|
||||
text: standardRoute('plugin/text/text'),
|
||||
|
|
|
|||
|
|
@ -139,7 +139,7 @@ describe('backend tests', () => {
|
|||
chai.assert.deepEqual(x, audio);
|
||||
next();
|
||||
});
|
||||
backend.runMetadataRuns().then((x) => {
|
||||
backend.runMetadataTags().then((x) => {
|
||||
chai.assert.deepEqual(x, runMetadata);
|
||||
next();
|
||||
});
|
||||
|
|
|
|||
|
|
@ -189,20 +189,20 @@ Polymer({
|
|||
}
|
||||
// Set this to true so we only initialize once.
|
||||
this._initialized = true;
|
||||
Promise.all([backend.graphRuns(), backend.runMetadataRuns()])
|
||||
Promise.all([backend.graphRuns(), backend.runMetadataTags()])
|
||||
.then(function(result) {
|
||||
var runsWithGraph = result[0].sort(compareTagNames);
|
||||
var runToMetadata = result[1];
|
||||
var datasets = _.map(runsWithGraph, function(runName) {
|
||||
return {
|
||||
name: runName,
|
||||
path: backend.router.graph(
|
||||
path: backend.graphUrl(
|
||||
runName, tf.graph.LIMIT_ATTR_SIZE, tf.graph.LARGE_ATTRS_KEY),
|
||||
runMetadata: runToMetadata[runName] ? _.map(
|
||||
runToMetadata[runName].sort(compareTagNames), function(tag) {
|
||||
return {
|
||||
tag: tag,
|
||||
path: backend.router.runMetadata(tag, runName)
|
||||
path: backend.runMetadataUrl(tag, runName)
|
||||
};
|
||||
}, this) : []
|
||||
};
|
||||
|
|
|
|||
|
|
@ -55,13 +55,9 @@ all of the data available from the TensorBoard server. Here is an example:
|
|||
|
||||
{
|
||||
"train_run": {
|
||||
"graph": true,
|
||||
"firstEventTimestamp": 123456.789
|
||||
"run_metadata": ["forward prop", "inference"]
|
||||
},
|
||||
"eval": {
|
||||
"graph": false,
|
||||
"run_metadata": []
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -81,6 +77,8 @@ and will not appear in the output from this route:
|
|||
- `scalars`
|
||||
- `compressedHistograms`, moved to `distributions`
|
||||
- `histograms`
|
||||
- `graph`, as `/data/plugin/graphs/runs`
|
||||
- `run_metadata`, as `/data/plugin/graphs/run_metadata_tags`
|
||||
|
||||
## `/data/plugin/scalars/tags`
|
||||
|
||||
|
|
@ -296,11 +294,19 @@ tags present in the corresponding run. Here is an example:
|
|||
Note that runs without any audio tags are included as keys with value the empty
|
||||
array.
|
||||
|
||||
## `/data/graph?run=foo&limit_attr_size=1024&large_attrs_key=key`
|
||||
## `/data/plugin/graphs/runs`
|
||||
|
||||
Returns the graph definition for the given run in gzipped pbtxt format. The
|
||||
graph is composed of a list of nodes, where each node is a specific TensorFlow
|
||||
operation which takes as inputs other nodes (operations).
|
||||
Returns a list of runs that have associated graphs.
|
||||
|
||||
For example:
|
||||
|
||||
["train"]
|
||||
|
||||
## `/data/plugin/graphs/graph?run=foo&limit_attr_size=1024&large_attrs_key=key`
|
||||
|
||||
Returns the graph definition for the given run in pbtxt format. The
|
||||
graph is composed of a list of nodes, where each node is a specific
|
||||
TensorFlow operation which takes as inputs other nodes (operations).
|
||||
|
||||
The query parameters `limit_attr_size` and `large_attrs_key` are optional.
|
||||
|
||||
|
|
@ -313,7 +319,10 @@ attributes that are too large. The value of this key (list of strings)
|
|||
should be used by the client in order to determine which attributes
|
||||
have been filtered. Must be specified if `limit_attr_size` is specified.
|
||||
|
||||
For the query `/graph?run=foo&limit_attr_size=1024&large_attrs_key=_too_large`,
|
||||
For the query
|
||||
|
||||
/data/plugin/graphs/graph?run=foo&limit_attr_size=1024&large_attrs_key=_too_large,
|
||||
|
||||
here is an example pbtxt response of a graph with 3 nodes, where the second
|
||||
node had two large attributes "a" and "b" that were filtered out (size > 1024):
|
||||
|
||||
|
|
|
|||
51
tensorflow/tensorboard/plugins/graphs/BUILD
Normal file
51
tensorflow/tensorboard/plugins/graphs/BUILD
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
# Description:
|
||||
# TensorBoard plugin for graphs
|
||||
|
||||
package(default_visibility = ["//tensorflow:internal"])
|
||||
|
||||
licenses(["notice"]) # Apache 2.0
|
||||
|
||||
exports_files(["LICENSE"])
|
||||
|
||||
load("//tensorflow:tensorflow.bzl", "py_test")
|
||||
|
||||
## Graphs Plugin ##
|
||||
py_library(
|
||||
name = "graphs_plugin",
|
||||
srcs = ["graphs_plugin.py"],
|
||||
srcs_version = "PY2AND3",
|
||||
visibility = [
|
||||
"//tensorflow:internal",
|
||||
],
|
||||
deps = [
|
||||
"//tensorflow:tensorflow_py",
|
||||
"//tensorflow/tensorboard/backend:http_util",
|
||||
"//tensorflow/tensorboard/backend:process_graph",
|
||||
"//tensorflow/tensorboard/backend/event_processing:event_accumulator",
|
||||
"//tensorflow/tensorboard/plugins:base_plugin",
|
||||
"@org_pocoo_werkzeug//:werkzeug",
|
||||
"@six_archive//:six",
|
||||
],
|
||||
)
|
||||
|
||||
py_test(
|
||||
name = "graphs_plugin_test",
|
||||
size = "small",
|
||||
srcs = ["graphs_plugin_test.py"],
|
||||
main = "graphs_plugin_test.py",
|
||||
srcs_version = "PY2AND3",
|
||||
deps = [
|
||||
":graphs_plugin",
|
||||
"//tensorflow:tensorflow_py",
|
||||
"//tensorflow/tensorboard/backend:application",
|
||||
"//tensorflow/tensorboard/backend/event_processing:event_multiplexer",
|
||||
"@org_pocoo_werkzeug//:werkzeug",
|
||||
"@six_archive//:six",
|
||||
],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "all_files",
|
||||
srcs = glob(["**"]),
|
||||
visibility = ["//tensorflow:__pkg__"],
|
||||
)
|
||||
140
tensorflow/tensorboard/plugins/graphs/graphs_plugin.py
Normal file
140
tensorflow/tensorboard/plugins/graphs/graphs_plugin.py
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ==============================================================================
|
||||
"""The TensorBoard Graphs plugin."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
from werkzeug import wrappers
|
||||
|
||||
from tensorflow.tensorboard.backend import http_util
|
||||
from tensorflow.tensorboard.backend import process_graph
|
||||
from tensorflow.tensorboard.backend.event_processing import event_accumulator
|
||||
from tensorflow.tensorboard.plugins import base_plugin
|
||||
|
||||
_PLUGIN_PREFIX_ROUTE = 'graphs'
|
||||
|
||||
|
||||
class GraphsPlugin(base_plugin.TBPlugin):
|
||||
"""Graphs Plugin for TensorBoard."""
|
||||
|
||||
plugin_name = _PLUGIN_PREFIX_ROUTE
|
||||
|
||||
def get_plugin_apps(self, multiplexer, unused_logdir):
|
||||
self._multiplexer = multiplexer
|
||||
return {
|
||||
'/graph': self.graph_route,
|
||||
'/runs': self.runs_route,
|
||||
'/run_metadata': self.run_metadata_route,
|
||||
'/run_metadata_tags': self.run_metadata_tags_route,
|
||||
}
|
||||
|
||||
def is_active(self):
|
||||
"""The graphs plugin is active iff any run has a graph."""
|
||||
return bool(self.index_impl())
|
||||
|
||||
def index_impl(self):
|
||||
"""Returns a list of all runs that have a graph."""
|
||||
return [run_name
|
||||
for (run_name, run_data) in self._multiplexer.Runs().items()
|
||||
if run_data.get(event_accumulator.GRAPH)]
|
||||
|
||||
def run_metadata_index_impl(self):
|
||||
"""Returns a run-to-tag mapping for metadata."""
|
||||
return {
|
||||
run_name: run_data[event_accumulator.RUN_METADATA]
|
||||
for (run_name, run_data) in self._multiplexer.Runs().items()
|
||||
if event_accumulator.RUN_METADATA in run_data
|
||||
}
|
||||
|
||||
def graph_impl(self, run, limit_attr_size=None, large_attrs_key=None):
|
||||
"""Result of the form `(body, mime_type)`, or `None` if no graph exists."""
|
||||
try:
|
||||
graph = self._multiplexer.Graph(run)
|
||||
except ValueError:
|
||||
return None
|
||||
# This next line might raise a ValueError if the limit parameters
|
||||
# are invalid (size is negative, size present but key absent, etc.).
|
||||
process_graph.prepare_graph_for_ui(graph, limit_attr_size, large_attrs_key)
|
||||
return (str(graph), 'text/x-protobuf') # pbtxt
|
||||
|
||||
def run_metadata_impl(self, run, tag):
|
||||
"""Result of the form `(body, mime_type)`, or `None` if no data exists."""
|
||||
try:
|
||||
run_metadata = self._multiplexer.RunMetadata(run, tag)
|
||||
except ValueError:
|
||||
return None
|
||||
return (str(run_metadata), 'text/x-protobuf') # pbtxt
|
||||
|
||||
@wrappers.Request.application
|
||||
def runs_route(self, request):
|
||||
index = self.index_impl()
|
||||
return http_util.Respond(request, index, 'application/json')
|
||||
|
||||
@wrappers.Request.application
|
||||
def run_metadata_tags_route(self, request):
|
||||
index = self.run_metadata_index_impl()
|
||||
return http_util.Respond(request, index, 'application/json')
|
||||
|
||||
@wrappers.Request.application
|
||||
def graph_route(self, request):
|
||||
"""Given a single run, return the graph definition in protobuf format."""
|
||||
run = request.args.get('run')
|
||||
if run is None:
|
||||
return http_util.Respond(
|
||||
request, 'query parameter "run" is required', 'text/plain', 400)
|
||||
|
||||
limit_attr_size = request.args.get('limit_attr_size', None)
|
||||
if limit_attr_size is not None:
|
||||
try:
|
||||
limit_attr_size = int(limit_attr_size)
|
||||
except ValueError:
|
||||
return http_util.Respond(
|
||||
request, 'query parameter `limit_attr_size` must be an integer',
|
||||
'text/plain', 400)
|
||||
|
||||
large_attrs_key = request.args.get('large_attrs_key', None)
|
||||
|
||||
try:
|
||||
result = self.graph_impl(run, limit_attr_size, large_attrs_key)
|
||||
except ValueError as e:
|
||||
return http_util.Respond(request, e.message, 'text/plain', code=400)
|
||||
else:
|
||||
if result is not None:
|
||||
(body, mime_type) = result # pylint: disable=unpacking-non-sequence
|
||||
return http_util.Respond(request, body, mime_type)
|
||||
else:
|
||||
return http_util.Respond(request, '404 Not Found', 'text/plain',
|
||||
code=404)
|
||||
|
||||
@wrappers.Request.application
|
||||
def run_metadata_route(self, request):
|
||||
"""Given a tag and a run, return the session.run() metadata."""
|
||||
tag = request.args.get('tag')
|
||||
run = request.args.get('run')
|
||||
if tag is None:
|
||||
return http_util.Respond(
|
||||
request, 'query parameter "tag" is required', 'text/plain', 400)
|
||||
if run is None:
|
||||
return http_util.Respond(
|
||||
request, 'query parameter "run" is required', 'text/plain', 400)
|
||||
result = self.run_metadata_impl(run, tag)
|
||||
if result is not None:
|
||||
(body, mime_type) = result # pylint: disable=unpacking-non-sequence
|
||||
return http_util.Respond(request, body, mime_type)
|
||||
else:
|
||||
return http_util.Respond(request, '404 Not Found', 'text/plain',
|
||||
code=404)
|
||||
142
tensorflow/tensorboard/plugins/graphs/graphs_plugin_test.py
Normal file
142
tensorflow/tensorboard/plugins/graphs/graphs_plugin_test.py
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ==============================================================================
|
||||
"""Integration tests for the Graphs Plugin."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import math
|
||||
import os.path
|
||||
|
||||
import tensorflow as tf
|
||||
|
||||
from google.protobuf import text_format
|
||||
from tensorflow.tensorboard.backend.event_processing import event_multiplexer
|
||||
from tensorflow.tensorboard.plugins.graphs import graphs_plugin
|
||||
|
||||
|
||||
class GraphsPluginTest(tf.test.TestCase):
|
||||
|
||||
_RUN_WITH_GRAPH = '_RUN_WITH_GRAPH'
|
||||
_RUN_WITHOUT_GRAPH = '_RUN_WITHOUT_GRAPH'
|
||||
|
||||
_METADATA_TAG = 'secret-stats'
|
||||
_MESSAGE_PREFIX_LENGTH_LOWER_BOUND = 1024
|
||||
|
||||
def generate_run(self, run_name, include_graph):
|
||||
"""Create a run with a text summary, metadata, and optionally a graph."""
|
||||
tf.reset_default_graph()
|
||||
k1 = tf.constant(math.pi, name='k1')
|
||||
k2 = tf.constant(math.e, name='k2')
|
||||
result = (k1 ** k2) - k1
|
||||
expected = tf.constant(20.0, name='expected')
|
||||
error = tf.abs(result - expected, name='error')
|
||||
message_prefix_value = 'error ' * 1000
|
||||
true_length = len(message_prefix_value)
|
||||
assert true_length > self._MESSAGE_PREFIX_LENGTH_LOWER_BOUND, true_length
|
||||
message_prefix = tf.constant(message_prefix_value, name='message_prefix')
|
||||
error_message = tf.string_join([message_prefix,
|
||||
tf.as_string(error, name='error_string')],
|
||||
name='error_message')
|
||||
summary_message = tf.summary.text('summary_message', error_message)
|
||||
|
||||
sess = tf.Session()
|
||||
writer = tf.summary.FileWriter(os.path.join(self.logdir, run_name))
|
||||
if include_graph:
|
||||
writer.add_graph(sess.graph)
|
||||
options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
|
||||
run_metadata = tf.RunMetadata()
|
||||
s = sess.run(summary_message, options=options, run_metadata=run_metadata)
|
||||
writer.add_summary(s)
|
||||
writer.add_run_metadata(run_metadata, self._METADATA_TAG)
|
||||
writer.close()
|
||||
|
||||
def set_up_with_runs(self, with_graph=True, without_graph=True):
|
||||
self.logdir = self.get_temp_dir()
|
||||
if with_graph:
|
||||
self.generate_run(self._RUN_WITH_GRAPH, include_graph=True)
|
||||
if without_graph:
|
||||
self.generate_run(self._RUN_WITHOUT_GRAPH, include_graph=False)
|
||||
multiplexer = event_multiplexer.EventMultiplexer()
|
||||
multiplexer.AddRunsFromDirectory(self.logdir)
|
||||
multiplexer.Reload()
|
||||
self.plugin = graphs_plugin.GraphsPlugin()
|
||||
self.plugin.get_plugin_apps(multiplexer, None)
|
||||
|
||||
def test_index(self):
|
||||
self.set_up_with_runs()
|
||||
self.assertItemsEqual([self._RUN_WITH_GRAPH], self.plugin.index_impl())
|
||||
|
||||
def test_run_metadata_index(self):
|
||||
self.set_up_with_runs()
|
||||
self.assertDictEqual({
|
||||
self._RUN_WITH_GRAPH: [self._METADATA_TAG],
|
||||
self._RUN_WITHOUT_GRAPH: [self._METADATA_TAG],
|
||||
}, self.plugin.run_metadata_index_impl())
|
||||
|
||||
def _get_graph(self, *args, **kwargs):
|
||||
"""Set up runs, then fetch and return the graph as a proto."""
|
||||
self.set_up_with_runs()
|
||||
(graph_pbtxt, mime_type) = self.plugin.graph_impl(
|
||||
self._RUN_WITH_GRAPH, *args, **kwargs)
|
||||
self.assertEqual(mime_type, 'text/x-protobuf')
|
||||
return text_format.Parse(graph_pbtxt, tf.GraphDef())
|
||||
|
||||
def test_graph_simple(self):
|
||||
graph = self._get_graph()
|
||||
node_names = set(node.name for node in graph.node)
|
||||
self.assertEqual({'k1', 'k2', 'pow', 'sub', 'expected', 'sub_1', 'error',
|
||||
'message_prefix', 'error_string', 'error_message',
|
||||
'summary_message'},
|
||||
node_names)
|
||||
|
||||
def test_graph_large_attrs(self):
|
||||
key = 'o---;;-;'
|
||||
graph = self._get_graph(
|
||||
limit_attr_size=self._MESSAGE_PREFIX_LENGTH_LOWER_BOUND,
|
||||
large_attrs_key=key)
|
||||
large_attrs = {
|
||||
node.name: list(node.attr[key].list.s)
|
||||
for node in graph.node
|
||||
if key in node.attr
|
||||
}
|
||||
self.assertEqual({'message_prefix': [b'value']},
|
||||
large_attrs)
|
||||
|
||||
def test_run_metadata(self):
|
||||
self.set_up_with_runs()
|
||||
(metadata_pbtxt, mime_type) = self.plugin.run_metadata_impl(
|
||||
self._RUN_WITH_GRAPH, self._METADATA_TAG)
|
||||
self.assertEqual(mime_type, 'text/x-protobuf')
|
||||
text_format.Parse(metadata_pbtxt, tf.RunMetadata())
|
||||
# If it parses, we're happy.
|
||||
|
||||
def test_is_active_with_graph(self):
|
||||
self.set_up_with_runs(with_graph=True, without_graph=False)
|
||||
self.assertTrue(self.plugin.is_active())
|
||||
|
||||
def test_is_active_without_graph(self):
|
||||
self.set_up_with_runs(with_graph=False, without_graph=True)
|
||||
self.assertFalse(self.plugin.is_active())
|
||||
|
||||
def test_is_active_with_both(self):
|
||||
self.set_up_with_runs(with_graph=True, without_graph=True)
|
||||
self.assertTrue(self.plugin.is_active())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
tf.test.main()
|
||||
|
|
@ -34,6 +34,7 @@ from tensorflow.tensorboard.backend import application
|
|||
from tensorflow.tensorboard.backend.event_processing import event_file_inspector as efi
|
||||
from tensorflow.tensorboard.plugins.audio import audio_plugin
|
||||
from tensorflow.tensorboard.plugins.distributions import distributions_plugin
|
||||
from tensorflow.tensorboard.plugins.graphs import graphs_plugin
|
||||
from tensorflow.tensorboard.plugins.histograms import histograms_plugin
|
||||
from tensorflow.tensorboard.plugins.images import images_plugin
|
||||
from tensorflow.tensorboard.plugins.projector import projector_plugin
|
||||
|
|
@ -208,6 +209,7 @@ def main(unused_argv=None):
|
|||
scalars_plugin.ScalarsPlugin(),
|
||||
images_plugin.ImagesPlugin(),
|
||||
audio_plugin.AudioPlugin(),
|
||||
graphs_plugin.GraphsPlugin(),
|
||||
distributions_plugin.DistributionsPlugin(),
|
||||
histograms_plugin.HistogramsPlugin(),
|
||||
projector_plugin.ProjectorPlugin(),
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user