Source code for lingvo.core.inference_graph_pb2

# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler.  DO NOT EDIT!
# source: lingvo/core/inference_graph.proto

import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)

_sym_db = _symbol_database.Default()


from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
from tensorflow.core.framework import graph_pb2 as tensorflow_dot_core_dot_framework_dot_graph__pb2
from tensorflow.core.framework import types_pb2 as tensorflow_dot_core_dot_framework_dot_types__pb2
from tensorflow.core.protobuf import meta_graph_pb2 as tensorflow_dot_core_dot_protobuf_dot_meta__graph__pb2
from tensorflow.core.protobuf import saver_pb2 as tensorflow_dot_core_dot_protobuf_dot_saver__pb2


DESCRIPTOR = _descriptor.FileDescriptor(
  name='lingvo/core/inference_graph.proto',
  package='tensorflow.lingvo',
  syntax='proto2',
  serialized_options=None,
  serialized_pb=_b('\n!lingvo/core/inference_graph.proto\x12\x11tensorflow.lingvo\x1a\x19google/protobuf/any.proto\x1a%tensorflow/core/framework/graph.proto\x1a%tensorflow/core/framework/types.proto\x1a)tensorflow/core/protobuf/meta_graph.proto\x1a$tensorflow/core/protobuf/saver.proto\"\xde\x0c\n\x0eInferenceGraph\x12\'\n\tgraph_def\x18\x01 \x01(\x0b\x32\x14.tensorflow.GraphDef\x12\'\n\tsaver_def\x18\x02 \x01(\x0b\x32\x14.tensorflow.SaverDef\x12\x43\n\tsubgraphs\x18\x05 \x03(\x0b\x32\x30.tensorflow.lingvo.InferenceGraph.SubgraphsEntry\x12\x17\n\x0fhyperparameters\x18\x07 \x01(\t\x12\x30\n\x0e\x61sset_file_def\x18\n \x03(\x0b\x32\x18.tensorflow.AssetFileDef\x12L\n\x0e\x63ollection_def\x18\x0b \x03(\x0b\x32\x34.tensorflow.lingvo.InferenceGraph.CollectionDefEntry\x1a\xa0\x05\n\x08Subgraph\x12<\n\x04meta\x18\x01 \x01(\x0b\x32..tensorflow.lingvo.InferenceGraph.SubgraphMeta\x12\x44\n\x05\x66\x65\x65\x64s\x18\x02 \x03(\x0b\x32\x35.tensorflow.lingvo.InferenceGraph.Subgraph.FeedsEntry\x12H\n\x07\x66\x65tches\x18\x03 \x03(\x0b\x32\x37.tensorflow.lingvo.InferenceGraph.Subgraph.FetchesEntry\x12M\n\nfeeds_meta\x18\x04 \x03(\x0b\x32\x39.tensorflow.lingvo.InferenceGraph.Subgraph.FeedsMetaEntry\x12Q\n\x0c\x66\x65tches_meta\x18\x05 \x03(\x0b\x32;.tensorflow.lingvo.InferenceGraph.Subgraph.FetchesMetaEntry\x1a,\n\nFeedsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a.\n\x0c\x46\x65tchesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x61\n\x0e\x46\x65\x65\x64sMetaEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12>\n\x05value\x18\x02 \x01(\x0b\x32/.tensorflow.lingvo.InferenceGraph.FeedFetchMeta:\x02\x38\x01\x1a\x63\n\x10\x46\x65tchesMetaEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12>\n\x05value\x18\x02 \x01(\x0b\x32/.tensorflow.lingvo.InferenceGraph.FeedFetchMeta:\x02\x38\x01\x1a\\\n\x0eSubgraphsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.tensorflow.lingvo.InferenceGraph.Subgraph:\x02\x38\x01\x1a\x38\n\x0cSubgraphMeta\x12(\n\nextensions\x18\x01 \x03(\x0b\x32\x14.google.protobuf.Any\x1a\xfd\x02\n\rFeedFetchMeta\x12(\n\nextensions\x18\x01 \x03(\x0b\x32\x14.google.protobuf.Any\x12\'\n\tdata_type\x18\x02 \x01(\x0e\x32\x14.tensorflow.DataType\x12\x11\n\x05shape\x18\x03 \x03(\x05\x42\x02\x10\x01\x12\x0e\n\x06layout\x18\x04 \x01(\t\x12\x1c\n\x14\x64ispatch_stride_axis\x18\r \x01(\x05\x12\x1a\n\x12quantized_num_bits\x18\x08 \x01(\x05\x12\x1d\n\x13quantized_min_const\x18\t \x01(\x01H\x00\x12\x1e\n\x14quantized_min_tensor\x18\n \x01(\tH\x00\x12\x1d\n\x13quantized_max_const\x18\x0b \x01(\x01H\x01\x12\x1e\n\x14quantized_max_tensor\x18\x0c \x01(\tH\x01\x42\x15\n\x13quantized_min_oneofB\x15\n\x13quantized_max_oneofJ\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x07\x10\x08\x1aO\n\x12\x43ollectionDefEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12(\n\x05value\x18\x02 \x01(\x0b\x32\x19.tensorflow.CollectionDef:\x02\x38\x01J\x04\x08\x03\x10\x04J\x04\x08\x04\x10\x05J\x04\x08\x06\x10\x07')
  ,
  dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,tensorflow_dot_core_dot_framework_dot_graph__pb2.DESCRIPTOR,tensorflow_dot_core_dot_framework_dot_types__pb2.DESCRIPTOR,tensorflow_dot_core_dot_protobuf_dot_meta__graph__pb2.DESCRIPTOR,tensorflow_dot_core_dot_protobuf_dot_saver__pb2.DESCRIPTOR,])




_INFERENCEGRAPH_SUBGRAPH_FEEDSENTRY = _descriptor.Descriptor(
  name='FeedsEntry',
  full_name='tensorflow.lingvo.InferenceGraph.Subgraph.FeedsEntry',
  filename=None,
  file=DESCRIPTOR,
  containing_type=None,
  fields=[
    _descriptor.FieldDescriptor(
      name='key', full_name='tensorflow.lingvo.InferenceGraph.Subgraph.FeedsEntry.key', index=0,
      number=1, type=9, cpp_type=9, label=1,
      has_default_value=False, default_value=_b("").decode('utf-8'),
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='value', full_name='tensorflow.lingvo.InferenceGraph.Subgraph.FeedsEntry.value', index=1,
      number=2, type=9, cpp_type=9, label=1,
      has_default_value=False, default_value=_b("").decode('utf-8'),
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
  ],
  extensions=[
  ],
  nested_types=[],
  enum_types=[
  ],
  serialized_options=_b('8\001'),
  is_extendable=False,
  syntax='proto2',
  extension_ranges=[],
  oneofs=[
  ],
  serialized_start=946,
  serialized_end=990,
)

_INFERENCEGRAPH_SUBGRAPH_FETCHESENTRY = _descriptor.Descriptor(
  name='FetchesEntry',
  full_name='tensorflow.lingvo.InferenceGraph.Subgraph.FetchesEntry',
  filename=None,
  file=DESCRIPTOR,
  containing_type=None,
  fields=[
    _descriptor.FieldDescriptor(
      name='key', full_name='tensorflow.lingvo.InferenceGraph.Subgraph.FetchesEntry.key', index=0,
      number=1, type=9, cpp_type=9, label=1,
      has_default_value=False, default_value=_b("").decode('utf-8'),
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='value', full_name='tensorflow.lingvo.InferenceGraph.Subgraph.FetchesEntry.value', index=1,
      number=2, type=9, cpp_type=9, label=1,
      has_default_value=False, default_value=_b("").decode('utf-8'),
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
  ],
  extensions=[
  ],
  nested_types=[],
  enum_types=[
  ],
  serialized_options=_b('8\001'),
  is_extendable=False,
  syntax='proto2',
  extension_ranges=[],
  oneofs=[
  ],
  serialized_start=992,
  serialized_end=1038,
)

_INFERENCEGRAPH_SUBGRAPH_FEEDSMETAENTRY = _descriptor.Descriptor(
  name='FeedsMetaEntry',
  full_name='tensorflow.lingvo.InferenceGraph.Subgraph.FeedsMetaEntry',
  filename=None,
  file=DESCRIPTOR,
  containing_type=None,
  fields=[
    _descriptor.FieldDescriptor(
      name='key', full_name='tensorflow.lingvo.InferenceGraph.Subgraph.FeedsMetaEntry.key', index=0,
      number=1, type=9, cpp_type=9, label=1,
      has_default_value=False, default_value=_b("").decode('utf-8'),
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='value', full_name='tensorflow.lingvo.InferenceGraph.Subgraph.FeedsMetaEntry.value', index=1,
      number=2, type=11, cpp_type=10, label=1,
      has_default_value=False, default_value=None,
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
  ],
  extensions=[
  ],
  nested_types=[],
  enum_types=[
  ],
  serialized_options=_b('8\001'),
  is_extendable=False,
  syntax='proto2',
  extension_ranges=[],
  oneofs=[
  ],
  serialized_start=1040,
  serialized_end=1137,
)

_INFERENCEGRAPH_SUBGRAPH_FETCHESMETAENTRY = _descriptor.Descriptor(
  name='FetchesMetaEntry',
  full_name='tensorflow.lingvo.InferenceGraph.Subgraph.FetchesMetaEntry',
  filename=None,
  file=DESCRIPTOR,
  containing_type=None,
  fields=[
    _descriptor.FieldDescriptor(
      name='key', full_name='tensorflow.lingvo.InferenceGraph.Subgraph.FetchesMetaEntry.key', index=0,
      number=1, type=9, cpp_type=9, label=1,
      has_default_value=False, default_value=_b("").decode('utf-8'),
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='value', full_name='tensorflow.lingvo.InferenceGraph.Subgraph.FetchesMetaEntry.value', index=1,
      number=2, type=11, cpp_type=10, label=1,
      has_default_value=False, default_value=None,
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
  ],
  extensions=[
  ],
  nested_types=[],
  enum_types=[
  ],
  serialized_options=_b('8\001'),
  is_extendable=False,
  syntax='proto2',
  extension_ranges=[],
  oneofs=[
  ],
  serialized_start=1139,
  serialized_end=1238,
)

_INFERENCEGRAPH_SUBGRAPH = _descriptor.Descriptor(
  name='Subgraph',
  full_name='tensorflow.lingvo.InferenceGraph.Subgraph',
  filename=None,
  file=DESCRIPTOR,
  containing_type=None,
  fields=[
    _descriptor.FieldDescriptor(
      name='meta', full_name='tensorflow.lingvo.InferenceGraph.Subgraph.meta', index=0,
      number=1, type=11, cpp_type=10, label=1,
      has_default_value=False, default_value=None,
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='feeds', full_name='tensorflow.lingvo.InferenceGraph.Subgraph.feeds', index=1,
      number=2, type=11, cpp_type=10, label=3,
      has_default_value=False, default_value=[],
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='fetches', full_name='tensorflow.lingvo.InferenceGraph.Subgraph.fetches', index=2,
      number=3, type=11, cpp_type=10, label=3,
      has_default_value=False, default_value=[],
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='feeds_meta', full_name='tensorflow.lingvo.InferenceGraph.Subgraph.feeds_meta', index=3,
      number=4, type=11, cpp_type=10, label=3,
      has_default_value=False, default_value=[],
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='fetches_meta', full_name='tensorflow.lingvo.InferenceGraph.Subgraph.fetches_meta', index=4,
      number=5, type=11, cpp_type=10, label=3,
      has_default_value=False, default_value=[],
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
  ],
  extensions=[
  ],
  nested_types=[_INFERENCEGRAPH_SUBGRAPH_FEEDSENTRY, _INFERENCEGRAPH_SUBGRAPH_FETCHESENTRY, _INFERENCEGRAPH_SUBGRAPH_FEEDSMETAENTRY, _INFERENCEGRAPH_SUBGRAPH_FETCHESMETAENTRY, ],
  enum_types=[
  ],
  serialized_options=None,
  is_extendable=False,
  syntax='proto2',
  extension_ranges=[],
  oneofs=[
  ],
  serialized_start=566,
  serialized_end=1238,
)

_INFERENCEGRAPH_SUBGRAPHSENTRY = _descriptor.Descriptor(
  name='SubgraphsEntry',
  full_name='tensorflow.lingvo.InferenceGraph.SubgraphsEntry',
  filename=None,
  file=DESCRIPTOR,
  containing_type=None,
  fields=[
    _descriptor.FieldDescriptor(
      name='key', full_name='tensorflow.lingvo.InferenceGraph.SubgraphsEntry.key', index=0,
      number=1, type=9, cpp_type=9, label=1,
      has_default_value=False, default_value=_b("").decode('utf-8'),
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='value', full_name='tensorflow.lingvo.InferenceGraph.SubgraphsEntry.value', index=1,
      number=2, type=11, cpp_type=10, label=1,
      has_default_value=False, default_value=None,
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
  ],
  extensions=[
  ],
  nested_types=[],
  enum_types=[
  ],
  serialized_options=_b('8\001'),
  is_extendable=False,
  syntax='proto2',
  extension_ranges=[],
  oneofs=[
  ],
  serialized_start=1240,
  serialized_end=1332,
)

_INFERENCEGRAPH_SUBGRAPHMETA = _descriptor.Descriptor(
  name='SubgraphMeta',
  full_name='tensorflow.lingvo.InferenceGraph.SubgraphMeta',
  filename=None,
  file=DESCRIPTOR,
  containing_type=None,
  fields=[
    _descriptor.FieldDescriptor(
      name='extensions', full_name='tensorflow.lingvo.InferenceGraph.SubgraphMeta.extensions', index=0,
      number=1, type=11, cpp_type=10, label=3,
      has_default_value=False, default_value=[],
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
  ],
  extensions=[
  ],
  nested_types=[],
  enum_types=[
  ],
  serialized_options=None,
  is_extendable=False,
  syntax='proto2',
  extension_ranges=[],
  oneofs=[
  ],
  serialized_start=1334,
  serialized_end=1390,
)

_INFERENCEGRAPH_FEEDFETCHMETA = _descriptor.Descriptor(
  name='FeedFetchMeta',
  full_name='tensorflow.lingvo.InferenceGraph.FeedFetchMeta',
  filename=None,
  file=DESCRIPTOR,
  containing_type=None,
  fields=[
    _descriptor.FieldDescriptor(
      name='extensions', full_name='tensorflow.lingvo.InferenceGraph.FeedFetchMeta.extensions', index=0,
      number=1, type=11, cpp_type=10, label=3,
      has_default_value=False, default_value=[],
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='data_type', full_name='tensorflow.lingvo.InferenceGraph.FeedFetchMeta.data_type', index=1,
      number=2, type=14, cpp_type=8, label=1,
      has_default_value=False, default_value=0,
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='shape', full_name='tensorflow.lingvo.InferenceGraph.FeedFetchMeta.shape', index=2,
      number=3, type=5, cpp_type=1, label=3,
      has_default_value=False, default_value=[],
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=_b('\020\001'), file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='layout', full_name='tensorflow.lingvo.InferenceGraph.FeedFetchMeta.layout', index=3,
      number=4, type=9, cpp_type=9, label=1,
      has_default_value=False, default_value=_b("").decode('utf-8'),
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='dispatch_stride_axis', full_name='tensorflow.lingvo.InferenceGraph.FeedFetchMeta.dispatch_stride_axis', index=4,
      number=13, type=5, cpp_type=1, label=1,
      has_default_value=False, default_value=0,
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='quantized_num_bits', full_name='tensorflow.lingvo.InferenceGraph.FeedFetchMeta.quantized_num_bits', index=5,
      number=8, type=5, cpp_type=1, label=1,
      has_default_value=False, default_value=0,
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='quantized_min_const', full_name='tensorflow.lingvo.InferenceGraph.FeedFetchMeta.quantized_min_const', index=6,
      number=9, type=1, cpp_type=5, label=1,
      has_default_value=False, default_value=float(0),
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='quantized_min_tensor', full_name='tensorflow.lingvo.InferenceGraph.FeedFetchMeta.quantized_min_tensor', index=7,
      number=10, type=9, cpp_type=9, label=1,
      has_default_value=False, default_value=_b("").decode('utf-8'),
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='quantized_max_const', full_name='tensorflow.lingvo.InferenceGraph.FeedFetchMeta.quantized_max_const', index=8,
      number=11, type=1, cpp_type=5, label=1,
      has_default_value=False, default_value=float(0),
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='quantized_max_tensor', full_name='tensorflow.lingvo.InferenceGraph.FeedFetchMeta.quantized_max_tensor', index=9,
      number=12, type=9, cpp_type=9, label=1,
      has_default_value=False, default_value=_b("").decode('utf-8'),
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
  ],
  extensions=[
  ],
  nested_types=[],
  enum_types=[
  ],
  serialized_options=None,
  is_extendable=False,
  syntax='proto2',
  extension_ranges=[],
  oneofs=[
    _descriptor.OneofDescriptor(
      name='quantized_min_oneof', full_name='tensorflow.lingvo.InferenceGraph.FeedFetchMeta.quantized_min_oneof',
      index=0, containing_type=None, fields=[]),
    _descriptor.OneofDescriptor(
      name='quantized_max_oneof', full_name='tensorflow.lingvo.InferenceGraph.FeedFetchMeta.quantized_max_oneof',
      index=1, containing_type=None, fields=[]),
  ],
  serialized_start=1393,
  serialized_end=1774,
)

_INFERENCEGRAPH_COLLECTIONDEFENTRY = _descriptor.Descriptor(
  name='CollectionDefEntry',
  full_name='tensorflow.lingvo.InferenceGraph.CollectionDefEntry',
  filename=None,
  file=DESCRIPTOR,
  containing_type=None,
  fields=[
    _descriptor.FieldDescriptor(
      name='key', full_name='tensorflow.lingvo.InferenceGraph.CollectionDefEntry.key', index=0,
      number=1, type=9, cpp_type=9, label=1,
      has_default_value=False, default_value=_b("").decode('utf-8'),
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='value', full_name='tensorflow.lingvo.InferenceGraph.CollectionDefEntry.value', index=1,
      number=2, type=11, cpp_type=10, label=1,
      has_default_value=False, default_value=None,
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
  ],
  extensions=[
  ],
  nested_types=[],
  enum_types=[
  ],
  serialized_options=_b('8\001'),
  is_extendable=False,
  syntax='proto2',
  extension_ranges=[],
  oneofs=[
  ],
  serialized_start=1776,
  serialized_end=1855,
)

_INFERENCEGRAPH = _descriptor.Descriptor(
  name='InferenceGraph',
  full_name='tensorflow.lingvo.InferenceGraph',
  filename=None,
  file=DESCRIPTOR,
  containing_type=None,
  fields=[
    _descriptor.FieldDescriptor(
      name='graph_def', full_name='tensorflow.lingvo.InferenceGraph.graph_def', index=0,
      number=1, type=11, cpp_type=10, label=1,
      has_default_value=False, default_value=None,
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='saver_def', full_name='tensorflow.lingvo.InferenceGraph.saver_def', index=1,
      number=2, type=11, cpp_type=10, label=1,
      has_default_value=False, default_value=None,
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='subgraphs', full_name='tensorflow.lingvo.InferenceGraph.subgraphs', index=2,
      number=5, type=11, cpp_type=10, label=3,
      has_default_value=False, default_value=[],
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='hyperparameters', full_name='tensorflow.lingvo.InferenceGraph.hyperparameters', index=3,
      number=7, type=9, cpp_type=9, label=1,
      has_default_value=False, default_value=_b("").decode('utf-8'),
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='asset_file_def', full_name='tensorflow.lingvo.InferenceGraph.asset_file_def', index=4,
      number=10, type=11, cpp_type=10, label=3,
      has_default_value=False, default_value=[],
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
    _descriptor.FieldDescriptor(
      name='collection_def', full_name='tensorflow.lingvo.InferenceGraph.collection_def', index=5,
      number=11, type=11, cpp_type=10, label=3,
      has_default_value=False, default_value=[],
      message_type=None, enum_type=None, containing_type=None,
      is_extension=False, extension_scope=None,
      serialized_options=None, file=DESCRIPTOR),
  ],
  extensions=[
  ],
  nested_types=[_INFERENCEGRAPH_SUBGRAPH, _INFERENCEGRAPH_SUBGRAPHSENTRY, _INFERENCEGRAPH_SUBGRAPHMETA, _INFERENCEGRAPH_FEEDFETCHMETA, _INFERENCEGRAPH_COLLECTIONDEFENTRY, ],
  enum_types=[
  ],
  serialized_options=None,
  is_extendable=False,
  syntax='proto2',
  extension_ranges=[],
  oneofs=[
  ],
  serialized_start=243,
  serialized_end=1873,
)

_INFERENCEGRAPH_SUBGRAPH_FEEDSENTRY.containing_type = _INFERENCEGRAPH_SUBGRAPH
_INFERENCEGRAPH_SUBGRAPH_FETCHESENTRY.containing_type = _INFERENCEGRAPH_SUBGRAPH
_INFERENCEGRAPH_SUBGRAPH_FEEDSMETAENTRY.fields_by_name['value'].message_type = _INFERENCEGRAPH_FEEDFETCHMETA
_INFERENCEGRAPH_SUBGRAPH_FEEDSMETAENTRY.containing_type = _INFERENCEGRAPH_SUBGRAPH
_INFERENCEGRAPH_SUBGRAPH_FETCHESMETAENTRY.fields_by_name['value'].message_type = _INFERENCEGRAPH_FEEDFETCHMETA
_INFERENCEGRAPH_SUBGRAPH_FETCHESMETAENTRY.containing_type = _INFERENCEGRAPH_SUBGRAPH
_INFERENCEGRAPH_SUBGRAPH.fields_by_name['meta'].message_type = _INFERENCEGRAPH_SUBGRAPHMETA
_INFERENCEGRAPH_SUBGRAPH.fields_by_name['feeds'].message_type = _INFERENCEGRAPH_SUBGRAPH_FEEDSENTRY
_INFERENCEGRAPH_SUBGRAPH.fields_by_name['fetches'].message_type = _INFERENCEGRAPH_SUBGRAPH_FETCHESENTRY
_INFERENCEGRAPH_SUBGRAPH.fields_by_name['feeds_meta'].message_type = _INFERENCEGRAPH_SUBGRAPH_FEEDSMETAENTRY
_INFERENCEGRAPH_SUBGRAPH.fields_by_name['fetches_meta'].message_type = _INFERENCEGRAPH_SUBGRAPH_FETCHESMETAENTRY
_INFERENCEGRAPH_SUBGRAPH.containing_type = _INFERENCEGRAPH
_INFERENCEGRAPH_SUBGRAPHSENTRY.fields_by_name['value'].message_type = _INFERENCEGRAPH_SUBGRAPH
_INFERENCEGRAPH_SUBGRAPHSENTRY.containing_type = _INFERENCEGRAPH
_INFERENCEGRAPH_SUBGRAPHMETA.fields_by_name['extensions'].message_type = google_dot_protobuf_dot_any__pb2._ANY
_INFERENCEGRAPH_SUBGRAPHMETA.containing_type = _INFERENCEGRAPH
_INFERENCEGRAPH_FEEDFETCHMETA.fields_by_name['extensions'].message_type = google_dot_protobuf_dot_any__pb2._ANY
_INFERENCEGRAPH_FEEDFETCHMETA.fields_by_name['data_type'].enum_type = tensorflow_dot_core_dot_framework_dot_types__pb2._DATATYPE
_INFERENCEGRAPH_FEEDFETCHMETA.containing_type = _INFERENCEGRAPH
_INFERENCEGRAPH_FEEDFETCHMETA.oneofs_by_name['quantized_min_oneof'].fields.append(
  _INFERENCEGRAPH_FEEDFETCHMETA.fields_by_name['quantized_min_const'])
_INFERENCEGRAPH_FEEDFETCHMETA.fields_by_name['quantized_min_const'].containing_oneof = _INFERENCEGRAPH_FEEDFETCHMETA.oneofs_by_name['quantized_min_oneof']
_INFERENCEGRAPH_FEEDFETCHMETA.oneofs_by_name['quantized_min_oneof'].fields.append(
  _INFERENCEGRAPH_FEEDFETCHMETA.fields_by_name['quantized_min_tensor'])
_INFERENCEGRAPH_FEEDFETCHMETA.fields_by_name['quantized_min_tensor'].containing_oneof = _INFERENCEGRAPH_FEEDFETCHMETA.oneofs_by_name['quantized_min_oneof']
_INFERENCEGRAPH_FEEDFETCHMETA.oneofs_by_name['quantized_max_oneof'].fields.append(
  _INFERENCEGRAPH_FEEDFETCHMETA.fields_by_name['quantized_max_const'])
_INFERENCEGRAPH_FEEDFETCHMETA.fields_by_name['quantized_max_const'].containing_oneof = _INFERENCEGRAPH_FEEDFETCHMETA.oneofs_by_name['quantized_max_oneof']
_INFERENCEGRAPH_FEEDFETCHMETA.oneofs_by_name['quantized_max_oneof'].fields.append(
  _INFERENCEGRAPH_FEEDFETCHMETA.fields_by_name['quantized_max_tensor'])
_INFERENCEGRAPH_FEEDFETCHMETA.fields_by_name['quantized_max_tensor'].containing_oneof = _INFERENCEGRAPH_FEEDFETCHMETA.oneofs_by_name['quantized_max_oneof']
_INFERENCEGRAPH_COLLECTIONDEFENTRY.fields_by_name['value'].message_type = tensorflow_dot_core_dot_protobuf_dot_meta__graph__pb2._COLLECTIONDEF
_INFERENCEGRAPH_COLLECTIONDEFENTRY.containing_type = _INFERENCEGRAPH
_INFERENCEGRAPH.fields_by_name['graph_def'].message_type = tensorflow_dot_core_dot_framework_dot_graph__pb2._GRAPHDEF
_INFERENCEGRAPH.fields_by_name['saver_def'].message_type = tensorflow_dot_core_dot_protobuf_dot_saver__pb2._SAVERDEF
_INFERENCEGRAPH.fields_by_name['subgraphs'].message_type = _INFERENCEGRAPH_SUBGRAPHSENTRY
_INFERENCEGRAPH.fields_by_name['asset_file_def'].message_type = tensorflow_dot_core_dot_protobuf_dot_meta__graph__pb2._ASSETFILEDEF
_INFERENCEGRAPH.fields_by_name['collection_def'].message_type = _INFERENCEGRAPH_COLLECTIONDEFENTRY
DESCRIPTOR.message_types_by_name['InferenceGraph'] = _INFERENCEGRAPH
_sym_db.RegisterFileDescriptor(DESCRIPTOR)

InferenceGraph = _reflection.GeneratedProtocolMessageType('InferenceGraph', (_message.Message,), {

  'Subgraph' : _reflection.GeneratedProtocolMessageType('Subgraph', (_message.Message,), {

    'FeedsEntry' : _reflection.GeneratedProtocolMessageType('FeedsEntry', (_message.Message,), {
      'DESCRIPTOR' : _INFERENCEGRAPH_SUBGRAPH_FEEDSENTRY,
      '__module__' : 'lingvo.core.inference_graph_pb2'
      # @@protoc_insertion_point(class_scope:tensorflow.lingvo.InferenceGraph.Subgraph.FeedsEntry)
      })
    ,

    'FetchesEntry' : _reflection.GeneratedProtocolMessageType('FetchesEntry', (_message.Message,), {
      'DESCRIPTOR' : _INFERENCEGRAPH_SUBGRAPH_FETCHESENTRY,
      '__module__' : 'lingvo.core.inference_graph_pb2'
      # @@protoc_insertion_point(class_scope:tensorflow.lingvo.InferenceGraph.Subgraph.FetchesEntry)
      })
    ,

    'FeedsMetaEntry' : _reflection.GeneratedProtocolMessageType('FeedsMetaEntry', (_message.Message,), {
      'DESCRIPTOR' : _INFERENCEGRAPH_SUBGRAPH_FEEDSMETAENTRY,
      '__module__' : 'lingvo.core.inference_graph_pb2'
      # @@protoc_insertion_point(class_scope:tensorflow.lingvo.InferenceGraph.Subgraph.FeedsMetaEntry)
      })
    ,

    'FetchesMetaEntry' : _reflection.GeneratedProtocolMessageType('FetchesMetaEntry', (_message.Message,), {
      'DESCRIPTOR' : _INFERENCEGRAPH_SUBGRAPH_FETCHESMETAENTRY,
      '__module__' : 'lingvo.core.inference_graph_pb2'
      # @@protoc_insertion_point(class_scope:tensorflow.lingvo.InferenceGraph.Subgraph.FetchesMetaEntry)
      })
    ,
    'DESCRIPTOR' : _INFERENCEGRAPH_SUBGRAPH,
    '__module__' : 'lingvo.core.inference_graph_pb2'
    # @@protoc_insertion_point(class_scope:tensorflow.lingvo.InferenceGraph.Subgraph)
    })
  ,

  'SubgraphsEntry' : _reflection.GeneratedProtocolMessageType('SubgraphsEntry', (_message.Message,), {
    'DESCRIPTOR' : _INFERENCEGRAPH_SUBGRAPHSENTRY,
    '__module__' : 'lingvo.core.inference_graph_pb2'
    # @@protoc_insertion_point(class_scope:tensorflow.lingvo.InferenceGraph.SubgraphsEntry)
    })
  ,

  'SubgraphMeta' : _reflection.GeneratedProtocolMessageType('SubgraphMeta', (_message.Message,), {
    'DESCRIPTOR' : _INFERENCEGRAPH_SUBGRAPHMETA,
    '__module__' : 'lingvo.core.inference_graph_pb2'
    # @@protoc_insertion_point(class_scope:tensorflow.lingvo.InferenceGraph.SubgraphMeta)
    })
  ,

  'FeedFetchMeta' : _reflection.GeneratedProtocolMessageType('FeedFetchMeta', (_message.Message,), {
    'DESCRIPTOR' : _INFERENCEGRAPH_FEEDFETCHMETA,
    '__module__' : 'lingvo.core.inference_graph_pb2'
    # @@protoc_insertion_point(class_scope:tensorflow.lingvo.InferenceGraph.FeedFetchMeta)
    })
  ,

  'CollectionDefEntry' : _reflection.GeneratedProtocolMessageType('CollectionDefEntry', (_message.Message,), {
    'DESCRIPTOR' : _INFERENCEGRAPH_COLLECTIONDEFENTRY,
    '__module__' : 'lingvo.core.inference_graph_pb2'
    # @@protoc_insertion_point(class_scope:tensorflow.lingvo.InferenceGraph.CollectionDefEntry)
    })
  ,
  'DESCRIPTOR' : _INFERENCEGRAPH,
  '__module__' : 'lingvo.core.inference_graph_pb2'
  # @@protoc_insertion_point(class_scope:tensorflow.lingvo.InferenceGraph)
  })
_sym_db.RegisterMessage(InferenceGraph)
_sym_db.RegisterMessage(InferenceGraph.Subgraph)
_sym_db.RegisterMessage(InferenceGraph.Subgraph.FeedsEntry)
_sym_db.RegisterMessage(InferenceGraph.Subgraph.FetchesEntry)
_sym_db.RegisterMessage(InferenceGraph.Subgraph.FeedsMetaEntry)
_sym_db.RegisterMessage(InferenceGraph.Subgraph.FetchesMetaEntry)
_sym_db.RegisterMessage(InferenceGraph.SubgraphsEntry)
_sym_db.RegisterMessage(InferenceGraph.SubgraphMeta)
_sym_db.RegisterMessage(InferenceGraph.FeedFetchMeta)
_sym_db.RegisterMessage(InferenceGraph.CollectionDefEntry)


_INFERENCEGRAPH_SUBGRAPH_FEEDSENTRY._options = None
_INFERENCEGRAPH_SUBGRAPH_FETCHESENTRY._options = None
_INFERENCEGRAPH_SUBGRAPH_FEEDSMETAENTRY._options = None
_INFERENCEGRAPH_SUBGRAPH_FETCHESMETAENTRY._options = None
_INFERENCEGRAPH_SUBGRAPHSENTRY._options = None
_INFERENCEGRAPH_FEEDFETCHMETA.fields_by_name['shape']._options = None
_INFERENCEGRAPH_COLLECTIONDEFENTRY._options = None
# @@protoc_insertion_point(module_scope)