Models: attention ocr: How to export SavedModel for Tensorflow Serving?

Created on 19 Sep 2018  路  3Comments  路  Source: tensorflow/models

I trained successfully attention ocr (https://github.com/tensorflow/models/tree/master/research/attention_ocr) on my custom dataset.

Now for inference part, I want to export SavedModel for Tensorflow Serving but don't know how. The instruction is pretty vague in the docs:

The inference part was not released yet, but it is pretty straightforward to implement one in Python or C++.

Can you provide a demo export script just like Object Detection API's export_inference_graph.py (https://github.com/tensorflow/models/blob/master/research/object_detection/export_inference_graph.py) ?

awaiting response

Most helpful comment

I managed to solve this problem myself :joy:

# export.py
import tensorflow as tf
from tensorflow.python.platform import flags

import common_flags
import data_provider
import datasets

FLAGS = flags.FLAGS
common_flags.define()


flags.DEFINE_string('saved_dir', './saved/1',
                    'Directory with version contains saved model')


def get_dataset_image_shape(dataset_name):
  ds_module = getattr(datasets, dataset_name)
  height, width, channel = ds_module.DEFAULT_CONFIG['image_shape']
  return height, width, channel


def create_model(dataset_name):
    height, width, channel = get_dataset_image_shape(dataset_name)
    dataset = common_flags.create_dataset(split_name=FLAGS.split_name)

    model = common_flags.create_model(
        num_char_classes=dataset.num_char_classes,
        seq_length=dataset.max_sequence_length,
        num_views=dataset.num_of_views,
        null_code=dataset.null_code,
        charset=dataset.charset)

    raw_images = tf.placeholder(tf.uint8, shape=[1, height, width, channel])
    images = tf.map_fn(data_provider.preprocess_image, raw_images, dtype=tf.float32)
    endpoints = model.create_base(images, labels_one_hot=None)
    return raw_images, endpoints


def main():
    images_placeholder, endpoints = create_model(FLAGS.dataset_name)

    with tf.Session() as sess:
        tf.train.Saver().restore(sess, FLAGS.checkpoint)
        inputs = {'input': tf.saved_model.utils.build_tensor_info(images_placeholder)}
        out_classes = sess.graph.get_tensor_by_name('AttentionOcr_v1/ReduceJoin:0')
        outputs = {'output':  tf.saved_model.utils.build_tensor_info(out_classes)}

        signature = tf.saved_model.signature_def_utils.build_signature_def(
            inputs=inputs,
            outputs=outputs,
            method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME)

        legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')

        # Save out the SavedModel.
        builder = tf.saved_model.builder.SavedModelBuilder(FLAGS.saved_dir)
        builder.add_meta_graph_and_variables(
            sess, [tf.saved_model.tag_constants.SERVING],
            signature_def_map={
                tf.saved_model.signature_constants.
                DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                signature
            },
            legacy_init_op=legacy_init_op)
        builder.save()

    print('Model is saved to', FLAGS.saved_dir)


if __name__ == '__main__':
    main()

And you run it like:

python export.py --checkpoint /tmp/attention_ocr/train/model.ckpt-123456 --saved_dir ./saved/attention_ocr/1

And here you have a SavedModel with version ./saved/attention_ocr/1 for your Tensorflow Serving :)

I'll close this issue for now.

All 3 comments

Thank you for your post. We noticed you have not filled out the following field in the issue template. Could you update them if they are relevant in your case, or leave them as N/A? Thanks.
What is the top-level directory of the model you are using
Have I written custom code
OS Platform and Distribution
TensorFlow installed from
TensorFlow version
Bazel version
CUDA/cuDNN version
GPU model and memory
Exact command to reproduce

I managed to solve this problem myself :joy:

# export.py
import tensorflow as tf
from tensorflow.python.platform import flags

import common_flags
import data_provider
import datasets

FLAGS = flags.FLAGS
common_flags.define()


flags.DEFINE_string('saved_dir', './saved/1',
                    'Directory with version contains saved model')


def get_dataset_image_shape(dataset_name):
  ds_module = getattr(datasets, dataset_name)
  height, width, channel = ds_module.DEFAULT_CONFIG['image_shape']
  return height, width, channel


def create_model(dataset_name):
    height, width, channel = get_dataset_image_shape(dataset_name)
    dataset = common_flags.create_dataset(split_name=FLAGS.split_name)

    model = common_flags.create_model(
        num_char_classes=dataset.num_char_classes,
        seq_length=dataset.max_sequence_length,
        num_views=dataset.num_of_views,
        null_code=dataset.null_code,
        charset=dataset.charset)

    raw_images = tf.placeholder(tf.uint8, shape=[1, height, width, channel])
    images = tf.map_fn(data_provider.preprocess_image, raw_images, dtype=tf.float32)
    endpoints = model.create_base(images, labels_one_hot=None)
    return raw_images, endpoints


def main():
    images_placeholder, endpoints = create_model(FLAGS.dataset_name)

    with tf.Session() as sess:
        tf.train.Saver().restore(sess, FLAGS.checkpoint)
        inputs = {'input': tf.saved_model.utils.build_tensor_info(images_placeholder)}
        out_classes = sess.graph.get_tensor_by_name('AttentionOcr_v1/ReduceJoin:0')
        outputs = {'output':  tf.saved_model.utils.build_tensor_info(out_classes)}

        signature = tf.saved_model.signature_def_utils.build_signature_def(
            inputs=inputs,
            outputs=outputs,
            method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME)

        legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')

        # Save out the SavedModel.
        builder = tf.saved_model.builder.SavedModelBuilder(FLAGS.saved_dir)
        builder.add_meta_graph_and_variables(
            sess, [tf.saved_model.tag_constants.SERVING],
            signature_def_map={
                tf.saved_model.signature_constants.
                DEFAULT_SERVING_SIGNATURE_DEF_KEY:
                signature
            },
            legacy_init_op=legacy_init_op)
        builder.save()

    print('Model is saved to', FLAGS.saved_dir)


if __name__ == '__main__':
    main()

And you run it like:

python export.py --checkpoint /tmp/attention_ocr/train/model.ckpt-123456 --saved_dir ./saved/attention_ocr/1

And here you have a SavedModel with version ./saved/attention_ocr/1 for your Tensorflow Serving :)

I'll close this issue for now.

Hello all, just follow the below video and export your own model with in a 10 seconds

https://youtu.be/w0Ebsbz7HYA

Was this page helpful?
0 / 5 - 0 ratings