You must feed a value for placeholder tensor 'input_example_tensor' with dtype string and shape [1]

1.4k views Asked by At

I am developing a tensorflow serving client/server application by using chatbot-retrieval project.

My code has two parts, namely serving part and client part.

Below is the code snippet for the serving parts.

def get_features(context, utterance):

  context_len = 50
  utterance_len = 50

  features = {
    "context": context,
    "context_len": tf.constant(context_len, shape=[1,1], dtype=tf.int64),
    "utterance": utterance,
    "utterance_len": tf.constant(utterance_len, shape=[1,1], dtype=tf.int64),
  }

  return features


def my_input_fn(estimator, input_example_tensor ):
      feature_configs = {
              'context':tf.FixedLenFeature(shape=[50], dtype=tf.int64),
              'utterance':tf.FixedLenFeature(shape=[50], dtype=tf.int64)
              }
      tf_example = tf.parse_example(input_example_tensor, feature_configs)
      context = tf.identity(tf_example['context'], name='context')
      utterance = tf.identity(tf_example['utterance'], name='utterance')
      features = get_features(context, utterance)
      return features

def my_signature_fn(input_example_tensor, features, predictions):
  feature_configs = {
          'context':tf.FixedLenFeature(shape=[50], dtype=tf.int64),
          'utterance':tf.FixedLenFeature(shape=[50], dtype=tf.int64)
          }

  tf_example = tf.parse_example(input_example_tensor, feature_configs)
  tf_context = tf.identity(tf_example['context'], name='tf_context_utterance')
  tf_utterance = tf.identity(tf_example['utterance'], name='tf_utterance')

  default_graph_signature = exporter.regression_signature(
              input_tensor=input_example_tensor,
              output_tensor=tf.identity(predictions)
              )

  named_graph_signatures = {
              'inputs':exporter.generic_signature(
                  {
                      'context':tf_context,
                      'utterance':tf_utterance
                  }
               ),
              'outputs':exporter.generic_signature(
                  {
                      'scores':predictions
                  }
               )
              }

  return default_graph_signature, named_graph_signatures

def main():
      ##preliminary codes here##

      estimator.fit(input_fn=input_fn_train, steps=100, monitors=[eval_monitor])

      estimator.export(
              export_dir = FLAGS.export_dir,
              input_fn = my_input_fn,
              use_deprecated_input_fn = True,
              signature_fn = my_signature_fn,
              exports_to_keep = 1
              )

Below is the code snippet for the client part.

def tokenizer_fn(iterator):
   return (x.split(" ") for x in iterator)

    vp = tf.contrib.learn.preprocessing.VocabularyProcessor.restore(FLAGS.vocab_processor_file)

input_context = "biz banka kart farkli bir banka atmsinde para"
input_utterance = "farkli banka kart biz banka atmsinde para"

context_feature = np.array(list(vp.transform([input_context])))
utterance_feature = np.array(list(vp.transform([input_utterance])))

context_tensor = tf.contrib.util.make_tensor_proto(context_feature, shape=[1, context_feature.size])
utterance_tensor = tf.contrib.util.make_tensor_proto(context_feature, shape=[1, context_feature.size])

request.inputs['context'].CopyFrom(context_tensor)
request.inputs['utterance'].CopyFrom(utterance_tensor)

result_counter.throttle()
result_future = stub.Predict.future(request, 5.0)  # 5 seconds
result_future.add_done_callback(
_create_rpc_callback(label[0], result_counter))
   return result_counter.get_error_rate()

Both of the serving and client parts builds with no error. After running the serving application and then the client application I get the following strange error propogated to the client application when the rpc call completes.

Below is the error I get when rpc call completes

AbortionError(code=StatusCode.INVALID_ARGUMENT, details="You must feed a value for placeholder tensor 'input_example_tensor' with dtype string and shape [1]
         [[Node: input_example_tensor = Placeholder[_output_shapes=[[1]], dtype=DT_STRING, shape=[1], _device="/job:localhost/replica:0/task:0/cpu:0"]()]]")

The error is strange since there seems to be no way to feed the placeholder from the client application.

How can I provide data for the placeholder 'input_example_tensor' if I am accessing the model through tensorflow serving?

ANSWER: (I posted my answer here since I couldn't post it as an answer due to lack of StackOverflow badges. Anyone who is volunteer to submit it as his/her answer to the question is more than welcome. I will approve it as the answer.)

I could resolve the problem by using the option use_deprecated_input_fn = False in estimator.export function and change the input signatures accordingly.

Below is the final code which is running with no problem.

def get_features(input_example_tensor, context, utterance):
  context_len = 50
  utterance_len = 50
  features = {
    "my_input_example_tensor": input_example_tensor,
    "context": context,
    "context_len": tf.constant(context_len, shape=[1,1], dtype=tf.int64),
    "utterance": utterance,
    "utterance_len": tf.constant(utterance_len, shape=[1,1], dtype=tf.int64),
  }

  return features

def my_input_fn():
  input_example_tensor = tf.placeholder(tf.string, name='tf_example_placeholder')

  feature_configs = {
          'context':tf.FixedLenFeature(shape=[50], dtype=tf.int64),
          'utterance':tf.FixedLenFeature(shape=[50], dtype=tf.int64)
          }
  tf_example = tf.parse_example(input_example_tensor, feature_configs)
  context = tf.identity(tf_example['context'], name='context')
  utterance = tf.identity(tf_example['utterance'], name='utterance')
  features = get_features(input_example_tensor, context, utterance)

  return features, None

def my_signature_fn(input_example_tensor, features, predictions):   
  default_graph_signature = exporter.regression_signature(
              input_tensor=input_example_tensor,
              output_tensor=predictions
              )

  named_graph_signatures = {
              'inputs':exporter.generic_signature(
                  {
                      'context':features['context'],
                      'utterance':features['utterance']
                  }
               ),
              'outputs':exporter.generic_signature(
                  {
                      'scores':predictions
                  }
               )
              }

  return default_graph_signature, named_graph_signatures

def main():
  ##preliminary codes here##

  estimator.fit(input_fn=input_fn_train, steps=100, monitors=[eval_monitor])

  estimator._targets_info = tf.contrib.learn.estimators.tensor_signature.TensorSignature(tf.constant(0, shape=[1,1]))

  estimator.export(
          export_dir = FLAGS.export_dir,
          input_fn = my_input_fn,
          input_feature_key ="my_input_example_tensor",
          use_deprecated_input_fn = False,
          signature_fn = my_signature_fn,
          exports_to_keep = 1
          )
1

There are 1 answers

0
dga On BEST ANSWER

OP self-solved but couldn't self-answer, so here's their answer:

Problem was fixed by using the option use_deprecated_input_fn = False in estimator.export function and changing the input signatures accordingly:

def my_signature_fn(input_example_tensor, features, predictions):   
  default_graph_signature = exporter.regression_signature(
      input_tensor=input_example_tensor,
      output_tensor=predictions
      )

  named_graph_signatures = {
      'inputs':exporter.generic_signature(
          {
          'context':features['context'],
          'utterance':features['utterance']
          }
       ),
      'outputs':exporter.generic_signature(
          {
          'scores':predictions
          }
       )
      }

  return default_graph_signature, named_graph_signatures

def main():
  ##preliminary codes here##

  estimator.fit(input_fn=input_fn_train, steps=100, monitors=[eval_monitor])

  estimator._targets_info = tf.contrib.learn.estimators.tensor_signature.TensorSignature(tf.constant(0, shape=[1,1]))

  estimator.export(
      export_dir = FLAGS.export_dir,
      input_fn = my_input_fn,
      input_feature_key ="my_input_example_tensor",
      use_deprecated_input_fn = False,
      signature_fn = my_signature_fn,
      exports_to_keep = 1
      )