张量流服务预测作为 B64 输出顶部结果



我有一个 Keras 模型 I 转换为 tensorflow 服务模型。 我可以成功地将预训练的 keras 模型转换为获取 b64 输入,预处理该输入,并将其馈送到我的模型。 我的问题是我不知道如何获取我获得的预测数据(这是巨大的(,并且只导出顶部结果。我正在做图像分割,所以我的输出预测是形状(?, 473, 473, 3(,我想得到顶部结果并以 b64 编码格式返回。 我目前所拥有的只是返回整个预测:

sess = K.get_session()
g = sess.graph
g_def = graph_util.convert_variables_to_constants(sess, 
                      g.as_graph_def(),
                      [model.output.name.replace(':0','')])
with tf.Graph().as_default() as g_input:
    input_b64 = tf.placeholder(shape=(1,),
                               dtype=tf.string,
                               name='b64')
    tf.logging.info('input b64 {}'.format(input_b64))
    image = tf.image.decode_image(input_b64[0])#input_bytes)
    image_f = tf.image.convert_image_dtype(image, dtype=tf.float16)
    input_image = tf.expand_dims(image_f, 0)
    image_r = tf.image.resize_bilinear(input_image, [HEIGHT, WIDTH], align_corners=False)
    input_data = preprocess_image(image_r)
    output = tf.identity(input_data, name='input_image')


# Convert to GraphDef
g_input_def = g_input.as_graph_def()

with tf.Graph().as_default() as g_combined:
    x = tf.placeholder(tf.string, name="b64")
    im, = tf.import_graph_def(g_input_def,
                              input_map={'b64:0': x},
                              return_elements=["input_image:0"])
    pred, = tf.import_graph_def(g_def,
             input_map={model.input.name: im},
             return_elements=[model.output.name])
    with tf.Session() as session:
        inputs = {"image_bytes": tf.saved_model.utils.build_tensor_info(x)}
        outputs = {"output_bytes":tf.saved_model.utils.build_tensor_info(pred)}
        signature =tf.saved_model.signature_def_utils.build_signature_def(
                inputs=inputs,
                outputs=outputs,
                method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME
            )

        """Convert the Keras HDF5 model into TensorFlow SavedModel."""
        if os.path.exists(export_path):
            shutil.rmtree(export_path)
        legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')
        builder = saved_model_builder.SavedModelBuilder(export_path)
        builder.add_meta_graph_and_variables(
            sess=session,
            tags=[tag_constants.SERVING],
            signature_def_map={ signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature },
        )
        builder.save()

我从 https://medium.com/google-cloud/serverless-transfer-learning-with-cloud-ml-engine-and-keras-335435f31e15 中获取了很多工作作为参考。 谢谢!

发布我自己的解决方案,以防其他人遇到此问题。 基本上,您只需执行输入函数的反转。

def postprocess_image(img, in_shape):
    class_image = tf.argmax(img, axis=2)
    colored_class_image = utils.class_image_to_image_tensor(class_image, [HEIGHT,WIDTH])
    image_expand = tf.expand_dims(colored_class_image, 0)
    image_r = tf.image.resize_bilinear(image_expand, in_shape, align_corners=False)
    casted_data = tf.bitcast(tf.cast(image_r[0], tf.int8), tf.uint8)
    out_image = tf.image.encode_png(casted_data)
    return out_image
sess = K.get_session()
g = sess.graph
g_def = graph_util.convert_variables_to_constants(sess, 
                      g.as_graph_def(),
                      [model.output.name.replace(':0','')])

with tf.Graph().as_default() as g_input:
    input_b64 = tf.placeholder(shape=(1,),
                               dtype=tf.string,
                               name='b64')
    tf.logging.info('input b64 {}'.format(input_b64))
    image = tf.image.decode_image(input_b64[0])
    image_f = tf.image.convert_image_dtype(image, dtype=tf.uint8)
    input_image = tf.expand_dims(image_f, 0)
    image_r = tf.image.resize_bilinear(input_image, [HEIGHT, WIDTH], align_corners=False)
    input_data = preprocess_image(image_r[0])
    output = tf.identity(input_data, name='input_image')

with tf.Graph().as_default() as g_output:
    first = tf.placeholder(shape=[1,473,473,150],
                               dtype=tf.float32,
                               name='activation_58/div')
    i_shape = tf.placeholder(dtype=tf.int32, shape=[2], name='in_shape')

    post_image = postprocess_image(first[0], i_shape)
    output_data = tf.identity(post_image, name='out')

g_input_def = g_input.as_graph_def()
g_output_def = g_output.as_graph_def()
with tf.Graph().as_default() as g_combined:
    x = tf.placeholder(tf.string, name="b64")
    in_shape = tf.placeholder(tf.int32, shape=[1,2],name="original_shape")
    im, = tf.import_graph_def(g_input_def,
                              input_map={'b64:0': x},
                              return_elements=["input_image:0"])
    pred, = tf.import_graph_def(g_def,
         input_map={model.input.name: im},
         return_elements=[model.output.name])

    y, = tf.import_graph_def(g_output_def,
             input_map={model.output.name: pred,
             'in_shape:0':in_shape[0]},
             return_elements=["out:0"])
    with tf.Session() as session:
        inputs = {"image_bytes": tf.saved_model.utils.build_tensor_info(x),
                "original_shape":tf.saved_model.utils.build_tensor_info(in_shape)}
        outputs = {"output_bytes":tf.saved_model.utils.build_tensor_info(y)}
        signature =tf.saved_model.signature_def_utils.build_signature_def(
                inputs=inputs,
                outputs=outputs,
                method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME
            )

        """Convert the Keras HDF5 model into TensorFlow SavedModel."""
        if os.path.exists(export_path):
            shutil.rmtree(export_path)
        legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')
        builder = saved_model_builder.SavedModelBuilder(export_path)
        builder.add_meta_graph_and_variables(
            sess=session,
            tags=[tag_constants.SERVING],
            signature_def_map={ signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature },
        )
        builder.save()

最新更新