def run_inference():
    # tf.reset_default_graph()
    with graph.as_default():
        saver = tf.train.Saver()
        checkpoint_path = loaded_checkpoint_path
        if not checkpoint_path:
            checkpoint_path = tf.train.latest_checkpoint(model_dir_input)

        def session_init_op(_scaffold, sess):
            saver.restore(sess, checkpoint_path)
            tf.logging.info("Restored model from %s", checkpoint_path)

        scaffold = tf.train.Scaffold(init_fn=session_init_op)
        session_creator = tf.train.ChiefSessionCreator(scaffold=scaffold)
        with tf.train.MonitoredSession(
                session_creator=session_creator, hooks=hooks) as sess:
            sess.run([])
        # print(" ****** decoded string ", decoded_string)
        return decoded_string


@app.route("/examplesdata")
def examplesdata():
    source_data = data_utils.load_test_dataset()
    f_names = data_utils.generate_field_types(source_data)
    data_utils.forward_norm(source_data, destination_file, f_names)

    print('1 >>>>')
    print('source data: ', source_data)
    run_inference()

    # Perform post processing - backward normalization
    # decoded_post_array = []
    # for row in decoded_string:
    #     decoded_post = data_utils.backward_norm(row, f_names)
    #     decoded_post_array.append(decoded_post)

    decoded_string_post = data_utils.backward_norm(decoded_string[0], f_names)
    print('2 >>>>')
    print('f_names: ', f_names)
    print('decoded string post: ', decoded_string_post)

    try:
        vega_spec = json.loads(decoded_string_post)
        vega_spec["data"] = {"values": source_data}
        response_payload = {"vegaspec": vega_spec, "status": True}
        print('3 >>>>')
        print('response: ', response_payload)
    except JSONDecodeError as e:
        response_payload = {
            "status": False,
            "reason": "Model did not produce a valid vegalite JSON",
            "vegaspec": decoded_string
        }
    return jsonify(response_payload)