Created
November 21, 2022 21:17
-
-
Save geoffreyangus/22c7a98d34d236af163909651820b852 to your computer and use it in GitHub Desktop.
Method 2: Convert `InferenceModule` Into a Single TorchScript Module
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import json | |
from pprint import pprint | |
import pandas as pd | |
import torch | |
from ludwig.models.inference import InferenceModule | |
from ludwig.utils.inference_utils import to_inference_module_input_from_dataframe | |
inference_module = InferenceModule.from_directory( | |
f"{EXPERIMENT_DIRECTORY}/torchscript") | |
# Convert the InferenceModule into a single TorchScript module | |
single_module = torch.jit.script(inference_module) | |
with open( | |
f"{EXPERIMENT_DIRECTORY}/torchscript/model_hyperparameters.json") as f: | |
config = json.load(f) | |
input_df = pd.read_parquet(f"{DATA_DIRECTORY}/twitter_bots.parquet") | |
input_sample_df = input_df.head(2) | |
input_sample_dict = to_inference_module_input_from_dataframe( | |
input_sample_df, config) | |
postproc_output = single_module(input_sample_dict) | |
pprint(postproc_output) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment