# Import the libraries import time # Import the time module for time-related functions from gradio_client import Client # Import the Gradio client for interacting with Gradio interfaces from sklearn.datasets import fetch_openml # Import fetch_openml to fetch datasets from sklearn.model_selection import train_test_split # Import train_test_split for splitting datasets from tqdm import tqdm # Import tqdm for progress bars # Run the training script placed in the same directory as app.py # The training script will train and persist a linear regression # model with the filename 'model.joblib' # Load the freshly trained model from disk # Prepare the logging functionality log_file = Path("logs/") / f"data_{uuid.uuid4()}.json" log_folder = log_file.parent scheduler = CommitScheduler( repo_id="-----------", # provide a name "insurance-charge-mlops-logs" for the repo_id repo_type="dataset", folder_path=log_folder, path_in_repo="data", every=2 ) # Define the predict function which will take features, convert to dataframe and make predictions using the saved model # the functions runs when 'Submit' is clicked or when a API request is made # While the prediction is made, log both the inputs and outputs to a log file # While writing to the log file, ensure that the commit scheduler is locked to avoid parallel # access with scheduler.lock: with log_file.open("a") as f: f.write(json.dumps( { 'age': age, 'bmi': bmi, 'children': children, 'sex': sex, 'smoker': smoker, 'region': region, 'prediction': prediction[0] } )) f.write("\n") return prediction[0] # Set up UI components for input and output # Create the gradio interface, make title "HealthyLife Insurance Charge Prediction" # Launch with a load balancer demo.queue() demo.launch(share=False)