how to load a registered model in azureml to a azure function app?

284 views Asked by At

I am migrating to Azure function app for model inference from using Azure container instance deployment. The code below shows how I was loading the model in score.py

import json
import pandas as pd
from joblib import load
import os
import pathlib
from azureml.core.model import Model

def init():
    global model


def run(raw_data):

    try:
        # parse the features from the json doc
        dat = json.loads(raw_data)

        # deserialize the model file back into a sklearn model
        model_name = "{0}_{1}_{2}_sfsw".format(
            dat["MachineName"], dat["HeadNumber"], dat["ProductEDP"])
        model_path = Model.get_model_path(model_name=model_name)
        model = load(model_path)

How would I load the model in a azure function app. would in be in the same way? is def init(): global model still required? the following is code for azure function python app

import pandas as pd
import logging
import azure.functions as func
import joblib
from azureml.core.model import Model



def main(req: func.HttpRequest):
    logging.info("Python HTTP trigger function processed a request.")

    try:
        # Check if request contains JSON
        if not req.get_json():
            return func.HttpResponse(
                "JSON data expected in request.",
                status_code=400
            )
        dat = req.get_json()

    except ValueError:
        return func.HttpResponse(
            "Invalid JSON received.",
            status_code=400
        )

    # Load the model
    try:
        # deserialize the model file back into a sklearn model
        model_name = "{0}_{1}_{2}_sfsw".format(
            dat["MachineName"], dat["HeadNumber"], dat["ProductEDP"])
        model_path = Model.get_model_path(model_name=model_name)
        model = joblib.load(model_path)
    except FileNotFoundError:
        return func.HttpResponse(
            f"Model '{model_name}' not found.",
            status_code=404
        )
1

There are 1 answers

0
SiddheshDesai On BEST ANSWER

I agree with @Muhammad Pathan you are not required to define global model variable in this function. You can update your code like below:-

I tweaked your code to load my model and perform inference:-

import logging
import azure.functions as func
from azureml.core import Workspace, Model
import joblib
import pandas as pd

def load_model(model_name):
    try:
        # Load the model from Azure ML
        model = Model(workspace=ws, name=model_name)
        model_path = model.download(exist_ok=True)  # Download the model
        loaded_model = joblib.load(model_path)
        return loaded_model
    except Exception as e:
        return None

# Load your Azure ML Workspace from a configuration file
ws = Workspace.from_config('./config.json')

def main(req: func.HttpRequest):
    logging.info("Python HTTP trigger function processed a request.")

    try:
        # Check if request contains JSON
        if not req.get_json():
            return func.HttpResponse(
                "JSON data expected in the request.",
                status_code=400
            )
        dat = req.get_json()
    except ValueError:
        return func.HttpResponse(
            "Invalid JSON received.",
            status_code=400
        )

    model_name = "sample_model3"  # Replace with the model name in your workspace

    # Load the model
    model = load_model(model_name)
    if model is None:
        return func.HttpResponse(
            f"Model '{model_name}' not found.",
            status_code=404
        )

    # Perform inference using the loaded model
    try:
        result = model.predict(pd.DataFrame([dat]))
        return func.HttpResponse(f"Inference result: {result}")
    except Exception as e:
        return func.HttpResponse(f"An error occurred during inference: {str(e)}", status_code=500)

Output:-

enter image description here

Postman request with JSON body:- Json request body according to my model:-

{
    "sepal length (cm)": 5.1,
    "sepal width (cm)": 3.5,
    "petal length (cm)": 1.4,
    "petal width (cm)": 0.2
}

Postman Output:-

enter image description here As an alternative, In order to load your Azure ML model you can make use of the HttpTrigger code below:-

I have loaded and downloaded the model in a folder called temp. This folder is also created by Function code in the same directory as my Function code with Azure Functions like below:-

Make sure you have logged into your Azure account which contains your ML workspace and model with az login command in your VS code Terminal for the below Function code to work.

My HttpTrigger code:-

import os
import azure.functions as func
from azureml.core import Workspace, Model

# Function to load and download models
def load_and_download_models():
    try:
        ws = Workspace.from_config('./config.json')
        # Load your Azure ML Workspace. Replace with your own values.
        ws = Workspace(subscription_id="xxxxxxxxaxxxx",
                       resource_group="xxxx",
                       workspace_name="xxx")

        
        downloaded_models_path = "temp"

        loaded_and_downloaded_models = []

        for model_name in ws.models:
            # Iterate through model names in the workspace

            # Load the model using its name
            model = Model(ws, model_name)

            

            # Download the model to a specified directory
            model_path = model.download(target_dir=downloaded_models_path)

            loaded_and_downloaded_models.append(model_name)

        return loaded_and_downloaded_models

    except Exception as e:
        return str(e)

# Azure Function entry point with HTTP trigger
def main(req: func.HttpRequest) -> func.HttpResponse:
    try:
        loaded_and_downloaded_models = load_and_download_models()
        if isinstance(loaded_and_downloaded_models, list):
            return func.HttpResponse(f"Models loaded and downloaded successfully: {', '.join(loaded_and_downloaded_models)}", status_code=200)
        else:
            return func.HttpResponse(f"An error occurred: {loaded_and_downloaded_models}", status_code=500)
    except Exception as e:
        return func.HttpResponse(f"An error occurred: {str(e)}", status_code=500)

Output:-

enter image description here

enter image description here

Reference:-

Python Azure function timer trigger log info not showing when deployed - Stack Overflow