VisualSemSeg / model /modelLoading.py
Nunzio
simpler model loading
3f820ff
import torch
from model.BiSeNet.build_bisenet import BiSeNet
from model.BiSeNetV2.model import BiSeNetV2
# Model loading function
def loadModel(model:str = 'bisenet', device: str = 'cpu', weights:str='weight_Base.pth') -> BiSeNet | BiSeNetV2:
"""
Load the BiSeNet or BiSeNetV2 model and move it to the specified device.
This function supports loading different versions of the model based on the provided `model` argument.
The model weights are loaded from the specified `weights` file.
The model is set to evaluation mode after loading.
Args:
model (str): The type of model to load. Options are 'bisenet', 'bisenet_base', 'bisenet_best', 'bisenetv2', 'bisenetv2_base', 'bisenetv2_best'.
Default is 'bisenet'.
device (str): Device to load the model onto ('cpu' or 'cuda'). Default is 'cpu'.
weights (str): weights file to be loaded. Default is 'weight_Base.pth'.
Returns:
model (BiSeNet | BiSeNetV2): The loaded BiSeNet or BiSeNetV2 model.
"""
match model.lower() if isinstance(model, str) else model:
case 'bisenet' | 'bisenet_base' | 'bisenet_best':
model = BiSeNet(num_classes=19, context_path='resnet18').to(device)
modelStateDict = torch.load(f'./weights/BiSeNet/{weights}', map_location=device)
model.load_state_dict(modelStateDict['model_state_dict'] if 'model_state_dict' in modelStateDict else modelStateDict)
case 'bisenetv2' | 'bisenetv2_base' | 'bisenetv2_best':
model = BiSeNetV2(n_classes=19).to(device)
modelStateDict = torch.load(f'./weights/BiSeNetV2/{weights}', map_location=device)
model.load_state_dict(modelStateDict['model_state_dict'] if 'model_state_dict' in modelStateDict else modelStateDict)
case _: raise NotImplementedError(f"Model {model} is not implemented. Please choose 'bisenet' or 'bisenetv2'.")
model.eval()
return model