Problems with running custom trained model on pySDK

Hello, for couple of days I’ve been working towards running my model on Hailo AI HAT 26TOPS.

I’ve ran into couple of problems, first I’ve tried to implement my own post processing but after learning about degirum I thought it would be the simpler of two options. My model is yolov8_seg model custom trained to recognize cracks on different surfaces.

I’ve followed the #4 guide regarding implementation of segmentation models but it is still not working correctly.

My file structure: yolov8n_seg.json

{
    "ConfigVersion": 6,
    "Checksum": "82e77890ff6ad1ab5b398b3f581f4ea767c7a397522d2f2eea49d1b2e097d2d6",
    "DEVICE": [
        {
            "DeviceType": "HAILO8",
            "RuntimeAgent": "HAILORT",
            "SupportedDeviceTypes": "HAILORT/HAILO8"
        }
    ],
    "PRE_PROCESS": [
        {
            "InputType": "Image",
            "InputN": 1,
            "InputH": 640,
            "InputW": 640,
            "InputC": 3,
            "InputPadMethod": "letterbox",
            "InputResizeMethod": "bilinear",                
            "InputQuantEn": true
        }
    ],
    "MODEL_PARAMETERS": [
        {
            "ModelPath": "yolov8n_seg.hef"
        }
    ],
    "POST_PROCESS": [
        {
            "OutputPostprocessType": "SegmentationYoloV8",
            "LabelsPath": "yolov8n_seg_labels.json",
            "OutputNumClasses": 1,
            "OutputConfThreshold": 0.3,
            "SigmoidOnCLS": true
        }
    ]
} 

labels_yolov8n_seg.json

{
    "0": "crack"    
}

and lastly my compiled yolov8n_seg.hef file

My jupiter notebook file to run the whole thing:

import degirum as dg, degirum_tools

inference_host_address = "@local"
zoo_url = 'degirum/hailo'
token=''
device_type=['HAILORT/HAILO8L']

# choose a model to run inference on by uncommenting one of the following lines
model_name = "yolov8n_seg"
# model_name = "yolov8n_relu6_coco_pose--640x640_quant_hailort_hailo8l_1"
# model_name = "yolov8n_relu6_coco_seg--640x640_quant_hailort_hailo8l_1"
# model_name = "yolov8s_silu_imagenet--224x224_quant_hailort_hailo8l_1"

# choose image source
image_source = "../assets/zdjecie.jpg"

# load AI model
model = dg.load_model(
    model_name=model_name,
    inference_host_address=inference_host_address,
    zoo_url=zoo_url,
    token=token,
    device_type=device_type
)

# perform AI model inference on given image source
print(f" Running inference using '{model_name}' on image source '{image_source}'")
inference_result = model(image_source)

# print('Inference Results \n', inference_result)  # numeric results
print(inference_result)
print("Press 'x' or 'q' to stop.")

# show results of inference
with degirum_tools.Display("AI Camera") as output_display:
    output_display.show_image(inference_result.image_overlay)

this is the output that i get when trying to run it:

---------------------------------------------------------------------------
DegirumException                          Traceback (most recent call last)
Cell In[2], line 18
     15 image_source = "../assets/zdjecie.jpg"
     17 # load AI model
---> 18 model = dg.load_model(
     19     model_name=model_name,
     20     inference_host_address=inference_host_address,
     21     zoo_url=zoo_url,
     22     token=token,
     23     device_type=device_type
     24 )
     26 # perform AI model inference on given image source
     27 print(f" Running inference using '{model_name}' on image source '{image_source}'")

File ~/Desktop/yolo_post_process/hailo_examples/degirum_env/lib/python3.11/site-packages/degirum/__init__.py:244, in load_model(model_name, inference_host_address, zoo_url, token, **kwargs)
    220 """
    221 Load a model from the model zoo for the inference.
    222 
   (...)    241     An instance of [degirum.model.Model][] model handling object to be used for AI inferences.
    242 """
    243 zoo = ZooManager(inference_host_address, zoo_url, token)
--> 244 return zoo.load_model(model_name, **kwargs)

File ~/Desktop/yolo_post_process/hailo_examples/degirum_env/lib/python3.11/site-packages/degirum/log.py:92, in log_wrap.<locals>.sync_wrap(*args, **kwargs)
     90 try:
     91     logger.log(log_level, f"/ {f.__qualname__}")
---> 92     return f(*args, **kwargs)
     93 finally:
     94     t2 = time.time_ns()

File ~/Desktop/yolo_post_process/hailo_examples/degirum_env/lib/python3.11/site-packages/degirum/zoo_manager.py:324, in ZooManager.load_model(self, model_name, **kwargs)
    262 @log_wrap
    263 def load_model(self, model_name: str, **kwargs) -> Model:
    264     """Create and return the model handling object for given model name.
    265 
    266     Args:
   (...)    322 
    323     """
--> 324     model = self._zoo.load_model(model_name)
    325     for key, value in kwargs.items():
    326         if hasattr(Model, key):

File ~/Desktop/yolo_post_process/hailo_examples/degirum_env/lib/python3.11/site-packages/degirum/log.py:92, in log_wrap.<locals>.sync_wrap(*args, **kwargs)
     90 try:
     91     logger.log(log_level, f"/ {f.__qualname__}")
---> 92     return f(*args, **kwargs)
     93 finally:
     94     t2 = time.time_ns()

File ~/Desktop/yolo_post_process/hailo_examples/degirum_env/lib/python3.11/site-packages/degirum/_zoo_accessor.py:580, in _LocalHWCloudZooAccessor.load_model(self, model)
    569 @log_wrap
    570 def load_model(self, model: str):
    571     """Create model object for given model name.
    572 
    573     Args:
   (...)    577         model object corresponding to given model name
    578     """
--> 580     assets = self._get_model_assets(model, True)
    581     model_params: ModelParams = copy.deepcopy(assets.model_params)
    582     model_params.CloudModelName = self.ext_model_name(model)

File ~/Desktop/yolo_post_process/hailo_examples/degirum_env/lib/python3.11/site-packages/degirum/_zoo_accessor.py:531, in _CloudZooAccessorBase._get_model_assets(self, model, get_labels)
    528 assets = self._assets.get(model)
    530 if assets is None or assets.model_params is None:
--> 531     model_info = self._cloud_server_request(
    532         f"/zoo/v1/public/models{self._zoo_url}/{model}/info"
    533     )
    535     assets = self._Assets(
    536         ModelParams(json.dumps(model_info["model_params"])),
    537         Model._harmonize_label_dictionary(model_info["model_labels"]),
    538     )
    539     self._assets[model] = assets

File ~/Desktop/yolo_post_process/hailo_examples/degirum_env/lib/python3.11/site-packages/degirum/log.py:92, in log_wrap.<locals>.sync_wrap(*args, **kwargs)
     90 try:
     91     logger.log(log_level, f"/ {f.__qualname__}")
---> 92     return f(*args, **kwargs)
     93 finally:
     94     t2 = time.time_ns()

File ~/Desktop/yolo_post_process/hailo_examples/degirum_env/lib/python3.11/site-packages/degirum/_zoo_accessor.py:460, in _CloudZooAccessorBase._cloud_server_request(self, api_url, is_octet_stream)
    449 """Perform request to cloud server
    450 
    451 Args:
   (...)    456     response parsed JSON (when is_octet_stream is False) or binary content otherwise
    457 """
    459 logger.info(f"sending a request to {self.url}{api_url}")
--> 460 return cloud_server_request(
    461     base_url=self.url,
    462     api_url=api_url,
    463     token=self._token,
    464     timeout_s=self._timeout,
    465     is_octet_stream=is_octet_stream,
    466 )

File ~/Desktop/yolo_post_process/hailo_examples/degirum_env/lib/python3.11/site-packages/degirum/_misc.py:120, in cloud_server_request(base_url, api_url, token, method, data, params, headers, timeout_s, is_octet_stream, no_returns)
    118     except json.JSONDecodeError:
    119         pass
--> 120     raise DegirumException(details) from None
    122 if no_returns:
    123     return None

DegirumException: could not get model by url. (cloud server response: 400 Client Error: Bad Request for url: https://hub.degirum.com/zoo/v1/public/models/degirum/hailo/yolov8n_seg/info)

Looking forward to the response,

thanks in advance.

Hi @Aleksoooxd,

The issue is likely an incorrect zoo_url. You have this assigned: zoo_url = 'degirum/hailo', but this is an AI Hub model zoo. Instead, set the zoo_url to the directory with your model.

Additionally, in the model JSON file, change this line to match the name of the labels file next to your HEF if it’s named differently:
"LabelsPath": "yolov8n_seg_labels.json",

1 Like