Is there a way to change the overlay_font_color in hailo models?
pySDK, degirum-tools, Pi5 with Hailo 8
model.overlay_color
property of Model class in PySDK sets the color for all AI annotations: bbox rectangles and text captions. We do not have separate control over text captions color vs. bbox rectangles color.
You can assign RGB tuple to model.overlay_color
so all your bboxes and labels will have this color:
model.overlay_color = (255,0,0) # use red color
Or you can assign a list of RGB tuples to have per-class colors:
model.overlay_color = [(255,0,0), (0,255,0), (0,0,255)]
If the number of classes exceeds the list size, colors will be reused in a cyclic manner.
Thank you for the reply, I use the overlay_color
but now I realized, that i.e. at the combined model face-and-gender
the text in the overlay is black but at the face_and_age comb.model
the text is white, which is on a i.e. yellow overlay_color
nearly unreadable, so I searched for a attribute as overlay_font_color
but as far as I researched there is no way to set the font_color
or is that set at a specified position in the overlay_color tuple
, or as overlay_color
in the second model?
@core-stuff , please advise, what models are you using and how do you combine them. Little code snippet will help.
As for text color, it is deduced automatically from the bbox frame color the following way: if the luminance of the bbox frame color is greater than 180, then text color is black, otherwise it is white.
Thank you for giving me this information.
That is the explaining the text color phenomenon, thank you now I know how the different text-colors coming from, meaning on the other hand there is no option to set the text-color manually, but now with knowing that it’s ok.
@vladk I load the models dynamically via MQTT
from Homeassistant
, where I also display the overlayed stream, with a model_loader.py
and the configuration of the models is defined in a models.yaml
what works just fine so far.
part. of models.yaml
:
models:
face_and_age:
type: compound
strategy: crop_classify
crop_extent: 30.0
components:
- path: "yolov8n_relu6_face--640x640_quant_hailort_hailo8_1"
result_key: "face"
overlay_show_labels: True
overlay_show_probabilities: False
overlay_font_scale: 1.2
overlay_line_width: 2
overlay_color: [150, 255, 200]
overlay_alpha: 0.9
- path: "yolov8n_relu6_age--256x256_quant_hailort_hailo8_1"
result_key: "age"
overlay_show_labels: True
overlay_show_probabilities: True
model_loader.py
import yaml
import degirum as dg
import degirum_tools
class ModelLoader:
def __init__(
self,
config_path="<path>/models.yaml",
zoo_path="<path>/degirum-zoo",
inference_host="@local",
token=""
):
self.zoo_path = zoo_path
self.inference_host = inference_host
self.token = token
with open(config_path, "r") as f:
self.config = yaml.safe_load(f)["models"]
def load_model(self, model_name):
if model_name not in self.config:
raise ValueError(f"model '{model_name}' not found in config.")
model_cfg = self.config[model_name]
model_type = model_cfg.get("type", "single")
if model_type == "single":
return self._load_single(model_cfg)
elif model_type == "compound":
return self._load_compound(model_cfg)
else:
raise ValueError(f"Unknown model type: {model_type}")
def configure_model_overlay(self, model, config):
for key, attr in [
("overlay_show_labels", "overlay_show_labels"),
("overlay_show_probabilities", "overlay_show_probabilities"),
("overlay_font_scale", "overlay_font_scale"),
("overlay_line_width", "overlay_line_width"),
("overlay_alpha", "overlay_alpha"),
("overlay_blur", "overlay_blur"),
]:
if key in config:
setattr(model, attr, config[key])
if "overlay_font_color" in config:
try:
raw_color = config["overlay_font_color"]
overlay_font_color = [tuple(raw_color)]
model.overlay_font_color = overlay_font_color
print("Overlay-font-color set successfully:", overlay_font_color)
except Exception as e:
print(f"Error setting overlay_font_color: {e}")
if "overlay_color" in config:
try:
raw_color = config["overlay_color"]
if isinstance(raw_color[0], list): # multible colors
overlay_color = [tuple(c) for c in raw_color]
else: # single color
overlay_color = [tuple(raw_color)]
model.overlay_color = overlay_color
print("Overlay-color set successfully:", overlay_color)
except Exception as e:
print(f"Error setting overlay_color: {e}")
# load single model
def _load_single(self, model_cfg):
model = dg.load_model(
model_name=model_cfg["path"],
inference_host_address=self.inference_host,
zoo_url=self.zoo_path,
token=self.token
)
self.configure_model_overlay(model, model_cfg)
result_key = model_cfg.get("result_key", "default")
return {result_key: model}
# load compound model
def _load_compound(self, model_cfg):
components = model_cfg.get("components", [])
if len(components) != 2:
raise ValueError("Compound models require exactly 2 components.")
strategy = model_cfg.get("strategy", "crop_classify")
crop_extent = model_cfg.get("crop_extent", 30.0)
# Load components
det_cfg = components[0]
cls_cfg = components[1]
det_model = dg.load_model(
model_name=det_cfg["path"],
inference_host_address=self.inference_host,
zoo_url=self.zoo_path,
token=self.token
)
self.configure_model_overlay(det_model, det_cfg)
cls_model = dg.load_model(
model_name=cls_cfg["path"],
inference_host_address=self.inference_host,
zoo_url=self.zoo_path,
token=self.token
)
self.configure_model_overlay(cls_model, cls_cfg)
# Strategy: cropping and classifying
if strategy == "crop_classify":
compound = degirum_tools.CroppingAndClassifyingCompoundModel(
det_model, cls_model, crop_extent
)
return {
det_cfg["result_key"]: det_model,
cls_cfg["result_key"]: cls_model,
"compound": compound
}
# Strategy: chaining (you take care of the interaction)
elif strategy == "chain":
return {
det_cfg["result_key"]: det_model,
cls_cfg["result_key"]: cls_model
}
# Strategy: parallel (both models run in parallel and you take care of the interaction)
elif strategy == "parallel":
return {
det_cfg["result_key"]: det_model,
cls_cfg["result_key"]: cls_model
}
else:
raise NotImplementedError(f"Strategy '{strategy}' is not supported.")
I am already working on it, so this might be not the final version.