From f052eb35cceedbcef1726ba225e7a2a22e473686 Mon Sep 17 00:00:00 2001 From: Tai An Date: Mon, 20 Apr 2026 21:13:41 -0700 Subject: [PATCH] fix(exporters/tasks): support local timm model paths Previously, `get_model_from_task` always prepended `hf_hub:` when loading timm models, forcing them to be fetched from HF Hub even when a local directory was provided. This caused a `Repository Not Found` error for locally fine-tuned timm models. Fix: check if `model_name_or_path` is an existing local directory. If so, pass it directly to `create_model`; otherwise use the `hf_hub:` prefix for Hub models. Fixes #2423 --- optimum/exporters/tasks.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/optimum/exporters/tasks.py b/optimum/exporters/tasks.py index b841d52a6b..3cffdc22cf 100644 --- a/optimum/exporters/tasks.py +++ b/optimum/exporters/tasks.py @@ -1174,7 +1174,12 @@ def get_model_from_task( ) if library_name == "timm": - model = model_class(f"hf_hub:{model_name_or_path}", pretrained=True, exportable=True) + import os + + if os.path.isdir(model_name_or_path): + model = model_class(model_name_or_path, pretrained=True, exportable=True) + else: + model = model_class(f"hf_hub:{model_name_or_path}", pretrained=True, exportable=True) model = model.to(torch_dtype).to(device) elif library_name == "sentence_transformers": token = model_kwargs.pop("token", None)