Skip to content

Commit 579af0f

Browse files
committed
onnx issue fixed
1 parent 46af9a7 commit 579af0f

4 files changed

Lines changed: 72 additions & 11 deletions

File tree

multimind/__init__.py

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -243,9 +243,7 @@ def __init__(self, *args, **kwargs):
243243
BaseModelConverter,
244244
HuggingFaceConverter,
245245
OllamaConverter,
246-
ONNXConverter,
247246
TensorFlowConverter,
248-
ONNXRuntimeConverter,
249247
SafetensorsConverter,
250248
GGMLConverter,
251249
OptimizationConverter,
@@ -257,6 +255,17 @@ def __init__(self, *args, **kwargs):
257255
ModelConversionManager
258256
)
259257

258+
# Try to import ONNX-related converters, but handle gracefully if not available
259+
try:
260+
from .model_conversion import ONNXConverter
261+
except ImportError:
262+
ONNXConverter = None
263+
264+
try:
265+
from .model_conversion import ONNXRuntimeConverter
266+
except ImportError:
267+
ONNXRuntimeConverter = None
268+
260269
# Context window components
261270
from .context_window import (
262271
ContextManager,

multimind/model_conversion/__init__.py

Lines changed: 29 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -10,10 +10,25 @@
1010
# Core converters
1111
from .huggingface import HuggingFaceConverter
1212
from .ollama import OllamaConverter
13-
from .onnx import ONNXConverter
13+
14+
# Try to import ONNXConverter, but handle gracefully if not available
15+
try:
16+
from .onnx import ONNXConverter
17+
ONNX_CONVERTER_AVAILABLE = True
18+
except ImportError:
19+
ONNX_CONVERTER_AVAILABLE = False
20+
ONNXConverter = None
1421

1522
# Format converters
16-
from .formats import TensorFlowConverter, ONNXRuntimeConverter, SafetensorsConverter, GGMLConverter
23+
from .formats import TensorFlowConverter, SafetensorsConverter, GGMLConverter
24+
25+
# Try to import ONNXRuntimeConverter, but handle gracefully if not available
26+
try:
27+
from .formats import ONNXRuntimeConverter
28+
ONNX_RUNTIME_CONVERTER_AVAILABLE = True
29+
except ImportError:
30+
ONNX_RUNTIME_CONVERTER_AVAILABLE = False
31+
ONNXRuntimeConverter = None
1732

1833
# Optimization converters
1934
from .optimization import OptimizationConverter, AdvancedOptimization
@@ -34,11 +49,15 @@
3449
# Core converters
3550
'HuggingFaceConverter',
3651
'OllamaConverter',
37-
'ONNXConverter',
38-
52+
]
53+
54+
# Conditionally add ONNX-related exports
55+
if ONNX_CONVERTER_AVAILABLE:
56+
__all__.append('ONNXConverter')
57+
58+
__all__.extend([
3959
# Format converters
4060
'TensorFlowConverter',
41-
'ONNXRuntimeConverter',
4261
'SafetensorsConverter',
4362
'GGMLConverter',
4463

@@ -58,4 +77,8 @@
5877

5978
# Manager
6079
'ModelConversionManager',
61-
]
80+
])
81+
82+
# Conditionally add ONNXRuntimeConverter
83+
if ONNX_RUNTIME_CONVERTER_AVAILABLE:
84+
__all__.append('ONNXRuntimeConverter')

multimind/model_conversion/formats.py

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
from typing import Dict, Any, Optional
22
from pathlib import Path
33
import torch
4-
import onnx
5-
import onnxruntime
64
from .base import BaseModelConverter
75

86
# Try to import tensorflow, but handle gracefully if not available
@@ -13,6 +11,16 @@
1311
TENSORFLOW_AVAILABLE = False
1412
tf = None
1513

14+
# Try to import onnx and onnxruntime, but handle gracefully if not available
15+
try:
16+
import onnx
17+
import onnxruntime
18+
ONNX_AVAILABLE = True
19+
except ImportError:
20+
ONNX_AVAILABLE = False
21+
onnx = None
22+
onnxruntime = None
23+
1624
class TensorFlowConverter(BaseModelConverter):
1725
"""Converter for TensorFlow models."""
1826

@@ -85,6 +93,9 @@ def convert(self,
8593
output_path: str,
8694
config: Optional[Dict[str, Any]] = None) -> str:
8795
"""Convert ONNX model to optimized ONNX Runtime format."""
96+
if not ONNX_AVAILABLE:
97+
raise ImportError("ONNX is not available. Please install onnx and onnxruntime to use this converter.")
98+
8899
config = config or {}
89100

90101
# Load ONNX model
@@ -104,6 +115,8 @@ def _optimize_model(self, model: onnx.ModelProto, config: Dict[str, Any]) -> onn
104115

105116
def validate(self, model_path: str) -> bool:
106117
"""Validate ONNX model."""
118+
if not ONNX_AVAILABLE:
119+
return False
107120
try:
108121
onnx.load(model_path)
109122
return True
@@ -112,6 +125,9 @@ def validate(self, model_path: str) -> bool:
112125

113126
def get_metadata(self, model_path: str) -> Dict[str, Any]:
114127
"""Get ONNX model metadata."""
128+
if not ONNX_AVAILABLE:
129+
return {"format": "onnx", "error": "ONNX not available"}
130+
115131
model = onnx.load(model_path)
116132
return {
117133
"format": "onnx",

multimind/model_conversion/onnx.py

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,17 @@
11
import os
2-
import onnx
32
import torch
43
from typing import Dict, Any, Optional
54
from transformers import AutoModelForCausalLM, AutoTokenizer
65
from .base import BaseModelConverter
76

7+
# Try to import onnx, but handle gracefully if not available
8+
try:
9+
import onnx
10+
ONNX_AVAILABLE = True
11+
except ImportError:
12+
ONNX_AVAILABLE = False
13+
onnx = None
14+
815
class ONNXConverter(BaseModelConverter):
916
"""Converter for ONNX models."""
1017

@@ -33,6 +40,9 @@ def convert(self,
3340
Returns:
3441
str: Path to the converted model
3542
"""
43+
if not ONNX_AVAILABLE:
44+
raise ImportError("ONNX is not available. Please install onnx to use this converter.")
45+
3646
if not self.validate(model_path):
3747
raise ValueError(f"Invalid model path: {model_path}")
3848

@@ -118,6 +128,9 @@ def get_metadata(self, model_path: str) -> Dict[str, Any]:
118128
Returns:
119129
Dict[str, Any]: Model metadata
120130
"""
131+
if not ONNX_AVAILABLE:
132+
return {"error": "ONNX is not available"}
133+
121134
model = AutoModelForCausalLM.from_pretrained(model_path)
122135
tokenizer = AutoTokenizer.from_pretrained(model_path)
123136

0 commit comments

Comments
 (0)