Skip to content

Instantly share code, notes, and snippets.

@nagadomi
Created March 5, 2026 01:10
Show Gist options
  • Select an option

  • Save nagadomi/e1b8023cb9fa57e278d4bc10fc0beb60 to your computer and use it in GitHub Desktop.

Select an option

Save nagadomi/e1b8023cb9fa57e278d4bc10fc0beb60 to your computer and use it in GitHub Desktop.
import torch
import coremltools as ct
import os
# --- Model Definitions (Primary Models) ---
V2_MODELS = {
# model_name: (entry_point, arg_name, encoder_name)
"Any_S": ("DepthAnything", "encoder", "vits"),
"Any_B": ("DepthAnything", "encoder", "vitb"),
"Any_L": ("DepthAnything", "encoder", "vitl"),
"Any_V2_S": ("DepthAnything", "encoder", "v2_vits"),
"Any_V2_B": ("DepthAnything", "encoder", "v2_vitb"),
"Any_V2_L": ("DepthAnything", "encoder", "v2_vitl"),
}
V3_MODELS = {
"Any_V3_Mono": ("load_model", "model_name", "da3mono-large"),
}
# --- Conversion Logic ---
class WrappedV3(torch.nn.Module):
"""Wrapper for V3 model to handle 5D input and return depth only"""
def __init__(self, m):
super().__init__()
self.m = m
def forward(self, x):
# model expects (B, 1, C, H, W)
return self.m(x.unsqueeze(1))["depth"]
def convert_fixed(
local_repo,
model_name,
entry_point,
arg_name,
encoder,
output_dir,
is_v3=False,
size=518,
):
"""Conversion with fixed size (Currently working)"""
print(f"\n[Fixed] Model: {model_name} | Size: {size}x{size} | Repo: {local_repo}")
if not os.path.exists(local_repo):
print(f"Error: {local_repo} not found.")
return
try:
load_args = {
arg_name: encoder,
"source": "local",
"verbose": False,
"trust_repo": True,
}
model = torch.hub.load(local_repo, entry_point, **load_args)
if is_v3:
model = WrappedV3(model)
model.eval()
example_input = torch.rand(1, 3, size, size)
print(f"Tracing {model_name}...")
traced_model = torch.jit.trace(model, example_input, check_trace=False)
print("Converting to CoreML (Fixed)...")
mlmodel = ct.convert(
traced_model,
inputs=[ct.TensorType(name="input", shape=example_input.shape)],
minimum_deployment_target=ct.target.iOS17,
)
output_name = f"{model_name.lower()}_fixed_{size}.mlpackage"
mlmodel.save(os.path.join(output_dir, output_name))
print(f"Success: {output_name} created.")
except Exception as e:
print(f"Failed to convert {model_name} (Fixed): {e}")
def convert_dynamic(
local_repo, model_name, entry_point, arg_name, encoder, output_dir, is_v3=False
):
"""Conversion with dynamic size using 14x symbolic constraints (Future task)"""
print(f"\n[Dynamic] Model: {model_name} | Multiple of 14 | Repo: {local_repo}")
if not os.path.exists(local_repo):
print(f"Error: {local_repo} not found.")
return
try:
load_args = {
arg_name: encoder,
"source": "local",
"verbose": False,
"trust_repo": True,
}
model = torch.hub.load(local_repo, entry_point, **load_args)
if is_v3:
model = WrappedV3(model)
model.eval()
# Define dynamic dims as multiples of 14 (224 to 1036)
s_h = torch.export.Dim("s_h", min=16, max=74)
s_w = torch.export.Dim("s_w", min=16, max=74)
h_dim = s_h * 14
w_dim = s_w * 14
dynamic_shapes = {"x": {2: h_dim, 3: w_dim}}
example_input = torch.rand(1, 3, 518, 518)
print(f"Exporting {model_name} with 14x constraints...")
exported_program = torch.export.export(
model, (example_input,), dynamic_shapes=dynamic_shapes
)
exported_program = exported_program.run_decompositions()
print("Converting to CoreML (Dynamic)...")
h_range = ct.RangeDim(lower_bound=224, upper_bound=1036, default=518)
w_range = ct.RangeDim(lower_bound=224, upper_bound=1036, default=518)
input_shape = ct.Shape(shape=(1, 3, h_range, w_range))
mlmodel = ct.convert(
exported_program,
inputs=[ct.TensorType(name="input", shape=input_shape)],
minimum_deployment_target=ct.target.iOS17,
)
output_name = f"{model_name.lower()}_dynamic.mlpackage"
mlmodel.save(os.path.join(output_dir, output_name))
print(f"Success: {output_name} created.")
except Exception as e:
print(f"Failed to convert {model_name} (Dynamic): {e}")
if __name__ == "__main__":
# git clone https://github.com/nagadomi/Depth-Anything_iw3.git ../Depth-Anything_iw3
v2_repo = os.path.abspath("../Depth-Anything_iw3")
# git clone https://github.com/nagadomi/Depth-Anything-3_iw3.git ../Depth-Anything-3_iw3
v3_repo = os.path.abspath("../Depth-Anything-3_iw3")
output_dir = "conv_output"
if not os.path.exists(output_dir):
os.makedirs(output_dir)
print("Starting Batch Conversion...")
# 1. Batch Convert Fixed Size (Currently working)
for name, (entry, arg, enc) in V2_MODELS.items():
convert_fixed(
v2_repo, name, entry, arg, enc, output_dir=output_dir, is_v3=False
)
for name, (entry, arg, enc) in V3_MODELS.items():
convert_fixed(v3_repo, name, entry, arg, enc, output_dir=output_dir, is_v3=True)
# 2. Batch Convert Dynamic Size (Future task - commented out)
"""
for name, (entry, arg, enc) in V2_MODELS.items():
convert_dynamic(v2_repo, name, entry, arg, enc, output_dir=output_dir, is_v3=False)
for name, (entry, arg, enc) in V3_MODELS.items():
convert_dynamic(v3_repo, name, entry, arg, enc, output_dir=output_dir, is_v3=True)
"""
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment