Skip to content

Instantly share code, notes, and snippets.

@bjacob
Created November 18, 2025 15:24
Show Gist options
  • Select an option

  • Save bjacob/549d3a36f7c80dfed72aca4d789ba317 to your computer and use it in GitHub Desktop.

Select an option

Save bjacob/549d3a36f7c80dfed72aca4d789ba317 to your computer and use it in GitHub Desktop.
---------------------------- live log sessionstart -----------------------------
INFO conftest:conftest.py:39 Pytest benchmark test session is starting
============================= test session starts ==============================
platform linux -- Python 3.12.3, pytest-8.0.0, pluggy-1.6.0
rootdir: /home/ossci/iree-test-suites/sharktank_models
configfile: pytest.ini
plugins: anyio-4.11.0, xdist-3.5.0, timeout-2.4.0, subtests-0.15.0, metadata-3.1.1, cov-7.0.0, asyncio-0.23.8, html-4.1.1, retry-1.7.0, reportlog-1.0.0, check-2.6.0
timeout: 600.0s
timeout method: signal
timeout func_only: False
asyncio: mode=Mode.STRICT
collected 8 items / 6 deselected / 2 selected
iree-test-suites/sharktank_models/benchmarks/model_benchmark_run.py::sd3 :: clip_cpu
-------------------------------- live log call ---------------------------------
INFO ireers_tools.fixtures:fixtures.py:107 **************************************************************
INFO ireers_tools.fixtures:fixtures.py:108 Exec: ['iree-benchmark-module', '--device=local-task', '--function=None', '--parameters=model=/home/ossci/iree-test-suites/sharktank_models/artifacts/sd3_clip/real_weights.irpa', '--benchmark_format=json', "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.0.bin', 'value': '1x77x2xi64'}", "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.1.bin', 'value': '1x77x2xi64'}", "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.2.bin', 'value': '1x77x2xi64'}", "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.3.bin', 'value': '1x77x2xi64'}", "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.4.bin', 'value': '1x77x2xi64'}", "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.5.bin', 'value': '1x77x2xi64'}", '--module=/home/ossci/iree-test-suites/sharktank_models/sd3_clip_vmfbs/model.cpu.vmfb']
FAILED [ 50%]
iree-test-suites/sharktank_models/benchmarks/model_benchmark_run.py::sdxl :: clip_cpu SKIPPED [100%]
---------------------------- live log sessionfinish ----------------------------
INFO conftest:conftest.py:99 Pytest benchmark test session has finished
=================================== FAILURES ===================================
___________________________ usecase: sd3 :: clip_cpu ___________________________
cls = <class '_pytest.runner.CallInfo'>
func = <function call_runtest_hook.<locals>.<lambda> at 0x7ff34b908900>
when = 'call'
reraise = (<class '_pytest.outcomes.Exit'>, <class 'KeyboardInterrupt'>)
@classmethod
def from_call(
cls,
func: Callable[[], TResult],
when: Literal["collect", "setup", "call", "teardown"],
reraise: Optional[
Union[Type[BaseException], Tuple[Type[BaseException], ...]]
] = None,
) -> "CallInfo[TResult]":
"""Call func, wrapping the result in a CallInfo.
:param func:
The function to call. Called without arguments.
:param when:
The phase in which the function is called.
:param reraise:
Exception or exceptions that shall propagate if raised by the
function, instead of being wrapped in the CallInfo.
"""
excinfo = None
start = timing.time()
precise_start = timing.perf_counter()
try:
> result: Optional[TResult] = func()
shark-ai/.venv/lib/python3.12/site-packages/_pytest/runner.py:345:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
> lambda: ihook(item=item, **kwds), when=when, reraise=reraise
)
shark-ai/.venv/lib/python3.12/site-packages/_pytest/runner.py:266:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <HookCaller 'pytest_runtest_call'>
kwargs = {'item': <ModelBenchmarkRunItem sd3 :: clip_cpu>}, firstresult = False
def __call__(self, **kwargs: object) -> Any:
"""Call the hook.
Only accepts keyword arguments, which should match the hook
specification.
Returns the result(s) of calling all registered plugins, see
:ref:`calling`.
"""
assert not self.is_historic(), (
"Cannot directly call a historic hook - use call_historic instead."
)
self._verify_all_args_are_provided(kwargs)
firstresult = self.spec.opts.get("firstresult", False) if self.spec else False
# Copy because plugins may register other plugins during iteration (#438).
> return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult)
shark-ai/.venv/lib/python3.12/site-packages/pluggy/_hooks.py:512:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <_pytest.config.PytestPluginManager object at 0x7ff34dedf110>
hook_name = 'pytest_runtest_call'
methods = [<HookImpl plugin_name='runner', plugin=<module '_pytest.runner' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-...aisableexception' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-packages/_pytest/unraisableexception.py'>>, ...]
kwargs = {'item': <ModelBenchmarkRunItem sd3 :: clip_cpu>}, firstresult = False
def _hookexec(
self,
hook_name: str,
methods: Sequence[HookImpl],
kwargs: Mapping[str, object],
firstresult: bool,
) -> object | list[object]:
# called from all hookcaller instances.
# enable_tracing will set its own wrapping function at self._inner_hookexec
> return self._inner_hookexec(hook_name, methods, kwargs, firstresult)
shark-ai/.venv/lib/python3.12/site-packages/pluggy/_manager.py:120:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
hook_name = 'pytest_runtest_call'
hook_impls = [<HookImpl plugin_name='runner', plugin=<module '_pytest.runner' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-...aisableexception' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-packages/_pytest/unraisableexception.py'>>, ...]
caller_kwargs = {'item': <ModelBenchmarkRunItem sd3 :: clip_cpu>}
firstresult = False
def _multicall(
hook_name: str,
hook_impls: Sequence[HookImpl],
caller_kwargs: Mapping[str, object],
firstresult: bool,
) -> object | list[object]:
"""Execute a call into multiple python functions/methods and return the
result(s).
``caller_kwargs`` comes from HookCaller.__call__().
"""
__tracebackhide__ = True
results: list[object] = []
exception = None
try: # run impl and wrapper setup functions in a loop
teardowns: list[Teardown] = []
try:
for hook_impl in reversed(hook_impls):
try:
args = [caller_kwargs[argname] for argname in hook_impl.argnames]
except KeyError as e:
# coverage bug - this is tested
for argname in hook_impl.argnames: # pragma: no cover
if argname not in caller_kwargs:
raise HookCallError(
f"hook call must provide argument {argname!r}"
) from e
if hook_impl.hookwrapper:
function_gen = run_old_style_hookwrapper(hook_impl, hook_name, args)
next(function_gen) # first yield
teardowns.append(function_gen)
elif hook_impl.wrapper:
try:
# If this cast is not valid, a type error is raised below,
# which is the desired response.
res = hook_impl.function(*args)
function_gen = cast(Generator[None, object, object], res)
next(function_gen) # first yield
teardowns.append(function_gen)
except StopIteration:
_raise_wrapfail(function_gen, "did not yield")
else:
res = hook_impl.function(*args)
if res is not None:
results.append(res)
if firstresult: # halt further impl calls
break
except BaseException as exc:
exception = exc
finally:
if firstresult: # first result hooks return a single value
result = results[0] if results else None
else:
result = results
# run all wrapper post-yield blocks
for teardown in reversed(teardowns):
try:
if exception is not None:
try:
teardown.throw(exception)
except RuntimeError as re:
# StopIteration from generator causes RuntimeError
# even for coroutine usage - see #544
if (
isinstance(exception, StopIteration)
and re.__cause__ is exception
):
teardown.close()
continue
else:
raise
else:
teardown.send(result)
# Following is unreachable for a well behaved hook wrapper.
# Try to force finalizers otherwise postponed till GC action.
# Note: close() may raise if generator handles GeneratorExit.
teardown.close()
except StopIteration as si:
result = si.value
exception = None
continue
except BaseException as e:
exception = e
continue
_raise_wrapfail(teardown, "has second yield")
if exception is not None:
> raise exception
shark-ai/.venv/lib/python3.12/site-packages/pluggy/_callers.py:167:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
hook_name = 'pytest_runtest_call'
hook_impls = [<HookImpl plugin_name='runner', plugin=<module '_pytest.runner' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-...aisableexception' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-packages/_pytest/unraisableexception.py'>>, ...]
caller_kwargs = {'item': <ModelBenchmarkRunItem sd3 :: clip_cpu>}
firstresult = False
def _multicall(
hook_name: str,
hook_impls: Sequence[HookImpl],
caller_kwargs: Mapping[str, object],
firstresult: bool,
) -> object | list[object]:
"""Execute a call into multiple python functions/methods and return the
result(s).
``caller_kwargs`` comes from HookCaller.__call__().
"""
__tracebackhide__ = True
results: list[object] = []
exception = None
try: # run impl and wrapper setup functions in a loop
teardowns: list[Teardown] = []
try:
for hook_impl in reversed(hook_impls):
try:
args = [caller_kwargs[argname] for argname in hook_impl.argnames]
except KeyError as e:
# coverage bug - this is tested
for argname in hook_impl.argnames: # pragma: no cover
if argname not in caller_kwargs:
raise HookCallError(
f"hook call must provide argument {argname!r}"
) from e
if hook_impl.hookwrapper:
function_gen = run_old_style_hookwrapper(hook_impl, hook_name, args)
next(function_gen) # first yield
teardowns.append(function_gen)
elif hook_impl.wrapper:
try:
# If this cast is not valid, a type error is raised below,
# which is the desired response.
res = hook_impl.function(*args)
function_gen = cast(Generator[None, object, object], res)
next(function_gen) # first yield
teardowns.append(function_gen)
except StopIteration:
_raise_wrapfail(function_gen, "did not yield")
else:
res = hook_impl.function(*args)
if res is not None:
results.append(res)
if firstresult: # halt further impl calls
break
except BaseException as exc:
exception = exc
finally:
if firstresult: # first result hooks return a single value
result = results[0] if results else None
else:
result = results
# run all wrapper post-yield blocks
for teardown in reversed(teardowns):
try:
if exception is not None:
try:
> teardown.throw(exception)
shark-ai/.venv/lib/python3.12/site-packages/pluggy/_callers.py:139:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
@pytest.hookimpl(wrapper=True, tryfirst=True)
def pytest_runtest_call() -> Generator[None, None, None]:
> yield from thread_exception_runtest_hook()
shark-ai/.venv/lib/python3.12/site-packages/_pytest/threadexception.py:87:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
def thread_exception_runtest_hook() -> Generator[None, None, None]:
with catch_threading_exception() as cm:
try:
> yield
shark-ai/.venv/lib/python3.12/site-packages/_pytest/threadexception.py:63:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
hook_name = 'pytest_runtest_call'
hook_impls = [<HookImpl plugin_name='runner', plugin=<module '_pytest.runner' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-...aisableexception' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-packages/_pytest/unraisableexception.py'>>, ...]
caller_kwargs = {'item': <ModelBenchmarkRunItem sd3 :: clip_cpu>}
firstresult = False
def _multicall(
hook_name: str,
hook_impls: Sequence[HookImpl],
caller_kwargs: Mapping[str, object],
firstresult: bool,
) -> object | list[object]:
"""Execute a call into multiple python functions/methods and return the
result(s).
``caller_kwargs`` comes from HookCaller.__call__().
"""
__tracebackhide__ = True
results: list[object] = []
exception = None
try: # run impl and wrapper setup functions in a loop
teardowns: list[Teardown] = []
try:
for hook_impl in reversed(hook_impls):
try:
args = [caller_kwargs[argname] for argname in hook_impl.argnames]
except KeyError as e:
# coverage bug - this is tested
for argname in hook_impl.argnames: # pragma: no cover
if argname not in caller_kwargs:
raise HookCallError(
f"hook call must provide argument {argname!r}"
) from e
if hook_impl.hookwrapper:
function_gen = run_old_style_hookwrapper(hook_impl, hook_name, args)
next(function_gen) # first yield
teardowns.append(function_gen)
elif hook_impl.wrapper:
try:
# If this cast is not valid, a type error is raised below,
# which is the desired response.
res = hook_impl.function(*args)
function_gen = cast(Generator[None, object, object], res)
next(function_gen) # first yield
teardowns.append(function_gen)
except StopIteration:
_raise_wrapfail(function_gen, "did not yield")
else:
res = hook_impl.function(*args)
if res is not None:
results.append(res)
if firstresult: # halt further impl calls
break
except BaseException as exc:
exception = exc
finally:
if firstresult: # first result hooks return a single value
result = results[0] if results else None
else:
result = results
# run all wrapper post-yield blocks
for teardown in reversed(teardowns):
try:
if exception is not None:
try:
> teardown.throw(exception)
shark-ai/.venv/lib/python3.12/site-packages/pluggy/_callers.py:139:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
@pytest.hookimpl(wrapper=True, tryfirst=True)
def pytest_runtest_call() -> Generator[None, None, None]:
> yield from unraisable_exception_runtest_hook()
shark-ai/.venv/lib/python3.12/site-packages/_pytest/unraisableexception.py:90:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
def unraisable_exception_runtest_hook() -> Generator[None, None, None]:
with catch_unraisable_exception() as cm:
try:
> yield
shark-ai/.venv/lib/python3.12/site-packages/_pytest/unraisableexception.py:65:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
hook_name = 'pytest_runtest_call'
hook_impls = [<HookImpl plugin_name='runner', plugin=<module '_pytest.runner' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-...aisableexception' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-packages/_pytest/unraisableexception.py'>>, ...]
caller_kwargs = {'item': <ModelBenchmarkRunItem sd3 :: clip_cpu>}
firstresult = False
def _multicall(
hook_name: str,
hook_impls: Sequence[HookImpl],
caller_kwargs: Mapping[str, object],
firstresult: bool,
) -> object | list[object]:
"""Execute a call into multiple python functions/methods and return the
result(s).
``caller_kwargs`` comes from HookCaller.__call__().
"""
__tracebackhide__ = True
results: list[object] = []
exception = None
try: # run impl and wrapper setup functions in a loop
teardowns: list[Teardown] = []
try:
for hook_impl in reversed(hook_impls):
try:
args = [caller_kwargs[argname] for argname in hook_impl.argnames]
except KeyError as e:
# coverage bug - this is tested
for argname in hook_impl.argnames: # pragma: no cover
if argname not in caller_kwargs:
raise HookCallError(
f"hook call must provide argument {argname!r}"
) from e
if hook_impl.hookwrapper:
function_gen = run_old_style_hookwrapper(hook_impl, hook_name, args)
next(function_gen) # first yield
teardowns.append(function_gen)
elif hook_impl.wrapper:
try:
# If this cast is not valid, a type error is raised below,
# which is the desired response.
res = hook_impl.function(*args)
function_gen = cast(Generator[None, object, object], res)
next(function_gen) # first yield
teardowns.append(function_gen)
except StopIteration:
_raise_wrapfail(function_gen, "did not yield")
else:
res = hook_impl.function(*args)
if res is not None:
results.append(res)
if firstresult: # halt further impl calls
break
except BaseException as exc:
exception = exc
finally:
if firstresult: # first result hooks return a single value
result = results[0] if results else None
else:
result = results
# run all wrapper post-yield blocks
for teardown in reversed(teardowns):
try:
if exception is not None:
try:
> teardown.throw(exception)
shark-ai/.venv/lib/python3.12/site-packages/pluggy/_callers.py:139:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <_pytest.logging.LoggingPlugin object at 0x7ff34c8dbcb0>
item = <ModelBenchmarkRunItem sd3 :: clip_cpu>
@hookimpl(wrapper=True)
def pytest_runtest_call(self, item: nodes.Item) -> Generator[None, None, None]:
self.log_cli_handler.set_when("call")
> yield from self._runtest_for(item, "call")
shark-ai/.venv/lib/python3.12/site-packages/_pytest/logging.py:839:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <_pytest.logging.LoggingPlugin object at 0x7ff34c8dbcb0>
item = <ModelBenchmarkRunItem sd3 :: clip_cpu>, when = 'call'
def _runtest_for(self, item: nodes.Item, when: str) -> Generator[None, None, None]:
"""Implement the internals of the pytest_runtest_xxx() hooks."""
with catching_logs(
self.caplog_handler,
level=self.log_level,
) as caplog_handler, catching_logs(
self.report_handler,
level=self.log_level,
) as report_handler:
caplog_handler.reset()
report_handler.reset()
item.stash[caplog_records_key][when] = caplog_handler.records
item.stash[caplog_handler_key] = caplog_handler
try:
> yield
shark-ai/.venv/lib/python3.12/site-packages/_pytest/logging.py:822:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
hook_name = 'pytest_runtest_call'
hook_impls = [<HookImpl plugin_name='runner', plugin=<module '_pytest.runner' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-...aisableexception' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-packages/_pytest/unraisableexception.py'>>, ...]
caller_kwargs = {'item': <ModelBenchmarkRunItem sd3 :: clip_cpu>}
firstresult = False
def _multicall(
hook_name: str,
hook_impls: Sequence[HookImpl],
caller_kwargs: Mapping[str, object],
firstresult: bool,
) -> object | list[object]:
"""Execute a call into multiple python functions/methods and return the
result(s).
``caller_kwargs`` comes from HookCaller.__call__().
"""
__tracebackhide__ = True
results: list[object] = []
exception = None
try: # run impl and wrapper setup functions in a loop
teardowns: list[Teardown] = []
try:
for hook_impl in reversed(hook_impls):
try:
args = [caller_kwargs[argname] for argname in hook_impl.argnames]
except KeyError as e:
# coverage bug - this is tested
for argname in hook_impl.argnames: # pragma: no cover
if argname not in caller_kwargs:
raise HookCallError(
f"hook call must provide argument {argname!r}"
) from e
if hook_impl.hookwrapper:
function_gen = run_old_style_hookwrapper(hook_impl, hook_name, args)
next(function_gen) # first yield
teardowns.append(function_gen)
elif hook_impl.wrapper:
try:
# If this cast is not valid, a type error is raised below,
# which is the desired response.
res = hook_impl.function(*args)
function_gen = cast(Generator[None, object, object], res)
next(function_gen) # first yield
teardowns.append(function_gen)
except StopIteration:
_raise_wrapfail(function_gen, "did not yield")
else:
res = hook_impl.function(*args)
if res is not None:
results.append(res)
if firstresult: # halt further impl calls
break
except BaseException as exc:
exception = exc
finally:
if firstresult: # first result hooks return a single value
result = results[0] if results else None
else:
result = results
# run all wrapper post-yield blocks
for teardown in reversed(teardowns):
try:
if exception is not None:
try:
> teardown.throw(exception)
shark-ai/.venv/lib/python3.12/site-packages/pluggy/_callers.py:139:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <CaptureManager _method='fd' _global_capturing=<MultiCapture out=<FDCapture 1 oldfd=5 _state='suspended' tmpfile=<_io....xtIOWrapper name='/dev/null' mode='r' encoding='utf-8'>> _state='suspended' _in_suspended=False> _capture_fixture=None>
item = <ModelBenchmarkRunItem sd3 :: clip_cpu>
@hookimpl(wrapper=True)
def pytest_runtest_call(self, item: Item) -> Generator[None, None, None]:
with self.item_capture("call", item):
> return (yield)
shark-ai/.venv/lib/python3.12/site-packages/_pytest/capture.py:882:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
hook_name = 'pytest_runtest_call'
hook_impls = [<HookImpl plugin_name='runner', plugin=<module '_pytest.runner' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-...aisableexception' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-packages/_pytest/unraisableexception.py'>>, ...]
caller_kwargs = {'item': <ModelBenchmarkRunItem sd3 :: clip_cpu>}
firstresult = False
def _multicall(
hook_name: str,
hook_impls: Sequence[HookImpl],
caller_kwargs: Mapping[str, object],
firstresult: bool,
) -> object | list[object]:
"""Execute a call into multiple python functions/methods and return the
result(s).
``caller_kwargs`` comes from HookCaller.__call__().
"""
__tracebackhide__ = True
results: list[object] = []
exception = None
try: # run impl and wrapper setup functions in a loop
teardowns: list[Teardown] = []
try:
for hook_impl in reversed(hook_impls):
try:
args = [caller_kwargs[argname] for argname in hook_impl.argnames]
except KeyError as e:
# coverage bug - this is tested
for argname in hook_impl.argnames: # pragma: no cover
if argname not in caller_kwargs:
raise HookCallError(
f"hook call must provide argument {argname!r}"
) from e
if hook_impl.hookwrapper:
function_gen = run_old_style_hookwrapper(hook_impl, hook_name, args)
next(function_gen) # first yield
teardowns.append(function_gen)
elif hook_impl.wrapper:
try:
# If this cast is not valid, a type error is raised below,
# which is the desired response.
res = hook_impl.function(*args)
function_gen = cast(Generator[None, object, object], res)
next(function_gen) # first yield
teardowns.append(function_gen)
except StopIteration:
_raise_wrapfail(function_gen, "did not yield")
else:
res = hook_impl.function(*args)
if res is not None:
results.append(res)
if firstresult: # halt further impl calls
break
except BaseException as exc:
exception = exc
finally:
if firstresult: # first result hooks return a single value
result = results[0] if results else None
else:
result = results
# run all wrapper post-yield blocks
for teardown in reversed(teardowns):
try:
if exception is not None:
try:
> teardown.throw(exception)
shark-ai/.venv/lib/python3.12/site-packages/pluggy/_callers.py:139:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
hook_impl = <HookImpl plugin_name='timeout', plugin=<module 'pytest_timeout' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-packages/pytest_timeout.py'>>
hook_name = 'pytest_runtest_call'
args = [<ModelBenchmarkRunItem sd3 :: clip_cpu>]
def run_old_style_hookwrapper(
hook_impl: HookImpl, hook_name: str, args: Sequence[object]
) -> Teardown:
"""
backward compatibility wrapper to run a old style hookwrapper as a wrapper
"""
teardown: Teardown = cast(Teardown, hook_impl.function(*args))
try:
next(teardown)
except StopIteration:
_raise_wrapfail(teardown, "did not yield")
try:
res = yield
result = Result(res, None)
except BaseException as exc:
result = Result(None, exc)
try:
teardown.send(result)
except StopIteration:
pass
except BaseException as e:
_warn_teardown_exception(hook_name, hook_impl, e)
raise
else:
_raise_wrapfail(teardown, "has second yield")
finally:
teardown.close()
> return result.get_result()
shark-ai/.venv/lib/python3.12/site-packages/pluggy/_callers.py:53:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <pluggy._result.Result object at 0x7ff34b912a40>
def get_result(self) -> ResultType:
"""Get the result(s) for this hook call.
If the hook was marked as a ``firstresult`` only a single value
will be returned, otherwise a list of results.
"""
__tracebackhide__ = True
exc = self._exception
tb = self._traceback
if exc is None:
return cast(ResultType, self._result)
else:
> raise exc.with_traceback(tb)
shark-ai/.venv/lib/python3.12/site-packages/pluggy/_result.py:103:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
hook_impl = <HookImpl plugin_name='timeout', plugin=<module 'pytest_timeout' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-packages/pytest_timeout.py'>>
hook_name = 'pytest_runtest_call'
args = [<ModelBenchmarkRunItem sd3 :: clip_cpu>]
def run_old_style_hookwrapper(
hook_impl: HookImpl, hook_name: str, args: Sequence[object]
) -> Teardown:
"""
backward compatibility wrapper to run a old style hookwrapper as a wrapper
"""
teardown: Teardown = cast(Teardown, hook_impl.function(*args))
try:
next(teardown)
except StopIteration:
_raise_wrapfail(teardown, "did not yield")
try:
> res = yield
shark-ai/.venv/lib/python3.12/site-packages/pluggy/_callers.py:38:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
hook_name = 'pytest_runtest_call'
hook_impls = [<HookImpl plugin_name='runner', plugin=<module '_pytest.runner' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-...aisableexception' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-packages/_pytest/unraisableexception.py'>>, ...]
caller_kwargs = {'item': <ModelBenchmarkRunItem sd3 :: clip_cpu>}
firstresult = False
def _multicall(
hook_name: str,
hook_impls: Sequence[HookImpl],
caller_kwargs: Mapping[str, object],
firstresult: bool,
) -> object | list[object]:
"""Execute a call into multiple python functions/methods and return the
result(s).
``caller_kwargs`` comes from HookCaller.__call__().
"""
__tracebackhide__ = True
results: list[object] = []
exception = None
try: # run impl and wrapper setup functions in a loop
teardowns: list[Teardown] = []
try:
for hook_impl in reversed(hook_impls):
try:
args = [caller_kwargs[argname] for argname in hook_impl.argnames]
except KeyError as e:
# coverage bug - this is tested
for argname in hook_impl.argnames: # pragma: no cover
if argname not in caller_kwargs:
raise HookCallError(
f"hook call must provide argument {argname!r}"
) from e
if hook_impl.hookwrapper:
function_gen = run_old_style_hookwrapper(hook_impl, hook_name, args)
next(function_gen) # first yield
teardowns.append(function_gen)
elif hook_impl.wrapper:
try:
# If this cast is not valid, a type error is raised below,
# which is the desired response.
res = hook_impl.function(*args)
function_gen = cast(Generator[None, object, object], res)
next(function_gen) # first yield
teardowns.append(function_gen)
except StopIteration:
_raise_wrapfail(function_gen, "did not yield")
else:
res = hook_impl.function(*args)
if res is not None:
results.append(res)
if firstresult: # halt further impl calls
break
except BaseException as exc:
exception = exc
finally:
if firstresult: # first result hooks return a single value
result = results[0] if results else None
else:
result = results
# run all wrapper post-yield blocks
for teardown in reversed(teardowns):
try:
if exception is not None:
try:
> teardown.throw(exception)
shark-ai/.venv/lib/python3.12/site-packages/pluggy/_callers.py:139:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
item = <ModelBenchmarkRunItem sd3 :: clip_cpu>
@hookimpl(wrapper=True)
def pytest_runtest_call(item: Item) -> Generator[None, None, None]:
xfailed = item.stash.get(xfailed_key, None)
if xfailed is None:
item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item)
if xfailed and not item.config.option.runxfail and not xfailed.run:
xfail("[NOTRUN] " + xfailed.reason)
try:
> return (yield)
shark-ai/.venv/lib/python3.12/site-packages/_pytest/skipping.py:257:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
hook_name = 'pytest_runtest_call'
hook_impls = [<HookImpl plugin_name='runner', plugin=<module '_pytest.runner' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-...aisableexception' from '/home/ossci/shark-ai/.venv/lib/python3.12/site-packages/_pytest/unraisableexception.py'>>, ...]
caller_kwargs = {'item': <ModelBenchmarkRunItem sd3 :: clip_cpu>}
firstresult = False
def _multicall(
hook_name: str,
hook_impls: Sequence[HookImpl],
caller_kwargs: Mapping[str, object],
firstresult: bool,
) -> object | list[object]:
"""Execute a call into multiple python functions/methods and return the
result(s).
``caller_kwargs`` comes from HookCaller.__call__().
"""
__tracebackhide__ = True
results: list[object] = []
exception = None
try: # run impl and wrapper setup functions in a loop
teardowns: list[Teardown] = []
try:
for hook_impl in reversed(hook_impls):
try:
args = [caller_kwargs[argname] for argname in hook_impl.argnames]
except KeyError as e:
# coverage bug - this is tested
for argname in hook_impl.argnames: # pragma: no cover
if argname not in caller_kwargs:
raise HookCallError(
f"hook call must provide argument {argname!r}"
) from e
if hook_impl.hookwrapper:
function_gen = run_old_style_hookwrapper(hook_impl, hook_name, args)
next(function_gen) # first yield
teardowns.append(function_gen)
elif hook_impl.wrapper:
try:
# If this cast is not valid, a type error is raised below,
# which is the desired response.
res = hook_impl.function(*args)
function_gen = cast(Generator[None, object, object], res)
next(function_gen) # first yield
teardowns.append(function_gen)
except StopIteration:
_raise_wrapfail(function_gen, "did not yield")
else:
> res = hook_impl.function(*args)
shark-ai/.venv/lib/python3.12/site-packages/pluggy/_callers.py:121:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
item = <ModelBenchmarkRunItem sd3 :: clip_cpu>
def pytest_runtest_call(item: Item) -> None:
_update_current_test_var(item, "call")
try:
del sys.last_type
del sys.last_value
del sys.last_traceback
except AttributeError:
pass
try:
item.runtest()
except Exception as e:
# Store trace info to allow postmortem debugging
sys.last_type = type(e)
sys.last_value = e
assert e.__traceback__ is not None
# Skip *this* frame
sys.last_traceback = e.__traceback__.tb_next
> raise e
shark-ai/.venv/lib/python3.12/site-packages/_pytest/runner.py:181:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
item = <ModelBenchmarkRunItem sd3 :: clip_cpu>
def pytest_runtest_call(item: Item) -> None:
_update_current_test_var(item, "call")
try:
del sys.last_type
del sys.last_value
del sys.last_traceback
except AttributeError:
pass
try:
> item.runtest()
shark-ai/.venv/lib/python3.12/site-packages/_pytest/runner.py:173:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <ModelBenchmarkRunItem sd3 :: clip_cpu>
def runtest(self):
# if a rocm chip is designated to be ignored in JSON file, skip test
if chip in self.specific_chip_to_ignore:
pytest.skip(
f"Ignoring benchmark test for {self.model_name} {self.submodel_name} for chip {chip}"
)
# if compilation is required, run this step
if self.compilation_required:
compiled_vmfb_path = compile_iree_method(
self.mlir_file_path, self.compile_flags, self.compiled_file_name
)
if not compiled_vmfb_path:
pytest.fail(
f"Failed to compile for {self.model_name} {self.submodel_name} during benchmark test. Skipping..."
)
directory_compile = f"{vmfb_dir}/{self.model_name}_{self.submodel_name}_vmfbs"
artifact_directory = f"{artifacts_dir}/{self.model_name}_{self.submodel_name}"
vmfb_file_path = f"{directory_compile}/model.{self.file_suffix}.vmfb"
exec_args = [
f"--parameters=model={artifact_directory}/{self.real_weights_file_name}"
]
# If there are modules for an e2e pipeline test, reset exec_args and directory_compile variables to custom variables
if self.modules:
all_modules_found, exec_args = e2e_iree_benchmark_module_args(
self.modules, self.file_suffix
)
if not all_modules_found:
pytest.skip(
f"Modules needed for {self.model_name} :: {self.submodel_name} not found, unable to run benchmark tests. Skipping..."
)
vmfb_file_path = f"{vmfb_dir}/{self.compiled_file_name}.vmfb"
exec_args += (
[
"--benchmark_format=json",
]
+ get_input_list(self.inputs)
+ self.benchmark_flags
)
if not Path(vmfb_file_path).is_file():
pytest.skip(
f"Vmfb file for {self.model_name} :: {self.submodel_name} was not found. Unable to run benchmark tests, skipping..."
)
# run iree benchmark command
ret_value, output = iree_benchmark_module(
vmfb=Path(vmfb_file_path),
device=self.device,
function=self.function_run,
args=exec_args,
)
# parse the output and retrieve the benchmark mean time
> benchmark_mean_time = job_summary_process(ret_value, output, self.model_name)
iree-test-suites/sharktank_models/benchmarks/model_benchmark_run.py:203:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
ret_value = 5
output = b'c/runtime/src/iree/vm/bytecode/module.c:317: NOT_FOUND; no function `None` exported by module\n'
model_name = 'sd3'
def job_summary_process(ret_value, output, model_name):
if ret_value == 1:
# Output should have already been logged earlier.
pytest.fail(f"Running {model_name} benchmark failed. Exiting.")
> output_json = json.loads(output)
iree-test-suites/sharktank_models/benchmarks/model_benchmark_run.py:46:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
s = 'c/runtime/src/iree/vm/bytecode/module.c:317: NOT_FOUND; no function `None` exported by module\n'
cls = None, object_hook = None, parse_float = None, parse_int = None
parse_constant = None, object_pairs_hook = None, kw = {}
def loads(s, *, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
"""Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance
containing a JSON document) to a Python object.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
``object_pairs_hook`` is an optional function that will be called with the
result of any object literal decoded with an ordered list of pairs. The
return value of ``object_pairs_hook`` will be used instead of the ``dict``.
This feature can be used to implement custom decoders. If ``object_hook``
is also defined, the ``object_pairs_hook`` takes priority.
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN.
This can be used to raise an exception if invalid JSON numbers
are encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg; otherwise ``JSONDecoder`` is used.
"""
if isinstance(s, str):
if s.startswith('\ufeff'):
raise JSONDecodeError("Unexpected UTF-8 BOM (decode using utf-8-sig)",
s, 0)
else:
if not isinstance(s, (bytes, bytearray)):
raise TypeError(f'the JSON object must be str, bytes or bytearray, '
f'not {s.__class__.__name__}')
s = s.decode(detect_encoding(s), 'surrogatepass')
if (cls is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None and not kw):
> return _default_decoder.decode(s)
/usr/lib/python3.12/json/__init__.py:346:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <json.decoder.JSONDecoder object at 0x7ff34e221010>
s = 'c/runtime/src/iree/vm/bytecode/module.c:317: NOT_FOUND; no function `None` exported by module\n'
_w = <built-in method match of re.Pattern object at 0x7ff34e218ad0>
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` instance
containing a JSON document).
"""
> obj, end = self.raw_decode(s, idx=_w(s, 0).end())
/usr/lib/python3.12/json/decoder.py:337:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <json.decoder.JSONDecoder object at 0x7ff34e221010>
s = 'c/runtime/src/iree/vm/bytecode/module.c:317: NOT_FOUND; no function `None` exported by module\n'
idx = 0
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` beginning with
a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration as err:
> raise JSONDecodeError("Expecting value", s, err.value) from None
E json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
/usr/lib/python3.12/json/decoder.py:355: JSONDecodeError
----------------------------- Captured stderr call -----------------------------
INFO:ireers_tools.fixtures:**************************************************************
INFO:ireers_tools.fixtures:Exec: ['iree-benchmark-module', '--device=local-task', '--function=None', '--parameters=model=/home/ossci/iree-test-suites/sharktank_models/artifacts/sd3_clip/real_weights.irpa', '--benchmark_format=json', "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.0.bin', 'value': '1x77x2xi64'}", "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.1.bin', 'value': '1x77x2xi64'}", "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.2.bin', 'value': '1x77x2xi64'}", "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.3.bin', 'value': '1x77x2xi64'}", "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.4.bin', 'value': '1x77x2xi64'}", "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.5.bin', 'value': '1x77x2xi64'}", '--module=/home/ossci/iree-test-suites/sharktank_models/sd3_clip_vmfbs/model.cpu.vmfb']
------------------------------ Captured log call -------------------------------
INFO:conftest:Pytest benchmark test session has finished
INFO ireers_tools.fixtures:fixtures.py:107 **************************************************************
INFO ireers_tools.fixtures:fixtures.py:108 Exec: ['iree-benchmark-module', '--device=local-task', '--function=None', '--parameters=model=/home/ossci/iree-test-suites/sharktank_models/artifacts/sd3_clip/real_weights.irpa', '--benchmark_format=json', "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.0.bin', 'value': '1x77x2xi64'}", "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.1.bin', 'value': '1x77x2xi64'}", "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.2.bin', 'value': '1x77x2xi64'}", "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.3.bin', 'value': '1x77x2xi64'}", "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.4.bin', 'value': '1x77x2xi64'}", "--input={'source': 'https://sharkpublic.blob.core.windows.net/sharkpublic/sai/sd3-prompt-encoder/inference_input.5.bin', 'value': '1x77x2xi64'}", '--module=/home/ossci/iree-test-suites/sharktank_models/sd3_clip_vmfbs/model.cpu.vmfb']
============================== slowest durations ===============================
4.01s call benchmarks/model_benchmark_run.py::sd3 :: clip_cpu
(5 durations < 0.005s hidden. Use -vv to show these durations.)
=========================== short test summary info ============================
FAILED iree-test-suites/sharktank_models/benchmarks/model_benchmark_run.py::sd3 :: clip_cpu
================== 1 failed, 1 skipped, 6 deselected in 4.08s ==================
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment