Skip to content

step pattern parse error #21

@EugeneJao

Description

@EugeneJao

i use ollama to run my model qwen3-vl:8b. But when i run the example, there are some exception show on ollama log. i search the key words on the porject and found the template defined in the output_schemas.py line 251. finally,i modified the wrong regex,and it work!
git commit id : 1a067a1

wrong code

                  "step_id": {
                        "type": "string",
                        "pattern": "^Step \\d+$"
                    },

correct code
"step_id": {
"type": "string",
"pattern": "^Step \\d+$"
},

2026-03-13 17:02:20,301 - src.agent.service - ERROR - Error running agent
Traceback (most recent call last):
File "/Users/codemonkey/Project/TuriX-CUA/src/agent/service.py", line 1048, in run
await self.edit()
File "/Users/codemonkey/Project/TuriX-CUA/src/agent/service.py", line 1077, in edit
result = await self.planner.edit_task()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/codemonkey/Project/TuriX-CUA/src/agent/planner_service.py", line 527, in edit_task
response = await self.planner_llm.ainvoke(messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/runnables/base.py", line 5443, in ainvoke
return await self.bound.ainvoke(
^^^^^^^^^^^^^^^^^^^^^^^^^
...<3 lines>...
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 394, in ainvoke
llm_result = await self.agenerate_prompt(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
...<8 lines>...
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 968, in agenerate_prompt
return await self.agenerate(
^^^^^^^^^^^^^^^^^^^^^
prompt_messages, stop=stop, callbacks=callbacks, **kwargs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 926, in agenerate
raise exceptions[0]
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 1094, in _agenerate_with_cache
result = await self._agenerate(
^^^^^^^^^^^^^^^^^^^^^^
messages, stop=stop, run_manager=run_manager, **kwargs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_ollama/chat_models.py", line 788, in _agenerate
final_chunk = await self._achat_stream_with_aggregation(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
messages, stop, run_manager, verbose=self.verbose, **kwargs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_ollama/chat_models.py", line 645, in _achat_stream_with_aggregation
async for stream_resp in self._acreate_chat_stream(messages, stop, **kwargs):
...<27 lines>...
)
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_ollama/chat_models.py", line 575, in _acreate_chat_stream
async for part in await self._async_client.chat(**chat_params):
yield part
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/ollama/_client.py", line 762, in inner
raise ResponseError(err)
ollama._types.ResponseError: failed to load model vocabulary required for format (status code: -1)
Traceback (most recent call last):
File "/Users/codemonkey/Project/TuriX-CUA/examples/main.py", line 438, in
main(args.config)
~~~~^^^^^^^^^^^^^
File "/Users/codemonkey/Project/TuriX-CUA/examples/main.py", line 429, in main
asyncio.run(runner())
~~~~~~~~~~~^^^^^^^^^^
File "/opt/homebrew/Cellar/python@3.14/3.14.3_1/Frameworks/Python.framework/Versions/3.14/lib/python3.14/asyncio/runners.py", line 204, in run
return runner.run(main)
~~~~~~~~~~^^^^^^
File "/opt/homebrew/Cellar/python@3.14/3.14.3_1/Frameworks/Python.framework/Versions/3.14/lib/python3.14/asyncio/runners.py", line 127, in run
return self._loop.run_until_complete(task)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^
File "/opt/homebrew/Cellar/python@3.14/3.14.3_1/Frameworks/Python.framework/Versions/3.14/lib/python3.14/asyncio/base_events.py", line 719, in run_until_complete
return future.result()
~~~~~~~~~~~~~^^
File "/Users/codemonkey/Project/TuriX-CUA/examples/main.py", line 422, in runner
await agent_task
File "/Users/codemonkey/Project/TuriX-CUA/src/agent/service.py", line 1048, in run
await self.edit()
File "/Users/codemonkey/Project/TuriX-CUA/src/agent/service.py", line 1077, in edit
result = await self.planner.edit_task()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/codemonkey/Project/TuriX-CUA/src/agent/planner_service.py", line 527, in edit_task
response = await self.planner_llm.ainvoke(messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/runnables/base.py", line 5443, in ainvoke
return await self.bound.ainvoke(
^^^^^^^^^^^^^^^^^^^^^^^^^
...<3 lines>...
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 394, in ainvoke
llm_result = await self.agenerate_prompt(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
...<8 lines>...
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 968, in agenerate_prompt
return await self.agenerate(
^^^^^^^^^^^^^^^^^^^^^
prompt_messages, stop=stop, callbacks=callbacks, **kwargs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 926, in agenerate
raise exceptions[0]
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 1094, in _agenerate_with_cache
result = await self._agenerate(
^^^^^^^^^^^^^^^^^^^^^^
messages, stop=stop, run_manager=run_manager, **kwargs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_ollama/chat_models.py", line 788, in _agenerate
final_chunk = await self._achat_stream_with_aggregation(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
messages, stop, run_manager, verbose=self.verbose, **kwargs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_ollama/chat_models.py", line 645, in _achat_stream_with_aggregation
async for stream_resp in self._acreate_chat_stream(messages, stop, **kwargs):
...<27 lines>...
)
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_ollama/chat_models.py", line 575, in _acreate_chat_stream
async for part in await self._async_client.chat(**chat_params):
yield part
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/ollama/_client.py", line 762, in inner
raise ResponseError(err)
ollama._types.ResponseError: failed to load model vocabulary required for format (status code: -1)

ollama log
parse: error parsing grammar: unknown escape at \d"+) """ space
step-by-step-plan ::= "[" space (step-by-step-plan-item ("," space step-by-step-plan-item))? "]" space
step-by-step-plan-item ::= "{" space step-by-step-plan-item-step-id-kv "," space step-by-step-plan-item-description-kv "," space step-by-step-plan-item-important-search-info-kv "}" space
step-by-step-plan-item-important-search-info-kv ::= ""important_search_info"" space ":" space string
natural-language-plan-kv ::= ""natural_language_plan"" space ":" space string
char ::= [^"\\\x7F\x00-\x1F] | [\] (["\bfnrt] | "u" [0-9a-fA-F]{4})
string ::= """ char
""" space
root ::= "{" space iteration-info-kv "," space search-summary-kv "," space selected-skills-kv "," space step-by-step-plan-kv ( "," space ( natural-language-plan-kv ) )? "}" space
selected-skills-kv ::= ""selected_skills"" space ":" space selected-skills
iteration-info-current-iteration-kv ::= ""current_iteration"" space ":" space iteration-info-current-iteration
selected-skills ::= "[" space (string ("," space string)*)? "]" space
iteration-info-total-iterations ::= ([1-9] [0-9]{0,15}) space
iteration-info-kv ::= ""iteration_info"" space ":" space iteration-info
space ::= | " " | "\n"{1,2} [ \t]{0,20}
search-summary-kv ::= ""search_summary"" space ":" space string
iteration-info ::= "{" space iteration-info-current-iteration-kv "," space iteration-info-total-iterations-kv "}" space
iteration-info-total-iterations-kv ::= ""total_iterations"" space ":" space iteration-info-total-iterations
iteration-info-current-iteration ::= ([1-9] [0-9]{0,15}) space

main.py log
2026-03-13 17:02:20,301 - src.agent.service - ERROR - Error running agent
Traceback (most recent call last):
File "/Users/codemonkey/Project/TuriX-CUA/src/agent/service.py", line 1048, in run
await self.edit()
File "/Users/codemonkey/Project/TuriX-CUA/src/agent/service.py", line 1077, in edit
result = await self.planner.edit_task()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/codemonkey/Project/TuriX-CUA/src/agent/planner_service.py", line 527, in edit_task
response = await self.planner_llm.ainvoke(messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/runnables/base.py", line 5443, in ainvoke
return await self.bound.ainvoke(
^^^^^^^^^^^^^^^^^^^^^^^^^
...<3 lines>...
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 394, in ainvoke
llm_result = await self.agenerate_prompt(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
...<8 lines>...
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 968, in agenerate_prompt
return await self.agenerate(
^^^^^^^^^^^^^^^^^^^^^
prompt_messages, stop=stop, callbacks=callbacks, **kwargs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 926, in agenerate
raise exceptions[0]
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 1094, in _agenerate_with_cache
result = await self._agenerate(
^^^^^^^^^^^^^^^^^^^^^^
messages, stop=stop, run_manager=run_manager, **kwargs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_ollama/chat_models.py", line 788, in _agenerate
final_chunk = await self._achat_stream_with_aggregation(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
messages, stop, run_manager, verbose=self.verbose, **kwargs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_ollama/chat_models.py", line 645, in _achat_stream_with_aggregation
async for stream_resp in self._acreate_chat_stream(messages, stop, **kwargs):
...<27 lines>...
)
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_ollama/chat_models.py", line 575, in _acreate_chat_stream
async for part in await self._async_client.chat(**chat_params):
yield part
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/ollama/_client.py", line 762, in inner
raise ResponseError(err)
ollama._types.ResponseError: failed to load model vocabulary required for format (status code: -1)
Traceback (most recent call last):
File "/Users/codemonkey/Project/TuriX-CUA/examples/main.py", line 438, in
main(args.config)
~~~~^^^^^^^^^^^^^
File "/Users/codemonkey/Project/TuriX-CUA/examples/main.py", line 429, in main
asyncio.run(runner())
~~~~~~~~~~~^^^^^^^^^^
File "/opt/homebrew/Cellar/python@3.14/3.14.3_1/Frameworks/Python.framework/Versions/3.14/lib/python3.14/asyncio/runners.py", line 204, in run
return runner.run(main)
~~~~~~~~~~^^^^^^
File "/opt/homebrew/Cellar/python@3.14/3.14.3_1/Frameworks/Python.framework/Versions/3.14/lib/python3.14/asyncio/runners.py", line 127, in run
return self._loop.run_until_complete(task)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^
File "/opt/homebrew/Cellar/python@3.14/3.14.3_1/Frameworks/Python.framework/Versions/3.14/lib/python3.14/asyncio/base_events.py", line 719, in run_until_complete
return future.result()
~~~~~~~~~~~~~^^
File "/Users/codemonkey/Project/TuriX-CUA/examples/main.py", line 422, in runner
await agent_task
File "/Users/codemonkey/Project/TuriX-CUA/src/agent/service.py", line 1048, in run
await self.edit()
File "/Users/codemonkey/Project/TuriX-CUA/src/agent/service.py", line 1077, in edit
result = await self.planner.edit_task()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/codemonkey/Project/TuriX-CUA/src/agent/planner_service.py", line 527, in edit_task
response = await self.planner_llm.ainvoke(messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/runnables/base.py", line 5443, in ainvoke
return await self.bound.ainvoke(
^^^^^^^^^^^^^^^^^^^^^^^^^
...<3 lines>...
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 394, in ainvoke
llm_result = await self.agenerate_prompt(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
...<8 lines>...
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 968, in agenerate_prompt
return await self.agenerate(
^^^^^^^^^^^^^^^^^^^^^
prompt_messages, stop=stop, callbacks=callbacks, **kwargs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 926, in agenerate
raise exceptions[0]
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_core/language_models/chat_models.py", line 1094, in _agenerate_with_cache
result = await self._agenerate(
^^^^^^^^^^^^^^^^^^^^^^
messages, stop=stop, run_manager=run_manager, **kwargs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_ollama/chat_models.py", line 788, in _agenerate
final_chunk = await self._achat_stream_with_aggregation(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
messages, stop, run_manager, verbose=self.verbose, **kwargs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_ollama/chat_models.py", line 645, in _achat_stream_with_aggregation
async for stream_resp in self._acreate_chat_stream(messages, stop, **kwargs):
...<27 lines>...
)
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/langchain_ollama/chat_models.py", line 575, in _acreate_chat_stream
async for part in await self._async_client.chat(**chat_params):
yield part
File "/Users/codemonkey/Project/TuriX-CUA/turix_env/lib/python3.14/site-packages/ollama/_client.py", line 762, in inner
raise ResponseError(err)
ollama._types.ResponseError: failed to load model vocabulary required for format (status code: -1)

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions