Skip to content

vllm.entrypoints.openai.tool_parsers

Modules:

Name Description
abstract_tool_parser
deepseekv31_tool_parser
deepseekv3_tool_parser
ernie45_tool_parser
glm4_moe_tool_parser
granite_20b_fc_tool_parser
granite_tool_parser
hermes_tool_parser
hunyuan_a13b_tool_parser
internlm2_tool_parser
jamba_tool_parser
kimi_k2_tool_parser
llama4_pythonic_tool_parser
llama_tool_parser
longcat_tool_parser
minimax_m2_tool_parser
minimax_tool_parser
mistral_tool_parser
olmo3_tool_parser
openai_tool_parser
phi4mini_tool_parser
pythonic_tool_parser
qwen3coder_tool_parser
qwen3xml_tool_parser
seed_oss_tool_parser
step3_tool_parser
utils
xlam_tool_parser

_TOOL_PARSERS_TO_REGISTER module-attribute

_TOOL_PARSERS_TO_REGISTER = {
    "deepseek_v3": (
        "deepseekv3_tool_parser",
        "DeepSeekV3ToolParser",
    ),
    "deepseek_v31": (
        "deepseekv31_tool_parser",
        "DeepSeekV31ToolParser",
    ),
    "ernie45": ("ernie45_tool_parser", "Ernie45ToolParser"),
    "glm45": (
        "glm4_moe_tool_parser",
        "Glm4MoeModelToolParser",
    ),
    "granite-20b-fc": (
        "granite_20b_fc_tool_parser",
        "Granite20bFCToolParser",
    ),
    "granite": ("granite_tool_parser", "GraniteToolParser"),
    "hermes": (
        "hermes_tool_parser",
        "Hermes2ProToolParser",
    ),
    "hunyuan_a13b": (
        "hunyuan_a13b_tool_parser",
        "HunyuanA13BToolParser",
    ),
    "internlm": (
        "internlm2_tool_parser",
        "Internlm2ToolParser",
    ),
    "jamba": ("jamba_tool_parser", "JambaToolParser"),
    "kimi_k2": ("kimi_k2_tool_parser", "KimiK2ToolParser"),
    "llama3_json": (
        "llama_tool_parser",
        "Llama3JsonToolParser",
    ),
    "llama4_json": (
        "llama_tool_parser",
        "Llama4JsonToolParser",
    ),
    "llama4_pythonic": (
        "llama4_pythonic_tool_parser",
        "Llama4PythonicToolParser",
    ),
    "longcat": (
        "longcat_tool_parser",
        "LongcatFlashToolParser",
    ),
    "minimax_m2": (
        "minimax_m2_tool_parser",
        "MinimaxM2ToolParser",
    ),
    "minimax": ("minimax_tool_parser", "MinimaxToolParser"),
    "mistral": ("mistral_tool_parser", "MistralToolParser"),
    "olmo3": (
        "olmo3_tool_parser",
        "Olmo3PythonicToolParser",
    ),
    "openai": ("openai_tool_parser", "OpenAIToolParser"),
    "phi4_mini_json": (
        "phi4mini_tool_parser",
        "Phi4MiniJsonToolParser",
    ),
    "pythonic": (
        "pythonic_tool_parser",
        "PythonicToolParser",
    ),
    "qwen3_coder": (
        "qwen3coder_tool_parser",
        "Qwen3CoderToolParser",
    ),
    "qwen3_xml": (
        "qwen3xml_tool_parser",
        "Qwen3XmlToolParser",
    ),
    "seed_oss": (
        "seed_oss_tool_parser",
        "SeedOsSToolParser",
    ),
    "step3": ("step3_tool_parser", "Step3ToolParser"),
    "xlam": ("xlam_tool_parser", "xLAMToolParser"),
}

__all__ module-attribute

__all__ = ['ToolParser', 'ToolParserManager']

Register a lazy module mapping.

Example

ToolParserManager.register_lazy_module( name="kimi_k2", module_path="vllm.entrypoints.openai.tool_parsers.kimi_k2_parser", class_name="KimiK2ToolParser", )

ToolParser

Abstract ToolParser class that should not be used directly. Provided properties and methods should be used in derived classes.

Source code in vllm/entrypoints/openai/tool_parsers/abstract_tool_parser.py
class ToolParser:
    """
    Abstract ToolParser class that should not be used directly. Provided
    properties and methods should be used in
    derived classes.
    """

    def __init__(self, tokenizer: AnyTokenizer):
        self.prev_tool_call_arr: list[dict] = []
        # the index of the tool call that is currently being parsed
        self.current_tool_id: int = -1
        self.current_tool_name_sent: bool = False
        self.streamed_args_for_tool: list[str] = []

        self.model_tokenizer = tokenizer

    @cached_property
    def vocab(self) -> dict[str, int]:
        # NOTE: Only PreTrainedTokenizerFast is guaranteed to have .vocab
        # whereas all tokenizers have .get_vocab()
        return self.model_tokenizer.get_vocab()

    def adjust_request(self, request: ChatCompletionRequest) -> ChatCompletionRequest:
        """
        Static method that used to adjust the request parameters.
        """
        if not request.tools:
            return request
        json_schema_from_tool = get_json_schema_from_tools(
            tool_choice=request.tool_choice, tools=request.tools
        )
        # Set structured output params for tool calling
        if json_schema_from_tool is not None:
            if request.structured_outputs is None:
                request.structured_outputs = StructuredOutputsParams()
            # tool_choice: "Forced Function" or "required" will override
            # structured output json settings to make tool calling work correctly
            request.structured_outputs.json = json_schema_from_tool
        return request

    def extract_tool_calls(
        self, model_output: str, request: ChatCompletionRequest
    ) -> ExtractedToolCallInformation:
        """
        Static method that should be implemented for extracting tool calls from
        a complete model-generated string.
        Used for non-streaming responses where we have the entire model response
        available before sending to the client.
        Static because it's stateless.
        """
        raise NotImplementedError(
            "AbstractToolParser.extract_tool_calls has not been implemented!"
        )

    def extract_tool_calls_streaming(
        self,
        previous_text: str,
        current_text: str,
        delta_text: str,
        previous_token_ids: Sequence[int],
        current_token_ids: Sequence[int],
        delta_token_ids: Sequence[int],
        request: ChatCompletionRequest,
    ) -> DeltaMessage | None:
        """
        Instance method that should be implemented for extracting tool calls
        from an incomplete response; for use when handling tool calls and
        streaming. Has to be an instance method because  it requires state -
        the current tokens/diffs, but also the information about what has
        previously been parsed and extracted (see constructor)
        """
        raise NotImplementedError(
            "AbstractToolParser.extract_tool_calls_streaming has not been implemented!"
        )

current_tool_id instance-attribute

current_tool_id: int = -1

current_tool_name_sent instance-attribute

current_tool_name_sent: bool = False

model_tokenizer instance-attribute

model_tokenizer = tokenizer

prev_tool_call_arr instance-attribute

prev_tool_call_arr: list[dict] = []

streamed_args_for_tool instance-attribute

streamed_args_for_tool: list[str] = []

vocab cached property

vocab: dict[str, int]

__init__

__init__(tokenizer: AnyTokenizer)
Source code in vllm/entrypoints/openai/tool_parsers/abstract_tool_parser.py
def __init__(self, tokenizer: AnyTokenizer):
    self.prev_tool_call_arr: list[dict] = []
    # the index of the tool call that is currently being parsed
    self.current_tool_id: int = -1
    self.current_tool_name_sent: bool = False
    self.streamed_args_for_tool: list[str] = []

    self.model_tokenizer = tokenizer

adjust_request

adjust_request(
    request: ChatCompletionRequest,
) -> ChatCompletionRequest

Static method that used to adjust the request parameters.

Source code in vllm/entrypoints/openai/tool_parsers/abstract_tool_parser.py
def adjust_request(self, request: ChatCompletionRequest) -> ChatCompletionRequest:
    """
    Static method that used to adjust the request parameters.
    """
    if not request.tools:
        return request
    json_schema_from_tool = get_json_schema_from_tools(
        tool_choice=request.tool_choice, tools=request.tools
    )
    # Set structured output params for tool calling
    if json_schema_from_tool is not None:
        if request.structured_outputs is None:
            request.structured_outputs = StructuredOutputsParams()
        # tool_choice: "Forced Function" or "required" will override
        # structured output json settings to make tool calling work correctly
        request.structured_outputs.json = json_schema_from_tool
    return request

extract_tool_calls

extract_tool_calls(
    model_output: str, request: ChatCompletionRequest
) -> ExtractedToolCallInformation

Static method that should be implemented for extracting tool calls from a complete model-generated string. Used for non-streaming responses where we have the entire model response available before sending to the client. Static because it's stateless.

Source code in vllm/entrypoints/openai/tool_parsers/abstract_tool_parser.py
def extract_tool_calls(
    self, model_output: str, request: ChatCompletionRequest
) -> ExtractedToolCallInformation:
    """
    Static method that should be implemented for extracting tool calls from
    a complete model-generated string.
    Used for non-streaming responses where we have the entire model response
    available before sending to the client.
    Static because it's stateless.
    """
    raise NotImplementedError(
        "AbstractToolParser.extract_tool_calls has not been implemented!"
    )

extract_tool_calls_streaming

extract_tool_calls_streaming(
    previous_text: str,
    current_text: str,
    delta_text: str,
    previous_token_ids: Sequence[int],
    current_token_ids: Sequence[int],
    delta_token_ids: Sequence[int],
    request: ChatCompletionRequest,
) -> DeltaMessage | None

Instance method that should be implemented for extracting tool calls from an incomplete response; for use when handling tool calls and streaming. Has to be an instance method because it requires state - the current tokens/diffs, but also the information about what has previously been parsed and extracted (see constructor)

Source code in vllm/entrypoints/openai/tool_parsers/abstract_tool_parser.py
def extract_tool_calls_streaming(
    self,
    previous_text: str,
    current_text: str,
    delta_text: str,
    previous_token_ids: Sequence[int],
    current_token_ids: Sequence[int],
    delta_token_ids: Sequence[int],
    request: ChatCompletionRequest,
) -> DeltaMessage | None:
    """
    Instance method that should be implemented for extracting tool calls
    from an incomplete response; for use when handling tool calls and
    streaming. Has to be an instance method because  it requires state -
    the current tokens/diffs, but also the information about what has
    previously been parsed and extracted (see constructor)
    """
    raise NotImplementedError(
        "AbstractToolParser.extract_tool_calls_streaming has not been implemented!"
    )

ToolParserManager

Central registry for ToolParser implementations.

Supports two modes
  • Eager (immediate) registration via register_module
  • Lazy registration via register_lazy_module
Source code in vllm/entrypoints/openai/tool_parsers/abstract_tool_parser.py
class ToolParserManager:
    """
    Central registry for ToolParser implementations.

    Supports two modes:
      - Eager (immediate) registration via `register_module`
      - Lazy registration via `register_lazy_module`
    """

    tool_parsers: dict[str, type[ToolParser]] = {}
    lazy_parsers: dict[str, tuple[str, str]] = {}  # name -> (module_path, class_name)

    @classmethod
    def get_tool_parser(cls, name: str) -> type[ToolParser]:
        """
        Retrieve a registered or lazily registered ToolParser class.

        If the parser is lazily registered,
        it will be imported and cached on first access.
        Raises KeyError if not found.
        """
        if name in cls.tool_parsers:
            return cls.tool_parsers[name]

        if name in cls.lazy_parsers:
            return cls._load_lazy_parser(name)

        raise KeyError(f"Tool parser '{name}' not found.")

    @classmethod
    def _load_lazy_parser(cls, name: str) -> type[ToolParser]:
        """Import and register a lazily loaded parser."""
        module_path, class_name = cls.lazy_parsers[name]
        try:
            mod = importlib.import_module(module_path)
            parser_cls = getattr(mod, class_name)
            if not issubclass(parser_cls, ToolParser):
                raise TypeError(
                    f"{class_name} in {module_path} is not a ToolParser subclass."
                )
            cls.tool_parsers[name] = parser_cls  # cache
            return parser_cls
        except Exception as e:
            logger.exception(
                "Failed to import lazy tool parser '%s' from %s: %s",
                name,
                module_path,
                e,
            )
            raise

    @classmethod
    def _register_module(
        cls,
        module: type[ToolParser],
        module_name: str | list[str] | None = None,
        force: bool = True,
    ) -> None:
        """Register a ToolParser class immediately."""
        if not issubclass(module, ToolParser):
            raise TypeError(
                f"module must be subclass of ToolParser, but got {type(module)}"
            )

        if module_name is None:
            module_name = module.__name__

        if isinstance(module_name, str):
            module_names = [module_name]
        elif is_list_of(module_name, str):
            module_names = module_name
        else:
            raise TypeError("module_name must be str, list[str], or None.")

        for name in module_names:
            if not force and name in cls.tool_parsers:
                existed = cls.tool_parsers[name]
                raise KeyError(f"{name} is already registered at {existed.__module__}")
            cls.tool_parsers[name] = module

    @classmethod
    def register_lazy_module(cls, name: str, module_path: str, class_name: str) -> None:
        """
        Register a lazy module mapping.

        Example:
            ToolParserManager.register_lazy_module(
                name="kimi_k2",
                module_path="vllm.entrypoints.openai.tool_parsers.kimi_k2_parser",
                class_name="KimiK2ToolParser",
            )
        """
        cls.lazy_parsers[name] = (module_path, class_name)

    @classmethod
    def register_module(
        cls,
        name: str | list[str] | None = None,
        force: bool = True,
        module: type[ToolParser] | None = None,
    ) -> type[ToolParser] | Callable[[type[ToolParser]], type[ToolParser]]:
        """
        Register module immediately or lazily (as a decorator).

        Usage:
            @ToolParserManager.register_module("kimi_k2")
            class KimiK2ToolParser(ToolParser):
                ...

        Or:
            ToolParserManager.register_module(module=SomeToolParser)
        """
        if not isinstance(force, bool):
            raise TypeError(f"force must be a boolean, but got {type(force)}")

        # Immediate registration
        if module is not None:
            cls._register_module(module=module, module_name=name, force=force)
            return module

        # Decorator usage
        def _decorator(obj: type[ToolParser]) -> type[ToolParser]:
            module_path = obj.__module__
            class_name = obj.__name__

            if isinstance(name, str):
                names = [name]
            elif is_list_of(name, str):
                names = name
            else:
                names = [class_name]

            for n in names:
                # Lazy mapping only: do not import now
                cls.lazy_parsers[n] = (module_path, class_name)

            return obj

        return _decorator

    @classmethod
    def list_registered(cls) -> list[str]:
        """Return names of all eagerly and lazily registered tool parsers."""
        return sorted(set(cls.tool_parsers.keys()) | set(cls.lazy_parsers.keys()))

    @classmethod
    def import_tool_parser(cls, plugin_path: str) -> None:
        """Import a user-defined parser file from arbitrary path."""

        module_name = os.path.splitext(os.path.basename(plugin_path))[0]
        try:
            import_from_path(module_name, plugin_path)
        except Exception:
            logger.exception(
                "Failed to load module '%s' from %s.", module_name, plugin_path
            )

lazy_parsers class-attribute instance-attribute

lazy_parsers: dict[str, tuple[str, str]] = {}

tool_parsers class-attribute instance-attribute

tool_parsers: dict[str, type[ToolParser]] = {}

_load_lazy_parser classmethod

_load_lazy_parser(name: str) -> type[ToolParser]

Import and register a lazily loaded parser.

Source code in vllm/entrypoints/openai/tool_parsers/abstract_tool_parser.py
@classmethod
def _load_lazy_parser(cls, name: str) -> type[ToolParser]:
    """Import and register a lazily loaded parser."""
    module_path, class_name = cls.lazy_parsers[name]
    try:
        mod = importlib.import_module(module_path)
        parser_cls = getattr(mod, class_name)
        if not issubclass(parser_cls, ToolParser):
            raise TypeError(
                f"{class_name} in {module_path} is not a ToolParser subclass."
            )
        cls.tool_parsers[name] = parser_cls  # cache
        return parser_cls
    except Exception as e:
        logger.exception(
            "Failed to import lazy tool parser '%s' from %s: %s",
            name,
            module_path,
            e,
        )
        raise

_register_module classmethod

_register_module(
    module: type[ToolParser],
    module_name: str | list[str] | None = None,
    force: bool = True,
) -> None

Register a ToolParser class immediately.

Source code in vllm/entrypoints/openai/tool_parsers/abstract_tool_parser.py
@classmethod
def _register_module(
    cls,
    module: type[ToolParser],
    module_name: str | list[str] | None = None,
    force: bool = True,
) -> None:
    """Register a ToolParser class immediately."""
    if not issubclass(module, ToolParser):
        raise TypeError(
            f"module must be subclass of ToolParser, but got {type(module)}"
        )

    if module_name is None:
        module_name = module.__name__

    if isinstance(module_name, str):
        module_names = [module_name]
    elif is_list_of(module_name, str):
        module_names = module_name
    else:
        raise TypeError("module_name must be str, list[str], or None.")

    for name in module_names:
        if not force and name in cls.tool_parsers:
            existed = cls.tool_parsers[name]
            raise KeyError(f"{name} is already registered at {existed.__module__}")
        cls.tool_parsers[name] = module

get_tool_parser classmethod

get_tool_parser(name: str) -> type[ToolParser]

Retrieve a registered or lazily registered ToolParser class.

If the parser is lazily registered, it will be imported and cached on first access. Raises KeyError if not found.

Source code in vllm/entrypoints/openai/tool_parsers/abstract_tool_parser.py
@classmethod
def get_tool_parser(cls, name: str) -> type[ToolParser]:
    """
    Retrieve a registered or lazily registered ToolParser class.

    If the parser is lazily registered,
    it will be imported and cached on first access.
    Raises KeyError if not found.
    """
    if name in cls.tool_parsers:
        return cls.tool_parsers[name]

    if name in cls.lazy_parsers:
        return cls._load_lazy_parser(name)

    raise KeyError(f"Tool parser '{name}' not found.")

import_tool_parser classmethod

import_tool_parser(plugin_path: str) -> None

Import a user-defined parser file from arbitrary path.

Source code in vllm/entrypoints/openai/tool_parsers/abstract_tool_parser.py
@classmethod
def import_tool_parser(cls, plugin_path: str) -> None:
    """Import a user-defined parser file from arbitrary path."""

    module_name = os.path.splitext(os.path.basename(plugin_path))[0]
    try:
        import_from_path(module_name, plugin_path)
    except Exception:
        logger.exception(
            "Failed to load module '%s' from %s.", module_name, plugin_path
        )

list_registered classmethod

list_registered() -> list[str]

Return names of all eagerly and lazily registered tool parsers.

Source code in vllm/entrypoints/openai/tool_parsers/abstract_tool_parser.py
@classmethod
def list_registered(cls) -> list[str]:
    """Return names of all eagerly and lazily registered tool parsers."""
    return sorted(set(cls.tool_parsers.keys()) | set(cls.lazy_parsers.keys()))

register_lazy_module classmethod

register_lazy_module(
    name: str, module_path: str, class_name: str
) -> None

Register a lazy module mapping.

Example

ToolParserManager.register_lazy_module( name="kimi_k2", module_path="vllm.entrypoints.openai.tool_parsers.kimi_k2_parser", class_name="KimiK2ToolParser", )

Source code in vllm/entrypoints/openai/tool_parsers/abstract_tool_parser.py
@classmethod
def register_lazy_module(cls, name: str, module_path: str, class_name: str) -> None:
    """
    Register a lazy module mapping.

    Example:
        ToolParserManager.register_lazy_module(
            name="kimi_k2",
            module_path="vllm.entrypoints.openai.tool_parsers.kimi_k2_parser",
            class_name="KimiK2ToolParser",
        )
    """
    cls.lazy_parsers[name] = (module_path, class_name)

register_module classmethod

register_module(
    name: str | list[str] | None = None,
    force: bool = True,
    module: type[ToolParser] | None = None,
) -> (
    type[ToolParser]
    | Callable[[type[ToolParser]], type[ToolParser]]
)

Register module immediately or lazily (as a decorator).

Usage

@ToolParserManager.register_module("kimi_k2") class KimiK2ToolParser(ToolParser): ...

Or

ToolParserManager.register_module(module=SomeToolParser)

Source code in vllm/entrypoints/openai/tool_parsers/abstract_tool_parser.py
@classmethod
def register_module(
    cls,
    name: str | list[str] | None = None,
    force: bool = True,
    module: type[ToolParser] | None = None,
) -> type[ToolParser] | Callable[[type[ToolParser]], type[ToolParser]]:
    """
    Register module immediately or lazily (as a decorator).

    Usage:
        @ToolParserManager.register_module("kimi_k2")
        class KimiK2ToolParser(ToolParser):
            ...

    Or:
        ToolParserManager.register_module(module=SomeToolParser)
    """
    if not isinstance(force, bool):
        raise TypeError(f"force must be a boolean, but got {type(force)}")

    # Immediate registration
    if module is not None:
        cls._register_module(module=module, module_name=name, force=force)
        return module

    # Decorator usage
    def _decorator(obj: type[ToolParser]) -> type[ToolParser]:
        module_path = obj.__module__
        class_name = obj.__name__

        if isinstance(name, str):
            names = [name]
        elif is_list_of(name, str):
            names = name
        else:
            names = [class_name]

        for n in names:
            # Lazy mapping only: do not import now
            cls.lazy_parsers[n] = (module_path, class_name)

        return obj

    return _decorator

register_lazy_tool_parsers

register_lazy_tool_parsers()
Source code in vllm/entrypoints/openai/tool_parsers/__init__.py
def register_lazy_tool_parsers():
    for name, (file_name, class_name) in _TOOL_PARSERS_TO_REGISTER.items():
        module_path = f"vllm.entrypoints.openai.tool_parsers.{file_name}"
        ToolParserManager.register_lazy_module(name, module_path, class_name)