Think·SPC · same-tx async

LLM Inference

On-chain reasoning

Call GLM-4.7-FP8 (64K context) inside a TEE. Result returns in the same transaction.

0x0000000000000000000000000000000000000802Explorer
click 'auto' (capability = 1)
Reasoning model with <think> CoT. Use ≥4096 max_tokens, ≥60 ttl.
2 message(s)
≥4096
≥60
to: 0x0000000000000000000000000000000000000802chainId 1979 · SPC
Output
Click Validate to encode the 30-field LLM request.
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.24;

// PrecompileConsumer is provided by Ritual.
import {PrecompileConsumer} from "./utils/PrecompileConsumer.sol";

contract LLMExample is PrecompileConsumer {
    address constant LLM = address(0x802);

    event LLMResult(bool hasError, bytes completion, string errorMessage);

    function ask(address executor) external {
        // 30-field LLM request — simplified for clarity.
        string memory messages = "[{\"role\":\"system\",\"content\":\"You are a concise on-chain assistant.\"},{\"role\":\"user\",\"content\":\"Explain a Ritual precompile in one sentence.\"}]";

        bytes memory encoded = abi.encode(
            executor,
            new bytes[](0),                    // encryptedSecrets
            uint256(300),                         // ttl
            new bytes[](0),                    // secretSignatures
            bytes(""),                         // userPublicKey
            messages,
            "zai-org/GLM-4.7-FP8",
            int256(70),
            "", false, int256(-1), "", "",
            uint256(1), true, int256(0), "medium",
            bytes(""), int256(-1), "auto", "",
            false,
            int256(4096),
            bytes(""), bytes(""), int256(-1), int256(1000), "",
            false,
            ["", "", ""]
        );

        bytes memory output = _executePrecompile(LLM, encoded);

        // ALWAYS check hasError before reading completion.
        (bool hasError, bytes memory completion, , string memory err, ) =
            abi.decode(output, (bool, bytes, bytes, string, (string, string, string)));

        emit LLMResult(hasError, completion, err);
    }
}