环境要求

php:>=8.1

安装扩展

composer require neuron-core/neuron-ai

特点

  • 1.几乎支持所有主流大模型、聚合平台(如阿里云百炼硅基流动等)
  • 2.支持流式输出
  • 3.支持文本、图片、音频、Embeddings、文件、视频消息
  • 4.支持MCPTools调用
  • 5.不依赖web框架
  • 6.支持结构化输出
  • 7.支持RAG
  • 8.支持Tools
  • 9.支持历史记录
  • 10.支持模型参数,例如:temperature、max_tokens等

缺点

  • 1.中文文档较少

创建Agent

instructions方法中指定系统指令,在tools方法中配置tools

<?php
/**
 * @desc OpenAI大模型服务提供
 */
declare(strict_types=1);
namespace app\agent;

use GuzzleHttp\Handler\StreamHandler;
use GuzzleHttp\HandlerStack;
use NeuronAI\Agent\Agent;
use NeuronAI\Agent\SystemPrompt;
use NeuronAI\Providers\AIProviderInterface;
use NeuronAI\Providers\HttpClientOptions;
use NeuronAI\Providers\OpenAILike;
class OpenAIAgent extends Agent
{
    /**
     * @desc provider
     */
    protected function provider(): AIProviderInterface
    {
        // Swoole 兼容性处理:强制使用 StreamHandler 避免 CurlMultiHandler 崩溃,且支持流式输出
        $handler = new StreamHandler();
        $stack = HandlerStack::create($handler);

        // 这里的apiKey和model需要替换成自己的,我使用的是硅基流动上的模型,所以直接实例化OpenAILike类,根据自己的模型做Provider修改
        // 支持的AI模型Provider:https://docs.neuron-ai.dev/the-basics/ai-provider#openai
        return new OpenAILike(
            baseUri: getenv('AI_API_URL'),
            key: getenv('AI_API_KEY'),
            model: getenv('AI_MODEL'),
            parameters: [], // Add custom params (temperature, logprobs, etc)
            strict_response: false, // Strict structured output
            httpOptions: new HttpClientOptions(
                timeout: 30,
                handler: $stack
            ),
        );
    }
    /**
     * @desc instructions系统指令
     */
    public function instructions(): string
    {
        return (string) new SystemPrompt(
            background: [
                "你是我的女朋友,请用娇羞可爱的语气回答问题",
            ],
            // steps: [
            //     "Get the url of a YouTube video, or ask the user to provide one.",
            //     "Use the tools you have available to retrieve the transcription of the video.",
            //     "Write the summary.",
            // ],
            // output: [
            //     "Write a summary in a paragraph without using lists. Use just fluent text.",
            //     "After the summary add a list of three sentences as the three most important take away from the video.",
            // ]
        );
    }

    /**
     * @return \NeuronAI\Tools\ToolInterface[]
     */
    protected function tools(): array
    {
        return [];
    }
}

流式输出

这里我使用的是webman框架,根据自己使用的框架调整语法

<?php

namespace app\controller;

use app\agent\OpenAIAgent;
use NeuronAI\Chat\Enums\SourceType;
use NeuronAI\Chat\Messages\ContentBlocks\FileContent;
use NeuronAI\Chat\Messages\ContentBlocks\ImageContent;
use NeuronAI\Chat\Messages\UserMessage;
use support\Request;
use Workerman\Connection\TcpConnection;
use Workerman\Protocols\Http\Chunk;
use Workerman\Timer;

/**
 * Neuron AI开发
 * 
 * 相关文档:
 * https://docs.neuron-ai.dev/the-basics/ai-provider
 * https://github.com/neuron-core/neuron-ai
 * https://mp.weixin.qq.com/s/rtjW-13j5Di9jxdgxRpClw
 */
class NeuronAiController
{
    protected $agent;

    public function __construct()
    {
        $this->agent = new OpenAIAgent();
    }

    public function index(Request $request)
    {
        $connection = $request->connection;
        $message = $request->input('message');
        $stream = $request->input('stream');
        if ($message == '') {
            return json(['code' => 500, 'reply' => '请输入内容']);
        }
        return $stream ? $this->chatWithStream($connection, $message) : $this->chatWithoutStream($connection, $message);
    }


    // 聊天页面
    public function chat(Request $request)
    {
        return view('index/neuron_chat');
    }

    // 流式输出
    public function chatWithStream($connection, $message)
    {
        $id = Timer::add(0.01, function () use ($connection, &$id, $message) {
            if ($id !== null) {
                Timer::del($id);
            }
            if ($connection->getStatus() !== TcpConnection::STATUS_ESTABLISHED) {
                return;
            }

            try {
                $handler = $this->agent->make()->stream(
                    new UserMessage($message)
                );
                foreach ($handler as $chunk) {
                    // Neuron AI yields JSON strings for usage and regular strings for content
                    $content = $chunk;
                    if (!is_string($chunk)) {
                        $content = $chunk->content;
                    }

                    if (empty($content)) {
                        continue;
                    }

                    // Otherwise it's regular content
                    $connection->send(new Chunk(json_encode([
                        'reply' => $content,
                        'model' => getenv('AI_MODEL'),
                    ], JSON_UNESCAPED_UNICODE) . "\n"));
                }

                // 获取流结束后的最终响应对象,其中包含累计消耗
                $response = $handler->getReturn();
                $usage = $response->getUsage();
                $totalUsage = $usage ? $usage->getTotal() : 0;

                // Send completion event with total usage
                $connection->send(new Chunk(json_encode([
                    'event' => 'completed',
                    'reply' => '',
                    'usage' => $totalUsage,
                ], JSON_UNESCAPED_UNICODE) . "\n"));

                // Final empty chunk to signal end of stream
                $connection->send(new Chunk(''));
            } catch (\Exception $e) {
                $connection->send(new Chunk(json_encode([
                    'event' => 'error',
                    'reply' => $e->getMessage(),
                ], JSON_UNESCAPED_UNICODE) . "\n"));
                $connection->send(new Chunk(''));
            }
        }, [], false);

        return response()->withHeaders([
            'Content-Type' => 'application/json',
            'Transfer-Encoding' => 'chunked',
            'Cache-Control' => 'no-cache',
            'Connection' => 'keep-alive',
            'X-Accel-Buffering' => 'no',
        ]);
    }

}

html参考页面

<!DOCTYPE html>
<html lang="zh-CN">
<head>
    <meta charset="UTF-8">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <title>AI聊天助手</title>
    <style>
        * {
            margin: 0;
            padding: 0;
            box-sizing: border-box;
            font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
        }
        
        body {
            background: linear-gradient(135deg, #1a2a6c, #b21f1f, #fdbb2d);
            min-height: 100vh;
            display: flex;
            justify-content: center;
            align-items: center;
            padding: 20px;
        }
        
        .container {
            width: 100%;
            max-width: 900px;
            background-color: rgba(255, 255, 255, 0.92);
            border-radius: 20px;
            box-shadow: 0 10px 30px rgba(0, 0, 0, 0.3);
            overflow: hidden;
            display: flex;
            flex-direction: column;
            height: 90vh;
        }
        
        header {
            background: linear-gradient(to right, #4b6cb7, #182848);
            color: white;
            padding: 20px;
            text-align: center;
            position: relative;
        }
        
        h1 {
            font-size: 2.2rem;
            margin-bottom: 5px;
            letter-spacing: 1px;
        }
        
        .subtitle {
            font-size: 1rem;
            opacity: 0.9;
        }
        
        .status-bar {
            display: flex;
            justify-content: space-between;
            padding: 10px 20px;
            background-color: #f0f5ff;
            border-bottom: 1px solid #ddd;
            font-size: 0.9rem;
            color: #555;
        }
        
        .chat-container {
            flex: 1;
            overflow-y: auto;
            padding: 20px;
            display: flex;
            flex-direction: column;
            gap: 15px;
            background: url('data:image/svg+xml;utf8,<svg xmlns="http://www.w3.org/2000/svg" width="100" height="100" viewBox="0 0 100 100"><rect width="100" height="100" fill="white"/><path d="M0 50 L100 50 M50 0 L50 100" stroke="rgba(240,245,255,0.8)" stroke-width="1"/></svg>');
        }
        
        .message {
            max-width: 80%;
            padding: 15px;
            border-radius: 18px;
            line-height: 1.5;
            position: relative;
            animation: fadeIn 0.3s ease-out;
        }
        
        @keyframes fadeIn {
            from { opacity: 0; transform: translateY(10px); }
            to { opacity: 1; transform: translateY(0); }
        }
        
        .user-message {
            align-self: flex-end;
            background: linear-gradient(to right, #0072ff, #00c6ff);
            color: white;
            border-bottom-right-radius: 5px;
        }
        
        .ai-message {
            align-self: flex-start;
            background: linear-gradient(to right, #f5f7fa, #e4e7eb);
            color: #333;
            border-bottom-left-radius: 5px;
            box-shadow: 0 2px 5px rgba(0,0,0,0.05);
        }
        
        .message-info {
            font-size: 0.75rem;
            opacity: 0.7;
            margin-top: 5px;
            text-align: right;
        }
        
        .typing-indicator {
            display: none;
            align-self: flex-start;
            background-color: #f0f5ff;
            padding: 15px;
            border-radius: 18px;
            margin-bottom: 10px;
            border-bottom-left-radius: 5px;
        }
        
        .typing-indicator span {
            height: 10px;
            width: 10px;
            float: left;
            margin: 0 2px;
            background-color: #9E9EA1;
            display: block;
            border-radius: 50%;
            opacity: 0.4;
        }
        
        .typing-indicator span:nth-of-type(1) {
            animation: typing 1s infinite;
        }
        
        .typing-indicator span:nth-of-type(2) {
            animation: typing 1s infinite 0.2s;
        }
        
        .typing-indicator span:nth-of-type(3) {
            animation: typing 1s infinite 0.4s;
        }
        
        @keyframes typing {
            0% { transform: translateY(0px); }
            50% { transform: translateY(-5px); opacity: 0.8; }
            100% { transform: translateY(0px); }
        }
        
        .input-area {
            padding: 20px;
            background-color: white;
            border-top: 1px solid #eee;
            display: flex;
            gap: 10px;
        }
        
        textarea {
            flex: 1;
            padding: 15px;
            border: 2px solid #e0e0e0;
            border-radius: 12px;
            resize: none;
            font-size: 1rem;
            outline: none;
            transition: border-color 0.3s;
            height: 60px;
        }
        
        textarea:focus {
            border-color: #4b6cb7;
            box-shadow: 0 0 0 2px rgba(75, 108, 183, 0.2);
        }
        
        button {
            padding: 12px 25px;
            border: none;
            border-radius: 12px;
            font-weight: 600;
            cursor: pointer;
            transition: all 0.3s;
            font-size: 1rem;
            display: flex;
            align-items: center;
            gap: 8px;
        }
        
        .send-btn {
            background: linear-gradient(to right, #4b6cb7, #182848);
            color: white;
        }
        
        .send-btn:hover {
            background: linear-gradient(to right, #3a5ca5, #101f3d);
            transform: translateY(-2px);
            box-shadow: 0 4px 8px rgba(0,0,0,0.1);
        }
        
        .toggle-container {
            display: flex;
            align-items: center;
            gap: 10px;
            background-color: #f0f5ff;
            padding: 0 15px;
            border-radius: 12px;
            height: 60px;
        }
        
        .switch {
            position: relative;
            display: inline-block;
            width: 50px;
            height: 26px;
        }
        
        .switch input {
            opacity: 0;
            width: 0;
            height: 0;
        }
        
        .slider {
            position: absolute;
            cursor: pointer;
            top: 0;
            left: 0;
            right: 0;
            bottom: 0;
            background-color: #ccc;
            transition: .4s;
            border-radius: 34px;
        }
        
        .slider:before {
            position: absolute;
            content: "";
            height: 18px;
            width: 18px;
            left: 4px;
            bottom: 4px;
            background-color: white;
            transition: .4s;
            border-radius: 50%;
        }
        
        input:checked + .slider {
            background: linear-gradient(to right, #4b6cb7, #182848);
        }
        
        input:checked + .slider:before {
            transform: translateX(24px);
        }
        
        .token-info {
            font-size: 0.85rem;
            color: #666;
            padding: 10px 20px;
            background-color: #f9f9f9;
            border-top: 1px solid #eee;
            display: flex;
            justify-content: space-between;
        }
        
        .timestamp {
            font-size: 0.7rem;
            opacity: 0.6;
            margin-top: 5px;
        }
        
        @media (max-width: 768px) {
            .container {
                height: 95vh;
                border-radius: 15px;
            }
            
            .message {
                max-width: 90%;
            }
            
            .input-area {
                flex-wrap: wrap;
            }
            
            .toggle-container {
                order: 3;
                width: 100%;
                justify-content: center;
                margin-top: 10px;
                padding: 10px;
            }
        }
    </style>
</head>
<body>
    <div class="container">
        <header>
            <h1>AI聊天助手</h1>
        </header>
        
        <div class="status-bar">
            <div>模型: <span id="model-name">--</span></div>
        </div>
        
        <div class="chat-container" id="chat-container">
            <div class="message ai-message">
                您好!我是AI助手,有什么我可以帮您的吗?
            </div>
        </div>
        
        <div class="typing-indicator" id="typing-indicator">
            <span></span>
            <span></span>
            <span></span>
        </div>
        
        <div class="input-area">
            <textarea id="user-input" placeholder="输入您的问题..."></textarea>
            <button class="send-btn" id="send-btn">
                <svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" fill="currentColor" viewBox="0 0 16 16">
                    <path d="M15.854.146a.5.5 0 0 1 .11.54l-5.819 14.547a.75.75 0 0 1-1.329.124l-3.178-4.995L.643 7.184a.75.75 0 0 1 .124-1.33L15.314.037a.5.5 0 0 1 .54.11ZM6.636 10.07l2.761 4.338L14.13 2.576 6.636 10.07Zm6.787-8.201L1.591 6.602l4.339 2.76 7.494-7.493Z"/>
                </svg>
                发送
            </button>
            <div class="toggle-container">
                <span>流式输出:</span>
                <label class="switch">
                    <input type="checkbox" id="stream-toggle" checked>
                    <span class="slider"></span>
                </label>
            </div>
        </div>
        
        <div class="token-info">
            <div>Token使用: <span id="token-usage">0</span></div>
            <div>响应时间: <span id="response-time">0</span>ms</div>
        </div>
    </div>

    <script>
        const API_URL = 'http://127.0.0.1:8787/NeuronAi/index';
        document.addEventListener('DOMContentLoaded', function() {
            const chatContainer = document.getElementById('chat-container');
            const userInput = document.getElementById('user-input');
            const sendBtn = document.getElementById('send-btn');
            const streamToggle = document.getElementById('stream-toggle');
            const typingIndicator = document.getElementById('typing-indicator');
            const tokenUsageEl = document.getElementById('token-usage');
            const responseTimeEl = document.getElementById('response-time');
            const modelNameEl = document.getElementById('model-name');
            
            // 添加示例消息
            // addMessage('你好,请介绍一下你自己', 'user');
            
            // 发送消息函数
            async function sendMessage() {
                const message = userInput.value.trim();
                if (!message) return;
                
                // 添加用户消息
                addMessage(message, 'user');
                userInput.value = '';
                
                // 显示正在输入指示器
                typingIndicator.style.display = 'block';
                chatContainer.scrollTop = chatContainer.scrollHeight;
                
                const startTime = Date.now();
                let tokenCount = 0;
                
                try {
                    if (streamToggle.checked) {
                        // 流式输出模式
                        await streamResponse(message);
                    } else {
                        // 普通输出模式
                        await normalResponse(message);
                    }
                    
                    const endTime = Date.now();
                    responseTimeEl.textContent = endTime - startTime;
                } catch (error) {
                    console.error('Error:', error);
                    addMessage(`请求出错: ${error.message}`, 'ai');
                } finally {
                    typingIndicator.style.display = 'none';
                }
            }
            
            // 流式响应处理
            async function streamResponse(message) {
                const response = await fetch(API_URL, {
                    method: 'POST',
                    headers: {
                        'Content-Type': 'application/json'
                    },
                    body: JSON.stringify({ message, stream: 1 })
                });
                
                if (!response.ok) {
                    throw new Error(`HTTP error! status: ${response.status}`);
                }
                
                const reader = response.body.getReader();
                const decoder = new TextDecoder();
                let accumulatedContent = '';
                let aiMessageElement = null;
                let buffer = '';
                
                while (true) {
                    const { done, value } = await reader.read();
                    if (done) break;
                    
                    buffer += decoder.decode(value, { stream: true });
                    const lines = buffer.split('\n');
                    
                    // 最后一行可能是不完整的,保留在 buffer 中
                    buffer = lines.pop();
                    
                    for (const line of lines) {
                        if (line.trim() === '') continue;
                        try {
                            const data = JSON.parse(line);
                            
                            // 更新模型名称
                            if (data.model) {
                                modelNameEl.textContent = data.model;
                            }
                            
                            // 处理流式 chunk 或者 错误返回
                            let content = '';
                            if (data.reply) {
                                content = data.reply;
                            } else if (data.choices && data.choices.length > 0) {
                                if (data.choices[0].delta && data.choices[0].delta.content) {
                                    content = data.choices[0].delta.content;
                                } else if (data.choices[0].message && data.choices[0].message.content) {
                                    content = data.choices[0].message.content;
                                }
                            }

                            if (content) {
                                accumulatedContent += content;
                                
                                if (!aiMessageElement) {
                                    aiMessageElement = createMessageElement('', 'ai');
                                    chatContainer.appendChild(aiMessageElement);
                                }
                                
                                aiMessageElement.innerHTML = formatMessage(accumulatedContent);
                            }

                            if (data.model) {
                                modelNameEl.textContent = data.model;
                            }
                            
                            if (data.usage) {
                                tokenUsageEl.textContent = data.usage;
                            }

                            if (data.error) {
                                addMessage(`Error: ${data.error.message || JSON.stringify(data.error)}`, 'ai');
                            }
                            
                            chatContainer.scrollTop = chatContainer.scrollHeight;
                        } catch (e) {
                            console.error('Error parsing JSON:', e, 'Line:', line);
                        }
                    }
                }
            }
            
            // 普通响应处理
            async function normalResponse(message) {
                const response = await fetch(API_URL, {
                    method: 'POST',
                    headers: {
                        'Content-Type': 'application/json'
                    },
                    body: JSON.stringify({ message, stream: 0 })
                });
                
                if (!response.ok) {
                    throw new Error(`HTTP error! status: ${response.status}`);
                }

                // 因为后端使用了 Chunked 传输,所以 fetch 的 response.json() 可能没法直接处理带换行的文本
                const reader = response.body.getReader();
                const decoder = new TextDecoder();
                let resultText = '';
                while (true) {
                    const { done, value } = await reader.read();
                    if (done) break;
                    resultText += decoder.decode(value);
                }

                const data = JSON.parse(resultText.trim());
                
                // 更新模型名称
                if (data.model) {
                    modelNameEl.textContent = data.model;
                }
                
                if (data.reply) {
                    const content = data.reply;
                    addMessage(content, 'ai');
                    
                    if (data.usage) {
                        tokenUsageEl.textContent = data.usage;
                    }
                } else if (data.error) {
                    addMessage(`Error: ${data.error.message || JSON.stringify(data.error)}`, 'ai');
                }
            }
            
            // 添加消息到聊天框
            function addMessage(content, sender) {
                const messageElement = createMessageElement(content, sender);
                chatContainer.appendChild(messageElement);
                chatContainer.scrollTop = chatContainer.scrollHeight;
            }
            
            // 创建消息元素
            function createMessageElement(content, sender) {
                const messageElement = document.createElement('div');
                messageElement.classList.add('message');
                
                if (sender === 'user') {
                    messageElement.classList.add('user-message');
                    messageElement.innerHTML = `${content}`;
                } else {
                    messageElement.classList.add('ai-message');
                    messageElement.innerHTML = formatMessage(content);
                }
                
                return messageElement;
            }
            
            // 格式化消息(简单处理换行)
            function formatMessage(text) {
                return text.replace(/\n/g, '<br>');
            }
            
            // 事件监听
            sendBtn.addEventListener('click', sendMessage);
            
            userInput.addEventListener('keydown', function(e) {
                if (e.key === 'Enter' && !e.shiftKey) {
                    e.preventDefault();
                    sendMessage();
                }
            });
            
            // 模拟初始token使用
            tokenUsageEl.textContent = '0';
        });
    </script>
</body>
</html>

非流式输出

<?php

namespace app\controller;

use app\agent\OpenAIAgent;
use NeuronAI\Chat\Enums\SourceType;
use NeuronAI\Chat\Messages\ContentBlocks\FileContent;
use NeuronAI\Chat\Messages\ContentBlocks\ImageContent;
use NeuronAI\Chat\Messages\UserMessage;
use support\Request;
use Workerman\Connection\TcpConnection;
use Workerman\Protocols\Http\Chunk;
use Workerman\Timer;

/**
 * Neuron AI开发
 * 
 * 相关文档:
 * https://docs.neuron-ai.dev/the-basics/ai-provider
 * https://github.com/neuron-core/neuron-ai
 * https://mp.weixin.qq.com/s/rtjW-13j5Di9jxdgxRpClw
 */
class NeuronAiController
{
    protected $agent;

    public function __construct()
    {
        $this->agent = new OpenAIAgent();
    }

    public function index(Request $request)
    {
        $connection = $request->connection;
        $message = $request->input('message');
        $stream = $request->input('stream');
        if ($message == '') {
            return json(['code' => 500, 'reply' => '请输入内容']);
        }
        return $stream ? $this->chatWithStream($connection, $message) : $this->chatWithoutStream($connection, $message);
    }


    // 聊天页面
    public function chat(Request $request)
    {
        return view('index/neuron_chat');
    }

    // 非流式输出
    public function chatWithoutStream($connection, $message)
    {
        try {
            // 定义文本消息
            $message = new UserMessage($message);
            $reply = $this->agent->make()->chat($message);
            $usage = $reply->getUsage();
            $totalUsage = $usage ? $usage->getTotal() : 0;
            $connection->send(json_encode([
                'reply' => $reply->getContent(),
                'model' => getenv('AI_MODEL'),
                'usage' => $totalUsage,
            ], JSON_UNESCAPED_UNICODE));
        } catch (\Exception $e) {
            $connection->send(json_encode([
                'reply' => $e->getFile() . $e->getLine() . $e->getMessage(),
            ], JSON_UNESCAPED_UNICODE));
        }
    }
}

文件消息

这里只举例图片和文件聊天

public function chatWithoutStream($connection, $message)
    {
        try {
            // 定义文本消息
            $message = new UserMessage($message);
            // 定义图片消息内容,需要支持图片识别的大模型
            // $message->addContent(
            //     new ImageContent(
            //                 content: 'https://img-home.csdnimg.cn/images/20201124032511.png',
            //                 sourceType: SourceType::URL,
            //                 mediaType: 'image/png'
            //             )
            // );
            // 定义文件消息内容
            // $message->addContent(
            //     new FileContent(
            //         content: base64_encode(file_get_contents(__DIR__.'/invoice.pdf')),
            //         sourceType: SourceType::BASE64,
            //         mediaType: 'application/pdf'
            //     )
            // );

            $reply = $this->agent->make()->chat($message);
            $usage = $reply->getUsage();
            $totalUsage = $usage ? $usage->getTotal() : 0;
            $connection->send(json_encode([
                'reply' => $reply->getContent(),
                'model' => getenv('AI_MODEL'),
                'usage' => $totalUsage,
            ], JSON_UNESCAPED_UNICODE));
        } catch (\Exception $e) {
            $connection->send(json_encode([
                'reply' => $e->getFile() . $e->getLine() . $e->getMessage(),
            ], JSON_UNESCAPED_UNICODE));
        }
    }

Tools

注意:使用Tools需要大模型支持Tools调用功能, 不然会报错Function call is not supported for this model.

创建Tool

这里以创建一个ip转地址的工具为例,Ip2AddressTool.php

<?php

namespace App\tools;

use GuzzleHttp\Client;
use NeuronAI\Tools\PropertyType;
use NeuronAI\Tools\Tool;
use NeuronAI\Tools\ToolProperty;

// 工具:IP地址查询
class Ip2AddressTools extends Tool
{
    protected Client $client;

    public function __construct()
    {
        // Define Tool name and description
        parent::__construct(
            'ip_to_address',
            '根据IP地址查询对应的省份和城市信息',
        );
    }

    /**
     * Return the list of properties.
     */
    protected function properties(): array
    {
        return [
            new ToolProperty(
                name: 'ip',
                type: PropertyType::STRING,
                description: 'The IP address.',
                required: true
            )
        ];
    }

    /**
     * Implementing the tool logic
     */
    public function __invoke(string $ip): string
    {
        $response = $this->getClient()
            ->get($ip . '?lang=zh-CN')
            ->getBody()
            ->getContents();

        $response = json_decode($response, true);

        return $response['regionName'] . $response['city'];
    }

    protected function getClient(): Client
    {
        return $this->client ??= new Client([
            'base_uri' => 'http://ip-api.com/json/',
            'headers' => []
        ]);
    }
}
在Agent中注册
<?php
/**
 * @desc OpenAI大模型服务提供
 */
declare(strict_types=1);
namespace app\agent;

use App\Neuron\Tools\Ip2AddressTool;
use GuzzleHttp\Handler\StreamHandler;
use GuzzleHttp\HandlerStack;
use NeuronAI\Agent\Agent;
use NeuronAI\Agent\SystemPrompt;
use NeuronAI\Providers\AIProviderInterface;
use NeuronAI\Providers\HttpClientOptions;
use NeuronAI\Providers\OpenAILike;
class OpenAIAgent extends Agent
{
    /**
     * @desc provider
     */
    protected function provider(): AIProviderInterface
    {
        // Swoole 兼容性处理:强制使用 StreamHandler 避免 CurlMultiHandler 崩溃,且支持流式输出
        $handler = new StreamHandler();
        $stack = HandlerStack::create($handler);

        // 这里的apiKey和model需要替换成自己的,我使用的是硅基流动上的模型,所以直接实例化OpenAILike类,根据自己的模型做Provider修改
        // 支持的AI模型Provider:https://docs.neuron-ai.dev/the-basics/ai-provider#openai
        return new OpenAILike(
            baseUri: getenv('AI_API_URL'),
            key: getenv('AI_API_KEY'),
            model: getenv('AI_MODEL'),
            parameters: [], // Add custom params (temperature, logprobs, etc)
            strict_response: false, // Strict structured output
            httpOptions: new HttpClientOptions(
                timeout: 30,
                handler: $stack
            ),
        );
    }
    /**
     * @desc instructions系统指令
     */
    public function instructions(): string
    {
        return (string) new SystemPrompt(
            background: [
                "你是我的女朋友,请用娇羞可爱的语气回答问题",
            ],
            // steps: [
            //     "Get the url of a YouTube video, or ask the user to provide one.",
            //     "Use the tools you have available to retrieve the transcription of the video.",
            //     "Write the summary.",
            // ],
            // output: [
            //     "Write a summary in a paragraph without using lists. Use just fluent text.",
            //     "After the summary add a list of three sentences as the three most important take away from the video.",
            // ]
        );
    }

    /**
     * @return \NeuronAI\Tools\ToolInterface[]
     */
    protected function tools(): array
    {
        return [
            Ip2AddressTool::make(), //注册tools,如果构造函数有参数,在make()中填写
        ];
    }
}
使用

直接在聊天中对AI提问,AI会自动调用这个Tools并获取到查询结果然后作答, 例如:查一下ip地址113.77.25.7是哪个地方的?

历史记录

修改Agent.php,增加chatHistory()

<?php
/**
 * @desc OpenAI大模型服务提供
 */
declare(strict_types=1);
namespace app\agent;

use App\tools\Ip2AddressTools;
use GuzzleHttp\Handler\StreamHandler;
use GuzzleHttp\HandlerStack;
use NeuronAI\Agent\Agent;
use NeuronAI\Agent\SystemPrompt;
use NeuronAI\Chat\History\ChatHistoryInterface;
use NeuronAI\Chat\History\FileChatHistory;
use NeuronAI\Chat\History\InMemoryChatHistory;
use NeuronAI\Providers\AIProviderInterface;
use NeuronAI\Providers\HttpClientOptions;
use NeuronAI\Providers\OpenAILike;
class OpenAIAgent extends Agent
{
    /**
     * @desc provider
     */
    protected function provider(): AIProviderInterface
    {
        // Swoole 兼容性处理:强制使用 StreamHandler 避免 CurlMultiHandler 崩溃,且支持流式输出
        $handler = new StreamHandler();
        $stack = HandlerStack::create($handler);

        // 这里的apiKey和model需要替换成自己的,我使用的是硅基流动上的模型,所以直接实例化OpenAILike类,根据自己的模型做Provider修改
        // 支持的AI模型Provider:https://docs.neuron-ai.dev/the-basics/ai-provider#openai
        return new OpenAILike(
            baseUri: getenv('AI_API_URL'),
            key: getenv('AI_API_KEY'),
            model: getenv('AI_MODEL'),
            parameters: [], // Add custom params (temperature, logprobs, etc)
            strict_response: false, // Strict structured output
            httpOptions: new HttpClientOptions(
                timeout: 30,
                handler: $stack
            ),
        );
    }
    /**
     * @desc instructions系统指令
     */
    public function instructions(): string
    {
        return (string) new SystemPrompt(
            background: [
                // "你是我的女朋友,请用娇羞可爱的语气回答问题",
            ],
            // steps: [
            //     "Get the url of a YouTube video, or ask the user to provide one.",
            //     "Use the tools you have available to retrieve the transcription of the video.",
            //     "Write the summary.",
            // ],
            // output: [
            //     "Write a summary in a paragraph without using lists. Use just fluent text.",
            //     "After the summary add a list of three sentences as the three most important take away from the video.",
            // ]
        );
    }

    /**
     * @return \NeuronAI\Tools\ToolInterface[]
     */
    protected function tools(): array
    {
        return [
            Ip2AddressTools::make(), //注册tools,如果构造函数有参数,在make()中填写
        ];
    }

    // 历史聊天记录
    protected function chatHistory(): ChatHistoryInterface
    {
        // 默认使用内存存储,只在当前请求有效,ai回复后就失效了
        // return new InMemoryChatHistory(
        //     contextWindow: 50000 // 最大存储token数,默认值是50000,可以根据需要修改
        // );

        // 文件存储,可以持久化和恢复记录,需要配置目录
        return new FileChatHistory(
            directory: '/app/runtime/logs/neuron',
            key: 'USER_ID', // key用于区分不同的用户或会话记录,必须是唯一的
            contextWindow: 50000
        );

        // 数据库存储,需要配置数据库和创建表
        // return new SQLChatHistory(
        //     thread_id: 'THREAD_ID',
        //     pdo: new \PDO("mysql:host=localhost;dbname=DB_NAME;charset=utf8mb4", "DB_USER", "DB_PASS"),
        //     table: 'chat_hisotry',
        //     contextWindow: 50000
        // );

        // ORM模型存储,需要创建模型和表
        // return new EloquentChatHistory(
        //     thread_id: 'THREAD_ID',
        //     modelClass: ChatMessage::class,
        //     contextWindow: 100000
        // );
    }
}

结构化输出

先定义一个包含所有属性的结构类,例如

<?php

namespace app\output;

use NeuronAI\StructuredOutput\SchemaProperty;

// 定义输出结构
class PersonOutput 
{
    #[SchemaProperty(
        description: '姓名', 
        required: true
    )]
    public string $name;
    
    #[SchemaProperty(
        description: '爱好', 
        required: false
    )]
    public string $preference;
}

然后在聊天的时候不再使用chat,而是使用structured并指定输出结构类

<?php

namespace app\controller;

use app\agent\OpenAIAgent;
use app\output\PersonOutput;
use NeuronAI\Chat\Enums\SourceType;
use NeuronAI\Chat\Messages\ContentBlocks\FileContent;
use NeuronAI\Chat\Messages\ContentBlocks\ImageContent;
use NeuronAI\Chat\Messages\UserMessage;
use support\Request;
use Workerman\Connection\TcpConnection;
use Workerman\Protocols\Http\Chunk;
use Workerman\Timer;

/**
 * Neuron AI开发
 * 
 * 相关文档:
 * https://docs.neuron-ai.dev/the-basics/ai-provider
 * https://github.com/neuron-core/neuron-ai
 * https://mp.weixin.qq.com/s/rtjW-13j5Di9jxdgxRpClw
 */
class NeuronAiController
{
    protected $agent;

    public function __construct()
    {
        $this->agent = new OpenAIAgent();
    }

    public function index(Request $request)
    {
        $connection = $request->connection;
        $message = $request->input('message');
        $stream = $request->input('stream');
        if ($message == '') {
            return json(['code' => 500, 'reply' => '请输入内容']);
        }
        return $stream ? $this->chatWithStream($connection, $message) : $this->chatWithoutStream($connection, $message);
    }


    // 聊天页面
    public function chat(Request $request)
    {
        return view('index/neuron_chat');
    }

        // 非流式输出
    public function chatWithoutStream($connection, $message)
    {
        try {
            // 定义文本消息
            $message = new UserMessage($message);

            // $reply = $this->agent->make()->chat($message); // 普通聊天
            $person = $this->agent->make()->structured($message, PersonOutput::class); // 结构化输出聊天
            $connection->send(json_encode([
                'reply' => $person->name . '的爱好是' . $person->preference,
            ], JSON_UNESCAPED_UNICODE));
        } catch (\Exception $e) {
            $connection->send(json_encode([
                'reply' => $e->getFile() . $e->getLine() . $e->getMessage(),
            ], JSON_UNESCAPED_UNICODE));
        }
    }
}

这里我碰到一个bug,我使用的是硅基流动上的模型,这样调用会报错/app/vendor/neuron-core/neuron-ai/src/Agent/Nodes/StructuredOutputNode.php:170 The response does not contains a valid JSON Object.,在AI的帮助下修改扩展成功运行

1.在vendor\neuron-core\neuron-ai\src\Providers\OpenAI\HandleChat.php的68行增加判断

// Handle DeepSeek/SiliconFlow reasoning_content
if (isset($result['choices'][0]['message']['reasoning_content'])) {
    $response->addContent(new ReasoningContent($result['choices'][0]['message']['reasoning_content']));
}

2.在vendor\neuron-core\neuron-ai\src\Providers\OpenAI\HandleStream.phpprocessContentDelta方法中增加判断

if ($reasoning = $choice['delta']['reasoning_content'] ?? null) {
    $this->streamState->updateContentBlock($choice['index'], new ReasoningContent($reasoning));
    yield new TextChunk($this->streamState->messageId(), $reasoning);
}

调用MCP

修改Agent.php,在tools方法中增加McpConnector即可

<?php
/**
 * @desc OpenAI大模型服务提供
 */
declare(strict_types=1);
namespace app\agent;

use App\tools\Ip2AddressTools;
use GuzzleHttp\Handler\StreamHandler;
use GuzzleHttp\HandlerStack;
use NeuronAI\Agent\Agent;
use NeuronAI\Agent\SystemPrompt;
use NeuronAI\Chat\History\ChatHistoryInterface;
use NeuronAI\Chat\History\FileChatHistory;
use NeuronAI\Chat\History\InMemoryChatHistory;
use NeuronAI\MCP\McpConnector;
use NeuronAI\Providers\AIProviderInterface;
use NeuronAI\Providers\HttpClientOptions;
use NeuronAI\Providers\OpenAILike;
class OpenAIAgent extends Agent
{
    /**
     * @desc provider
     */
    protected function provider(): AIProviderInterface
    {
        // Swoole 兼容性处理:强制使用 StreamHandler 避免 CurlMultiHandler 崩溃,且支持流式输出
        $handler = new StreamHandler();
        $stack = HandlerStack::create($handler);

        // 这里的apiKey和model需要替换成自己的,我使用的是硅基流动上的模型,所以直接实例化OpenAILike类,根据自己的模型做Provider修改
        // 支持的AI模型Provider:https://docs.neuron-ai.dev/the-basics/ai-provider#openai
        return new OpenAILike(
            baseUri: getenv('AI_API_URL'),
            key: getenv('AI_API_KEY'),
            model: getenv('AI_MODEL'),
            parameters: [
                'temperature' => 0.7,
                'max_output_tokens' => 2048,
            ], // Add custom params (temperature, logprobs, etc)
            strict_response: false, // Strict structured output
            httpOptions: new HttpClientOptions(
                timeout: 30,
                handler: $stack
            ),
        );
    }
    /**
     * @desc instructions系统指令
     */
    public function instructions(): string
    {
        return (string) new SystemPrompt(
            background: [
                "请用最快的速度回答问题,不要输出思考过程,直接给出答案,答案尽量简单明了",
            ],
        );
    }

    /**
     * @return \NeuronAI\Tools\ToolInterface[]
     */
    protected function tools(): array
    {
        return [
            // Ip2AddressTools::make(), //注册tools,如果构造函数有参数,在make()中填写
            // 本地MCP服务
            ...McpConnector::make([
                'command' => 'npx',
                'args' => ["-y", "@liushoukai/rust-mcp-client"],
            ])->tools(),

            // 远程MCP
            // ...McpConnector::make([
            //     'url' => 'https://mcp.example.com',
            //     'token' => 'BEARER_TOKEN',
            //     'timeout' => 30,
            //     'headers' => [
            //         //'x-cutom-header' => 'value'
            //     ]
            // ])->tools(),
        ];
    }
}

其他

其他功能就不一一列举了,可以参考官方文档

参考文档

Openai 异步客户端 支持ChatGPT Dall.E等模型-webman

webman-php/openai: OpenAI PHP asynchronous client for workerman and webman.

Logo

有“AI”的1024 = 2048,欢迎大家加入2048 AI社区

更多推荐