...
|
...
|
@@ -24,6 +24,7 @@ use App\Models\User\User; |
|
|
use App\Util\OpenAI\src\OpenAi;
|
|
|
use Exception;
|
|
|
use GuzzleHttp\Client;
|
|
|
use Illuminate\Support\Arr;
|
|
|
use Illuminate\Support\Facades\Cache;
|
|
|
use Illuminate\Support\Facades\Log;
|
|
|
use Illuminate\Support\Str;
|
...
|
...
|
@@ -71,19 +72,27 @@ trait ChatTrait |
|
|
$consume_times = $prepare['consume_times'];
|
|
|
$cache_key = $prepare['cache_key'];
|
|
|
|
|
|
// if ($stream !== true) {
|
|
|
// $open_ai = Manager::build(
|
|
|
// new Client([
|
|
|
// 'proxy' => 'http://47.251.45.96',
|
|
|
// 'verify' => false
|
|
|
// ]),
|
|
|
// new Authentication($open_api_key)
|
|
|
// );
|
|
|
// $answer = $this->sendRequest($open_ai, $send_data, 'general');
|
|
|
// } else {
|
|
|
// $open_ai = new OpenAi($open_api_key);
|
|
|
// $answer = $this->sendRequest($open_ai, $send_data);
|
|
|
// }
|
|
|
$type = 'chunked';
|
|
|
if ($stream !== true) {
|
|
|
$open_ai = Manager::build(
|
|
|
new Client([
|
|
|
// 'proxy' => 'http://127.0.0.1:10809',
|
|
|
'verify' => false
|
|
|
]),
|
|
|
new Authentication($open_api_key)
|
|
|
);
|
|
|
$answer = $this->sendRequest($open_ai, $send_data, 'general');
|
|
|
} else {
|
|
|
$open_ai = new OpenAi($open_api_key);
|
|
|
$answer = $this->sendRequest($open_ai, $send_data);
|
|
|
$type = 'general';
|
|
|
}
|
|
|
$open_ai = new OpenAi($open_api_key);
|
|
|
|
|
|
// $open_ai->setProxy("http://47.251.45.96");
|
|
|
$answer = $this->sendRequest($open_ai, $send_data, $type);
|
|
|
|
|
|
if ($context == 1) {
|
|
|
// 响应消息
|
...
|
...
|
@@ -271,6 +280,7 @@ trait ChatTrait |
|
|
*/
|
|
|
public static function sendRequest($open_ai, $send_data, $type = 'chunked')
|
|
|
{
|
|
|
// dump($type);
|
|
|
$answer = '';
|
|
|
if ($type === 'chunked') {
|
|
|
// 设置响应头信息
|
...
|
...
|
@@ -307,8 +317,8 @@ trait ChatTrait |
|
|
}
|
|
|
foreach ($data as $message) {
|
|
|
if ('[DONE]' === $message) {
|
|
|
echo "0\r\n\r\n";
|
|
|
// echo "[EVENT:STOPPED|DATA:DONE]\r\n\r\n";
|
|
|
// echo "[DONE]\r\n\r\n";
|
|
|
echo "[EVENT:STOPPED|DATA:DONE]\r\n\r\n";
|
|
|
} else {
|
|
|
$message = json_decode($message, true);
|
|
|
$content = $message['choices'][0]['delta']['content'] ?? '';
|
...
|
...
|
@@ -399,11 +409,15 @@ trait ChatTrait |
|
|
// echo "event: stop\n";
|
|
|
// echo "data: stopped\n\n";
|
|
|
} elseif($type === 'general') {
|
|
|
$response = $open_ai->chatCompletions()->create(
|
|
|
new \Tectalic\OpenAi\Models\ChatCompletions\CreateRequest($send_data)
|
|
|
)->toModel();
|
|
|
$response = $open_ai->chat($send_data);
|
|
|
$response = json_decode($response, true);
|
|
|
$answer = Arr::get(Arr::first(Arr::get($response, 'choices')), 'message.content');
|
|
|
|
|
|
$answer = $response->choices[0]->message->content;
|
|
|
// $response = $open_ai->chatCompletions()->create(
|
|
|
// new \Tectalic\OpenAi\Models\ChatCompletions\CreateRequest($send_data)
|
|
|
// )->toModel();
|
|
|
|
|
|
// $answer = $response->choices[0]->message->content;
|
|
|
}
|
|
|
|
|
|
return $answer;
|
...
|
...
|
|