OpenAI APIを使う」でチャットの機能のためのAPIの使い方について調べた。結果を一括で取得する方法と、 Server Sent Events
を用いて結果を徐々に受け取るストリーミングのような方法を調べた。
Server Sent Events
を受け取るためにはリクエストのBODYのパラメータ stream
に true
を設定する必要がある。
今回はこの方法を使い、ChatGPTと似たような使用感になるように、Emacsを拡張する事にした。
openai-chat-question
でチャットを開始する時に、前置きの部分に >
を挿入するようにした。
(list (read-string-from-buffer
"sQuestion"
(if (region-active-p)
(let ((txt (buffer-substring-no-properties
(region-beginning) (region-end))))
(with-temp-buffer
(insert txt)
(goto-char (point-min))
(replace-regexp "^" "> ")
(buffer-string)))
Emacs Lispを置いておく。
openai.el
;;; openai --- OpenAI API Utility -*- lexical-binding: t -*-
;; Copyright (C) 2024 TakesxiSximada
;; Author: TakesxiSximada <[email protected]>
;; Maintainer: TakesxiSximada <[email protected]>
;; Repository:
;; Version: 2
;; Package-Version: 20240515.0000
;; Package-Requires: ((emacs "28.0")
;; Date: 2024-05-15
;; This file is part of opeai.el.
;; This program is free software: you can redistribute it and/or modify
;; it under the terms of the GNU General Public License as published by
;; the Free Software Foundation, either version 3 of the License, or
;; (at your option) any later version.
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;; You should have received a copy of the GNU General Public License
;; along with this program. If not, see <http://www.gnu.org/licenses/>.
;;; Code:
(defvar openai-api-key nil)
(setq openai-chat-result-buffer-name "*ChatGPT Result*")
(setq openai-chat-request-buffer-name "*ChatGPT Request*")
(setq openai-chat-response-buffer-name "*ChatGPT Response*")
(setq openai-chat-response-error-buffer-name "*ChatGPT Response Error*")
(setq openai-chat-send-request-process nil)
(setq openai-chat-chars-q nil)
(setq openai-chat-chars-timer nil)
(defun openai-chat-sync-results ()
(while openai-chat-chars-q
(with-current-buffer (get-buffer-create openai-chat-result-buffer-name)
(goto-char (point-max))
(let ((ch (car openai-chat-chars-q)))
(if (string-equal "" ch)
(insert "\n")
(insert ch)))
(end-of-buffer)
(setq openai-chat-chars-q (cdr openai-chat-chars-q))))
(pop-to-buffer openai-chat-result-buffer-name))
(defun openai-chat-chars-start-timer ()
(interactive)
(unless openai-chat-chars-timer
(setq openai-chat-chars-timer (run-with-idle-timer 1 t #'openai-chat-sync-results))))
(defun openai-chat-chars-cancel-timer ()
(interactive)
(cancel-timer openai-chat-chars-timer)
(setq openai-chat-chars-timer nil))
(require 'json)
(defcustom openai-chat-system-pre-sentence nil
"APIに送信する前置きの文章"
:type 'string
:group 'openai
)
(defun openai-chat-create-request (sentence)
(interactive "sSentence: ")
(with-current-buffer (get-buffer-create openai-chat-result-buffer-name)
(goto-char (point-max))
(insert "\n\n------------------------------------------------\n")
(insert "ME: ")
(insert sentence)
(insert "\n------------------------------------------------\n")
(insert "AI: ")
)
(with-current-buffer (get-buffer-create openai-chat-request-buffer-name)
(erase-buffer)
(insert
(format "{
\"model\": \"gpt-4o-2024-05-13\",
\"messages\": [
{
\"role\": \"system\",
\"content\": %s
},
{
\"role\": \"user\",
\"content\": %s
}
],
\"stream\": true
}" (json-encode-string openai-chat-system-pre-sentence) (json-encode-string sentence)))))
(defun openai-chat-parse-response ()
(interactive)
(with-current-buffer openai-chat-response-buffer-name
(save-excursion
(goto-char (point-min))
(while (re-search-forward "^data: \\(.*\\)
" nil t)
(if-let ((txt (buffer-substring-no-properties
(match-beginning 1) (match-end 1))))
(progn
(delete-region (point-min) (match-end 1))
(if-let ((content
(cdr (assoc 'content
(cdr (assoc 'delta
(seq-first (cdr (assoc 'choices
(json-read-from-string txt))))))))))
(setq openai-chat-chars-q (append openai-chat-chars-q (list content))))))))))
(defun openai-chat-send-request ()
(interactive)
(if-let ((req-body (with-current-buffer openai-chat-request-buffer-name (buffer-string))))
(progn
(setq openai-chat-send-request-process
(make-process
:name "*OpenAI Chat*"
:buffer openai-chat-response-buffer-name
:command `("curl" "https://api.openai.com/v1/chat/completions"
"-X" "POST"
"-H" "Content-Type: application/json"
"-H" ,(format "Authorization: Bearer %s" openai-api-key)
"-d" "@-")
:connection-type 'pipe
:coding 'utf-8
:filter (lambda (process output)
(with-current-buffer (process-buffer process)
(goto-char (point-max))
(insert output)
(openai-chat-parse-response)
(openai-chat-sync-results)))
:sentinel (lambda (process event)
(openai-chat-parse-response)
(openai-chat-sync-results))
:stderr openai-chat-response-error-buffer-name))
(process-send-string openai-chat-send-request-process req-body)
(process-send-eof openai-chat-send-request-process))))
;;;###autoload
(defun openai-chat-question (question)
(interactive
(list (read-string-from-buffer
"sQuestion"
(if (region-active-p)
(let ((txt (buffer-substring-no-properties
(region-beginning) (region-end))))
(with-temp-buffer
(insert txt)
(goto-char (point-min))
(replace-regexp "^" "> ")
(buffer-string)))
""))))
(when (and question (not (string-blank-p question)))
(openai-chat-create-request question)
(openai-chat-send-request)))
;;;###autoload
(defun openai-chat-create-and-send-request-for-assist (content)
(interactive)
(when content
(openai-chat-create-request content)
(openai-chat-send-request)))
(provide 'openai)
;;; openai.el ends here
(2024-05-15追記) 使用しているモデルを新しいモデルに更新した。