Skip to content

Commit

Permalink
poc: removing openai dependency
Browse files Browse the repository at this point in the history
  • Loading branch information
madox2 committed Apr 13, 2023
1 parent 0058b3f commit 9a87280
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 20 deletions.
6 changes: 1 addition & 5 deletions py/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
config_options = vim.eval("options")
config_ui = vim.eval("ui")

openai.api_key = load_api_key()

def initialize_chat_window():
lines = vim.eval('getline(1, "$")')
contains_user_prompt = '>>> user' in lines
Expand Down Expand Up @@ -78,7 +76,7 @@ def parse_chat_header_options():
**request_options
}
printDebug("[chat] request: {}", request)
response = openai.ChatCompletion.create(**request)
response = openai_request('https://api.openai.com/v1/chat/completions', request)
def map_chunk(resp):
printDebug("[chat] response: {}", resp)
return resp['choices'][0]['delta'].get('content', '')
Expand All @@ -89,5 +87,3 @@ def map_chunk(resp):
vim.command("redraw")
except KeyboardInterrupt:
vim.command("normal! a Ctrl-C...")
except openai.error.Timeout:
vim.command("normal! aRequest timeout...")
8 changes: 2 additions & 6 deletions py/complete.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,14 @@

prompt = vim.eval("prompt").strip()

openai.api_key = load_api_key()

def complete_engine(prompt):
request = {
'stream': True,
'prompt': prompt,
**request_options
}
printDebug("[engine-complete] request: {}", request)
response = openai.Completion.create(**request)
response = openai_request('https://api.openai.com/v1/completions', request)
def map_chunk(resp):
printDebug("[engine-complete] response: {}", resp)
return resp['choices'][0].get('text', '')
Expand All @@ -35,7 +33,7 @@ def chat_engine(prompt):
**request_options
}
printDebug("[engine-chat] request: {}", request)
response = openai.ChatCompletion.create(**request)
response = openai_request('https://api.openai.com/v1/chat/completions', request)
def map_chunk(resp):
printDebug("[engine-chat] response: {}", resp)
return resp['choices'][0]['delta'].get('content', '')
Expand All @@ -52,5 +50,3 @@ def map_chunk(resp):
render_text_chunks(text_chunks)
except KeyboardInterrupt:
vim.command("normal! a Ctrl-C...")
except openai.error.Timeout:
vim.command("normal! aRequest timeout...")
41 changes: 32 additions & 9 deletions py/utils.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,9 @@
import datetime
import sys
import os

try:
import openai
except ImportError:
raise Exception("OpenAI module not found. Please install it with pip.")

is_debugging = vim.eval("g:vim_ai_debug") == "1"
debug_log_file = vim.eval("g:vim_ai_debug_log_file")
import json
import urllib.error
import urllib.request

def load_api_key():
config_file_path = os.path.join(os.path.expanduser("~"), ".config/openai.token")
Expand All @@ -22,12 +17,14 @@ def load_api_key():
raise Exception("Missing OpenAI API key")
return api_key.strip()

is_debugging = vim.eval("g:vim_ai_debug") == "1"
debug_log_file = vim.eval("g:vim_ai_debug_log_file")

def make_request_options(options):
request_options = {}
request_options['model'] = options['model']
request_options['max_tokens'] = int(options['max_tokens'])
request_options['temperature'] = float(options['temperature'])
request_options['request_timeout'] = float(options['request_timeout'])
return request_options

def render_text_chunks(chunks):
Expand Down Expand Up @@ -71,3 +68,29 @@ def printDebug(text, *args):
return
with open(debug_log_file, "a") as file:
file.write(f"[{datetime.datetime.now()}] " + text.format(*args) + "\n")

OPENAI_RESP_DATA_PREFIX = 'data: '
OPENAI_RESP_DONE = '[DONE]'
OPENAI_API_KEY = load_api_key()

def openai_request(url, data):
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {OPENAI_API_KEY}"
}
req = urllib.request.Request(
url,
data=json.dumps(data).encode("utf-8"),
headers=headers,
method="POST",
)
with urllib.request.urlopen(req) as response:
for line_bytes in response:
line = line_bytes.decode("utf-8", errors="replace")
if line.startswith(OPENAI_RESP_DATA_PREFIX):
line_data = line[len(OPENAI_RESP_DATA_PREFIX):-1]
if line_data == OPENAI_RESP_DONE:
pass
else:
openai_obj = json.loads(line_data)
yield openai_obj

0 comments on commit 9a87280

Please sign in to comment.