Skip to content

Commit e854ada

Browse files
authored
Merge pull request #5 from dasmy/dev/conversation_history
Dev/conversation history
2 parents 211cc87 + e839ee9 commit e854ada

File tree

9 files changed

+139
-122
lines changed

9 files changed

+139
-122
lines changed

frontend/src/App.tsx

Lines changed: 1 addition & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ function App() {
4141
let [messages, setMessages] = useState<Array<MessageDict>>(
4242
Array.from([
4343
{
44-
text: "Hello! I'm a GPT Code assistant. Ask me to do something for you! Pro tip: you can upload a file and I'll be able to use it.",
44+
text: "Hello! I am a GPT Code assistant. Ask me to do something for you! Pro tip: you can upload a file and I'll be able to use it.",
4545
role: "generator",
4646
type: "message",
4747
},
@@ -161,19 +161,6 @@ function App() {
161161
function completeUpload(message: string) {
162162
addMessage({ text: message, type: "message", role: "upload" });
163163
setWaitingForSystem(WaitingStates.Idle);
164-
165-
// Inform prompt server
166-
fetch(`${Config.WEB_ADDRESS}/inject-context`, {
167-
method: "POST",
168-
headers: {
169-
"Content-Type": "application/json",
170-
},
171-
body: JSON.stringify({
172-
prompt: message,
173-
}),
174-
})
175-
.then(() => {})
176-
.catch((error) => console.error("Error:", error));
177164
}
178165

179166
function startUpload(_: string) {

frontend/src/components/Chat.tsx

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -72,15 +72,33 @@ function Message(props: {
7272
<div className="cell-output" dangerouslySetInnerHTML={{ __html: text }}></div>
7373
))}
7474

75-
{(props.type == "message_raw") &&
75+
{props.type == "message_error" &&
76+
(props.showLoader ? (
77+
<div>
78+
{text} {props.showLoader ? <div className="loader"></div> : null}
79+
</div>
80+
) : (
81+
<div>
82+
Execution Error:
83+
<SyntaxHighlighter
84+
{...props}
85+
children={text}
86+
wrapLongLines={true}
87+
language={"python"}
88+
PreTag="div"
89+
/>
90+
</div>
91+
))}
92+
93+
{props.type == "message_raw" &&
7694
(props.showLoader ? (
7795
<div>
7896
{text} {props.showLoader ? <div className="loader"></div> : null}
7997
</div>
8098
) : (
8199
<div className="cell-output" dangerouslySetInnerHTML={{ __html: text }}></div>
82100
))}
83-
101+
84102
{props.type == "image/png" &&
85103
<div className="cell-output-image" dangerouslySetInnerHTML={{ __html: `<img src='data:image/png;base64,${text}' />` }}></div>
86104
}

gpt_code_ui/kernel_program/config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,4 +15,4 @@ def get_logger():
1515
logger = logging.getLogger(__name__)
1616
if "DEBUG" in os.environ:
1717
logger.setLevel(logging.DEBUG)
18-
return logger
18+
return logger

gpt_code_ui/kernel_program/kernel_manager.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ def cleanup_spawned_processes():
5656
os.kill(pid, signal.CTRL_BREAK_EVENT)
5757
else:
5858
os.kill(pid, signal.SIGKILL)
59-
59+
6060
# After successful kill, cleanup pid file
6161
os.remove(fp)
6262

@@ -149,7 +149,7 @@ def flush_kernel_msgs(kc, tries=1, timeout=0.2):
149149
elif msg["msg_type"] == "error":
150150
send_message(
151151
utils.escape_ansi("\n".join(msg["content"]["traceback"])),
152-
"message_raw",
152+
"message_error",
153153
)
154154
except queue.Empty:
155155
hit_empty += 1
@@ -221,4 +221,4 @@ def start_kernel():
221221

222222
if __name__ == "__main__":
223223
kc = start_kernel()
224-
start_snakemq(kc)
224+
start_snakemq(kc)
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
if __name__ == "__main__":
22
from ipykernel import kernelapp as app
33

4-
app.launch_new_instance()
4+
app.launch_new_instance()

gpt_code_ui/kernel_program/main.py

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
import time
88

99
import asyncio
10-
import json
1110
import threading
1211

1312
from queue import Queue
@@ -47,6 +46,7 @@
4746
app = Flask(__name__)
4847
CORS(app)
4948

49+
5050
def start_kernel_manager():
5151
global kernel_manager_process
5252

@@ -62,9 +62,11 @@ def start_kernel_manager():
6262
with open(os.path.join(config.KERNEL_PID_DIR, "%d.pid" % kernel_manager_process.pid), "w") as p:
6363
p.write("kernel_manager")
6464

65+
6566
def cleanup_kernel_program():
6667
kernel_manager.cleanup_spawned_processes()
6768

69+
6870
async def start_snakemq():
6971
global messaging
7072

@@ -77,11 +79,11 @@ def on_recv(conn, ident, message):
7779
if message["value"] == "ready":
7880
logger.debug("Kernel is ready.")
7981
result_queue.put({
80-
"value":"Kernel is ready.",
81-
"type": "message"
82+
"value": "Kernel is ready.",
83+
"type": "message_status"
8284
})
8385

84-
elif message["type"] in ["message", "message_raw", "image/png", "image/jpeg"]:
86+
elif message["type"] in ["message", "message_raw", "message_error", "image/png", "image/jpeg"]:
8587
# TODO: 1:1 kernel <> channel mapping
8688
logger.debug("%s of type %s" % (message["value"], message["type"]))
8789

@@ -97,8 +99,9 @@ def send_queued_messages():
9799
while True:
98100
if send_queue.qsize() > 0:
99101
message = send_queue.get()
100-
utils.send_json(messaging,
101-
{"type": "execute", "value": message["command"]},
102+
utils.send_json(
103+
messaging,
104+
{"type": "execute", "value": message["command"]},
102105
config.IDENT_KERNEL_MANAGER
103106
)
104107
time.sleep(0.1)
@@ -117,7 +120,7 @@ async def async_link_loop():
117120

118121
@app.route("/api", methods=["POST", "GET"])
119122
def handle_request():
120-
123+
121124
if request.method == "GET":
122125
# Handle GET requests by sending everything that's in the receive_queue
123126
results = [result_queue.get() for _ in range(result_queue.qsize())]
@@ -128,7 +131,8 @@ def handle_request():
128131
send_queue.put(data)
129132

130133
return jsonify({"result": "success"})
131-
134+
135+
132136
@app.route("/restart", methods=["POST"])
133137
def handle_restart():
134138

@@ -152,9 +156,6 @@ async def main():
152156
def run_flask_app():
153157
app.run(host="0.0.0.0", port=APP_PORT)
154158

159+
155160
if __name__ == "__main__":
156161
asyncio.run(main())
157-
158-
159-
160-

gpt_code_ui/kernel_program/utils.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
import gpt_code_ui.kernel_program.config as config
99

10+
1011
def escape_ansi(line):
1112
ansi_escape = re.compile(r"(?:\x1B[@-_]|[\x80-\x9F])[0-?]*[ -/]*[@-~]")
1213
return ansi_escape.sub("", line)
@@ -16,6 +17,7 @@ def send_json(messaging, message, identity):
1617
message = snakemq.message.Message(json.dumps(message).encode("utf-8"), ttl=600)
1718
messaging.send_message(identity, message)
1819

20+
1921
def init_snakemq(ident, init_type="listen"):
2022
link = snakemq.link.Link()
2123
packeter = snakemq.packeter.Packeter(link)
@@ -26,4 +28,4 @@ def init_snakemq(ident, init_type="listen"):
2628
link.add_connector(("localhost", config.SNAKEMQ_PORT))
2729
else:
2830
raise Exception("Unsupported init type.")
29-
return messaging, link
31+
return messaging, link

gpt_code_ui/main.py

Lines changed: 28 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -16,20 +16,23 @@
1616

1717
APP_URL = "http://localhost:%s" % APP_PORT
1818

19+
1920
def run_webapp():
2021
try:
2122
app.run(host="0.0.0.0", port=APP_PORT, use_reloader=False)
22-
except Exception as e:
23-
logging.exception("Error running the webapp:")
23+
except Exception:
24+
logging.exception("Error running the webapp")
2425
sys.exit(1)
2526

27+
2628
def run_kernel_program():
2729
try:
2830
asyncio.run(kernel_program_main())
29-
except Exception as e:
30-
logging.exception("Error running the kernel_program:")
31+
except Exception:
32+
logging.exception("Error running the kernel_program")
3133
sys.exit(1)
3234

35+
3336
def setup_logging():
3437
log_format = "%(asctime)s [%(levelname)s]: %(message)s"
3538
logging.basicConfig(level=logging.INFO, format=log_format)
@@ -38,32 +41,33 @@ def setup_logging():
3841
file_handler.setFormatter(logging.Formatter(log_format))
3942
logging.getLogger().addHandler(file_handler)
4043

44+
4145
def print_color(text, color="gray"):
4246
# Default to gray
43-
code="242"
47+
code = "242"
4448

4549
if color == "green":
46-
code="35"
47-
50+
code = "35"
51+
4852
gray_code = "\033[38;5;%sm" % code
4953
reset_code = "\033[0m"
5054
print(f"{gray_code}{text}{reset_code}")
5155

5256

5357
def print_banner():
54-
55-
print("""
58+
print("""
5659
█▀▀ █▀█ ▀█▀ ▄▄ █▀▀ █▀█ █▀▄ █▀▀
5760
█▄█ █▀▀ ░█░ ░░ █▄▄ █▄█ █▄▀ ██▄
58-
""")
61+
""")
62+
63+
print("> Open GPT-Code UI in your browser %s" % APP_URL)
64+
print("")
65+
print("You can inspect detailed logs in app.log.")
66+
print("")
67+
print("Find your OpenAI API key at https://platform.openai.com/account/api-keys")
68+
print("")
69+
print_color("Contribute to GPT-Code UI at https://github.com/ricklamers/gpt-code-ui")
5970

60-
print("> Open GPT-Code UI in your browser %s" % APP_URL)
61-
print("")
62-
print("You can inspect detailed logs in app.log.")
63-
print("")
64-
print("Find your OpenAI API key at https://platform.openai.com/account/api-keys")
65-
print("")
66-
print_color("Contribute to GPT-Code UI at https://github.com/ricklamers/gpt-code-ui")
6771

6872
def main():
6973
setup_logging()
@@ -80,20 +84,19 @@ def main():
8084
try:
8185
app.test_client().get("/")
8286
break
83-
except:
87+
except Exception:
8488
time.sleep(0.1)
85-
86-
print_banner()
87-
89+
90+
print_banner()
91+
8892
webbrowser.open(APP_URL)
8993

9094
webapp_process.join()
9195
kernel_program_process.join()
9296

93-
9497
except KeyboardInterrupt:
9598
print("Terminating processes...")
96-
99+
97100
cleanup_kernel_program()
98101
kernel_program_process.terminate()
99102

@@ -103,6 +106,7 @@ def main():
103106
kernel_program_process.join()
104107

105108
print("Processes terminated.")
106-
109+
110+
107111
if __name__ == '__main__':
108112
main()

0 commit comments

Comments
 (0)