-
Notifications
You must be signed in to change notification settings - Fork 305
Expand file tree
/
Copy pathChatbot.py
More file actions
149 lines (125 loc) · 6.05 KB
/
Chatbot.py
File metadata and controls
149 lines (125 loc) · 6.05 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
from chatbot.chatbot_thread import OllamaWorker
from PyQt5.QtWidgets import QWidget, QHBoxLayout, QTextEdit, QVBoxLayout, QLineEdit, QPushButton
from PyQt5.QtCore import QSize
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import QApplication
from configuration.Appconfig import Appconfig
import os
if os.name == 'nt':
from frontEnd import pathmagic # noqa:F401
init_path = ''
else:
import pathmagic # noqa:F401
init_path = '../../'
class ChatbotGUI(QWidget):
def __init__(self):
super().__init__()
self.setWindowTitle("AI Chatbot")
self.setFixedSize(400, 250)
self.chat_history = []
layout = QVBoxLayout(self)
self.chat_display = QTextEdit(self, readOnly=True)
layout.addWidget(self.chat_display)
input_layout = QHBoxLayout()
self.user_input = QLineEdit(self, placeholderText="Type your query here...")
self.user_input.setStyleSheet("font-size: 14px;")
self.user_input.returnPressed.connect(self.ask_ollama)
input_layout.addWidget(self.user_input)
self.clear_button = QPushButton(self, icon=QIcon(init_path + 'images/clear.png'))
self.clear_button.setIconSize(QSize(18, 18))
self.clear_button.setStyleSheet("font-size: 14px; padding: 5px;")
self.clear_button.clicked.connect(self.clear_session)
input_layout.addWidget(self.clear_button)
layout.addLayout(input_layout)
self.move_to_bottom_right()
def get_netlist_content(self):
"""Finds and reads the current project's .cir file."""
try:
self.obj_appconfig = Appconfig()
proj_info = self.obj_appconfig.current_project
if proj_info and "ProjectName" in proj_info:
proj_dir = proj_info["ProjectName"]
proj_name = os.path.basename(proj_dir.rstrip(os.sep))
netlist_path = os.path.join(proj_dir, f"{proj_name}.cir")
if os.path.exists(netlist_path):
with open(netlist_path, "r") as f:
return f.read()
except Exception as e:
print(f"Error fetching netlist: {e}")
return None
def ask_ollama(self):
user_text = self.user_input.text().strip()
if not user_text:
return
# 1. Fetch Netlist Context (The Proposal Implementation)
netlist = self.get_netlist_content()
# 2. Update History
self.chat_history = (self.chat_history + [f"User: {user_text}"])[-4:]
self.chat_display.append(f"You: {user_text}")
# 3. Create a context-aware prompt
if netlist:
# We explicitly tell the AI to look at the netlist
context_prompt = (
f"Analyze this eSim Netlist:\n{netlist}\n\n"
f"User Question: {user_text}"
)
else:
context_prompt = user_text
# 4. Pass the context_prompt to the worker
self.worker = OllamaWorker(context_prompt)
self.worker.response_signal.connect(self.display_response)
self.worker.start()
self.user_input.clear()
def move_to_bottom_right(self):
"""Move the chatbot window to the bottom-right corner of the screen."""
screen = QApplication.desktop().screenGeometry()
widget = self.geometry()
x = screen.width() - widget.width() - 10 # 10px margin from the right
y = screen.height() - widget.height() - 50 # 50px margin from the bottom
self.move(x, y)
def display_response(self, bot_response):
"""Display the bot's response in the chat display."""
self.chat_display.append(f"Bot: {bot_response}\n")
self.chat_history.append(f"Bot: {bot_response}\n")
def clear_session(self):
"""Clear the chat display."""
self.chat_display.clear()
self.chat_history=[]
def debug_ollama(self):
"""Send log AND netlist to Ollama for failed simulation analysis."""
self.chat_display.append(f"============ Simulation Failed =============\n")
error_log = self.user_input.text().strip()
# Get the netlist to help the AI understand the context of the error
netlist = self.get_netlist_content()
if netlist:
combined_query = (
f"SIMULATION ERROR LOG:\n{error_log}\n\n"
f"CORRESPONDING NETLIST:\n{netlist}\n\n"
"Please analyze the error based on this netlist."
)
else:
combined_query = error_log
# Pass the combined data to the worker
self.worker = OllamaWorker(combined_query)
self.worker.response_signal.connect(self.display_response)
self.worker.start()
self.user_input.clear()
def debug_error(self, log):
self.chat_history = []
if os.path.exists(log):
with open(log, "r") as f:
lines = [line for line in f.readlines() if line.strip()]
no_compat_index = next((i for i, line in enumerate(lines) if "No compatibility mode selected!" in line), None)
circuit_index = next((i for i, line in enumerate(lines) if "Circuit:" in line), None)
total_cpu_index = next((i for i, line in enumerate(lines) if "Total CPU time (seconds)" in line), None)
before_no_compat = lines[:no_compat_index] if no_compat_index else []
between_circuit_and_cpu = lines[circuit_index + 1:total_cpu_index] if circuit_index is not None and total_cpu_index is not None else []
filtered_lines = before_no_compat + between_circuit_and_cpu
combined_text = "".join(filtered_lines)
self.user_input.setText(combined_text)
self.obj_appconfig = Appconfig()
self.projDir = self.obj_appconfig.current_project["ProjectName"]
output_file = os.path.join(self.projDir, "erroroutput.txt")
with open(output_file, "w") as f:
f.writelines(filtered_lines)
self.debug_ollama()