Skip to content

Commit da39811

Browse files
committed
Refactor and enhance AI sidekick feature in CLI
This commit refactors the AI sidekick feature in the CLI, improving its functionality and user interaction. The changes include: - Replacing the `task` option with `request` for better clarity. - Generating the prompt with the appropriate context using the new `generate_sidekick_prompt` function. - Setting up a continuous loop for multiple questions from the user. - Adding a new option for the user to edit their question in their editor. - Improving the live markdown display with the `RichLiveCallbackHandler`. Additionally, a new module `prompts` has been added to handle prompt generation, starting with the sidekick prompt.
1 parent f6266f9 commit da39811

File tree

3 files changed

+65
-21
lines changed

3 files changed

+65
-21
lines changed

aicodebot/cli.py

Lines changed: 44 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
11
from aicodebot import version as aicodebot_version
2-
from aicodebot.agents import get_agent
32
from aicodebot.helpers import exec_and_get_output, get_token_length, git_diff_context
3+
from aicodebot.prompts import generate_sidekick_prompt
44
from dotenv import load_dotenv
55
from langchain.callbacks.base import BaseCallbackHandler
66
from langchain.chains import LLMChain
77
from langchain.chat_models import ChatOpenAI
8+
from langchain.memory import ConversationTokenBufferMemory
89
from langchain.prompts import load_prompt
910
from openai.api_resources import engine
1011
from pathlib import Path
@@ -264,31 +265,53 @@ def review(commit, verbose):
264265

265266

266267
@cli.command
267-
@click.option("--task", "-t", help="The task you want to perform - a description of what you want to do.")
268+
@click.option("--request", "-r", help="What to ask your sidekick to do")
268269
@click.option("-v", "--verbose", count=True)
269-
def sidekick(task, verbose):
270-
"""ALPHA/EXPERIMENTAL: Get coding help from your AI sidekick."""
271-
console.print(
272-
"⚠️ WARNING: The 'sidekick' feature is currently experimental and, frankly, it sucks right now. "
273-
"Due to the token limitations with large language models, the amount of context "
274-
"that can be sent back and forth is limited, and slow. This means that sidekick will struggle with "
275-
"complex tasks and will take longer than a human for simpler tasks.\n"
276-
"Play with it, but don't expect too much. Do you feel like contributing? 😃\n"
277-
"See docs/sidekick.md for more information.",
278-
style=warning_style,
279-
)
270+
def sidekick(request, verbose):
271+
"""EXPERIMENTAL: Coding help from your AI sidekick"""
272+
console.print("This is an experimental feature. Play with it, but don't count on it.", style=warning_style)
280273

281274
setup_environment()
282275

283-
model = get_llm_model()
284-
llm = ChatOpenAI(model=model, temperature=PRECISE_TEMPERATURE, max_tokens=2000, verbose=verbose)
276+
# Generate the prompt with the appropriate context
277+
prompt = generate_sidekick_prompt(request)
278+
model = get_llm_model(get_token_length(prompt.template))
279+
280+
llm = ChatOpenAI(
281+
model=model,
282+
temperature=PRECISE_TEMPERATURE,
283+
max_tokens=DEFAULT_MAX_TOKENS * 2,
284+
verbose=verbose,
285+
streaming=True,
286+
)
285287

286-
agent = get_agent("sidekick", llm, verbose)
288+
# Set up the chain
289+
chain = LLMChain(
290+
llm=llm,
291+
prompt=prompt,
292+
memory=ConversationTokenBufferMemory(llm=llm, max_token_limit=DEFAULT_MAX_TOKENS),
293+
verbose=verbose,
294+
)
287295

288-
with console.status("Thinking", spinner=DEFAULT_SPINNER):
289-
response = agent({"input": task})
290-
console.print("")
291-
console.print(response["output"], style=bot_style)
296+
while True: # continuous loop for multiple questions
297+
if request:
298+
user_input = request
299+
request = None # clear the command line request once we've handled it
300+
else:
301+
user_input = click.prompt(
302+
"Enter a question OR (q) quit, OR (e) edit for entering a question in your editor\n>>>",
303+
prompt_suffix="",
304+
)
305+
if user_input.lower() == "q":
306+
break
307+
elif user_input.lower() == "e":
308+
user_input = click.edit()
309+
310+
with Live(Markdown(""), auto_refresh=True) as live:
311+
callback = RichLiveCallbackHandler(live)
312+
callback.buffer = []
313+
llm.callbacks = [callback]
314+
chain.run(user_input)
292315

293316

294317
# ---------------------------------------------------------------------------- #
@@ -393,7 +416,7 @@ def __init__(self, live):
393416

394417
def on_llm_new_token(self, token, **kwargs):
395418
self.buffer.append(token)
396-
self.live.update(Markdown("".join(self.buffer)))
419+
self.live.update(Markdown("".join(self.buffer), style=bot_style))
397420

398421

399422
if __name__ == "__main__":

aicodebot/prompts/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
from .sidekick import generate_sidekick_prompt
2+
3+
__all__ = ["generate_sidekick_prompt"]

aicodebot/prompts/sidekick.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
from langchain import PromptTemplate
2+
3+
SIDEKICK_PREFIX = """You are a pair programming assistant named AICodeBot, acting as a sidekick to a human developer.
4+
If you aren't sure what to do, you can ask the human for more clarification.
5+
6+
Relevant history:
7+
{history}
8+
End History
9+
10+
Conversation with the human developer:
11+
Human: {task}
12+
AICodeBot:
13+
"""
14+
15+
16+
def generate_sidekick_prompt(task, files=None):
17+
"""Generates a prompt for the sidekick workflow."""
18+
return PromptTemplate(template=SIDEKICK_PREFIX, input_variables=["history", "task"])

0 commit comments

Comments
 (0)