Skip to content

Ai

This command sends information on the current debugging context to OpenAI's GPT-3 large language model and asks it a question supplied by the user. It then displays GPT-3's response to that question to the user.

dummy = False module-attribute

last_answer: List[str] = [] module-attribute

last_command = None module-attribute

last_pc = None module-attribute

last_question: List[str] = [] module-attribute

parser = argparse.ArgumentParser(description='Ask GPT-3 a question about the current debugging context.') module-attribute

verbosity = 0 module-attribute

ai(question, model, temperature, max_tokens, verbose, list_models=False, command=None)

build_command_prompt_body(command)

build_context_prompt_body()

build_prompt(question, command=None)

flatten_prompt(conversation)

get_anthropic_api_key()

get_openai_api_key()

get_openai_models()

query(prompt, model='text-davinci-003', max_tokens=100, temperature=0.0)

query_anthropic(prompt, model='claude-v1', max_tokens=100, temperature=0.0)

query_openai_chat(prompt, model='gpt-3.5-turbo', max_tokens=100, temperature=0.0)

query_openai_completions(prompt, model='text-davinci-003', max_tokens=100, temperature=0.0)

set_dummy_mode(d=True)