Skip to content
This repository was archived by the owner on Feb 4, 2025. It is now read-only.

Talk codebase #79

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file added age
Empty file.
16 changes: 10 additions & 6 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ gitdb==4.0.10
GitPython==3.1.41
gpt4all==0.2.3
halo==0.0.31
huggingface-hub==0.14.1
huggingface-hub
idna==3.7
Jinja2==3.1.3
joblib==1.2.0
Expand All @@ -32,7 +32,7 @@ mypy-extensions==1.0.0
networkx==3.1
nltk==3.8.1
numexpr==2.8.4
numpy==1.24.3
numpy==1.26.2
openai==0.27.7
openapi-schema-pydantic==1.2.4
packaging==23.1
Expand All @@ -56,7 +56,7 @@ tenacity==8.2.2
termcolor==2.3.0
threadpoolctl==3.1.0
tiktoken==0.4.0
tokenizers==0.13.3
tokenizers
torch==2.0.1
torchvision==0.15.2
tqdm==4.65.0
Expand Down
76 changes: 58 additions & 18 deletions talk_codebase/cli.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import sys
import subprocess
import requests
import shutil

import fire

from talk_codebase.config import CONFIGURE_STEPS, save_config, get_config, config_path, remove_api_key, \
remove_model_type, remove_model_name_local
from talk_codebase.consts import DEFAULT_CONFIG
from talk_codebase.config import CONFIGURE_STEPS, save_config, get_config, config_path, configure, remove_configuration
from talk_codebase.consts import DEFAULT_CONFIG, MODEL_TYPES
from talk_codebase.llm import factory_llm
from talk_codebase.utils import get_repo

Expand All @@ -15,45 +17,83 @@ def check_python_version():
sys.exit(1)


def check_ollama_installed():
return shutil.which("ollama") is not None


def check_ollama_running():
try:
response = requests.get("http://localhost:11434/api/tags")
return response.status_code == 200
except requests.RequestException:
return False


def update_config(config):
for key, value in DEFAULT_CONFIG.items():
if key not in config:
config[key] = value
return config


def configure(reset=True):
if reset:
remove_api_key()
remove_model_type()
remove_model_name_local()
config = get_config()
config = update_config(config)
for step in CONFIGURE_STEPS:
step(config)
save_config(config)


def chat_loop(llm):
print("\n🤖 I'm here to help you understand the codebase. Feel free to ask any questions!")
while True:
query = input("👉 ").lower().strip()
if not query:
print("🤖 Please enter a query")
continue
if query in ('exit', 'quit'):
break
print("\n🤖 Analyzing the codebase to provide the best possible answer...")
llm.send_query(query)


def chat():
configure(False)
config = get_config()
if not config.get("embedding_model_type") or not config.get("chat_model_type"):
print("🤖 Configuration not found. Running configuration process...")
configure(False)
config = get_config()

repo = get_repo()
if not repo:
print("🤖 Git repository not found")
sys.exit(1)
llm = factory_llm(repo.working_dir, config)
chat_loop(llm)

if config.get("embedding_model_type") != config.get("chat_model_type"):
print("Error: Embedding and chat model types must be the same.")
print("Please run 'talk-codebase configure' to set up your configuration correctly.")
sys.exit(1)

model_type = config.get("embedding_model_type")

if model_type in [MODEL_TYPES["OPENAI"], MODEL_TYPES["OPENAI_COMPATIBLE"]]:
if not config.get("openai_compatible_api_key"):
print("Error: API key is missing. Please run 'talk-codebase configure' to set up your API key.")
sys.exit(1)

if model_type == MODEL_TYPES["OPENAI_COMPATIBLE"] and not config.get("openai_compatible_endpoint"):
print("Error: API endpoint is missing for OpenAI-compatible setup. Please run 'talk-codebase configure' to set up your API endpoint.")
sys.exit(1)

elif model_type == MODEL_TYPES["OLLAMA"]:
if not check_ollama_installed():
print("⚠️ Ollama is not found in PATH. Please ensure Ollama is installed and added to your system PATH.")
print("You can download Ollama from: https://ollama.ai/download")
sys.exit(1)

if not check_ollama_running():
print("⚠️ Ollama is installed but not running. Please start Ollama with 'ollama serve' command.")
sys.exit(1)

try:
llm = factory_llm(repo.working_dir, config)
chat_loop(llm)
except ValueError as e:
print(f"Error: {str(e)}")
print("Please run 'talk-codebase configure' to set up your configuration correctly.")
sys.exit(1)


def main():
Expand Down
Loading