diff --git a/config/temi_config.json b/config/temi_config.json deleted file mode 100644 index 3dbc720..0000000 --- a/config/temi_config.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "model_path": "~/llm_models/model.gguf", - "type": "local" -} diff --git a/configs/sevrer_config.json b/configs/sevrer_config.json new file mode 100644 index 0000000..14a676a --- /dev/null +++ b/configs/sevrer_config.json @@ -0,0 +1,3 @@ +{ + "url": "http://127.0.0.1:8080" +} \ No newline at end of file diff --git a/playground/sever_config.json b/playground/sever_config.json new file mode 100644 index 0000000..9025df2 --- /dev/null +++ b/playground/sever_config.json @@ -0,0 +1,5 @@ +{ + "server_executable": "llama.cpp/build/bin/server", + "model_path": "/home/namtd/workspace/personal/kicopilot/playground/models/model.gguf", + "c": 2048 +} \ No newline at end of file diff --git a/scripts/build_llamacpp.sh b/scripts/build_llamacpp.sh deleted file mode 100755 index f93dd2b..0000000 --- a/scripts/build_llamacpp.sh +++ /dev/null @@ -1,9 +0,0 @@ -cd llama.cpp -mkdir -p build -rm -rf build/* -cd build -cmake .. -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS -cmake --build . --config Release -- -j$(nproc) - -cd ../.. -cp llama.cpp/build/bin/main temi-packaging/usr/local/bin/temicore diff --git a/scripts/install_temi.sh b/scripts/install_temi.sh deleted file mode 100755 index ddc21fc..0000000 --- a/scripts/install_temi.sh +++ /dev/null @@ -1,2 +0,0 @@ -dpkg-deb --build temi-packaging temi_v1.0.1.deb -sudo dpkg -i temi_v1.0.1.deb diff --git a/temi-packaging/usr/local/bin/prompt.txt b/scripts/llamacpp_docker.sh similarity index 100% rename from temi-packaging/usr/local/bin/prompt.txt rename to scripts/llamacpp_docker.sh diff --git a/scripts/llamacpp_server.sh b/scripts/llamacpp_server.sh new file mode 100755 index 0000000..087af7c --- /dev/null +++ b/scripts/llamacpp_server.sh @@ -0,0 +1,13 @@ +if [! -d llama.cpp/build ]; then + echo "Building the llamacpp ..." + cd llama.cpp + mkdir -p build + rm -rf build/* + cd build + cmake .. -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS + cmake --build . --config Release -- -j$(nproc) + cd ../.. +fi + +echo "Starting the llamcpp server ..." +./llama.cpp/build/bin/server -m /home/namtd/workspace/personal/kicopilot/playground/models/model.gguf -c 128 --host 0.0.0.0 --port 8080 diff --git a/scripts/update_llamacpp.sh b/scripts/llamacpp_update.sh similarity index 100% rename from scripts/update_llamacpp.sh rename to scripts/llamacpp_update.sh diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..2e8e13e --- /dev/null +++ b/setup.py @@ -0,0 +1,28 @@ +from setuptools import setup, find_packages + +setup( + name='temi', + version='2.0.0', + packages=find_packages(), + entry_points={ + 'console_scripts': [ + 'temi = src.temi_interface:main', + ], + }, + # Add all necessary package requirements here + install_requires=[ + 'requests', + # Add other dependencies needed for your package + ], + # Metadata + author='Nam Tran', + author_email='trannam.ase@gmail.com', + description='An assistant in your terminal powered by llama.cpp', + long_description=open('README.md').read(), + long_description_content_type='text/markdown', + url='https://github.com/namtranase/terminalmind', # Use the URL to the github repo. + project_urls={ + 'Source': 'https://github.com/namtranase/terminalmind', + # Add any other relevant links here + }, +) diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/modules/__init__.py b/src/modules/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/modules/assistant/__init__.py b/src/modules/assistant/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/modules/assistant/assistant.py b/src/modules/assistant/assistant.py new file mode 100644 index 0000000..a16a7cd --- /dev/null +++ b/src/modules/assistant/assistant.py @@ -0,0 +1,30 @@ +class AssistantGit: + def __init__(self) -> None: + pass + + def get_response(self, api_func, user_input): + completion_options = { + "temperature": 0.7, + "top_k": 50, + # ... include other options as needed ... + } + prompt = f"### Human: {user_input}\n### Assistant: " + response = api_func(prompt, **completion_options) + + return response + + +class AssistantTerminal: + def __init__(self) -> None: + pass + + def get_response(self, api_func, user_input): + completion_options = { + "temperature": 0.7, + "top_k": 50, + # ... include other options as needed ... + } + prompt = f"### Human: {user_input}\n### Assistant: " + response = api_func(prompt, **completion_options) + + return response \ No newline at end of file diff --git a/src/modules/chat/chat_model.py b/src/modules/chat/chat_model.py new file mode 100644 index 0000000..ec6c7ba --- /dev/null +++ b/src/modules/chat/chat_model.py @@ -0,0 +1,17 @@ +class ChatModel: + def __init__(self, instruction): + self.instruction = instruction + self.chat_history = [] + + def format_chat_prompt(self, user_input): + formatted_chat = f"{self.instruction}\n" + for i, message in enumerate(self.chat_history): + speaker = "Human" if i % 2 == 0 else "Assistant" + formatted_chat += f"### {speaker}: {message}\n" + formatted_chat += f"### Human: {user_input}\n### Assistant: " + return formatted_chat + + def format_prompt(self, user_input): + formatted_chat = f"{self.instruction}\n" + formatted_chat += f"### Human: {user_input}\n### Assistant: " + return formatted_chat diff --git a/src/modules/function_calling/__init__.py b/src/modules/function_calling/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/modules/function_calling/function_model.py b/src/modules/function_calling/function_model.py new file mode 100644 index 0000000..e70a2ce --- /dev/null +++ b/src/modules/function_calling/function_model.py @@ -0,0 +1,21 @@ +class ChatModel: + def __init__(self, instruction): + self.instruction = instruction + self.chat_history = [] + + def format_chat_prompt(self, user_input): + formatted_chat = f"{self.instruction}\n" + for i, message in enumerate(self.chat_history): + speaker = "Human" if i % 2 == 0 else "Assistant" + formatted_chat += f"### {speaker}: {message}\n" + formatted_chat += f"### Human: {user_input}\n### Assistant: " + return formatted_chat + + def format_prompt(self, user_input): + formatted_chat = f"{self.instruction}\n" + formatted_chat += f"### Human: {user_input}\n### Assistant: " + return formatted_chat + +class TermFuncCall: + def __init__(self) -> None: + pass \ No newline at end of file diff --git a/src/prompt_wrapper.py b/src/prompt_wrapper.py new file mode 100644 index 0000000..60a5754 --- /dev/null +++ b/src/prompt_wrapper.py @@ -0,0 +1,9 @@ +class PromptWrapper: + def __init__(self) -> None: + pass + + def get_assistant_prompt(self): + return None + + def get_function_call_prompt(self): + return None \ No newline at end of file diff --git a/src/llamacpp_api/server_wrapper.py b/src/server_wrapper.py similarity index 100% rename from src/llamacpp_api/server_wrapper.py rename to src/server_wrapper.py diff --git a/src/temi_interface.py b/src/temi_interface.py new file mode 100644 index 0000000..6ab33bb --- /dev/null +++ b/src/temi_interface.py @@ -0,0 +1,62 @@ +import json + +from server_wrapper import ServerWrapper +from prompt_wrapper import PromptWrapper +from .modules.assistant.assistant import AssistantTerminal, AssistantGit +from .modules.function_calling.function_model import TermFuncCall + +import argparse + +class TemiInterface: + def __init__(self, config_path="configs/server_config"): + config = None + with open(config_path, "r", encoding="utf-8") as config_file: + config = json.load(config_file) + + self.config = config + self.server_wrapper = ServerWrapper(config=config) + self.prompt_wrapper = PromptWrapper() + self.ast_terminal = AssistantTerminal() + self.ast_git = AssistantGit() + self.term_func = TermFuncCall() + + def check_server_status(self): + status = self.server_wrapper.check_server() + return status + + def handle_assitant(self, user_input): + print(user_input) + if "git" in user_input: + return self.ast_git.get_response(self.server_wrapper.generate_completion, user_input) + else: + return self.ast_terminal.get_response(self.server_wrapper.generate_completion, user_input) + + +def main(): + parser = argparse.ArgumentParser(description='temi - Your Terminal Assistant') + subparsers = parser.add_subparsers(dest='command') + + # Parser for "check server" command + server_parser = subparsers.add_parser('check', help='Check the temi server status') + server_parser.add_argument('status', nargs='?', help='The "status" argument to check the server') + + # Parser for "assistant" command + query_parser = subparsers.add_parser('assistant', help='Assistant mode') + query_parser.add_argument('user_input', nargs='+', help='The question or command for temi to process') + + args = parser.parse_args() + temi_interface = TemiInterface("/home/namtd/workspace/personal/terminalmind/configs/sevrer_config.json") + + if args.command == 'check': + status = temi_interface.check_server_status() + print(status) + elif args.command == 'assistant': + # Join the list of user_input into a single string + user_input = ' '.join(args.user_input) + answer = temi_interface.handle_assitant(user_input) + print(answer) + else: + parser.print_help() + +if __name__ == "__main__": + main() diff --git a/src/utils/__init__.py b/src/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/utils/helpers.py b/src/utils/helpers.py new file mode 100644 index 0000000..4108127 --- /dev/null +++ b/src/utils/helpers.py @@ -0,0 +1,2 @@ +def trim(s): + return s.strip() diff --git a/temi-packaging/DEBIAN/control b/temi-packaging/DEBIAN/control deleted file mode 100644 index 2c5f360..0000000 --- a/temi-packaging/DEBIAN/control +++ /dev/null @@ -1,8 +0,0 @@ -Package: temi -Version: 1.0.2 -Section: base -Priority: optional -Architecture: amd64 -Maintainer: Nam Tran -Depends: python3, python3-requests, python3-bs4 -Description: AI assistant command line tool diff --git a/temi-packaging/DEBIAN/postinst b/temi-packaging/DEBIAN/postinst deleted file mode 100755 index b4cd8d4..0000000 --- a/temi-packaging/DEBIAN/postinst +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash - -# Function to check if PyPDF2 is installed -check_and_install_pypdf2() { - if ! python3 -c "import PyPDF2" &> /dev/null; then - echo "PyPDF2 is not installed. Installing it now..." - pip3 install PyPDF2 - else - echo "PyPDF2 is already installed." - fi -} - -# Call the function -check_and_install_pypdf2 diff --git a/temi-packaging/usr/local/bin/config_setup.sh b/temi-packaging/usr/local/bin/config_setup.sh deleted file mode 100644 index 0c55de8..0000000 --- a/temi-packaging/usr/local/bin/config_setup.sh +++ /dev/null @@ -1,61 +0,0 @@ -#!/bin/bash - -# Define the directory where the script is located -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" - -# Path to the configuration directory and default configuration file -CONFIG_DIR="$HOME/.config/temi" -CONFIG_FILE="$CONFIG_DIR/temi_config.json" - -# Ensure jq is installed -if ! command -v jq &>/dev/null; then - echo "jq is not installed. Please install jq to use this script." - exit 1 -fi - -# Function to load the configuration from the JSON file -load_config() { - if [ ! -f "$CONFIG_FILE" ]; then - # Provide a default configuration if the config file is missing - mkdir -p "$CONFIG_DIR" - echo '{"model_path": "~/llm_models/model.gguf", "type": "local"}' > "$CONFIG_FILE" - echo "Default configuration created at '$CONFIG_FILE'." - fi - - MODEL_PATH=$(jq -r '.model_path' "$CONFIG_FILE") - MODEL_TYPE=$(jq -r '.type' "$CONFIG_FILE") - - # Resolve tilde and parameter expansion - MODEL_PATH=$(eval echo "$MODEL_PATH") - - # Check if the model path is valid - if [ "$MODEL_TYPE" == "local" ] && [ ! -f "$MODEL_PATH" ]; then - echo "Invalid model path in configuration: '$MODEL_PATH'" - fi -} - -# Function to update the configuration file -update_config() { - local new_config_path="$1" - - if [ ! -f "$new_config_path" ]; then - echo "The provided configuration file does not exist: '$new_config_path'" - exit 1 - fi - - # Copy the new configuration file to the standard location - cp "$new_config_path" "$CONFIG_FILE" - echo "Configuration updated successfully." - - # Reload the configuration - load_config -} - -# Check if the user wants to update the configuration -if [[ "$1" == "update_config" ]]; then - update_config "$2" - exit 0 -fi - -# Load configuration settings -load_config diff --git a/temi-packaging/usr/local/bin/extract_pdf_content.py b/temi-packaging/usr/local/bin/extract_pdf_content.py deleted file mode 100644 index d1b5a9a..0000000 --- a/temi-packaging/usr/local/bin/extract_pdf_content.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Extract PDF content based on user's input. -""" -import sys -import PyPDF2 - - -def extract_text_from_pdf(pdf_path): - """Extract text from pdf file.""" - with open(pdf_path, "rb") as file: - reader = PyPDF2.PdfReader(file) - text = "" - for page in reader.pages: - text += page.extract_text() + "\n" - return text - - -def main(): - """Main function to extract txt by keyword.""" - if len(sys.argv) != 2: - print("Usage: extract_text.py ") - sys.exit(1) - - pdf_path = sys.argv[1] - text = extract_text_from_pdf(pdf_path) - print(text) - - -if __name__ == "__main__": - main() diff --git a/temi-packaging/usr/local/bin/fetch_article_content.py b/temi-packaging/usr/local/bin/fetch_article_content.py deleted file mode 100755 index bd93a94..0000000 --- a/temi-packaging/usr/local/bin/fetch_article_content.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python3 -"""Fetch the article content based on user's url. -""" -import sys -import requests -from bs4 import BeautifulSoup - - -def fetch_article_content(url): - """Fetch article content.""" - try: - response = requests.get(url) - response.raise_for_status() - soup = BeautifulSoup(response.text, "html.parser") - article = soup.find("article") - paragraphs = article.find_all("p") - return " ".join([para.get_text() for para in paragraphs]) - except Exception as e: - return f"Error: {e}" - - -def main(): - """Fetch the article content.""" - if len(sys.argv) != 2: - print("Usage: fetch_article_content.py ") - sys.exit(1) - - url = sys.argv[1] - content = fetch_article_content(url) - print(content) - - -if __name__ == "__main__": - main() diff --git a/temi-packaging/usr/local/bin/temi b/temi-packaging/usr/local/bin/temi deleted file mode 100755 index 9e9a6f1..0000000 --- a/temi-packaging/usr/local/bin/temi +++ /dev/null @@ -1,176 +0,0 @@ -#!/bin/bash - -# Get the directory of the current script -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" - -EXTRACT_PDF_SCRIPT="$SCRIPT_DIR/extract_pdf_content.py" -FETCH_ARTICLE_SCRIPT="$SCRIPT_DIR/fetch_article_content.py" -PROMPT_FILE="$SCRIPT_DIR/prompt.txt" -TEMICORE="$SCRIPT_DIR/temicore" - -# Source the configuration setup script -source "$SCRIPT_DIR/config_setup.sh" - -# Function to validate if the model path ends with .gguf -validate_model_path() { - if [[ $1 != *.gguf ]]; then - echo "temi only supports .gguf models for now." - exit 1 - fi -} - -show_help() { - echo -e "\033[1mUsage:\033[0m" - echo " temi [COMMAND] [ARGUMENTS]... [OPTIONS]" - echo - echo -e "\033[1mCommands:\033[0m" - echo " summary|summarize Summarize the content from a given URL." - echo " RAG|retrieve|Retrieve KEYWORD Retrieve information given keyword from a PDF file." - echo " change_model Change the model configuration." - echo " update_config FILE Update the configuration with the given JSON file." - echo - echo -e "\033[1mOptions:\033[0m" - echo " --help Show this help message and exit." - echo - echo -e "\033[1mExamples:\033[0m" - echo " temi summary https://example.com/article" - echo " temi retrieve with keyword: test, path/to/file.pdf" - echo " temi change_model" - echo " temi update_config path/to/config.json" - echo - echo "For more information, visit: https://github.com/namtranase/terminalmind" - echo - echo -e "\033[1mNote:\033[0m Replace [COMMAND] with one of the commands above," - echo "and [ARGUMENTS]... with the appropriate input for the command." -} - -# Check for --help option -if [[ "$1" == "--help" ]]; then - show_help - exit 0 -fi - -# Function to extract URL from input -extract_url() { - echo "$1" | grep -o 'http[s]*://[^ ]*' -} - -# Function to extract PDF file path or URL from input -extract_pdf_path() { - # Use grep with a regular expression to find paths or URLs ending in .pdf - # This pattern matches both local paths and URLs - echo "$1" | grep -o -E '(http[s]*://[^ ]*\.pdf|/[^ ]*\.pdf)' -} - -# Function to extract the keyword phrase enclosed in curly braces {} -extract_keyword() { - local input=$1 - local keyword="" - - # Extract the keyword following 'keyword:' and ending at the comma - if echo "$input" | grep -q 'keyword:'; then - keyword=$(echo "$input" | grep -o 'keyword: [^,]*' | cut -d':' -f2 | cut -d',' -f1 | xargs) - fi - - echo "$keyword" -} - -change_model() { - echo "Please enter the new absolute path to your .gguf model file:" - read -r new_model_path - - # Validate the provided model path - validate_model_path "$new_model_path" - - # Save the new model path to the configuration file - echo "MODEL_PATH=\"$new_model_path\"" > "$CONFIG_FILE" - MODEL_PATH="$new_model_path" - - echo "Model path changed successfully." -} - -# Check if the user wants to change the model -if [[ $1 == "change_model" ]]; then - change_model - exit 0 -fi - -# Check if the model path is set and valid -if [ -z "$MODEL_PATH" ] || [ ! -f "$MODEL_PATH" ]; then - echo "Model file not found. Please enter the absolute path to your .gguf model file:" - read -r user_model_path - - # Validate the provided model path - validate_model_path "$user_model_path" - - # Save the model path to the configuration file - echo "MODEL_PATH=\"$user_model_path\"" > "$CONFIG_FILE" - MODEL_PATH="$user_model_path" -fi - -# Concatenate all input arguments into a single string -input_string="$*" - -if [[ ! $input_string =~ summary|summarize|RAG|Retrieve|retrieve ]]; then - output=$($TEMICORE 2>/dev/null -m $MODEL_PATH -n 128 -t 0.8 -p "[INST] $* - Write at most 50 words in new line [/INST]" | sed 's/\[INST\].*\[\/INST\]//g') - echo "$output" -# Check if input contains 'summary' or 'summarize' -elif [[ $input_string =~ summary|summarize ]]; then - # Extract URL from input - url=$(extract_url "$input_string") - if [ -z "$url" ]; then - echo "No URL found in input." - exit 1 - fi - - # Fetch article content using Python script - content=$(python3 $FETCH_ARTICLE_SCRIPT "$url") - - # Check if content was successfully fetched - if [[ $content == Error:* ]]; then - echo "$content" - exit 1 - fi - # Prepare the prompt and write to a file - { - echo "[INST] Summary the content: ${content} - Write at most 50 words in new line [/INST]" - } &> $PROMPT_FILE - - - # Call LLM model with content - output=$($TEMICORE 2>/dev/null -m $MODEL_PATH -n 128 -t 0.8 --file $PROMPT_FILE | sed 's/\[INST\].*\[\/INST\]//g') - echo "$output" - -# RAG with PDF Input Use Case -elif [[ $input_string =~ RAG|retrieve|Retrieve ]]; then - pdf_path=$(extract_pdf_path "$input_string") - if [ -z "$pdf_path" ]; then - echo "No PDF file path found in input." - exit 1 - fi - keyword=$(extract_keyword "$input_string") - if [ -z "$keyword" ]; then - echo "No keyword found in input." - exit 1 - fi - echo "PDF file: $pdf_path" - echo "Search keywork: $keyword" - content=$(python3 $EXTRACT_PDF_SCRIPT "$pdf_path") - if [[ -z $content ]]; then - echo "Failed to extract text from the PDF." - exit 1 - fi - # Prepare the prompt and write to a file - { - echo "[INST] Get the ${keyword} information from: ${content} - Write at most 50 words in new line [/INST]" - } &> $PROMPT_FILE - - # Call LLM model with content - output=$($TEMICORE 2>/dev/null -m $MODEL_PATH -n 128 -t 0.8 --file $PROMPT_FILE | sed 's/\[INST\].*\[\/INST\]//g') - echo "$output" - -# Other cases or default behavior -else - # Handle any other cases or provide a default behavior - echo "Command not recognized. Please provide a valid command." -fi diff --git a/temi-packaging/usr/local/bin/temicore b/temi-packaging/usr/local/bin/temicore deleted file mode 100755 index 6517bef..0000000 Binary files a/temi-packaging/usr/local/bin/temicore and /dev/null differ diff --git a/tests/test_server_api.py b/tests/test_server_api.py new file mode 100644 index 0000000..4b884e8 --- /dev/null +++ b/tests/test_server_api.py @@ -0,0 +1 @@ +#TODO: Add API test functions \ No newline at end of file