File size: 1,172 Bytes
6ee47c4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6b447a5
6ee47c4
6b447a5
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
from model.propmt.prompt_handler import *
from model.llm.llm import *
from model.rag.rag_handler import *
from config import *

class Chat:
    def __init__(self, chat_id, rag_handler) -> None:
        self.chat_id = chat_id
        self.message_history = []
        self.response_history = []
        self.prompt_handler = Prompt()
        self.llm = LLM_API_Call("gilas")
        self.rag_handler = rag_handler

    def response(self, message: str) -> str:
        self.message_history.append(message)

        info_list = self.rag_handler.get_information(message)
        prompt = self.prompt_handler.get_prompt(message, info_list)
        llm_response = self.llm.get_LLM_response(prompt=prompt)

        final_response = f"**Response**:\n{llm_response}\n\n"
        if info_list:
            final_response += "The following legal cases and information were retrieved and considered:\n"
            for i, info in enumerate(info_list):
                case_text = info['text'].replace("[end]", "")
                final_response += f"\n**Case {i+1}:** {info['title']}\n{case_text}\n"

        self.response_history.append(final_response)

        return final_response