Carlos Isael Ramírez González commited on
Commit
5fc4ff1
·
1 Parent(s): a04ffe2

Cambie la memoria por una version anterior

Browse files
Files changed (1) hide show
  1. memory.py +44 -23
memory.py CHANGED
@@ -1,26 +1,47 @@
1
- from langchain.vectorstores import Chroma
2
- from langchain.embeddings import HuggingFaceEmbeddings
3
- from langchain_core.documents import Document
4
 
5
-
6
- class Memory:
7
- def __init__(self):
8
- self.embedding_model = HuggingFaceEmbeddings(
9
- model_name="sentence-transformers/all-MiniLM-L6-v2"
10
- )
11
- self.vector_store = Chroma(
12
- persist_directory="./chroma_db", embedding_function=self.embedding_model
13
- )
14
  self.schema_cache = None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
 
16
- def add_interaction(self, question: str, answer: str, sql: str):
17
- document = Document(
18
- page_content=f"Pregunta: {question}\nRespuesta: {answer}\nSQL: {sql}",
19
- metadata={"source": "interaction"},
20
- )
21
- self.vector_store.add_documents([document])
22
-
23
- def get_relevant_interactions(self, question: str, top_k=3):
24
- results = self.vector_store.similarity_search(question, k=top_k)
25
- return "\n".join([d.page_content for d in results])
26
-
 
1
+ from collections import deque
2
+ import pandas as pd
3
+ from config import Config
4
 
5
+ class ConversationMemory:
6
+ def __init__(self, max_history: int = Config.MAX_HISTORY):
7
+ self.history = deque(maxlen=max_history)
 
 
 
 
 
 
8
  self.schema_cache = None
9
+
10
+ def add_interaction(self, question: str, sql: str, result: str):
11
+ self.history.append({
12
+ "question": question,
13
+ "sql": sql,
14
+ "result_summary": self._summarize_result(result)
15
+ })
16
+
17
+ def _summarize_result(self, result) -> str:
18
+ """Resumen ejecutivo para memoria de contexto"""
19
+ if isinstance(result, pd.DataFrame):
20
+ # Enfocado en datos CLAVE no en metadatos
21
+ if len(result) == 1:
22
+ return f"Único resultado: {result.iloc[0].to_dict()}"
23
+ elif 'Cliente' in result.columns:
24
+ top = result.nlargest(3, 'Neto') if 'Neto' in result.columns else result.head(3)
25
+ return f"Top clientes: {top['Cliente'].tolist()}"
26
+ else:
27
+ return f"Filas: {len(result)}, Columnas: {list(result.columns)}"
28
+ return str(result)
29
+
30
+ def get_context(self, current_question: str) -> str:
31
+ if not self.history:
32
+ return ""
33
+ last_relevant = []
34
+ for interaction in self.history:
35
+ if "producto" in interaction['question'].lower() and "producto" in current_question.lower():
36
+ last_relevant.append(interaction)
37
+ elif "cliente" in interaction['question'].lower() and "cliente" in current_question.lower():
38
+ last_relevant.append(interaction)
39
 
40
+ context = ""
41
+ for i, interaction in enumerate(last_relevant[-1:], 1): # Solo la última relevante
42
+ context += (
43
+ f"Interacción #{i}: {interaction['question'][:50]}...\n"
44
+ f"SQL: {interaction['sql'][:70]}...\n"
45
+ f"Resultado: {interaction['result_summary']}\n\n"
46
+ )
47
+ return context