Spaces:
Sleeping
Sleeping
Carlos Isael Ramírez González
commited on
Commit
·
5fc4ff1
1
Parent(s):
a04ffe2
Cambie la memoria por una version anterior
Browse files
memory.py
CHANGED
|
@@ -1,26 +1,47 @@
|
|
| 1 |
-
from
|
| 2 |
-
|
| 3 |
-
from
|
| 4 |
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
self.embedding_model = HuggingFaceEmbeddings(
|
| 9 |
-
model_name="sentence-transformers/all-MiniLM-L6-v2"
|
| 10 |
-
)
|
| 11 |
-
self.vector_store = Chroma(
|
| 12 |
-
persist_directory="./chroma_db", embedding_function=self.embedding_model
|
| 13 |
-
)
|
| 14 |
self.schema_cache = None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
results = self.vector_store.similarity_search(question, k=top_k)
|
| 25 |
-
return "\n".join([d.page_content for d in results])
|
| 26 |
-
|
|
|
|
| 1 |
+
from collections import deque
|
| 2 |
+
import pandas as pd
|
| 3 |
+
from config import Config
|
| 4 |
|
| 5 |
+
class ConversationMemory:
|
| 6 |
+
def __init__(self, max_history: int = Config.MAX_HISTORY):
|
| 7 |
+
self.history = deque(maxlen=max_history)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
self.schema_cache = None
|
| 9 |
+
|
| 10 |
+
def add_interaction(self, question: str, sql: str, result: str):
|
| 11 |
+
self.history.append({
|
| 12 |
+
"question": question,
|
| 13 |
+
"sql": sql,
|
| 14 |
+
"result_summary": self._summarize_result(result)
|
| 15 |
+
})
|
| 16 |
+
|
| 17 |
+
def _summarize_result(self, result) -> str:
|
| 18 |
+
"""Resumen ejecutivo para memoria de contexto"""
|
| 19 |
+
if isinstance(result, pd.DataFrame):
|
| 20 |
+
# Enfocado en datos CLAVE no en metadatos
|
| 21 |
+
if len(result) == 1:
|
| 22 |
+
return f"Único resultado: {result.iloc[0].to_dict()}"
|
| 23 |
+
elif 'Cliente' in result.columns:
|
| 24 |
+
top = result.nlargest(3, 'Neto') if 'Neto' in result.columns else result.head(3)
|
| 25 |
+
return f"Top clientes: {top['Cliente'].tolist()}"
|
| 26 |
+
else:
|
| 27 |
+
return f"Filas: {len(result)}, Columnas: {list(result.columns)}"
|
| 28 |
+
return str(result)
|
| 29 |
+
|
| 30 |
+
def get_context(self, current_question: str) -> str:
|
| 31 |
+
if not self.history:
|
| 32 |
+
return ""
|
| 33 |
+
last_relevant = []
|
| 34 |
+
for interaction in self.history:
|
| 35 |
+
if "producto" in interaction['question'].lower() and "producto" in current_question.lower():
|
| 36 |
+
last_relevant.append(interaction)
|
| 37 |
+
elif "cliente" in interaction['question'].lower() and "cliente" in current_question.lower():
|
| 38 |
+
last_relevant.append(interaction)
|
| 39 |
|
| 40 |
+
context = ""
|
| 41 |
+
for i, interaction in enumerate(last_relevant[-1:], 1): # Solo la última relevante
|
| 42 |
+
context += (
|
| 43 |
+
f"Interacción #{i}: {interaction['question'][:50]}...\n"
|
| 44 |
+
f"SQL: {interaction['sql'][:70]}...\n"
|
| 45 |
+
f"Resultado: {interaction['result_summary']}\n\n"
|
| 46 |
+
)
|
| 47 |
+
return context
|
|
|
|
|
|
|
|
|