Skip to content

Instantly share code, notes, and snippets.

@y-lan
Last active September 4, 2023 14:24
Show Gist options
  • Save y-lan/627e4acab3ec78e419451b77cd6d56c9 to your computer and use it in GitHub Desktop.
Save y-lan/627e4acab3ec78e419451b77cd6d56c9 to your computer and use it in GitHub Desktop.
Save the prompt and response with LLMs in LangChain
from typing import Any, Dict, List, Optional
from uuid import UUID
from langchain.callbacks.base import BaseCallbackHandler
from langchain.schema import LLMResult
import json
import time
class LLMLoggingCallbackHandler(BaseCallbackHandler):
def __init__(self, log_file='llm.log') -> None:
self._log = {}
self.log_file = log_file
super().__init__()
def write_log(self, record):
with open(self.log_file, 'a') as f:
f.write(json.dumps(record) + '\n')
@property
def always_verbose(self) -> bool:
return True
def on_llm_end(self, response: LLMResult,
*,
run_id: UUID,
parent_run_id: Optional[UUID] = None,
tags: Optional[List[str]] = None,
**kwargs: Any,
) -> None:
generations = response.generations
if run_id in self._log:
for prompt, generation in zip(self._log[run_id], generations):
outputs = [g.text for g in generation]
self.write_log({
'id': str(run_id),
'timestamp': time.time(),
'prompt': prompt,
'outputs': outputs
})
def on_llm_start(
self,
serialized: Dict[str, Any],
prompts: List[str],
*,
run_id: UUID,
parent_run_id: Optional[UUID] = None,
tags: Optional[List[str]] = None,
metadata: Optional[Dict[str, Any]] = None,
**kwargs: Any,
) -> None:
self._log[run_id] = prompts
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment