convosim-ui / models /databricks /scenario_sim_biz.py
ivnban27-ctl's picture
feat/MVP_GCT_SP (#2)
9ff00d4 verified
raw
history blame
2.11 kB
import os
import json
import requests
import logging
from models.custom_parsers import CustomStringOutputParser
from langchain.chains import ConversationChain
from langchain_core.callbacks.manager import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM
from langchain.prompts import PromptTemplate
from typing import Any, List, Mapping, Optional, Dict
class DatabricksCustomLLM(LLM):
issue:str
language:str
temperature:float = 0.8
db_url:str = os.environ['DATABRICKS_URL']
headers:Mapping[str,str] = {'Authorization': f'Bearer {os.environ.get("DATABRICKS_TOKEN")}', 'Content-Type': 'application/json'}
@property
def _llm_type(self) -> str:
return "custom_databricks"
def _call(
self,
prompt: str,
stop: Optional[List[str]] = None,
run_manager: Optional[CallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> str:
data_ = {'inputs': {
'prompt': [prompt],
'issue': [self.issue],
'language': [self.language],
'temperature': [self.temperature]
}}
data_json = json.dumps(data_, allow_nan=True)
response = requests.request(method='POST', headers=self.headers, url=self.db_url, data=data_json)
if response.status_code != 200:
raise Exception(f'Request failed with status {response.status_code}, {response.text}')
return response.json()["predictions"][0]["generated_text"]
_DATABRICKS_TEMPLATE_ = """{history}
helper: {input}
texter:"""
def get_databricks_chain(issue, language, memory, temperature=0.8):
PROMPT = PromptTemplate(
input_variables=['history', 'input'],
template=_DATABRICKS_TEMPLATE_
)
llm = DatabricksCustomLLM(
issue=issue,
language=language,
temperature=temperature
)
llm_chain = ConversationChain(
llm=llm,
prompt=PROMPT,
memory=memory,
output_parser=CustomStringOutputParser()
)
logging.debug(f"loaded Databricks Scenario Sim model")
return llm_chain, "helper:"