Danielrahmai1991 commited on
Commit
2732655
1 Parent(s): 9eb3567

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -49
app.py CHANGED
@@ -32,55 +32,7 @@ print("creating llm ended")
32
 
33
  def greet(question, model_type):
34
  print(f"question is {question}")
35
- if model_type == "With memory":
36
- retriever = TFIDFRetriever.from_texts(
37
- ["Finatial AI"])
38
-
39
-
40
- template = """You are the Finiantial expert:
41
- {history}
42
- {context}
43
- ### Instruction:
44
- {question}
45
-
46
- ### Input:
47
-
48
-
49
- ### Response:
50
- """
51
-
52
- prompt1 = PromptTemplate(
53
- input_variables=["history", "context", "question"],
54
- template=template,
55
- )
56
-
57
- llm_chain_model = RetrievalQA.from_chain_type(
58
- llm=llm,
59
- chain_type='stuff',
60
- retriever=retriever,
61
- verbose=False,
62
- chain_type_kwargs={
63
- "verbose": False,
64
- "prompt": prompt1,
65
- "memory": ConversationBufferMemory(
66
- memory_key="history",
67
- input_key="question"),
68
- }
69
- )
70
- print("creating model created")
71
- else:
72
- template = """You are the Finiantial expert:
73
- ### Instruction:
74
- {question}
75
- ### Input:
76
- ### Response:
77
- """
78
-
79
- prompt = PromptTemplate(template=template, input_variables=["question"])
80
-
81
- llm_chain_model = LLMChain(prompt=prompt, llm=llm)
82
- out_gen = llm_chain_model.run(question)
83
- print(f"out is: {out_gen}")
84
  return out_gen
85
 
86
  demo = gr.Interface(fn=greet, inputs=["text", gr.Dropdown(
 
32
 
33
  def greet(question, model_type):
34
  print(f"question is {question}")
35
+ out_gen = "testsetestestetsetsets"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  return out_gen
37
 
38
  demo = gr.Interface(fn=greet, inputs=["text", gr.Dropdown(