burak commited on
Commit
8ba6a85
1 Parent(s): f656118

Upload 4 files

Browse files
Files changed (4) hide show
  1. README.md +13 -0
  2. app.py +138 -0
  3. icon.jpg +0 -0
  4. requirements.txt +4 -0
README.md ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Turkish Chatbot
3
+ emoji: 💬
4
+ colorFrom: purple
5
+ colorTo: pink
6
+ sdk: streamlit
7
+ sdk_version: 1.31.1
8
+ app_file: app.py
9
+ pinned: false
10
+ license: mit
11
+ ---
12
+
13
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
app.py ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from openai import OpenAI
3
+ import os
4
+ import sys
5
+ from dotenv import load_dotenv, dotenv_values
6
+ load_dotenv()
7
+
8
+
9
+
10
+ # initialize the client
11
+ client = OpenAI(
12
+ base_url="https://hbzt3461o9xzjqxb.us-east-1.aws.endpoints.huggingface.cloud/v1/",
13
+ api_key=os.environ.get('HUGGINGFACEHUB_API_TOKEN')#"hf_xxx" # Replace with your token
14
+ )
15
+
16
+
17
+
18
+ #Create model
19
+ model_links ={
20
+
21
+ "Turkish-7b-mix":"burak/Trendyol-Turkcell-stock"
22
+
23
+ }
24
+
25
+ #Pull info about the model to display
26
+ model_info ={
27
+
28
+ "Turkish-7b-mix":
29
+ { 'description':"""Turkish-7b-Mix is a merge of pre-trained language models created using **mergekit**.\n \
30
+ ### Merge Method\n \
31
+
32
+ This model was merged using the [Model Stock](https://arxiv.org/abs/2403.19522) merge method using [Trendyol/Trendyol-LLM-7b-chat-dpo-v1.0](https://huggingface.co/Trendyol/Trendyol-LLM-7b-chat-dpo-v1.0) as a base.\n \
33
+
34
+ ### Models Merged\n \
35
+
36
+ The following models were included in the merge:\n \
37
+ * [TURKCELL/Turkcell-LLM-7b-v1](https://huggingface.co/TURKCELL/Turkcell-LLM-7b-v1)\n \
38
+ * [Trendyol/Trendyol-LLM-7b-chat-v1.0](https://huggingface.co/Trendyol/Trendyol-LLM-7b-chat-v1.0)\n""",
39
+
40
+ 'logo': 'https://huggingface.co/spaces/burak/SimpleChatbot/resolve/main/icon.jpg'
41
+
42
+ },
43
+
44
+ }
45
+
46
+ def reset_conversation():
47
+ '''
48
+ Resets Conversation
49
+ '''
50
+ st.session_state.conversation = []
51
+ st.session_state.messages = []
52
+ return None
53
+
54
+
55
+
56
+
57
+ # Define the available models
58
+ models =[key for key in model_links.keys()]
59
+
60
+ # Create the sidebar with the dropdown for model selection
61
+ selected_model = st.sidebar.selectbox("Select Model", models)
62
+
63
+ #Create a temperature slider
64
+ temp_values = st.sidebar.slider('Select a temperature value', 0.0, 1.0, (0.5))
65
+
66
+
67
+ #Add reset button to clear conversation
68
+ st.sidebar.button('Reset Chat', on_click=reset_conversation) #Reset button
69
+
70
+
71
+ # Create model description
72
+ st.sidebar.write(f"You're now chatting with **{selected_model}**")
73
+ st.sidebar.markdown(model_info[selected_model]['description'])
74
+ st.sidebar.image(model_info[selected_model]['logo'])
75
+ st.sidebar.markdown("*Generated content may be inaccurate or false.*")
76
+
77
+
78
+
79
+ if "prev_option" not in st.session_state:
80
+ st.session_state.prev_option = selected_model
81
+
82
+ if st.session_state.prev_option != selected_model:
83
+ st.session_state.messages = []
84
+ # st.write(f"Changed to {selected_model}")
85
+ st.session_state.prev_option = selected_model
86
+ reset_conversation()
87
+
88
+
89
+
90
+ #Pull in the model we want to use
91
+ repo_id = model_links[selected_model]
92
+
93
+
94
+ st.subheader(f'AI - {selected_model}')
95
+ # st.title(f'ChatBot Using {selected_model}')
96
+
97
+ # Set a default model
98
+ if selected_model not in st.session_state:
99
+ st.session_state[selected_model] = model_links[selected_model]
100
+
101
+ # Initialize chat history
102
+ if "messages" not in st.session_state:
103
+ st.session_state.messages = []
104
+
105
+
106
+ # Display chat messages from history on app rerun
107
+ for message in st.session_state.messages:
108
+ with st.chat_message(message["role"]):
109
+ st.markdown(message["content"])
110
+
111
+
112
+
113
+ # Accept user input
114
+ if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question"):
115
+
116
+ # Display user message in chat message container
117
+ with st.chat_message("user"):
118
+ st.markdown(prompt)
119
+ # Add user message to chat history
120
+ st.session_state.messages.append({"role": "user", "content": prompt})
121
+
122
+
123
+ # Display assistant response in chat message container
124
+ with st.chat_message("assistant"):
125
+ stream = client.chat.completions.create(
126
+ model= model_links[selected_model],
127
+ messages=[
128
+ {"role": m["role"], "content": m["content"]}
129
+ for m in st.session_state.messages
130
+ ],
131
+ temperature=temp_values,#0.5,
132
+ stream=True,
133
+ max_tokens=500,
134
+
135
+ )
136
+
137
+ response = st.write_stream(stream)
138
+ st.session_state.messages.append({"role": "assistant", "content": response})
icon.jpg ADDED
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ openai
2
+ langchain
3
+ python-dotenv
4
+ langchain-community