File size: 4,146 Bytes
beb9ce6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
import gradio as gr
import cohere
import os
import re
import uuid
from functools import partial
from urllib.error import HTTPError



cohere_api_key = os.getenv("COHERE_API_KEY")
co = cohere.Client(cohere_api_key)

history = []
chat = []
cid = str(uuid.uuid4())

def trigger_example(example):
    chat, updated_history = generate_response(example)
    return chat, updated_history
        
def generate_response(user_message, history=None):
    
    if history is None:
        history = []
        
    history.append(user_message)
     
    stream = co.chat_stream(message=user_message, conversation_id=cid, model='command-r-plus', connectors=[], temperature=0.3)
    
    output = ""
    
    for idx, response in enumerate(stream):
        if response.event_type == "text-generation":
            output += response.text
        if idx == 0:
            history.append(" " + output)
        else:
            history[-1] = output
        chat = [
            (history[i].strip(), history[i + 1].strip())
            for i in range(0, len(history) - 1, 2)
        ] 
        yield chat, history
        
    return chat, history
    

def clear_chat():
    cid = str(uuid.uuid4())
    return [], []


examples = [
"What are some good questions to get to know a stranger?",
"Create a list of unusual excuses people might use to get out of a work meeting",
"Write a python code to reverse a string",
"Explain the relativity theory in French",
"Como sair de um helicóptero que caiu na água?",
"Formally introduce the transformer architecture with notation.",
"¿Cómo le explicarías el aprendizaje automático a un extraterrestre?",
"Summarize recent news about the North American tech job market"
]

custom_css = """
#logo-img {
    border: none !important;
}
#chat-message {
    font-size: 14px;
    min-height: 300px;
}
"""

with gr.Blocks(analytics_enabled=False, css=custom_css) as demo:
    
    with gr.Row():
        with gr.Column(scale=1):
            gr.Image("logoplus.png", elem_id="logo-img", show_label=False, show_share_button=False, show_download_button=False)
        with gr.Column(scale=3):
            gr.Markdown("""C4AI Command R+ is a research open weights release of a 103B billion parameter with highly advanced Retrieval Augmented Generation (RAG) capabilities, tool Use to automate sophisticated tasks, and excels at the 10 key languages of business. Command R + is optimized for a variety of use cases including reasoning, summarization, and question answering.
            <br/><br/>
            **Model**: [c4ai-command-r-plus-v01](https://huggingface.co/CohereForAI/c4ai-command-r-plus-v01)
            <br/> 
            **Developed by**: [Cohere](https://cohere.com/) and [Cohere for AI](https://cohere.com/research)
            <br/>
            **License**: CC-BY-NC, requires also adhering to [C4AI's Acceptable Use Policy](https://docs.cohere.com/docs/c4ai-acceptable-use-policy)
            """
            )
            
    with gr.Column():
        with gr.Row():
            chatbot = gr.Chatbot(show_label=False)
        
        with gr.Row():
            user_message = gr.Textbox(lines=1, placeholder="Ask anything ...", label="Input", show_label=False)

      
        with gr.Row():
            submit_button = gr.Button("Submit")
            clear_button = gr.Button("Clear chat")

                        
        history = gr.State([])
        cid = str(uuid.uuid4())

        
        user_message.submit(fn=generate_response, inputs=[user_message, history], outputs=[chatbot, history], concurrency_limit=32)

        submit_button.click(fn=generate_response, inputs=[user_message, history], outputs=[chatbot, history], concurrency_limit=32)
        clear_button.click(fn=clear_chat, inputs=None, outputs=[chatbot, history], concurrency_limit=32)

        with gr.Row():
            gr.Examples(
                examples=examples,
                inputs=[user_message],
                cache_examples=False,
                fn=trigger_example,
                outputs=[chatbot],
            )

if __name__ == "__main__":
    # demo.launch(debug=True)
    demo.queue(api_open=False).launch()