Spaces:
Paused
Paused
Add descriptions
Browse files
app.py
CHANGED
@@ -10,7 +10,18 @@ import torch
|
|
10 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
11 |
from peft import PeftModel, PeftConfig
|
12 |
|
13 |
-
DESCRIPTION = "
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
|
15 |
if not torch.cuda.is_available():
|
16 |
DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>"
|
@@ -45,6 +56,7 @@ def generate(
|
|
45 |
|
46 |
history = current_input
|
47 |
current_input += message
|
|
|
48 |
|
49 |
device = "cuda:0"
|
50 |
input_ids = tokenizer(current_input, return_tensors="pt").input_ids.to(device)
|
@@ -122,11 +134,9 @@ chat_interface = gr.ChatInterface(
|
|
122 |
],
|
123 |
stop_btn=None,
|
124 |
examples=[
|
125 |
-
["<<<<<<<\
|
126 |
-
["<<<<<<<\n
|
127 |
-
["<<<<<<<\n
|
128 |
-
["How many hours does it take a man to eat a Helicopter?"],
|
129 |
-
["Write a 100-word article on 'Benefits of Open-Source in AI research'"],
|
130 |
],
|
131 |
)
|
132 |
|
|
|
10 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
11 |
from peft import PeftModel, PeftConfig
|
12 |
|
13 |
+
DESCRIPTION = "This is a conversational interface powered by the MergeLlama-7b model, a finetune of CodeLlama-7b designed to assist developers in resolving merge conflicts in their code. "
|
14 |
+
DESCRIPTION += "It leverages the capabilities of deep learning to provide suggestions for reconciling code differences, presenting potential resolutions for highlighted changes\n"
|
15 |
+
DESCRIPTION += "The feedback from this space will help develop future versions including more powerful 13b and 34b versions."
|
16 |
+
|
17 |
+
DESCRIPTION += "\n# How to use: \n"
|
18 |
+
DESCRIPTION += "1. Input your merge conflict in the chat in the following format:\n```\n<<<<<<<\n[change]\n=======\n[base]\n>>>>>>>\n```\n"
|
19 |
+
DESCRIPTION += "The model will generate the merge resolution. Context can be added before the conflict and multiple conflicts/resolutions can be chained together for context.\n"
|
20 |
+
DESCRIPTION += "**Additional Information:**\n"
|
21 |
+
DESCRIPTION += "- The model behind this tool is based on the MergeLlama dataset, which can be found [here](https://huggingface.co/datasets/codys12/MergeLlama).\n"
|
22 |
+
DESCRIPTION += "- For more information about the MergeLlama-7b model, visit [here](https://huggingface.co/codys12/MergeLlama-7b).\n"
|
23 |
+
DESCRIPTION += "- If you are interested in supporting the larger versions of this model, such as the 13b and 34b variants, you can check them out [here](https://www.dreamcatcher.co/ProjectPage?projectId=uibaxk4sfzetpkg7ch71ui).\n"
|
24 |
+
DESCRIPTION += "- This model was trained on [DreamcatcherAI](https://www.dreamcatcher.co/Discover)\n"
|
25 |
|
26 |
if not torch.cuda.is_available():
|
27 |
DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>"
|
|
|
56 |
|
57 |
history = current_input
|
58 |
current_input += message
|
59 |
+
current_input += "\n"
|
60 |
|
61 |
device = "cuda:0"
|
62 |
input_ids = tokenizer(current_input, return_tensors="pt").input_ids.to(device)
|
|
|
134 |
],
|
135 |
stop_btn=None,
|
136 |
examples=[
|
137 |
+
["<<<<<<<\n var visibleSets = beatmapSets.Where(s => !s.Filtered).ToList();\n if (!visibleSets.Any())\n return;\n\n=======\n\n var visible = beatmapSets.Where(s => !s.Filtered).ToList();\n if (!visible.Any())\n return false;\n\n>>>>>>>"],
|
138 |
+
["<<<<<<<\n// Related to JDK7\nimport java.nio.channels.FileChannel;\n\n=======\n\n// Branch-dependent imports\nimport java.nio.channels.SeekableByteChannel;\n\n>>>>>>>"],
|
139 |
+
["<<<<<<<\n bind(BlobDirectoryAccess.class, DefaultBlobDirectoryAccess.class);\n\n=======\n\n bind(new TypeLiteral<UpdateStepRepositoryMetadataAccess<Path>>() {}).to(new TypeLiteral<MetadataStore>() {});\n\n>>>>>>>"],
|
|
|
|
|
140 |
],
|
141 |
)
|
142 |
|