File size: 892 Bytes
9f2943e
 
d7045c1
 
9f2943e
1aef0fe
c7fbb55
d7045c1
9f2943e
1aef0fe
d7045c1
 
 
 
 
 
 
 
 
1aef0fe
9f2943e
d7045c1
1aef0fe
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
---
title: Mmmm
emoji: 🚀
colorFrom: red
colorTo: indigo
sdk: docker
sdk_version: 5.4.0
app_file: app.py
pinned: false
license: bigscience-openrail-m
duplicated_from: ysharma/ChatGPT4
disable_embedding: true
datasets:
- allenai/WildChat-1M
- allenai/WildChat-1M-Full
- allenai/WildChat
models:
- allenai/WildLlama-7b-user-assistant
- allenai/WildLlama-7b-assistant-only
short_description: nbb
---
- https://arxiv.org/abs/2405.01470
- https://arxiv.org/abs/2409.03753
- 
huggingface-cli 
# Use a pipeline as a high-level helper
from transformers import pipeline

pipe = pipeline("text-generation", model="allenai/WildLlama-7b-assistant-only")
# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("allenai/WildLlama-7b-assistant-only")
model = AutoModelForCausalLM.from_pretrained("allenai/WildLlama-7b-assistant-only")