pcuenq HF staff commited on
Commit
587fb65
1 Parent(s): b97eb37

Download from hub

Browse files
Files changed (4) hide show
  1. README.md +2 -0
  2. app.py +6 -2
  3. model_600000.pt +0 -3
  4. requirements.txt +1 -0
README.md CHANGED
@@ -7,6 +7,8 @@ sdk: gradio
7
  sdk_version: 3.9.1
8
  app_file: app.py
9
  pinned: false
 
 
10
  license: mit
11
  ---
12
 
 
7
  sdk_version: 3.9.1
8
  app_file: app.py
9
  pinned: false
10
+ models:
11
+ - pcuenq/Paella
12
  license: mit
13
  ---
14
 
app.py CHANGED
@@ -5,9 +5,12 @@ from PIL import Image
5
  from open_clip import tokenizer
6
  from rudalle import get_vae
7
  from einops import rearrange
 
8
  from modules import DenoiseUNet
9
 
10
- model_id = "./model_600000.pt"
 
 
11
  device = "cuda" if torch.cuda.is_available() else "cpu"
12
 
13
  batch_size = 4
@@ -103,7 +106,8 @@ def decode(img_seq, shape=(32,32)):
103
  img = (img.clamp(-1., 1.) + 1) * 0.5
104
  return img
105
 
106
- state_dict = torch.load(model_id, map_location=device)
 
107
  model = DenoiseUNet(num_labels=8192).to(device)
108
  model.load_state_dict(state_dict)
109
  model.eval().requires_grad_()
 
5
  from open_clip import tokenizer
6
  from rudalle import get_vae
7
  from einops import rearrange
8
+ from huggingface_hub import hf_hub_download
9
  from modules import DenoiseUNet
10
 
11
+ model_repo = "pcuenq/Paella"
12
+ model_file = "model_600000.pt"
13
+
14
  device = "cuda" if torch.cuda.is_available() else "cpu"
15
 
16
  batch_size = 4
 
106
  img = (img.clamp(-1., 1.) + 1) * 0.5
107
  return img
108
 
109
+ model_path = hf_hub_download(repo_id=model_repo, filename=model_file)
110
+ state_dict = torch.load(model_path, map_location=device)
111
  model = DenoiseUNet(num_labels=8192).to(device)
112
  model.load_state_dict(state_dict)
113
  model.eval().requires_grad_()
model_600000.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:bb2b512cc353a30144700324edaa55a31543ad472804e8c61117cb1e79000cbe
3
- size 2312246971
 
 
 
 
requirements.txt CHANGED
@@ -4,3 +4,4 @@ rudalle
4
  open_clip_torch
5
  einops
6
  Pillow
 
 
4
  open_clip_torch
5
  einops
6
  Pillow
7
+ huggingface_hub