Spaces:
Sleeping
Sleeping
Romain Graux
commited on
Commit
•
6cb080f
1
Parent(s):
40b0967
Add torch extra-index + log
Browse files- app.py +16 -10
- requirements.txt +1 -0
app.py
CHANGED
@@ -9,27 +9,33 @@
|
|
9 |
# TODO : add the training of the vae
|
10 |
# TODO : add the description of the settings
|
11 |
|
12 |
-
import sys
|
13 |
|
|
|
|
|
|
|
|
|
14 |
import numpy as np
|
|
|
|
|
|
|
|
|
|
|
15 |
from PIL import Image, ImageDraw
|
16 |
-
import gradio as gr
|
17 |
-
from app.tiff_utils import extract_physical_metadata
|
18 |
from app.dl_inference import inference_fn
|
19 |
from app.knn import knn, segment_image, bokeh_plot_knn, color_palette
|
20 |
-
|
21 |
-
import tempfile
|
22 |
-
import shutil
|
23 |
-
import json
|
24 |
-
from zipfile import ZipFile
|
25 |
-
from datetime import datetime
|
26 |
-
|
27 |
from collections import namedtuple
|
|
|
|
|
28 |
|
29 |
block_state_entry = namedtuple(
|
30 |
"block_state", ["results", "knn_results", "physical_metadata"]
|
31 |
)
|
32 |
|
|
|
|
|
|
|
|
|
33 |
if ".." not in sys.path:
|
34 |
sys.path.append("..")
|
35 |
|
|
|
9 |
# TODO : add the training of the vae
|
10 |
# TODO : add the description of the settings
|
11 |
|
|
|
12 |
|
13 |
+
|
14 |
+
|
15 |
+
import gradio as gr
|
16 |
+
import json
|
17 |
import numpy as np
|
18 |
+
import shutil
|
19 |
+
import sys
|
20 |
+
import tempfile
|
21 |
+
import torch
|
22 |
+
|
23 |
from PIL import Image, ImageDraw
|
|
|
|
|
24 |
from app.dl_inference import inference_fn
|
25 |
from app.knn import knn, segment_image, bokeh_plot_knn, color_palette
|
26 |
+
from app.tiff_utils import extract_physical_metadata
|
|
|
|
|
|
|
|
|
|
|
|
|
27 |
from collections import namedtuple
|
28 |
+
from datetime import datetime
|
29 |
+
from zipfile import ZipFile
|
30 |
|
31 |
block_state_entry = namedtuple(
|
32 |
"block_state", ["results", "knn_results", "physical_metadata"]
|
33 |
)
|
34 |
|
35 |
+
if torch_availbale := torch.cuda.is_available():
|
36 |
+
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
|
37 |
+
print(f"Is CUDA available: {torch_availbale}")
|
38 |
+
|
39 |
if ".." not in sys.path:
|
40 |
sys.path.append("..")
|
41 |
|
requirements.txt
CHANGED
@@ -170,6 +170,7 @@ tifffile==2023.2.28
|
|
170 |
tinycss2==1.2.1
|
171 |
tomli==2.0.1
|
172 |
toolz==0.12.0
|
|
|
173 |
torch==2.2.0
|
174 |
torchvision==0.17.0
|
175 |
tornado==6.2
|
|
|
170 |
tinycss2==1.2.1
|
171 |
tomli==2.0.1
|
172 |
toolz==0.12.0
|
173 |
+
--extra-index-url https://download.pytorch.org/whl/cu113
|
174 |
torch==2.2.0
|
175 |
torchvision==0.17.0
|
176 |
tornado==6.2
|