Spaces:
Running
Running
super-unsupervised and utils
Browse files- images/probability_od.png +0 -0
- images/unsupervised_learning.png +0 -0
- pages/supervised_unsupervised_page.py +1 -1
- utils.py +15 -2
images/probability_od.png
ADDED
images/unsupervised_learning.png
ADDED
pages/supervised_unsupervised_page.py
CHANGED
@@ -49,7 +49,7 @@ with col2:
|
|
49 |
- They are also used to identify unusual patterns or **outliers**.
|
50 |
""", unsafe_allow_html=True)
|
51 |
st.markdown(" ")
|
52 |
-
st.image("images/
|
53 |
|
54 |
st.markdown(" ")
|
55 |
|
|
|
49 |
- They are also used to identify unusual patterns or **outliers**.
|
50 |
""", unsafe_allow_html=True)
|
51 |
st.markdown(" ")
|
52 |
+
st.image("images/unsupervised_learning.png", caption="An example of unsupervised Learning")
|
53 |
|
54 |
st.markdown(" ")
|
55 |
|
utils.py
CHANGED
@@ -5,10 +5,12 @@ import base64
|
|
5 |
import streamlit as st
|
6 |
import pandas as pd
|
7 |
|
8 |
-
#from google.oauth2 import service_account
|
9 |
-
#from googleapiclient.discovery import build
|
10 |
from htbuilder import HtmlElement, div, hr, a, p, img, styles
|
11 |
from pathlib import Path
|
|
|
|
|
|
|
|
|
12 |
|
13 |
|
14 |
|
@@ -34,6 +36,17 @@ def load_model_pickle(path, file):
|
|
34 |
return model
|
35 |
|
36 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
37 |
#################### LOAD DATA FROM GOOGLE DRIVE ###################
|
38 |
|
39 |
# @st.cache_data(ttl=3600, show_spinner=False)
|
|
|
5 |
import streamlit as st
|
6 |
import pandas as pd
|
7 |
|
|
|
|
|
8 |
from htbuilder import HtmlElement, div, hr, a, p, img, styles
|
9 |
from pathlib import Path
|
10 |
+
from huggingface_hub.inference_api import InferenceApi
|
11 |
+
#from google.oauth2 import service_account
|
12 |
+
#from googleapiclient.discovery import build
|
13 |
+
|
14 |
|
15 |
|
16 |
|
|
|
36 |
return model
|
37 |
|
38 |
|
39 |
+
|
40 |
+
###################### LOAD MODEL HUGGINGFACE #############################
|
41 |
+
|
42 |
+
st.cache_data(ttl=3600)
|
43 |
+
def load_model_huggingface(repo_id, token, task=None):
|
44 |
+
""" Load model using Huggingface's Inference API
|
45 |
+
"""
|
46 |
+
model = InferenceApi(repo_id=repo_id, token=token, task=task)
|
47 |
+
return model
|
48 |
+
|
49 |
+
|
50 |
#################### LOAD DATA FROM GOOGLE DRIVE ###################
|
51 |
|
52 |
# @st.cache_data(ttl=3600, show_spinner=False)
|