Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -8,7 +8,7 @@ from transformers import (
|
|
8 |
AutoModelForImageClassification,
|
9 |
AutoTokenizer,
|
10 |
AutoModelForSeq2SeqLM,
|
11 |
-
pipeline
|
12 |
)
|
13 |
import requests
|
14 |
from geopy.geocoders import Nominatim
|
@@ -22,10 +22,10 @@ st.set_page_config(page_title="Skin Cancer Dashboard", layout="wide")
|
|
22 |
|
23 |
# --- Configuration ---
|
24 |
# Ensure you have set your Hugging Face token as an environment variable:
|
25 |
-
export HF_TOKEN="YOUR_TOKEN_HERE"
|
26 |
MODEL_NAME = "Anwarkh1/Skin_Cancer-Image_Classification"
|
27 |
LLM_NAME = "google/flan-t5-xl"
|
28 |
-
|
29 |
DATA_DIR = "data/harvard_dataset" # Path where you download and unpack the Harvard Dataverse dataset
|
30 |
DIARY_CSV = "diary.csv"
|
31 |
|
|
|
8 |
AutoModelForImageClassification,
|
9 |
AutoTokenizer,
|
10 |
AutoModelForSeq2SeqLM,
|
11 |
+
pipeline
|
12 |
)
|
13 |
import requests
|
14 |
from geopy.geocoders import Nominatim
|
|
|
22 |
|
23 |
# --- Configuration ---
|
24 |
# Ensure you have set your Hugging Face token as an environment variable:
|
25 |
+
#export HF_TOKEN="YOUR_TOKEN_HERE"
|
26 |
MODEL_NAME = "Anwarkh1/Skin_Cancer-Image_Classification"
|
27 |
LLM_NAME = "google/flan-t5-xl"
|
28 |
+
HF_TOKEN = os.environ.get("HF_TOKEN")
|
29 |
DATA_DIR = "data/harvard_dataset" # Path where you download and unpack the Harvard Dataverse dataset
|
30 |
DIARY_CSV = "diary.csv"
|
31 |
|