File size: 4,298 Bytes
83074cb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
511630e
 
 
 
 
 
 
 
 
83074cb
511630e
 
83074cb
 
511630e
83074cb
 
 
 
 
 
 
 
 
 
 
 
 
 
511630e
83074cb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
511630e
83074cb
 
511630e
83074cb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
# import joblib
# import pandas as pd
# import gradio as gr

# # Load the scaler and models
# scaler = joblib.load("models/scaler.joblib")
# models = {
#     "processing": joblib.load("models/svm_model_processing.joblib"),
#     "perception": joblib.load("models/svm_model_perception.joblib"),
#     "input": joblib.load("models/svm_model_input.joblib"),
#     "understanding": joblib.load("models/svm_model_understanding.joblib")
# }

# def predict(course_overview, reading_file, abstract_materiale, concrete_material, visual_materials,
#            self_assessment, exercises_submit, quiz_submitted, playing, paused, unstarted, buffering):
#     try:
#         input_data = {
#             "course overview": [course_overview],
#             "reading file": [reading_file],
#             "abstract materiale": [abstract_materiale],
#             "concrete material": [concrete_material],
#             "visual materials": [visual_materials],
#             "self-assessment": [self_assessment],
#             "exercises submit": [exercises_submit],
#             "quiz submitted": [quiz_submitted],
#             "playing": [playing],
#             "paused": [paused],
#             "unstarted": [unstarted],
#             "buffering": [buffering]
#         }

#         input_df = pd.DataFrame(input_data)
#         input_scaled = scaler.transform(input_df)

#         predictions = {}
#         for target, model in models.items():
#             pred = model.predict(input_scaled)
#             predictions[target] = pred[0]  # Return as is, without converting to int

#         return predictions

#     except Exception as e:
#         return {"error": str(e)}

# # Define Gradio interface using the latest syntax
# iface = gr.Interface(
#     fn=predict,
#     inputs=[
#         gr.Number(label="Course Overview"),
#         gr.Number(label="Reading File"),
#         gr.Number(label="Abstract Materiale"),
#         gr.Number(label="Concrete Material"),
#         gr.Number(label="Visual Materials"),
#         gr.Number(label="Self Assessment"),
#         gr.Number(label="Exercises Submit"),
#         gr.Number(label="Quiz Submitted"),
#         gr.Number(label="Playing"),
#         gr.Number(label="Paused"),
#         gr.Number(label="Unstarted"),
#         gr.Number(label="Buffering")
#     ],
#     outputs=gr.JSON(),
#     title="SVM Multi-Target Prediction",
#     description="Enter the feature values to get predictions for processing, perception, input, and understanding."
# )

# if __name__ == "__main__":
#     iface.launch()

from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
import joblib
import pandas as pd

# Load the scaler and models
scaler = joblib.load("models/scaler.joblib")
models = {
    "processing": joblib.load("models/svm_model_processing.joblib"),
    "perception": joblib.load("models/svm_model_perception.joblib"),
    "input": joblib.load("models/svm_model_input.joblib"),
    "understanding": joblib.load("models/svm_model_understanding.joblib"),
}

# Initialize the FastAPI app
app = FastAPI(title="SVM Multi-Target Prediction API")

# Define the input data model
class InputData(BaseModel):
    course_overview: float
    reading_file: float
    abstract_materiale: float
    concrete_material: float
    visual_materials: float
    self_assessment: float
    exercises_submit: float
    quiz_submitted: float
    playing: float
    paused: float
    unstarted: float
    buffering: float

# Define the prediction endpoint
@app.post("/predict")
def predict(input_data: InputData):
    """
    Predict target values based on input features using pre-trained SVM models.
    """
    try:
        # Convert the input data to a DataFrame
        input_df = pd.DataFrame([input_data.dict()])
        
        # Scale the input data
        input_scaled = scaler.transform(input_df)
        
        # Generate predictions for each target
        predictions = {
            target: model.predict(input_scaled)[0]
            for target, model in models.items()
        }
        return {"predictions": predictions}

    except ValueError as ve:
        raise HTTPException(status_code=400, detail=f"Input value error: {ve}")
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"Unexpected error: {e}")