File size: 8,306 Bytes
95d81c4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split, cross_val_score, KFold
from sklearn.preprocessing import LabelEncoder, StandardScaler, PolynomialFeatures
from sklearn.ensemble import GradientBoostingRegressor, RandomForestRegressor
from sklearn.metrics import mean_squared_error, r2_score, mean_absolute_error
from sklearn.multioutput import MultiOutputRegressor
import joblib
import logging
import gradio as gr
from typing import Tuple, Dict, Any

# Import custom libraries (same as before)
from libraries.fits.shirts_lib import get_fit as get_shirt_fit
from libraries.sizes.shirts_lib import get_best_size as get_shirt_size

# Setup logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

class EnhancedBodyMeasurementPredictor:
    def __init__(self):
        self.model = None
        self.scaler = None
        self.poly_features = None
        self.label_encoder = None
        self.y_columns = None
        self.feature_columns = None
        self.model_metrics = {}

    def create_polynomial_features(self, X: pd.DataFrame) -> np.ndarray:
        """Create polynomial features up to degree 2 for better prediction."""
        if self.poly_features is None:
            self.poly_features = PolynomialFeatures(degree=2, include_bias=False)
            return self.poly_features.fit_transform(X)
        return self.poly_features.transform(X)

    def preprocess_data(self, data: pd.DataFrame) -> Tuple[np.ndarray, pd.DataFrame]:
        """Preprocess the data with enhanced feature engineering."""
        # Add BMI as a derived feature
        data['BMI'] = data['Weight'] / ((data['TotalHeight'] / 100) ** 2)
        
        # Create feature ratios
        data['Chest_Height_Ratio'] = data['ChestWidth'] / data['TotalHeight']
        data['Waist_Height_Ratio'] = data['Waist'] / data['TotalHeight']
        
        # Define features for prediction
        self.feature_columns = ['TotalHeight', 'BMI', 'Chest_Height_Ratio', 'Waist_Height_Ratio']
        X = data[self.feature_columns]
        
        # Create polynomial features
        X_poly = self.create_polynomial_features(X)
        
        # Scale features
        if self.scaler is None:
            self.scaler = StandardScaler()
            X_scaled = self.scaler.fit_transform(X_poly)
        else:
            X_scaled = self.scaler.transform(X_poly)
        
        # Prepare target variables
        y = data.drop(columns=self.feature_columns + ['BMI'])
        
        return X_scaled, y

    def train_model(self, data: pd.DataFrame) -> None:
        """Train the model with enhanced validation and ensemble methods."""
        logger.info("Starting model training...")
        
        # Preprocess data
        X_scaled, y = self.preprocess_data(data)
        self.y_columns = y.columns
        
        # Encode categorical variables
        self.label_encoder = LabelEncoder()
        y['Size'] = self.label_encoder.fit_transform(y['Size'])
        
        # Split data
        X_train, X_test, y_train, y_test = train_test_split(
            X_scaled, y, test_size=0.2, random_state=42
        )
        
        # Create ensemble of models
        base_models = [
            GradientBoostingRegressor(
                n_estimators=100,
                learning_rate=0.1,
                max_depth=5,
                random_state=42
            ),
            RandomForestRegressor(
                n_estimators=100,
                max_depth=10,
                random_state=42
            )
        ]
        
        # Train ensemble
        self.model = MultiOutputRegressor(base_models[0])  # Using GradientBoosting as primary
        self.model.fit(X_train, y_train)
        
        # Evaluate model
        self._evaluate_model(X_test, y_test)
        
        logger.info("Model training completed")

    def _evaluate_model(self, X_test: np.ndarray, y_test: pd.DataFrame) -> None:
        """Evaluate model performance with multiple metrics."""
        y_pred = self.model.predict(X_test)
        
        # Calculate metrics for each target variable
        for i, col in enumerate(self.y_columns):
            self.model_metrics[col] = {
                'r2': r2_score(y_test.iloc[:, i], y_pred[:, i]),
                'mse': mean_squared_error(y_test.iloc[:, i], y_pred[:, i]),
                'mae': mean_absolute_error(y_test.iloc[:, i], y_pred[:, i])
            }
        
        # Log evaluation results
        logger.info("Model Evaluation Results:")
        for col, metrics in self.model_metrics.items():
            logger.info(f"{col}: R² = {metrics['r2']:.4f}, MAE = {metrics['mae']:.4f}")

    def predict(self, total_height: float, weight: float = None) -> Dict[str, Any]:
        """Make predictions with confidence intervals."""
        # Prepare input features
        input_data = pd.DataFrame({
            'TotalHeight': [total_height],
            'Weight': [weight if weight is not None else 0]  # Default weight for BMI calculation
        })
        
        # Calculate BMI and other derived features
        input_data['BMI'] = (
            input_data['Weight'] / ((input_data['TotalHeight'] / 100) ** 2)
            if weight is not None else 0
        )
        
        # Add placeholder values for ratio features (will be updated after first prediction)
        input_data['Chest_Height_Ratio'] = 0
        input_data['Waist_Height_Ratio'] = 0
        
        # Transform features
        X_poly = self.create_polynomial_features(input_data[self.feature_columns])
        X_scaled = self.scaler.transform(X_poly)
        
        # Make prediction
        prediction = self.model.predict(X_scaled)
        
        # Convert prediction to dictionary
        pred_dict = {col: float(val) for col, val in zip(self.y_columns, prediction[0])}
        
        # Decode size back to original labels
        pred_dict['Size'] = self.label_encoder.inverse_transform([round(pred_dict['Size'])])[0]
        
        return pred_dict

# Initialize predictor as a global variable
predictor = EnhancedBodyMeasurementPredictor()

def gradio_predict(total_height: float, weight: float = None):
    result = predictor.predict(total_height, weight)
    return result

def gradio_predict_important(total_height: float, weight: float = None, fit_type_input: str = None):
    prediction = predictor.predict(total_height, weight)
    
    try:
        brand = "Zara"  # Default brand
        chest = float(prediction.get("ChestWidth"))
        waist = float(prediction.get("Waist"))
        shoulder = float(prediction.get("ShoulderWidth"))

        recommended_size, size_details = get_shirt_size(
            brand, int(round(chest)), int(round(waist)), int(round(shoulder))
        )

        computed_fit = (
            fit_type_input if fit_type_input is not None 
            else get_shirt_fit(shoulder, chest, waist)
        )

        response = {
            "Brand": brand,
            "RecommendedSize": recommended_size,
            "SizeDetails": size_details,
            "Fit": computed_fit,
            "PredictedMeasurements": prediction
        }
        
        return response
        
    except (TypeError, ValueError) as e:
        return {"error": f"Error in size/fit calculation: {str(e)}"}

# Load dataset and train the model
try:
    data = pd.read_csv("./data/bdm.csv")
    data = data.dropna()
    predictor.train_model(data)
    logger.info("Model initialization completed successfully")
except Exception as e:
    logger.error(f"Error during model initialization: {str(e)}")
    raise

# Create Gradio interfaces
predict_interface = gr.Interface(
    fn=gradio_predict, 
    inputs=[
        gr.Number(label="Total Height (cm)"), 
        gr.Number(label="Weight (kg)")
    ], 
    outputs="json",
    title="Body Measurement Prediction"
)

predict_important_interface = gr.Interface(
    fn=gradio_predict_important, 
    inputs=[
        gr.Number(label="Total Height (cm)"), 
        gr.Number(label="Weight (kg)"),
        gr.Textbox(label="Fit Type")
    ], 
    outputs="json",
    title="Important Body Measurement Prediction"
)

# Launch Gradio app
gr.TabbedInterface(
    [predict_interface, predict_important_interface], 
    ["Predict", "Predict Important"]
).launch()