Spaces:
Sleeping
Sleeping
Commit
·
5292003
1
Parent(s):
5307707
Finalized testing
Browse files- app.py +66 -45
- prediction_data_prepped.csv +15 -0
app.py
CHANGED
@@ -22,15 +22,13 @@ def load_model():
|
|
22 |
|
23 |
MODEL = ort.InferenceSession(str(model_path))
|
24 |
|
25 |
-
#
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
FEATURE_COLUMNS = pd.read_csv("prediction_data_prepped.csv").columns.tolist()
|
31 |
-
except:
|
32 |
-
return "Error: Could not find prediction data files to determine feature structure"
|
33 |
|
|
|
34 |
return "Model loaded successfully"
|
35 |
except Exception as e:
|
36 |
return f"Error loading model: {str(e)}"
|
@@ -38,69 +36,92 @@ def load_model():
|
|
38 |
def process_player_data(player_id, mmr, comf_1, comf_2, comf_3, comf_4, comf_5):
|
39 |
"""Process player data similar to training pipeline"""
|
40 |
try:
|
41 |
-
# Define expected columns based on the model's requirements
|
42 |
-
expected_columns = ['mmr', 'p1', 'p2', 'p3', 'p4', 'p5', 'count', 'mean', 'std', 'min']
|
43 |
-
# Add hero-specific columns
|
44 |
-
for hero_id in range(1, 139): # Based on max hero ID 138 from your data
|
45 |
-
if hero_id in [139, 140, 141, 142, 143, 144]: # Skip any known gaps
|
46 |
-
continue
|
47 |
-
expected_columns.extend([f'games_{hero_id}', f'winrate_{hero_id}'])
|
48 |
-
|
49 |
-
print(f"\nExpected columns: {len(expected_columns)}")
|
50 |
-
|
51 |
# Clean player ID from URL if needed
|
52 |
if "/" in player_id:
|
53 |
player_id = player_id.split("/")[-1]
|
54 |
|
55 |
# Create initial player series
|
56 |
player_data = {
|
|
|
57 |
"mmr": float(mmr),
|
58 |
"p1": int(comf_1),
|
59 |
"p2": int(comf_2),
|
60 |
"p3": int(comf_3),
|
61 |
"p4": int(comf_4),
|
62 |
-
"p5": int(comf_5)
|
63 |
-
"count": 0,
|
64 |
-
"mean": 0,
|
65 |
-
"std": 0,
|
66 |
-
"min": 0
|
67 |
}
|
68 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
69 |
# Get hero statistics using OpenDota API
|
70 |
try:
|
71 |
hero_stats = hero_information(player_id)
|
72 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
73 |
|
74 |
-
# Add hero-specific stats
|
75 |
-
for col in expected_columns:
|
76 |
-
if col.startswith('games_') or col.startswith('winrate_'):
|
77 |
-
if col not in hero_data:
|
78 |
-
player_data[col] = 0
|
79 |
-
else:
|
80 |
-
player_data[col] = hero_data[col]
|
81 |
-
|
82 |
except Exception as e:
|
83 |
print(f"Warning - Error fetching hero data: {str(e)}")
|
84 |
-
# If hero stats fail, add placeholder values
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
|
|
90 |
df = pd.DataFrame([player_data])
|
91 |
|
92 |
-
#
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
97 |
df[col] = 0
|
98 |
|
99 |
-
#
|
100 |
df = df[expected_columns]
|
101 |
|
102 |
print(f"\nFinal number of columns: {len(df.columns)}")
|
103 |
-
print(f"
|
104 |
|
105 |
return df
|
106 |
except Exception as e:
|
|
|
22 |
|
23 |
MODEL = ort.InferenceSession(str(model_path))
|
24 |
|
25 |
+
# Use the known list of features
|
26 |
+
FEATURE_COLUMNS = ['mmr', 'p1', 'p2', 'p3', 'p4', 'p5', 'count', 'mean', 'std', 'min', 'max',
|
27 |
+
'sum', 'total_games_played', 'total_winrate'] + \
|
28 |
+
[f'games_{i}' for i in range(1, 139)] + \
|
29 |
+
[f'winrate_{i}' for i in range(1, 139)]
|
|
|
|
|
|
|
30 |
|
31 |
+
print(f"Number of features loaded: {len(FEATURE_COLUMNS)}")
|
32 |
return "Model loaded successfully"
|
33 |
except Exception as e:
|
34 |
return f"Error loading model: {str(e)}"
|
|
|
36 |
def process_player_data(player_id, mmr, comf_1, comf_2, comf_3, comf_4, comf_5):
|
37 |
"""Process player data similar to training pipeline"""
|
38 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
# Clean player ID from URL if needed
|
40 |
if "/" in player_id:
|
41 |
player_id = player_id.split("/")[-1]
|
42 |
|
43 |
# Create initial player series
|
44 |
player_data = {
|
45 |
+
"player_id": player_id,
|
46 |
"mmr": float(mmr),
|
47 |
"p1": int(comf_1),
|
48 |
"p2": int(comf_2),
|
49 |
"p3": int(comf_3),
|
50 |
"p4": int(comf_4),
|
51 |
+
"p5": int(comf_5)
|
|
|
|
|
|
|
|
|
52 |
}
|
53 |
|
54 |
+
# Read the example row from prediction_data_prepped.csv to get the expected structure
|
55 |
+
try:
|
56 |
+
pred_data = pd.read_csv("prediction_data_prepped.csv")
|
57 |
+
if not pred_data.empty:
|
58 |
+
# Get column structure from the first row
|
59 |
+
for col in pred_data.columns:
|
60 |
+
if col not in player_data:
|
61 |
+
player_data[col] = 0
|
62 |
+
except Exception as e:
|
63 |
+
print(f"Warning - Error reading prediction data template: {str(e)}")
|
64 |
+
|
65 |
# Get hero statistics using OpenDota API
|
66 |
try:
|
67 |
hero_stats = hero_information(player_id)
|
68 |
+
player_data.update(hero_stats.to_dict())
|
69 |
+
|
70 |
+
# Add season identifier to match training data format
|
71 |
+
player_season = f"{player_id}_S34" # Assuming current season is 34
|
72 |
+
temp_dict = {}
|
73 |
+
temp_dict[player_season] = 1.0 # Set current season flag to 1.0
|
74 |
+
player_data.update(temp_dict)
|
75 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
76 |
except Exception as e:
|
77 |
print(f"Warning - Error fetching hero data: {str(e)}")
|
78 |
+
# If hero stats fail, add placeholder values
|
79 |
+
player_data.update({
|
80 |
+
"total_games_played": 0,
|
81 |
+
"total_winrate": 0.0
|
82 |
+
})
|
83 |
+
|
84 |
+
# Convert to DataFrame for consistency with training
|
85 |
df = pd.DataFrame([player_data])
|
86 |
|
87 |
+
# Load reference data structure if available
|
88 |
+
try:
|
89 |
+
ref_data = pd.read_csv("result_prediction_data_prepped.csv")
|
90 |
+
if not ref_data.empty:
|
91 |
+
# Get all columns from reference data
|
92 |
+
for col in ref_data.columns:
|
93 |
+
if col not in df.columns:
|
94 |
+
df[col] = 0
|
95 |
+
# Reorder columns to match reference data
|
96 |
+
df = df[ref_data.columns]
|
97 |
+
except Exception as e:
|
98 |
+
print(f"Warning - Error matching reference data structure: {str(e)}")
|
99 |
+
|
100 |
+
# Load the expected columns from your prediction data
|
101 |
+
pred_data = pd.read_csv("prediction_data_prepped.csv")
|
102 |
+
expected_columns = pred_data.columns.tolist()
|
103 |
+
|
104 |
+
# Debug print
|
105 |
+
print(f"\nNumber of expected columns: {len(expected_columns)}")
|
106 |
+
print(f"Number of current columns: {len(df.columns)}")
|
107 |
+
|
108 |
+
# Find missing columns
|
109 |
+
missing_columns = [col for col in expected_columns if col not in df.columns]
|
110 |
+
extra_columns = [col for col in df.columns if col not in expected_columns]
|
111 |
+
|
112 |
+
print(f"\nMissing columns: {missing_columns}")
|
113 |
+
print(f"Extra columns: {extra_columns}")
|
114 |
+
|
115 |
+
# Ensure all expected columns exist
|
116 |
+
for col in expected_columns:
|
117 |
+
if col not in df.columns:
|
118 |
df[col] = 0
|
119 |
|
120 |
+
# Remove any extra columns
|
121 |
df = df[expected_columns]
|
122 |
|
123 |
print(f"\nFinal number of columns: {len(df.columns)}")
|
124 |
+
print(f"First few columns: {list(df.columns)[:5]}")
|
125 |
|
126 |
return df
|
127 |
except Exception as e:
|
prediction_data_prepped.csv
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
,188649776_S34,917074310_S34,99929152_S34,143663588_S34,101387753_S34,40436072_S34,87992033_S34,153864932_S34,159164400_S34,67028556_S34,92001890_S34,130527149_S34,120052382_S34,101647591_S34,167829403_S34,240889153_S34,57011991_S34,45626568_S34,64041417_S34,477791331_S34,65495278_S34,170539030_S34,90793653_S34,152985237_S34,118858955_S34,177129466_S34,75864841_S34,108050692_S34,45226038_S34,74883563_S34,16710765_S34,83833103_S34,84060273_S34,57880458_S34,110119494_S34,519770_S34
|
2 |
+
cost,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
|
3 |
+
player_id,188649776,917074310,99929152,143663588,101387753,40436072,87992033,153864932,159164400,67028556,92001890,130527149,120052382,101647591,167829403,240889153,57011991,45626568,64041417,477791331,65495278,170539030,90793653,152985237,118858955,177129466,75864841,108050692,45226038,74883563,16710765,83833103,84060273,57880458,110119494,519770
|
4 |
+
mmr,6812,6492,6358,6050,6000,6000,5886,5714,5701,5698,5685,5640,5538,5431,5420,5307,5200,5164,4997,4876,4650,4630,4627,4627,4580,4560,4500,4251,4181,4124,3777,3754,3408,2889,2621,1144
|
5 |
+
p1,5,5,3,3,4,5,4,1,3,4,4,1,5,4,5,5,1,1,1,5,1,5,2,1,1,1,3,2,1,2,3,1,1,4,1,1
|
6 |
+
p2,5,2,5,5,3,1,5,3,2,2,5,1,1,2,5,2,1,1,2,2,2,4,2,1,1,1,3,2,1,5,5,1,1,4,1,1
|
7 |
+
p3,4,5,4,4,2,1,3,5,5,5,4,1,5,5,5,5,2,2,3,4,4,3,5,1,1,2,3,3,2,3,5,1,4,4,2,2
|
8 |
+
p4,2,5,1,2,3,1,3,4,3,4,2,5,5,3,5,5,4,5,4,4,5,3,4,2,5,4,3,5,4,2,2,1,4,5,3,4
|
9 |
+
p5,1,5,1,2,2,1,3,4,4,5,2,5,5,2,5,1,5,5,5,2,5,2,4,3,5,5,3,3,4,3,2,5,3,2,3,5
|
10 |
+
count,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0,9.0
|
11 |
+
mean,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333,489.3333333333333
|
12 |
+
std,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204,77.4483698989204
|
13 |
+
min,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0,352.0
|
14 |
+
max,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0,593.0
|
15 |
+
sum,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0,4404.0
|