kbberendsen commited on
Commit
850b42f
·
1 Parent(s): 1c12a50

update support texts and light gbm model names

Browse files
dashboard/app.py CHANGED
@@ -44,8 +44,8 @@ app_ui = ui.page_fluid(
44
  ui.markdown("**Ranking**"),
45
  ui.input_select(
46
  "model_select_rank", "Select ranking model:",
47
- choices = ["Light XGBM 1", "Light XGBM 2", "Light XGBM 3"],
48
- selected = "Light XGBM 3"
49
  ),
50
  ui.input_select(
51
  "rank_cutoff_select", "Select minimum prediction score (%):",
 
44
  ui.markdown("**Ranking**"),
45
  ui.input_select(
46
  "model_select_rank", "Select ranking model:",
47
+ choices = ["Light GBM 1", "Light GBM 2", "Light GBM 3"],
48
+ selected = "Light GBM 3"
49
  ),
50
  ui.input_select(
51
  "rank_cutoff_select", "Select minimum prediction score (%):",
dashboard/modules/ranking.py CHANGED
@@ -154,11 +154,11 @@ def rank_campaign(CAMPAIGN_ID, ranker=lgbm_model_3, rank_cutoff=50):
154
 
155
  # Rank single lead
156
  def rank_single_lead(CAMPAIGN_ID, LEAD_ID, rank_cutoff=50, ranker=lgbm_model_3):
157
- if ranker == "Light XGBM 1":
158
  ranker = lgbm_model_1
159
- elif ranker == "Light XGBM 2":
160
  ranker = lgbm_model_2
161
- elif ranker == "Light XGBM 3":
162
  ranker = lgbm_model_3
163
 
164
  # Selecting single lead data and combine text columns used for ranking
 
154
 
155
  # Rank single lead
156
  def rank_single_lead(CAMPAIGN_ID, LEAD_ID, rank_cutoff=50, ranker=lgbm_model_3):
157
+ if ranker == "Light GBM 1":
158
  ranker = lgbm_model_1
159
+ elif ranker == "Light GBM 2":
160
  ranker = lgbm_model_2
161
+ elif ranker == "Light GBM 3":
162
  ranker = lgbm_model_3
163
 
164
  # Selecting single lead data and combine text columns used for ranking
dashboard/modules/support_texts.py CHANGED
@@ -61,7 +61,7 @@ ranking_intro_3 = '''
61
  '''
62
 
63
  models_test_ranking_1 = '''
64
- The ranking models are based on LightGMB, which is a gradient boosting framework that uses tree based learning algorithms. From this framework, models are created using the LGBMRanker. Here, the objective
65
  parameter is set to 'lambdarank' and the boosting type parameter is set to 'Gradient Boosting Decision Tree'. The combination of these two parameters mean that the models use a lambdaMART algorithm. The models
66
  available in this dashboard are described below. More information about, for example, the specific training parameters can be found in the source code.
67
  <br>
 
61
  '''
62
 
63
  models_test_ranking_1 = '''
64
+ The ranking models are based on LightGBM, which is a gradient boosting framework that uses tree based learning algorithms. From this framework, models are created using the LGBMRanker. Here, the objective
65
  parameter is set to 'lambdarank' and the boosting type parameter is set to 'Gradient Boosting Decision Tree'. The combination of these two parameters mean that the models use a lambdaMART algorithm. The models
66
  available in this dashboard are described below. More information about, for example, the specific training parameters can be found in the source code.
67
  <br>