Aye10032 commited on
Commit
59d0cdb
·
1 Parent(s): 1d99022
Files changed (2) hide show
  1. app.py +3 -3
  2. top5_error_rate.py +2 -0
app.py CHANGED
@@ -13,7 +13,7 @@ def compute(data):
13
  print(data)
14
  # return metric.compute()
15
  result = {
16
- "predictions": [list(map(int, pred.split(","))) for pred in data["predictions"]],
17
  "references": data["references"].cast(pl.Int64).to_list()
18
  }
19
  print(result)
@@ -23,8 +23,8 @@ def compute(data):
23
  local_path = Path(sys.path[0])
24
 
25
  default_value = pl.DataFrame({
26
- 'predictions': ['1,2,3,4,5', '1,2,3,4,5', '1,2,3,4,5'],
27
- 'references': ['0', '1', '2']
28
  })
29
 
30
  iface = gr.Interface(
 
13
  print(data)
14
  # return metric.compute()
15
  result = {
16
+ "predictions": [list(map(float, pred.split(","))) for pred in data["predictions"]],
17
  "references": data["references"].cast(pl.Int64).to_list()
18
  }
19
  print(result)
 
23
  local_path = Path(sys.path[0])
24
 
25
  default_value = pl.DataFrame({
26
+ 'predictions': ['0.82,0.95,0.6,0.14,0.15,0.70', '0.67,0.31,0.01,0.60,0.44,0.51', '0.57,0.06,0.69,0.07,0.96,0.72'],
27
+ 'references': ['1', '3', '4']
28
  })
29
 
30
  iface = gr.Interface(
top5_error_rate.py CHANGED
@@ -66,6 +66,8 @@ class Top5ErrorRate(evaluate.Metric):
66
  # to numpy array
67
  outputs = np.array(predictions)
68
  labels = np.array(references)
 
 
69
 
70
  # Top-1 ACC
71
  pred = outputs.argmax(axis=1)
 
66
  # to numpy array
67
  outputs = np.array(predictions)
68
  labels = np.array(references)
69
+ print(outputs)
70
+ print(labels)
71
 
72
  # Top-1 ACC
73
  pred = outputs.argmax(axis=1)