kpfadnis commited on
Commit
bac04f0
·
1 Parent(s): 8bab137

feat (metric behavior): Allow user to drill down in metric behavior view. (#12)

Browse files
src/processor.ts CHANGED
@@ -344,6 +344,7 @@ export function exportData(
344
  let dataToExport: RawData = {
345
  name: data.name,
346
  ...(data.exampleId && { exampleId: data.exampleId }),
 
347
  models: data.models,
348
  metrics: data.metrics,
349
  ...(data.documents && {
@@ -403,6 +404,7 @@ export function exportData(
403
  dataToExport = {
404
  name: data.name,
405
  ...(data.exampleId && { exampleId: data.exampleId }),
 
406
  models: data.models,
407
  metrics: data.metrics,
408
  ...(relevantDocuments.size !== 0 && {
@@ -426,6 +428,7 @@ export function exportData(
426
  dataToExport = {
427
  name: data.name,
428
  ...(data.exampleId && { exampleId: data.exampleId }),
 
429
  models: data.models,
430
  metrics: data.metrics,
431
  ...(data.documents && {
 
344
  let dataToExport: RawData = {
345
  name: data.name,
346
  ...(data.exampleId && { exampleId: data.exampleId }),
347
+ ...(data.filters && { filters: data.filters }),
348
  models: data.models,
349
  metrics: data.metrics,
350
  ...(data.documents && {
 
404
  dataToExport = {
405
  name: data.name,
406
  ...(data.exampleId && { exampleId: data.exampleId }),
407
+ ...(data.filters && { filters: data.filters }),
408
  models: data.models,
409
  metrics: data.metrics,
410
  ...(relevantDocuments.size !== 0 && {
 
428
  dataToExport = {
429
  name: data.name,
430
  ...(data.exampleId && { exampleId: data.exampleId }),
431
+ ...(data.filters && { filters: data.filters }),
432
  models: data.models,
433
  metrics: data.metrics,
434
  ...(data.documents && {
src/views/data-characteristics/DataCharacteristics.tsx CHANGED
@@ -295,9 +295,9 @@ async function computeStatistics(
295
  // ===================================================================================
296
  // RENDER FUNCTIONS
297
  // ===================================================================================
298
- function SkeletonGraphs({ key }: { key: string }) {
299
  return (
300
- <div key={key} className={classes.row}>
301
  <ButtonSkeleton className={classes.viewBtn} />
302
  &nbsp;
303
  <div className={classes.graphsGrid}>
@@ -711,8 +711,8 @@ export default function DataCharacteristics({ tasks, filters }: Props) {
711
  <div className={classes.page}>
712
  {loading ? (
713
  <div className={classes.row}>
714
- <SkeletonGraphs key={'skeleton-graphs-1'} />
715
- <SkeletonGraphs key={'skeleton-graphs-2'} />
716
  </div>
717
  ) : (
718
  <>
 
295
  // ===================================================================================
296
  // RENDER FUNCTIONS
297
  // ===================================================================================
298
+ function SkeletonGraphs({ keyValue }: { keyValue: string }) {
299
  return (
300
+ <div key={keyValue} className={classes.row}>
301
  <ButtonSkeleton className={classes.viewBtn} />
302
  &nbsp;
303
  <div className={classes.graphsGrid}>
 
711
  <div className={classes.page}>
712
  {loading ? (
713
  <div className={classes.row}>
714
+ <SkeletonGraphs keyValue={'skeleton-graphs-1'} />
715
+ <SkeletonGraphs keyValue={'skeleton-graphs-2'} />
716
  </div>
717
  ) : (
718
  <>
src/views/example/Example.tsx CHANGED
@@ -230,7 +230,7 @@ export default memo(function Example({ data }: { data: Data }) {
230
  ];
231
  }, [data.evaluations, data.tasks, data.models, eligibleMetricsMap]);
232
 
233
- const { } = useBackButton();
234
 
235
  // Step 3: Return
236
  return (
@@ -331,9 +331,7 @@ export default memo(function Example({ data }: { data: Data }) {
331
  <TabPanel key={'model-comparator-panel'}>
332
  {data.models.length == 1 ? (
333
  <DisabledTab
334
- message={
335
- 'Nothing to see here in absence of multiple models.'
336
- }
337
  />
338
  ) : (
339
  <ModelComparator
@@ -356,6 +354,9 @@ export default memo(function Example({ data }: { data: Data }) {
356
  models={data.models}
357
  metrics={eligibleMetrics}
358
  filters={{}}
 
 
 
359
  ></MetricBehavior>
360
  )}
361
  </TabPanel>
 
230
  ];
231
  }, [data.evaluations, data.tasks, data.models, eligibleMetricsMap]);
232
 
233
+ const {} = useBackButton();
234
 
235
  // Step 3: Return
236
  return (
 
331
  <TabPanel key={'model-comparator-panel'}>
332
  {data.models.length == 1 ? (
333
  <DisabledTab
334
+ message={'Nothing to see here in absence of multiple models.'}
 
 
335
  />
336
  ) : (
337
  <ModelComparator
 
354
  models={data.models}
355
  metrics={eligibleMetrics}
356
  filters={{}}
357
+ onTaskSelection={(taskId) => {
358
+ setSelectedTaskId(taskId);
359
+ }}
360
  ></MetricBehavior>
361
  )}
362
  </TabPanel>
src/views/metric-behavior/MetricBehavior.module.scss CHANGED
@@ -117,17 +117,9 @@
117
  color: $gray-40;
118
  }
119
 
120
- .table {
121
- display: table;
122
- border-collapse: separate;
123
- border-spacing: 50px;
124
- }
125
-
126
- .tableRow {
127
- display: table-row;
128
- }
129
-
130
- .tableCell {
131
- display: table-cell;
132
- width: 600px;
133
  }
 
117
  color: $gray-40;
118
  }
119
 
120
+ .tasksTableContainer {
121
+ margin: $spacing-05 0;
122
+ display: flex;
123
+ flex-direction: column;
124
+ row-gap: $spacing-03;
 
 
 
 
 
 
 
 
125
  }
src/views/metric-behavior/MetricBehavior.tsx CHANGED
@@ -19,8 +19,10 @@
19
  'use client';
20
 
21
  import { isEmpty } from 'lodash';
22
- import { useState, useMemo, useEffect, memo } from 'react';
 
23
  import { WarningAlt } from '@carbon/icons-react';
 
24
  import { HeatmapChart } from '@carbon/charts-react';
25
  import { ColorLegendType, ScaleTypes } from '@carbon/charts';
26
 
@@ -38,12 +40,11 @@ import { areObjectsIntersecting } from '@/src/utilities/objects';
38
 
39
  import Filters from '@/src/components/filters/Filters';
40
  import MetricSelector from '@/src/components/selectors/MetricSelector';
 
41
 
42
  import '@carbon/charts-react/styles.css';
43
  import classes from './MetricBehavior.module.scss';
44
 
45
- import { FilterableMultiSelect, Tag } from '@carbon/react';
46
-
47
  // ===================================================================================
48
  // TYPES
49
  // ===================================================================================
@@ -52,6 +53,7 @@ interface Props {
52
  models: Model[];
53
  metrics: Metric[];
54
  filters: { [key: string]: string[] };
 
55
  }
56
 
57
  // ===================================================================================
@@ -147,6 +149,13 @@ function calculateCorrelation(
147
  return correlationMap;
148
  }
149
 
 
 
 
 
 
 
 
150
  function calculateOverlap(
151
  evaluationsPerMetric: { [key: string]: TaskEvaluation[] },
152
  metricA: Metric,
@@ -196,7 +205,35 @@ function calculateOverlap(
196
  console.log(`Check the data in evaluation B for taskId ${taskId}`);
197
  }
198
  });
199
- return overlapMap;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
200
  }
201
 
202
  function sortMetricAggregatedValues(values: string[], metric: Metric) {
@@ -236,14 +273,16 @@ export function prepareHeatMapData(
236
  let count: number = 0;
237
  sortedMetricAVals.forEach((metricValA) => {
238
  sortedMetricBVals.forEach((metricValB) => {
239
- temp.push({
240
- metricA: extractMetricDisplayValue(metricValA, metricA.values),
241
- metricB: extractMetricDisplayValue(metricValB, metricB.values),
242
- value: heatMap[metricValA][metricValB]
243
- ? heatMap[metricValA][metricValB]
244
- : 0,
245
- });
246
- count += heatMap[metricValA][metricValB];
 
 
247
  });
248
  });
249
 
@@ -256,6 +295,7 @@ export function prepareHeatMapData(
256
 
257
  return temp2;
258
  }
 
259
  return temp;
260
  }
261
 
@@ -267,6 +307,7 @@ export default memo(function MetricBehavior({
267
  models,
268
  metrics,
269
  filters,
 
270
  }: Props) {
271
  // Step 1: Initialize state and necessary variables
272
  const [WindowWidth, setWindowWidth] = useState<number>(
@@ -276,15 +317,19 @@ export default memo(function MetricBehavior({
276
  global?.window && window.innerHeight,
277
  );
278
  const [selectedModels, setSelectedModels] = useState<Model[]>(models);
279
- const [selectedMetricA, setSelectedMetricA] = useState<Metric | undefined>(
280
- undefined,
281
- );
282
- const [selectedMetricB, setSelectedMetricB] = useState<Metric | undefined>(
283
- undefined,
284
- );
285
  const [selectedFilters, setSelectedFilters] = useState<{
286
  [key: string]: string[];
287
  }>({});
 
 
 
 
 
 
 
 
288
 
289
  // Step 2: Run effects
290
  // Step 2.a: Window resizing
@@ -341,6 +386,239 @@ export default memo(function MetricBehavior({
341
  }
342
  }, [filteredEvaluationsPerMetric, selectedMetricA, selectedMetricB]);
343
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
344
  // Step 3: Render
345
  return (
346
  <div className={classes.page}>
@@ -443,7 +721,6 @@ export default memo(function MetricBehavior({
443
  type: ColorLegendType.QUANTIZE,
444
  },
445
  },
446
- experimental: false,
447
  width: Math.round(WindowWidth * 0.6) + 'px',
448
  height: Math.round(WindowHeight * 0.6) + 'px',
449
  toolbar: {
@@ -488,6 +765,7 @@ export default memo(function MetricBehavior({
488
  {extractMetricDisplayName(selectedMetricB)})
489
  </h4>
490
  <HeatmapChart
 
491
  data={prepareHeatMapData(
492
  selectedMetricA,
493
  selectedMetricB,
@@ -524,6 +802,26 @@ export default memo(function MetricBehavior({
524
  </div>
525
  )
526
  ) : null}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
527
  </div>
528
  );
529
  });
 
19
  'use client';
20
 
21
  import { isEmpty } from 'lodash';
22
+ import { useState, useMemo, useEffect, useRef, memo } from 'react';
23
+
24
  import { WarningAlt } from '@carbon/icons-react';
25
+ import { FilterableMultiSelect, Tag } from '@carbon/react';
26
  import { HeatmapChart } from '@carbon/charts-react';
27
  import { ColorLegendType, ScaleTypes } from '@carbon/charts';
28
 
 
40
 
41
  import Filters from '@/src/components/filters/Filters';
42
  import MetricSelector from '@/src/components/selectors/MetricSelector';
43
+ import TasksTable from '@/src/views/tasks-table/TasksTable';
44
 
45
  import '@carbon/charts-react/styles.css';
46
  import classes from './MetricBehavior.module.scss';
47
 
 
 
48
  // ===================================================================================
49
  // TYPES
50
  // ===================================================================================
 
53
  models: Model[];
54
  metrics: Metric[];
55
  filters: { [key: string]: string[] };
56
+ onTaskSelection: Function;
57
  }
58
 
59
  // ===================================================================================
 
149
  return correlationMap;
150
  }
151
 
152
+ /**
153
+ * Calculate overlap matix between metric values
154
+ * @param evaluationsPerMetric
155
+ * @param metricA
156
+ * @param metricB
157
+ * @returns
158
+ */
159
  function calculateOverlap(
160
  evaluationsPerMetric: { [key: string]: TaskEvaluation[] },
161
  metricA: Metric,
 
205
  console.log(`Check the data in evaluation B for taskId ${taskId}`);
206
  }
207
  });
208
+
209
+ // Step 4: Sorted MxN matrix (where M and N are ranges of metricA and metricB)
210
+ const sortedOverlapMap: { [key: string]: { [key: string]: number } } = {};
211
+ // Only sort keys when metric is of 'numerical' type
212
+ if (metricA.type === 'numerical') {
213
+ Object.keys(overlapMap)
214
+ .sort()
215
+ .forEach((a) => {
216
+ if (metricB.type === 'numerical') {
217
+ sortedOverlapMap[a] = Object.fromEntries(
218
+ Object.keys(overlapMap[a])
219
+ .sort()
220
+ .map((b) => [b, overlapMap[a][b]]),
221
+ );
222
+ } else {
223
+ sortedOverlapMap[a] = overlapMap[a];
224
+ }
225
+ });
226
+ } else {
227
+ Object.entries(overlapMap).forEach(([a, b]) => {
228
+ sortedOverlapMap[a] = Object.fromEntries(
229
+ Object.keys(b)
230
+ .sort()
231
+ .map((key) => [key, b[key]]),
232
+ );
233
+ });
234
+ }
235
+
236
+ return sortedOverlapMap;
237
  }
238
 
239
  function sortMetricAggregatedValues(values: string[], metric: Metric) {
 
273
  let count: number = 0;
274
  sortedMetricAVals.forEach((metricValA) => {
275
  sortedMetricBVals.forEach((metricValB) => {
276
+ if (heatMap[metricValA][metricValB]) {
277
+ temp.push({
278
+ metricA: extractMetricDisplayValue(metricValA, metricA.values),
279
+ metricB: extractMetricDisplayValue(metricValB, metricB.values),
280
+ value: heatMap[metricValA][metricValB]
281
+ ? heatMap[metricValA][metricValB]
282
+ : 0,
283
+ });
284
+ count += heatMap[metricValA][metricValB];
285
+ }
286
  });
287
  });
288
 
 
295
 
296
  return temp2;
297
  }
298
+
299
  return temp;
300
  }
301
 
 
307
  models,
308
  metrics,
309
  filters,
310
+ onTaskSelection,
311
  }: Props) {
312
  // Step 1: Initialize state and necessary variables
313
  const [WindowWidth, setWindowWidth] = useState<number>(
 
317
  global?.window && window.innerHeight,
318
  );
319
  const [selectedModels, setSelectedModels] = useState<Model[]>(models);
320
+ const [selectedMetricA, setSelectedMetricA] = useState<Metric | undefined>();
321
+ const [selectedMetricB, setSelectedMetricB] = useState<Metric | undefined>();
 
 
 
 
322
  const [selectedFilters, setSelectedFilters] = useState<{
323
  [key: string]: string[];
324
  }>({});
325
+ const [selectedMetricARange, setSelectedMetricARange] = useState<
326
+ number[] | string
327
+ >();
328
+ const [selectedMetricBRange, setSelectedMetricBRange] = useState<
329
+ number[] | string
330
+ >();
331
+ const tableRef = useRef(null);
332
+ const chartRef = useRef(null);
333
 
334
  // Step 2: Run effects
335
  // Step 2.a: Window resizing
 
386
  }
387
  }, [filteredEvaluationsPerMetric, selectedMetricA, selectedMetricB]);
388
 
389
+ // Step 2.e: Reset ranges on selected metric change
390
+ useEffect(() => {
391
+ setSelectedMetricARange(undefined);
392
+ setSelectedMetricBRange(undefined);
393
+ }, [selectedMetricA, selectedMetricB]);
394
+
395
+ // Step 2.f: Identify visible evaluations
396
+ const visibleEvaluations: TaskEvaluation[] = useMemo(() => {
397
+ if (selectedMetricA && selectedMetricB) {
398
+ // Step 1: Initialize necessary variables
399
+ const selectedModelIds = selectedModels.map((model) => model.modelId);
400
+
401
+ const evaluationsPerTask: { [key: string]: TaskEvaluation } = {};
402
+
403
+ // Step 2: Add eligible evaluations for 1st selected metric (A)
404
+ filteredEvaluationsPerMetric[selectedMetricA.name].forEach(
405
+ (evaluation) => {
406
+ if (selectedModelIds.includes(evaluation.modelId)) {
407
+ const UUID = `${evaluation.taskId}<::>${evaluation.modelId}`;
408
+ if (selectedMetricARange) {
409
+ if (typeof selectedMetricARange === 'string') {
410
+ if (
411
+ extractMetricDisplayValue(
412
+ evaluation[`${selectedMetricA.name}_agg`].value,
413
+ selectedMetricA.values,
414
+ ) === selectedMetricARange
415
+ ) {
416
+ if (evaluationsPerTask.hasOwnProperty(UUID)) {
417
+ evaluationsPerTask[UUID] = {
418
+ ...evaluationsPerTask[UUID],
419
+ [`${selectedMetricA.name}`]:
420
+ evaluation[`${selectedMetricA.name}`],
421
+ [`${selectedMetricA.name}_agg`]:
422
+ evaluation[`${selectedMetricA.name}_agg`],
423
+ };
424
+ } else {
425
+ evaluationsPerTask[UUID] = evaluation;
426
+ }
427
+ }
428
+ } else if (Array.isArray(selectedMetricARange)) {
429
+ if (
430
+ evaluation[`${selectedMetricA.name}_agg`].value >=
431
+ selectedMetricARange[0] &&
432
+ evaluation[`${selectedMetricA.name}_agg`].value <=
433
+ selectedMetricARange[1]
434
+ ) {
435
+ if (evaluationsPerTask.hasOwnProperty(UUID)) {
436
+ evaluationsPerTask[UUID] = {
437
+ ...evaluationsPerTask[UUID],
438
+ [`${selectedMetricA.name}`]:
439
+ evaluation[`${selectedMetricA.name}`],
440
+ [`${selectedMetricA.name}_agg`]:
441
+ evaluation[`${selectedMetricA.name}_agg`],
442
+ };
443
+ } else {
444
+ evaluationsPerTask[UUID] = evaluation;
445
+ }
446
+ }
447
+ }
448
+ } else {
449
+ if (evaluationsPerTask.hasOwnProperty(UUID)) {
450
+ evaluationsPerTask[UUID] = {
451
+ ...evaluationsPerTask[UUID],
452
+ [`${selectedMetricA.name}`]:
453
+ evaluation[`${selectedMetricA.name}`],
454
+ [`${selectedMetricA.name}_agg`]:
455
+ evaluation[`${selectedMetricA.name}_agg`],
456
+ };
457
+ } else {
458
+ evaluationsPerTask[UUID] = evaluation;
459
+ }
460
+ }
461
+ }
462
+ },
463
+ );
464
+
465
+ // Step 2: Add eligible evaluations for 2nd selected metric (B)
466
+ filteredEvaluationsPerMetric[selectedMetricB.name].forEach(
467
+ (evaluation) => {
468
+ if (selectedModelIds.includes(evaluation.modelId)) {
469
+ const UUID = `${evaluation.taskId}<::>${evaluation.modelId}`;
470
+ if (selectedMetricBRange) {
471
+ if (typeof selectedMetricBRange === 'string') {
472
+ if (
473
+ extractMetricDisplayValue(
474
+ evaluation[`${selectedMetricB.name}_agg`].value,
475
+ selectedMetricB.values,
476
+ ) === selectedMetricBRange
477
+ ) {
478
+ if (evaluationsPerTask.hasOwnProperty(UUID)) {
479
+ evaluationsPerTask[UUID] = {
480
+ ...evaluationsPerTask[UUID],
481
+ [`${selectedMetricB.name}`]:
482
+ evaluation[`${selectedMetricB.name}`],
483
+ [`${selectedMetricB.name}_agg`]:
484
+ evaluation[`${selectedMetricB.name}_agg`],
485
+ };
486
+ } else {
487
+ evaluationsPerTask[UUID] = evaluation;
488
+ }
489
+ }
490
+ } else if (Array.isArray(selectedMetricBRange)) {
491
+ if (
492
+ evaluation[`${selectedMetricB.name}_agg`].value >=
493
+ selectedMetricBRange[0] &&
494
+ evaluation[`${selectedMetricB.name}_agg`].value <=
495
+ selectedMetricBRange[1]
496
+ ) {
497
+ if (evaluationsPerTask.hasOwnProperty(UUID)) {
498
+ evaluationsPerTask[UUID] = {
499
+ ...evaluationsPerTask[UUID],
500
+ [`${selectedMetricB.name}`]:
501
+ evaluation[`${selectedMetricB.name}`],
502
+ [`${selectedMetricB.name}_agg`]:
503
+ evaluation[`${selectedMetricB.name}_agg`],
504
+ };
505
+ } else {
506
+ evaluationsPerTask[UUID] = evaluation;
507
+ }
508
+ }
509
+ }
510
+ } else {
511
+ if (evaluationsPerTask.hasOwnProperty(UUID)) {
512
+ evaluationsPerTask[UUID] = {
513
+ ...evaluationsPerTask[UUID],
514
+ [`${selectedMetricB.name}`]:
515
+ evaluation[`${selectedMetricB.name}`],
516
+ [`${selectedMetricB.name}_agg`]:
517
+ evaluation[`${selectedMetricB.name}_agg`],
518
+ };
519
+ } else {
520
+ evaluationsPerTask[UUID] = evaluation;
521
+ }
522
+ }
523
+ }
524
+ },
525
+ );
526
+
527
+ // Step 3: Only retains evaluation tasks where both metric values are available
528
+ return Object.values(evaluationsPerTask).filter(
529
+ (evaluation) =>
530
+ evaluation.hasOwnProperty(`${selectedMetricA.name}`) &&
531
+ evaluation.hasOwnProperty(`${selectedMetricA.name}_agg`) &&
532
+ evaluation.hasOwnProperty(`${selectedMetricB.name}`) &&
533
+ evaluation.hasOwnProperty(`${selectedMetricB.name}_agg`),
534
+ );
535
+ }
536
+ return [];
537
+ }, [
538
+ filteredEvaluationsPerMetric,
539
+ selectedModels,
540
+ selectedMetricA,
541
+ selectedMetricARange,
542
+ selectedMetricB,
543
+ selectedMetricBRange,
544
+ ]);
545
+
546
+ // Step 2.g: Add chart event
547
+ useEffect(() => {
548
+ // Step 2.g.i: Update function
549
+ function onClick(event) {
550
+ // Set range for 1st selected metric (A)
551
+ if (selectedMetricA?.type === 'numerical') {
552
+ if (event.detail.datum['metricA'].substring(1).includes('-')) {
553
+ const match = event.detail.datum['metricA'].match(
554
+ /^(-?\d*\.?\d*)-(-?\d*\.?\d*)$/,
555
+ );
556
+ setSelectedMetricARange([parseFloat(match[1]), parseFloat(match[2])]);
557
+ } else {
558
+ setSelectedMetricARange([
559
+ parseFloat(event.detail.datum['metricA']),
560
+ parseFloat(event.detail.datum['metricA']),
561
+ ]);
562
+ }
563
+ } else {
564
+ setSelectedMetricARange(event.detail.datum['metricA']);
565
+ }
566
+
567
+ // Set range for 2nd selected metric (B)
568
+ if (selectedMetricB?.type === 'numerical') {
569
+ if (event.detail.datum['metricB'].substring(1).includes('-')) {
570
+ const match = event.detail.datum['metricB'].match(
571
+ /^(-?\d*\.?\d*)-(-?\d*\.?\d*)$/,
572
+ );
573
+ setSelectedMetricBRange([parseFloat(match[1]), parseFloat(match[2])]);
574
+ } else {
575
+ setSelectedMetricARange([
576
+ parseFloat(event.detail.datum['metricB']),
577
+ parseFloat(event.detail.datum['metricB']),
578
+ ]);
579
+ }
580
+ } else {
581
+ setSelectedMetricBRange(event.detail.datum['metricB']);
582
+ }
583
+ }
584
+
585
+ // Step 2.g.ii: Local copy of reference
586
+ let ref = null;
587
+
588
+ // Step 2.g.iii: Update reference and add event
589
+ if (chartRef && chartRef.current) {
590
+ ref = chartRef.current;
591
+
592
+ //@ts-ignore
593
+ ref.chart.services.events.addEventListener('heatmap-click', onClick);
594
+ }
595
+
596
+ // Step 2.g.iv: Cleanup function
597
+ return () => {
598
+ if (ref) {
599
+ //@ts-ignore
600
+ ref.chart.services.events.removeEventListener('heatmap-click', onClick);
601
+ }
602
+ };
603
+ }, [chartRef, selectedMetricA, selectedMetricB, metricToMetricOverlap]);
604
+
605
+ // Step 2.h: Scroll task table into view
606
+ useEffect(() => {
607
+ if (
608
+ selectedMetricARange &&
609
+ selectedMetricBRange &&
610
+ tableRef &&
611
+ tableRef.current
612
+ ) {
613
+ //@ts-ignore
614
+ tableRef.current.scrollIntoView({
615
+ behavior: 'smooth',
616
+ block: 'end',
617
+ inline: 'center',
618
+ });
619
+ }
620
+ }, [tableRef, selectedMetricARange, selectedMetricBRange]);
621
+
622
  // Step 3: Render
623
  return (
624
  <div className={classes.page}>
 
721
  type: ColorLegendType.QUANTIZE,
722
  },
723
  },
 
724
  width: Math.round(WindowWidth * 0.6) + 'px',
725
  height: Math.round(WindowHeight * 0.6) + 'px',
726
  toolbar: {
 
765
  {extractMetricDisplayName(selectedMetricB)})
766
  </h4>
767
  <HeatmapChart
768
+ ref={chartRef}
769
  data={prepareHeatMapData(
770
  selectedMetricA,
771
  selectedMetricB,
 
802
  </div>
803
  )
804
  ) : null}
805
+
806
+ {selectedMetricA && selectedMetricB && !isEmpty(visibleEvaluations) ? (
807
+ <div ref={tableRef} className={classes.tasksTableContainer}>
808
+ <h4>
809
+ Tasks<sup>*</sup>
810
+ </h4>
811
+
812
+ <TasksTable
813
+ metrics={[selectedMetricA, selectedMetricB]}
814
+ evaluations={visibleEvaluations}
815
+ models={selectedModels}
816
+ filters={filters}
817
+ onClick={onTaskSelection}
818
+ />
819
+ <span className={classes.tasksTableWarning}>
820
+ <sup>*</sup> Only tasks with aggregate scores in selected range are
821
+ shown in the above table.
822
+ </span>
823
+ </div>
824
+ ) : null}
825
  </div>
826
  );
827
  });
src/views/model-behavior/ModelBehavior.tsx CHANGED
@@ -836,7 +836,7 @@ export default function ModelBehavior({
836
  <div className={classes.row}>
837
  <h4>Tasks</h4>
838
  <TasksTable
839
- metric={selectedMetric}
840
  evaluations={visibleEvaluations}
841
  models={selectedModels}
842
  filters={filters}
 
836
  <div className={classes.row}>
837
  <h4>Tasks</h4>
838
  <TasksTable
839
+ metrics={[selectedMetric]}
840
  evaluations={visibleEvaluations}
841
  models={selectedModels}
842
  filters={filters}
src/views/model-comparator/ModelComparator.tsx CHANGED
@@ -581,9 +581,15 @@ export default function ModelComparator({
581
 
582
  // Step 2.i: Add chart event
583
  useEffect(() => {
 
 
 
 
584
  if (chartRef && chartRef.current) {
 
 
585
  //@ts-ignore
586
- chartRef.current.chart.services.events.addEventListener(
587
  'scatter-click',
588
  ({ detail }) => {
589
  onTaskSelection(detail.datum.taskId);
@@ -591,10 +597,11 @@ export default function ModelComparator({
591
  );
592
  }
593
 
 
594
  return () => {
595
- if (chartRef && chartRef.current) {
596
  //@ts-ignore
597
- chartRef.current.chart.services.events.removeEventListener(
598
  'scatter-click',
599
  ({ detail }) => {
600
  onTaskSelection(detail.datum.taskId);
@@ -956,7 +963,7 @@ export default function ModelComparator({
956
  {filteredEvaluations ? (
957
  <>
958
  <TasksTable
959
- metric={selectedMetric}
960
  evaluations={filteredEvaluations}
961
  models={[modelA, modelB]}
962
  filters={filters}
 
581
 
582
  // Step 2.i: Add chart event
583
  useEffect(() => {
584
+ // Step 2.i.*: Local copy of reference
585
+ let ref = null;
586
+
587
+ // Step 2.i.**: Update reference and add event
588
  if (chartRef && chartRef.current) {
589
+ ref = chartRef.current;
590
+
591
  //@ts-ignore
592
+ ref.chart.services.events.addEventListener(
593
  'scatter-click',
594
  ({ detail }) => {
595
  onTaskSelection(detail.datum.taskId);
 
597
  );
598
  }
599
 
600
+ // Step 2.i.***: Cleanup function
601
  return () => {
602
+ if (ref) {
603
  //@ts-ignore
604
+ ref.chart.services.events.removeEventListener(
605
  'scatter-click',
606
  ({ detail }) => {
607
  onTaskSelection(detail.datum.taskId);
 
963
  {filteredEvaluations ? (
964
  <>
965
  <TasksTable
966
+ metrics={[selectedMetric]}
967
  evaluations={filteredEvaluations}
968
  models={[modelA, modelB]}
969
  filters={filters}
src/views/tasks-table/TasksTable.module.scss CHANGED
@@ -35,6 +35,19 @@
35
  align-items: center;
36
  }
37
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  .majorityValue {
39
  font-size: 14px;
40
  line-height: 18px;
 
35
  align-items: center;
36
  }
37
 
38
+ .tableCellValue {
39
+ display: flex;
40
+ column-gap: $spacing-03;
41
+ align-items: center;
42
+ border-left: 1px solid var(--cds-border-inverse);
43
+ padding-left: $spacing-03;
44
+ }
45
+
46
+ .tableCellValue:first-child {
47
+ border-left: none;
48
+ padding-left: 0;
49
+ }
50
+
51
  .majorityValue {
52
  font-size: 14px;
53
  line-height: 18px;
src/views/tasks-table/TasksTable.tsx CHANGED
@@ -62,7 +62,7 @@ type EvaluationRow = {
62
  taskId: string;
63
  };
64
  interface Props {
65
- metric: Metric;
66
  evaluations: TaskEvaluation[];
67
  models: Model[];
68
  filters: { [key: string]: string[] };
@@ -81,7 +81,7 @@ interface Props {
81
  */
82
  function populateTable(
83
  evaluations: TaskEvaluation[],
84
- metric: Metric,
85
  models: Model[],
86
  taskInputMap: { [key: string]: any },
87
  filters: { [key: string]: string[] },
@@ -115,15 +115,22 @@ function populateTable(
115
  }
116
 
117
  // Add annotations
118
- entry[`${evaluation.modelId}::value`] = annotator
119
- ? extractMetricDisplayValue(
120
- evaluation[metric.name][annotator].value,
121
- metric.values,
122
- )
123
- : extractMetricDisplayValue(
124
- evaluation[`${metric.name}_agg`].value,
125
- metric.values,
126
- );
 
 
 
 
 
 
 
127
 
128
  // Step 1.b: Save updated entry into evaluations map
129
  evaluationsMap.set(evaluation.taskId, entry);
@@ -258,7 +265,7 @@ function sparkline(
258
  // MAIN FUNCTION
259
  // ===================================================================================
260
  export default function TasksTable({
261
- metric,
262
  evaluations,
263
  models,
264
  filters,
@@ -285,10 +292,12 @@ export default function TasksTable({
285
  return Object.fromEntries(
286
  evaluations.map((evaluation) => [
287
  `${evaluation.taskId}:${evaluation.modelId}`,
288
- evaluation[metric.name],
 
 
289
  ]),
290
  );
291
- }, [evaluations, metric]);
292
 
293
  // Step 2.e: Build tasks map
294
  const taskInputMap = useMemo(() => {
@@ -306,13 +315,13 @@ export default function TasksTable({
306
  () =>
307
  populateTable(
308
  evaluations,
309
- metric,
310
  models,
311
  taskInputMap,
312
  filters,
313
  annotator,
314
  ),
315
- [evaluations, metric, models, filters, taskInputMap, annotator],
316
  );
317
 
318
  // Step 2.g: Identify visible rows
@@ -538,22 +547,57 @@ export default function TasksTable({
538
  ) : (
539
  <TableCell key={cell.id}>
540
  <div className={classes.tableCell}>
541
- <div className={classes.majorityValue}>
542
- {cell.value
543
- ? Array.isArray(cell.value)
544
- ? cell.value.join(', ')
545
- : cell.value
546
- : '-'}
547
- </div>
548
- {!annotator &&
549
- cell.value &&
550
- sparkline(
551
- evaluationsMap[
552
- cell.id.split('::value', 1)[0]
553
- ],
554
- metric,
555
- theme,
556
- )}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
557
  </div>
558
  </TableCell>
559
  ),
 
62
  taskId: string;
63
  };
64
  interface Props {
65
+ metrics: Metric[];
66
  evaluations: TaskEvaluation[];
67
  models: Model[];
68
  filters: { [key: string]: string[] };
 
81
  */
82
  function populateTable(
83
  evaluations: TaskEvaluation[],
84
+ metrics: Metric[],
85
  models: Model[],
86
  taskInputMap: { [key: string]: any },
87
  filters: { [key: string]: string[] },
 
115
  }
116
 
117
  // Add annotations
118
+ entry[`${evaluation.modelId}::value`] = {};
119
+ metrics.forEach((metric) => {
120
+ if (annotator) {
121
+ entry[`${evaluation.modelId}::value`][metric.name] =
122
+ extractMetricDisplayValue(
123
+ evaluation[metric.name][annotator].value,
124
+ metric.values,
125
+ );
126
+ } else {
127
+ entry[`${evaluation.modelId}::value`][metric.name] =
128
+ extractMetricDisplayValue(
129
+ evaluation[`${metric.name}_agg`].value,
130
+ metric.values,
131
+ );
132
+ }
133
+ });
134
 
135
  // Step 1.b: Save updated entry into evaluations map
136
  evaluationsMap.set(evaluation.taskId, entry);
 
265
  // MAIN FUNCTION
266
  // ===================================================================================
267
  export default function TasksTable({
268
+ metrics,
269
  evaluations,
270
  models,
271
  filters,
 
292
  return Object.fromEntries(
293
  evaluations.map((evaluation) => [
294
  `${evaluation.taskId}:${evaluation.modelId}`,
295
+ Object.fromEntries(
296
+ metrics.map((metric) => [metric.name, evaluation[metric.name]]),
297
+ ),
298
  ]),
299
  );
300
+ }, [evaluations, metrics]);
301
 
302
  // Step 2.e: Build tasks map
303
  const taskInputMap = useMemo(() => {
 
315
  () =>
316
  populateTable(
317
  evaluations,
318
+ metrics,
319
  models,
320
  taskInputMap,
321
  filters,
322
  annotator,
323
  ),
324
+ [evaluations, metrics, models, filters, taskInputMap, annotator],
325
  );
326
 
327
  // Step 2.g: Identify visible rows
 
547
  ) : (
548
  <TableCell key={cell.id}>
549
  <div className={classes.tableCell}>
550
+ {cell.value ? (
551
+ typeof cell.value === 'object' ? (
552
+ <>
553
+ {metrics.map((metric) => {
554
+ return (
555
+ <>
556
+ <div
557
+ className={
558
+ classes.tableCellValue
559
+ }
560
+ key={`${cell.id}::${metric.name}`}
561
+ >
562
+ <div
563
+ className={
564
+ classes.majorityValue
565
+ }
566
+ >
567
+ {cell.value[metric.name]}
568
+ </div>
569
+ {!annotator &&
570
+ evaluationsMap[
571
+ cell.id.split('::value', 1)[0]
572
+ ]
573
+ ? sparkline(
574
+ evaluationsMap[
575
+ cell.id.split(
576
+ '::value',
577
+ 1,
578
+ )[0]
579
+ ][metric.name],
580
+ metric,
581
+ theme,
582
+ )
583
+ : null}
584
+ </div>
585
+ </>
586
+ );
587
+ })}
588
+ </>
589
+ ) : (
590
+ <div className={classes.majorityValue}>
591
+ {Array.isArray(cell.value)
592
+ ? cell.value.join(', ')
593
+ : cell.value}
594
+ </div>
595
+ )
596
+ ) : (
597
+ <div className={classes.majorityValue}>
598
+ -
599
+ </div>
600
+ )}
601
  </div>
602
  </TableCell>
603
  ),