darabos commited on
Commit
9cc1fee
·
1 Parent(s): 29d6ac1

Working but slow LynxScribe in LynxKite.

Browse files
data/LynxScribe demo ADDED
@@ -0,0 +1,973 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "env": "LynxScribe",
3
+ "nodes": [
4
+ {
5
+ "id": "Input chat 1",
6
+ "type": "basic",
7
+ "data": {
8
+ "title": "Input chat",
9
+ "params": {
10
+ "chat": "mi az elet titka"
11
+ },
12
+ "display": null,
13
+ "error": null,
14
+ "meta": {
15
+ "name": "Input chat",
16
+ "params": {
17
+ "chat": {
18
+ "name": "chat",
19
+ "default": null,
20
+ "type": {
21
+ "type": "<class 'str'>"
22
+ }
23
+ }
24
+ },
25
+ "inputs": {},
26
+ "outputs": {
27
+ "output": {
28
+ "name": "output",
29
+ "type": {
30
+ "type": "None"
31
+ },
32
+ "position": "right"
33
+ }
34
+ },
35
+ "type": "basic",
36
+ "sub_nodes": null
37
+ }
38
+ },
39
+ "position": {
40
+ "x": -292.9043729325095,
41
+ "y": 59.46898452253504
42
+ },
43
+ "parentId": null
44
+ },
45
+ {
46
+ "id": "View 1",
47
+ "type": "table_view",
48
+ "data": {
49
+ "title": "View",
50
+ "params": {},
51
+ "display": {
52
+ "dataframes": {
53
+ "df": {
54
+ "columns": [
55
+ "response"
56
+ ],
57
+ "data": [
58
+ [
59
+ "Az élet titka sokak számára különböző lehet, és sok tényezőtől függ. Néhány kulcselem, ami segíthet megtalálni az élet értelmét vagy titkát:\n\n- **Kapcsolatok**: A barátok és a család közelsége fontos az érzelmi jólét szempontjából.\n- **Önmegvalósítás**: Mindenkinek más a célja és álma, érdemes dolgozni azon, hogy elérjük őket.\n- **Tanulás**: Folyamatosan fejlődjünk és tanuljunk, hogy jobban megértsük a világot.\n- **Egészség**: A fizikai és mentális egészség megőrzése kulcsfontosságú az életminőség szempontjából.\n- **Kibékülés**: Békélj meg a múltaddal, és tanulj meg megbocsátani önmagadnak és másoknak.\n\nEzek az elemek hozzájárulhatnak ahhoz, hogy az életet gazdagabbnak és értékesebbnek érezd. Van valami konkrét aspektus az élet titkáról, amiről szívesen beszélnél?\n\nPlease visit <a href='https://www.linkedin.com/in/g%c3%a1bor-benedek-95578717' target='_blank'>https://www.linkedin.com/in/g%c3%a1bor-benedek-95578717</a> for further information."
60
+ ]
61
+ ]
62
+ }
63
+ }
64
+ },
65
+ "error": null,
66
+ "meta": {
67
+ "name": "View",
68
+ "params": {},
69
+ "inputs": {
70
+ "input": {
71
+ "name": "input",
72
+ "type": {
73
+ "type": "<class 'inspect._empty'>"
74
+ },
75
+ "position": "left"
76
+ }
77
+ },
78
+ "outputs": {},
79
+ "type": "table_view",
80
+ "sub_nodes": null
81
+ }
82
+ },
83
+ "position": {
84
+ "x": 472.96030572661607,
85
+ "y": 44.15182379992555
86
+ },
87
+ "parentId": null
88
+ },
89
+ {
90
+ "id": "LLM 1",
91
+ "type": "basic",
92
+ "data": {
93
+ "title": "LLM",
94
+ "params": {
95
+ "name": "openai"
96
+ },
97
+ "display": null,
98
+ "error": null,
99
+ "meta": {
100
+ "name": "LLM",
101
+ "params": {
102
+ "name": {
103
+ "name": "name",
104
+ "default": "openai",
105
+ "type": {
106
+ "type": "<class 'str'>"
107
+ }
108
+ }
109
+ },
110
+ "inputs": {},
111
+ "outputs": {
112
+ "output": {
113
+ "name": "output",
114
+ "type": {
115
+ "type": "None"
116
+ },
117
+ "position": "right"
118
+ }
119
+ },
120
+ "type": "basic",
121
+ "sub_nodes": null
122
+ }
123
+ },
124
+ "position": {
125
+ "x": -996.3183837866898,
126
+ "y": 1329.9037704510513
127
+ },
128
+ "parentId": null
129
+ },
130
+ {
131
+ "id": "Scenario selector 1",
132
+ "type": "basic",
133
+ "data": {
134
+ "title": "Scenario selector",
135
+ "params": {
136
+ "scenario_file": "/home/darabos/nvme/lynxscribe/examples/chat_api/scenarios.yaml",
137
+ "node_types": "intent_cluster"
138
+ },
139
+ "display": null,
140
+ "error": null,
141
+ "meta": {
142
+ "name": "Scenario selector",
143
+ "params": {
144
+ "scenario_file": {
145
+ "name": "scenario_file",
146
+ "default": null,
147
+ "type": {
148
+ "type": "<class 'str'>"
149
+ }
150
+ },
151
+ "node_types": {
152
+ "name": "node_types",
153
+ "default": "intent_cluster",
154
+ "type": {
155
+ "type": "<class 'str'>"
156
+ }
157
+ }
158
+ },
159
+ "inputs": {},
160
+ "outputs": {
161
+ "output": {
162
+ "name": "output",
163
+ "type": {
164
+ "type": "None"
165
+ },
166
+ "position": "right"
167
+ }
168
+ },
169
+ "type": "basic",
170
+ "sub_nodes": null
171
+ }
172
+ },
173
+ "position": {
174
+ "x": -1076.3726648689906,
175
+ "y": 1126.1701539825485
176
+ },
177
+ "parentId": null
178
+ },
179
+ {
180
+ "id": "Chat API 1",
181
+ "type": "basic",
182
+ "data": {
183
+ "title": "Chat API",
184
+ "params": {
185
+ "model": "gpt-4o-mini"
186
+ },
187
+ "display": null,
188
+ "error": null,
189
+ "meta": {
190
+ "name": "Chat API",
191
+ "params": {
192
+ "model": {
193
+ "name": "model",
194
+ "default": "gpt-4o-mini",
195
+ "type": {
196
+ "type": "<class 'str'>"
197
+ }
198
+ }
199
+ },
200
+ "inputs": {
201
+ "chatbot": {
202
+ "name": "chatbot",
203
+ "type": {
204
+ "type": "<class 'inspect._empty'>"
205
+ },
206
+ "position": "bottom"
207
+ },
208
+ "chat_processor": {
209
+ "name": "chat_processor",
210
+ "type": {
211
+ "type": "<class 'inspect._empty'>"
212
+ },
213
+ "position": "bottom"
214
+ },
215
+ "knowledge_base": {
216
+ "name": "knowledge_base",
217
+ "type": {
218
+ "type": "<class 'inspect._empty'>"
219
+ },
220
+ "position": "left"
221
+ }
222
+ },
223
+ "outputs": {
224
+ "output": {
225
+ "name": "output",
226
+ "type": {
227
+ "type": "None"
228
+ },
229
+ "position": "right"
230
+ }
231
+ },
232
+ "type": "basic",
233
+ "sub_nodes": null
234
+ }
235
+ },
236
+ "position": {
237
+ "x": -57.80584961387282,
238
+ "y": 235.19823621492515
239
+ },
240
+ "parentId": null
241
+ },
242
+ {
243
+ "id": "Knowledge base 1",
244
+ "type": "basic",
245
+ "data": {
246
+ "title": "Knowledge base",
247
+ "params": {
248
+ "nodes_path": "/home/darabos/nvme/lynxscribe/examples/chat_api/data/lynx/nodes.pickle",
249
+ "edges_path": "/home/darabos/nvme/lynxscribe/examples/chat_api/data/lynx/edges.pickle",
250
+ "template_cluster_path": "/home/darabos/nvme/lynxscribe/examples/chat_api/data/lynx/tempclusters.pickle"
251
+ },
252
+ "display": null,
253
+ "error": null,
254
+ "meta": {
255
+ "name": "Knowledge base",
256
+ "params": {
257
+ "nodes_path": {
258
+ "name": "nodes_path",
259
+ "default": "nodes.pickle",
260
+ "type": {
261
+ "type": "<class 'str'>"
262
+ }
263
+ },
264
+ "edges_path": {
265
+ "name": "edges_path",
266
+ "default": "edges.pickle",
267
+ "type": {
268
+ "type": "<class 'str'>"
269
+ }
270
+ },
271
+ "template_cluster_path": {
272
+ "name": "template_cluster_path",
273
+ "default": "tempclusters.pickle",
274
+ "type": {
275
+ "type": "<class 'str'>"
276
+ }
277
+ }
278
+ },
279
+ "inputs": {},
280
+ "outputs": {
281
+ "output": {
282
+ "name": "output",
283
+ "type": {
284
+ "type": "None"
285
+ },
286
+ "position": "right"
287
+ }
288
+ },
289
+ "type": "basic",
290
+ "sub_nodes": null
291
+ }
292
+ },
293
+ "position": {
294
+ "x": -428.0531718264389,
295
+ "y": 174.62875974530755
296
+ },
297
+ "parentId": null
298
+ },
299
+ {
300
+ "id": "RAG chatbot 1",
301
+ "type": "basic",
302
+ "data": {
303
+ "title": "RAG chatbot",
304
+ "params": {
305
+ "negative_answer": "I'm sorry, but the data I've been trained on does not contain any information related to your question.",
306
+ "min_information": 2,
307
+ "max_information": 3,
308
+ "min_summary": 2,
309
+ "max_summary": 3,
310
+ "strict_limits": true,
311
+ "max_results": 5
312
+ },
313
+ "display": null,
314
+ "error": null,
315
+ "meta": {
316
+ "name": "RAG chatbot",
317
+ "params": {
318
+ "negative_answer": {
319
+ "name": "negative_answer",
320
+ "default": "I'm sorry, but the data I've been trained on does not contain any information related to your question.",
321
+ "type": {
322
+ "type": "<class 'str'>"
323
+ }
324
+ },
325
+ "min_information": {
326
+ "name": "min_information",
327
+ "default": 2,
328
+ "type": {
329
+ "type": "<class 'int'>"
330
+ }
331
+ },
332
+ "max_information": {
333
+ "name": "max_information",
334
+ "default": 3,
335
+ "type": {
336
+ "type": "<class 'int'>"
337
+ }
338
+ },
339
+ "min_summary": {
340
+ "name": "min_summary",
341
+ "default": 2,
342
+ "type": {
343
+ "type": "<class 'int'>"
344
+ }
345
+ },
346
+ "max_summary": {
347
+ "name": "max_summary",
348
+ "default": 3,
349
+ "type": {
350
+ "type": "<class 'int'>"
351
+ }
352
+ },
353
+ "strict_limits": {
354
+ "name": "strict_limits",
355
+ "default": true,
356
+ "type": {
357
+ "type": "<class 'bool'>"
358
+ }
359
+ },
360
+ "max_results": {
361
+ "name": "max_results",
362
+ "default": 5,
363
+ "type": {
364
+ "type": "<class 'int'>"
365
+ }
366
+ }
367
+ },
368
+ "inputs": {
369
+ "rag_graph": {
370
+ "name": "rag_graph",
371
+ "type": {
372
+ "type": "<class 'inspect._empty'>"
373
+ },
374
+ "position": "bottom"
375
+ },
376
+ "scenario_selector": {
377
+ "name": "scenario_selector",
378
+ "type": {
379
+ "type": "<class 'inspect._empty'>"
380
+ },
381
+ "position": "bottom"
382
+ },
383
+ "llm": {
384
+ "name": "llm",
385
+ "type": {
386
+ "type": "<class 'inspect._empty'>"
387
+ },
388
+ "position": "bottom"
389
+ }
390
+ },
391
+ "outputs": {
392
+ "output": {
393
+ "name": "output",
394
+ "type": {
395
+ "type": "None"
396
+ },
397
+ "position": "right"
398
+ }
399
+ },
400
+ "type": "basic",
401
+ "sub_nodes": null
402
+ },
403
+ "beingResized": false
404
+ },
405
+ "position": {
406
+ "x": -647.9563055161224,
407
+ "y": 528.5816378646354
408
+ },
409
+ "parentId": null,
410
+ "width": 417,
411
+ "height": 494
412
+ },
413
+ {
414
+ "id": "RAG graph 1",
415
+ "type": "basic",
416
+ "data": {
417
+ "title": "RAG graph",
418
+ "params": {},
419
+ "display": null,
420
+ "error": null,
421
+ "meta": {
422
+ "name": "RAG graph",
423
+ "params": {},
424
+ "inputs": {
425
+ "vector_store": {
426
+ "name": "vector_store",
427
+ "type": {
428
+ "type": "<class 'inspect._empty'>"
429
+ },
430
+ "position": "bottom"
431
+ },
432
+ "text_embedder": {
433
+ "name": "text_embedder",
434
+ "type": {
435
+ "type": "<class 'inspect._empty'>"
436
+ },
437
+ "position": "bottom"
438
+ }
439
+ },
440
+ "outputs": {
441
+ "output": {
442
+ "name": "output",
443
+ "type": {
444
+ "type": "None"
445
+ },
446
+ "position": "right"
447
+ }
448
+ },
449
+ "type": "basic",
450
+ "sub_nodes": null
451
+ }
452
+ },
453
+ "position": {
454
+ "x": -1018.3991667849547,
455
+ "y": 882.7108232430365
456
+ },
457
+ "parentId": null
458
+ },
459
+ {
460
+ "id": "Vector store 1",
461
+ "type": "basic",
462
+ "data": {
463
+ "title": "Vector store",
464
+ "params": {
465
+ "name": "chromadb",
466
+ "collection_name": "lynx"
467
+ },
468
+ "display": null,
469
+ "error": null,
470
+ "meta": {
471
+ "name": "Vector store",
472
+ "params": {
473
+ "name": {
474
+ "name": "name",
475
+ "default": "chromadb",
476
+ "type": {
477
+ "type": "<class 'str'>"
478
+ }
479
+ },
480
+ "collection_name": {
481
+ "name": "collection_name",
482
+ "default": "lynx",
483
+ "type": {
484
+ "type": "<class 'str'>"
485
+ }
486
+ }
487
+ },
488
+ "inputs": {},
489
+ "outputs": {
490
+ "output": {
491
+ "name": "output",
492
+ "type": {
493
+ "type": "None"
494
+ },
495
+ "position": "right"
496
+ }
497
+ },
498
+ "type": "basic",
499
+ "sub_nodes": null
500
+ },
501
+ "beingResized": false
502
+ },
503
+ "position": {
504
+ "x": -1992.8382657219915,
505
+ "y": 898.0883240074281
506
+ },
507
+ "parentId": null,
508
+ "width": 275,
509
+ "height": 227
510
+ },
511
+ {
512
+ "id": "Text embedder 2",
513
+ "type": "basic",
514
+ "data": {
515
+ "title": "Text embedder",
516
+ "params": {
517
+ "model": "text-embedding-ada-002"
518
+ },
519
+ "display": null,
520
+ "error": null,
521
+ "meta": {
522
+ "name": "Text embedder",
523
+ "params": {
524
+ "model": {
525
+ "name": "model",
526
+ "default": "text-embedding-ada-002",
527
+ "type": {
528
+ "type": "<class 'str'>"
529
+ }
530
+ }
531
+ },
532
+ "inputs": {
533
+ "llm": {
534
+ "name": "llm",
535
+ "type": {
536
+ "type": "<class 'inspect._empty'>"
537
+ },
538
+ "position": "bottom"
539
+ }
540
+ },
541
+ "outputs": {
542
+ "output": {
543
+ "name": "output",
544
+ "type": {
545
+ "type": "None"
546
+ },
547
+ "position": "right"
548
+ }
549
+ },
550
+ "type": "basic",
551
+ "sub_nodes": null
552
+ }
553
+ },
554
+ "position": {
555
+ "x": -1601.7383061140106,
556
+ "y": 1187.790118541483
557
+ },
558
+ "parentId": null
559
+ },
560
+ {
561
+ "id": "LLM 2",
562
+ "type": "basic",
563
+ "data": {
564
+ "title": "LLM",
565
+ "params": {
566
+ "name": "openai"
567
+ },
568
+ "display": null,
569
+ "error": null,
570
+ "meta": {
571
+ "name": "LLM",
572
+ "params": {
573
+ "name": {
574
+ "name": "name",
575
+ "default": "openai",
576
+ "type": {
577
+ "type": "<class 'str'>"
578
+ }
579
+ }
580
+ },
581
+ "inputs": {},
582
+ "outputs": {
583
+ "output": {
584
+ "name": "output",
585
+ "type": {
586
+ "type": "None"
587
+ },
588
+ "position": "right"
589
+ }
590
+ },
591
+ "type": "basic",
592
+ "sub_nodes": null
593
+ }
594
+ },
595
+ "position": {
596
+ "x": -1830.5553990810897,
597
+ "y": 1406.239623213993
598
+ },
599
+ "parentId": null
600
+ },
601
+ {
602
+ "id": "Truncate history 1",
603
+ "type": "basic",
604
+ "data": {
605
+ "title": "Truncate history",
606
+ "params": {
607
+ "max_tokens": 10000,
608
+ "language": "English"
609
+ },
610
+ "display": null,
611
+ "error": null,
612
+ "meta": {
613
+ "name": "Truncate history",
614
+ "params": {
615
+ "max_tokens": {
616
+ "name": "max_tokens",
617
+ "default": 10000,
618
+ "type": {
619
+ "type": "<class 'int'>"
620
+ }
621
+ },
622
+ "language": {
623
+ "name": "language",
624
+ "default": "English",
625
+ "type": {
626
+ "type": "<class 'str'>"
627
+ }
628
+ }
629
+ },
630
+ "inputs": {},
631
+ "outputs": {
632
+ "output": {
633
+ "name": "output",
634
+ "type": {
635
+ "type": "None"
636
+ },
637
+ "position": "right"
638
+ }
639
+ },
640
+ "type": "basic",
641
+ "sub_nodes": null
642
+ }
643
+ },
644
+ "position": {
645
+ "x": -206.839661001107,
646
+ "y": 954.055575798662
647
+ },
648
+ "parentId": null
649
+ },
650
+ {
651
+ "id": "Chat processor 1",
652
+ "type": "basic",
653
+ "data": {
654
+ "title": "Chat processor",
655
+ "params": {},
656
+ "display": null,
657
+ "error": null,
658
+ "meta": {
659
+ "name": "Chat processor",
660
+ "params": {},
661
+ "inputs": {
662
+ "processor": {
663
+ "name": "processor",
664
+ "type": {
665
+ "type": "<class 'inspect._empty'>"
666
+ },
667
+ "position": "left"
668
+ }
669
+ },
670
+ "outputs": {
671
+ "output": {
672
+ "name": "output",
673
+ "type": {
674
+ "type": "None"
675
+ },
676
+ "position": "right"
677
+ }
678
+ },
679
+ "type": "basic",
680
+ "sub_nodes": null
681
+ }
682
+ },
683
+ "position": {
684
+ "x": 266.23062579739207,
685
+ "y": 931.8796075565143
686
+ },
687
+ "parentId": null
688
+ },
689
+ {
690
+ "id": "Test Chat API 1",
691
+ "type": "basic",
692
+ "data": {
693
+ "title": "Test Chat API",
694
+ "params": {},
695
+ "display": null,
696
+ "error": null,
697
+ "meta": {
698
+ "name": "Test Chat API",
699
+ "params": {},
700
+ "inputs": {
701
+ "message": {
702
+ "name": "message",
703
+ "type": {
704
+ "type": "<class 'inspect._empty'>"
705
+ },
706
+ "position": "left"
707
+ },
708
+ "chat_api": {
709
+ "name": "chat_api",
710
+ "type": {
711
+ "type": "<class 'inspect._empty'>"
712
+ },
713
+ "position": "bottom"
714
+ }
715
+ },
716
+ "outputs": {
717
+ "output": {
718
+ "name": "output",
719
+ "type": {
720
+ "type": "None"
721
+ },
722
+ "position": "right"
723
+ }
724
+ },
725
+ "type": "basic",
726
+ "sub_nodes": null
727
+ }
728
+ },
729
+ "position": {
730
+ "x": 30.070483767962628,
731
+ "y": 30.871931330853627
732
+ },
733
+ "parentId": null
734
+ },
735
+ {
736
+ "id": "Mask 1",
737
+ "type": "basic",
738
+ "data": {
739
+ "title": "Mask",
740
+ "params": {
741
+ "name": "email",
742
+ "regex": "([a-z0-9!#$%&'*+\\/=?^_`{|.}~-]+@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?)",
743
744
+ "mask_pattern": "masked_email_address_{}"
745
+ },
746
+ "display": null,
747
+ "error": null,
748
+ "meta": {
749
+ "name": "Mask",
750
+ "params": {
751
+ "name": {
752
+ "name": "name",
753
+ "default": "",
754
+ "type": {
755
+ "type": "None"
756
+ }
757
+ },
758
+ "regex": {
759
+ "name": "regex",
760
+ "default": "",
761
+ "type": {
762
+ "type": "None"
763
+ }
764
+ },
765
+ "exceptions": {
766
+ "name": "exceptions",
767
+ "default": "",
768
+ "type": {
769
+ "type": "None"
770
+ }
771
+ },
772
+ "mask_pattern": {
773
+ "name": "mask_pattern",
774
+ "default": "",
775
+ "type": {
776
+ "type": "None"
777
+ }
778
+ }
779
+ },
780
+ "inputs": {},
781
+ "outputs": {
782
+ "output": {
783
+ "name": "output",
784
+ "type": {
785
+ "type": "None"
786
+ },
787
+ "position": "right"
788
+ }
789
+ },
790
+ "type": "basic",
791
+ "sub_nodes": null
792
+ }
793
+ },
794
+ "position": {
795
+ "x": -202.83662881345157,
796
+ "y": 1123.8190429357237
797
+ },
798
+ "parentId": null
799
+ },
800
+ {
801
+ "id": "Mask 2",
802
+ "type": "basic",
803
+ "data": {
804
+ "title": "Mask",
805
+ "params": {
806
+ "name": "credit_card",
807
+ "regex": "((?:(?:\\\\d{4}[- ]?){3}\\\\d{4}|\\\\d{15,16}))(?![\\\\d])",
808
+ "exceptions": "",
809
+ "mask_pattern": "masked_credit_card_number_{}"
810
+ },
811
+ "display": null,
812
+ "error": null,
813
+ "meta": {
814
+ "name": "Mask",
815
+ "params": {
816
+ "name": {
817
+ "name": "name",
818
+ "default": "",
819
+ "type": {
820
+ "type": "None"
821
+ }
822
+ },
823
+ "regex": {
824
+ "name": "regex",
825
+ "default": "",
826
+ "type": {
827
+ "type": "None"
828
+ }
829
+ },
830
+ "exceptions": {
831
+ "name": "exceptions",
832
+ "default": "",
833
+ "type": {
834
+ "type": "None"
835
+ }
836
+ },
837
+ "mask_pattern": {
838
+ "name": "mask_pattern",
839
+ "default": "",
840
+ "type": {
841
+ "type": "None"
842
+ }
843
+ }
844
+ },
845
+ "inputs": {},
846
+ "outputs": {
847
+ "output": {
848
+ "name": "output",
849
+ "type": {
850
+ "type": "None"
851
+ },
852
+ "position": "right"
853
+ }
854
+ },
855
+ "type": "basic",
856
+ "sub_nodes": null
857
+ }
858
+ },
859
+ "position": {
860
+ "x": -192.5486444668937,
861
+ "y": 1414.696184081429
862
+ },
863
+ "parentId": null
864
+ }
865
+ ],
866
+ "edges": [
867
+ {
868
+ "id": "xy-edge__Knowledge base 1output-Chat API 1knowledge_base",
869
+ "source": "Knowledge base 1",
870
+ "target": "Chat API 1",
871
+ "sourceHandle": "output",
872
+ "targetHandle": "knowledge_base"
873
+ },
874
+ {
875
+ "id": "xy-edge__RAG chatbot 1output-Chat API 1chatbot",
876
+ "source": "RAG chatbot 1",
877
+ "target": "Chat API 1",
878
+ "sourceHandle": "output",
879
+ "targetHandle": "chatbot"
880
+ },
881
+ {
882
+ "id": "xy-edge__LLM 1output-RAG chatbot 1llm",
883
+ "source": "LLM 1",
884
+ "target": "RAG chatbot 1",
885
+ "sourceHandle": "output",
886
+ "targetHandle": "llm"
887
+ },
888
+ {
889
+ "id": "xy-edge__Scenario selector 1output-RAG chatbot 1scenario_selector",
890
+ "source": "Scenario selector 1",
891
+ "target": "RAG chatbot 1",
892
+ "sourceHandle": "output",
893
+ "targetHandle": "scenario_selector"
894
+ },
895
+ {
896
+ "id": "xy-edge__RAG graph 1output-RAG chatbot 1rag_graph",
897
+ "source": "RAG graph 1",
898
+ "target": "RAG chatbot 1",
899
+ "sourceHandle": "output",
900
+ "targetHandle": "rag_graph"
901
+ },
902
+ {
903
+ "id": "xy-edge__Vector store 1output-RAG graph 1vector_store",
904
+ "source": "Vector store 1",
905
+ "target": "RAG graph 1",
906
+ "sourceHandle": "output",
907
+ "targetHandle": "vector_store"
908
+ },
909
+ {
910
+ "id": "xy-edge__Text embedder 2output-RAG graph 1text_embedder",
911
+ "source": "Text embedder 2",
912
+ "target": "RAG graph 1",
913
+ "sourceHandle": "output",
914
+ "targetHandle": "text_embedder"
915
+ },
916
+ {
917
+ "id": "xy-edge__LLM 2output-Text embedder 2llm",
918
+ "source": "LLM 2",
919
+ "target": "Text embedder 2",
920
+ "sourceHandle": "output",
921
+ "targetHandle": "llm"
922
+ },
923
+ {
924
+ "id": "xy-edge__Truncate history 1output-Chat processor 1processor",
925
+ "source": "Truncate history 1",
926
+ "target": "Chat processor 1",
927
+ "sourceHandle": "output",
928
+ "targetHandle": "processor"
929
+ },
930
+ {
931
+ "id": "xy-edge__Chat processor 1output-Chat API 1chat_processor",
932
+ "source": "Chat processor 1",
933
+ "target": "Chat API 1",
934
+ "sourceHandle": "output",
935
+ "targetHandle": "chat_processor"
936
+ },
937
+ {
938
+ "id": "xy-edge__Chat API 1output-Test Chat API 1chat_api",
939
+ "source": "Chat API 1",
940
+ "target": "Test Chat API 1",
941
+ "sourceHandle": "output",
942
+ "targetHandle": "chat_api"
943
+ },
944
+ {
945
+ "id": "xy-edge__Test Chat API 1output-View 1input",
946
+ "source": "Test Chat API 1",
947
+ "target": "View 1",
948
+ "sourceHandle": "output",
949
+ "targetHandle": "input"
950
+ },
951
+ {
952
+ "id": "xy-edge__Input chat 1output-Test Chat API 1message",
953
+ "source": "Input chat 1",
954
+ "target": "Test Chat API 1",
955
+ "sourceHandle": "output",
956
+ "targetHandle": "message"
957
+ },
958
+ {
959
+ "id": "xy-edge__Mask 1output-Chat processor 1processor",
960
+ "source": "Mask 1",
961
+ "target": "Chat processor 1",
962
+ "sourceHandle": "output",
963
+ "targetHandle": "processor"
964
+ },
965
+ {
966
+ "id": "xy-edge__Mask 2output-Chat processor 1processor",
967
+ "source": "Mask 2",
968
+ "target": "Chat processor 1",
969
+ "sourceHandle": "output",
970
+ "targetHandle": "processor"
971
+ }
972
+ ]
973
+ }
server/executors/one_by_one.py CHANGED
@@ -37,7 +37,7 @@ def register(env: str, cache: bool = True):
37
  ops.EXECUTORS[env] = lambda ws: execute(ws, ops.CATALOGS[env], cache=cache)
38
 
39
  def get_stages(ws, catalog):
40
- '''Inputs on top are batch inputs. We decompose the graph into a DAG of components along these edges.'''
41
  nodes = {n.id: n for n in ws.nodes}
42
  batch_inputs = {}
43
  inputs = {}
@@ -46,7 +46,7 @@ def get_stages(ws, catalog):
46
  node = nodes[edge.target]
47
  op = catalog[node.data.title]
48
  i = op.inputs[edge.targetHandle]
49
- if i.position == 'top':
50
  batch_inputs.setdefault(edge.target, []).append(edge.source)
51
  stages = []
52
  for bt, bss in batch_inputs.items():
@@ -77,7 +77,7 @@ def execute(ws, catalog, cache=None):
77
  node.data.error = None
78
  op = catalog[node.data.title]
79
  # Start tasks for nodes that have no non-batch inputs.
80
- if all([i.position == 'top' for i in op.inputs.values()]):
81
  tasks[node.id] = [NO_INPUT]
82
  batch_inputs = {}
83
  # Run the rest until we run out of tasks.
@@ -99,7 +99,7 @@ def execute(ws, catalog, cache=None):
99
  for task in ts:
100
  try:
101
  inputs = [
102
- batch_inputs[(n, i.name)] if i.position == 'top' else task
103
  for i in op.inputs.values()]
104
  if cache:
105
  key = json.dumps(fastapi.encoders.jsonable_encoder((inputs, params)))
@@ -126,7 +126,7 @@ def execute(ws, catalog, cache=None):
126
  t = nodes[edge.target]
127
  op = catalog[t.data.title]
128
  i = op.inputs[edge.targetHandle]
129
- if i.position == 'top':
130
  batch_inputs.setdefault((edge.target, edge.targetHandle), []).extend(results)
131
  else:
132
  tasks.setdefault(edge.target, []).extend(results)
 
37
  ops.EXECUTORS[env] = lambda ws: execute(ws, ops.CATALOGS[env], cache=cache)
38
 
39
  def get_stages(ws, catalog):
40
+ '''Inputs on top/bottom are batch inputs. We decompose the graph into a DAG of components along these edges.'''
41
  nodes = {n.id: n for n in ws.nodes}
42
  batch_inputs = {}
43
  inputs = {}
 
46
  node = nodes[edge.target]
47
  op = catalog[node.data.title]
48
  i = op.inputs[edge.targetHandle]
49
+ if i.position in 'top or bottom':
50
  batch_inputs.setdefault(edge.target, []).append(edge.source)
51
  stages = []
52
  for bt, bss in batch_inputs.items():
 
77
  node.data.error = None
78
  op = catalog[node.data.title]
79
  # Start tasks for nodes that have no non-batch inputs.
80
+ if all([i.position in 'top or bottom' for i in op.inputs.values()]):
81
  tasks[node.id] = [NO_INPUT]
82
  batch_inputs = {}
83
  # Run the rest until we run out of tasks.
 
99
  for task in ts:
100
  try:
101
  inputs = [
102
+ batch_inputs[(n, i.name)] if i.position in 'top or bottom' else task
103
  for i in op.inputs.values()]
104
  if cache:
105
  key = json.dumps(fastapi.encoders.jsonable_encoder((inputs, params)))
 
126
  t = nodes[edge.target]
127
  op = catalog[t.data.title]
128
  i = op.inputs[edge.targetHandle]
129
+ if i.position in 'top or bottom':
130
  batch_inputs.setdefault((edge.target, edge.targetHandle), []).extend(results)
131
  else:
132
  tasks.setdefault(edge.target, []).extend(results)
server/lynxscribe_ops.py CHANGED
@@ -1,52 +1,150 @@
1
- '''An example of passive ops. Just using LynxKite to describe the configuration of a complex system.'''
2
- from .ops import passive_op_registration, Parameter as P, MULTI_INPUT
3
-
4
- reg = passive_op_registration('LynxScribe')
5
- reg('Scrape documents', params=[P.basic('url', '')])
6
- reg('Conversation logs')
7
- reg('Extract graph', inputs=['input'])
8
- reg('Compute embeddings', inputs=['input'], params=[P.options('method', ['OpenAI', 'graph', 'Yi-34b']), P.basic('dimensions', 1234)])
9
- reg('Vector DB', inputs=[MULTI_INPUT], params=[P.options('backend', ['FAISS', 'ANN', 'HNSW'])])
10
- reg('Chat UI', outputs=[], inputs=['input'])
11
- reg('Chat backend')
12
- reg('WhatsApp')
13
- reg('PII removal', inputs=['input'])
14
- reg('Intent classification', inputs=['input'])
15
- reg('System prompt', params=[P.basic('prompt', 'You are a helpful chatbot.')])
16
- reg('LLM', inputs=[MULTI_INPUT], params=[P.options('backend', ['GPT-4', 'Yi-34b', 'Claude 3 Opus', 'Google Gemini'])])
17
-
18
- # From Marton's mock-up.
19
- yi = 'Yi-34B (triton)'
20
- reg('Chat Input', params=[
21
- P.options('load_mode', ['augmented']),
22
- P.options('model', [yi]),
23
- P.options('embedder', ['GritLM-7b (triton)']),
24
- ])
25
- reg('k-NN Intent Classifier', inputs=['qa_embs', 'rag_graph'], params=[
26
- P.options('distance', ['cosine', 'euclidean']),
27
- P.basic('max_dist', 0.3),
28
- P.basic('k', 3),
29
- P.options('voting', ['most common', 'weighted']),
30
- ])
31
- reg('Chroma Graph RAG Loader', inputs=[], params=[
32
- P.options('location', ['GCP']),
33
- P.collapsed('bucket', ''),
34
- P.collapsed('folder', ''),
35
- P.options('embedder', ['GritLM-7b (triton)']),
36
- ])
37
- reg('Scenario Builder', inputs=['input'], params=[
38
- P.collapsed('scenario', ''),
39
- ])
40
- reg('Graph RAG Answer', inputs=['qa_embs', 'intent', 'rag_graph', 'prompt_dict'], params=[
41
- P.options('answer_llm', [yi]),
42
- P.basic('faq_dist', 0.12),
43
- P.basic('max_dist', 0.25),
44
- P.basic('ctx_tokens', 2800),
45
- P.options('distance', ['cosine', 'euclidean']),
46
- P.collapsed('graph_rag_params', ''),
47
- ])
48
- reg('Answer Post Processing', inputs=['qa_embs', 'rag_graph'], params=[
49
- P.options('distance', ['cosine', 'euclidean']),
50
- P.basic('min_conf', 0.78),
51
- ])
52
- reg('Chat Output', inputs=['input'], outputs=[])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ LynxScribe configuration and testing in LynxKite.
3
+ """
4
+ from lynxscribe.core.llm.base import get_llm_engine
5
+ from lynxscribe.core.vector_store.base import get_vector_store
6
+ from lynxscribe.common.config import load_config
7
+ from lynxscribe.components.text_embedder import TextEmbedder
8
+ from lynxscribe.components.rag.rag_graph import RAGGraph
9
+ from lynxscribe.components.rag.knowledge_base_graph import PandasKnowledgeBaseGraph
10
+ from lynxscribe.components.rag.rag_chatbot import Scenario, ScenarioSelector, RAGChatbot
11
+ from lynxscribe.components.chat_processor.base import ChatProcessor
12
+ from lynxscribe.components.chat_processor.processors import MaskTemplate, TruncateHistory
13
+ from lynxscribe.components.chat_api import ChatAPI, ChatAPIRequest, ChatAPIResponse
14
+
15
+ from . import ops
16
+ import asyncio
17
+ from .executors import one_by_one
18
+
19
+ ENV = 'LynxScribe'
20
+ one_by_one.register(ENV)
21
+ op = ops.op_registration(ENV)
22
+
23
+ @op("Vector store")
24
+ def vector_store(*, name='chromadb', collection_name='lynx'):
25
+ vector_store = get_vector_store(name=name, collection_name=collection_name)
26
+ return {'vector_store': vector_store}
27
+
28
+ @op("LLM")
29
+ def llm(*, name='openai'):
30
+ llm = get_llm_engine(name=name)
31
+ return {'llm': llm}
32
+
33
+ @ops.input_position(llm="bottom")
34
+ @op("Text embedder")
35
+ def text_embedder(llm, *, model='text-embedding-ada-002'):
36
+ llm = llm[0]['llm']
37
+ text_embedder = TextEmbedder(llm=llm, model=model)
38
+ return {'text_embedder': text_embedder}
39
+
40
+ @ops.input_position(vector_store="bottom", text_embedder="bottom")
41
+ @op("RAG graph")
42
+ def rag_graph(vector_store, text_embedder):
43
+ vector_store = vector_store[0]['vector_store']
44
+ text_embedder = text_embedder[0]['text_embedder']
45
+ rag_graph = RAGGraph(
46
+ PandasKnowledgeBaseGraph(vector_store=vector_store, text_embedder=text_embedder)
47
+ )
48
+ return {'rag_graph': rag_graph}
49
+
50
+ @op("Scenario selector")
51
+ def scenario_selector(*, scenario_file: str, node_types='intent_cluster'):
52
+ scenarios = load_config(scenario_file)
53
+ node_types = [t.strip() for t in node_types.split(',')]
54
+ scenario_selector = ScenarioSelector(
55
+ scenarios=[Scenario(**scenario) for scenario in scenarios],
56
+ node_types=node_types,
57
+ )
58
+ return {'scenario_selector': scenario_selector}
59
+
60
+ DEFAULT_NEGATIVE_ANSWER = "I'm sorry, but the data I've been trained on does not contain any information related to your question."
61
+
62
+ @ops.input_position(rag_graph="bottom", scenario_selector="bottom", llm="bottom")
63
+ @op("RAG chatbot")
64
+ def rag_chatbot(
65
+ rag_graph, scenario_selector, llm, *,
66
+ negative_answer=DEFAULT_NEGATIVE_ANSWER,
67
+ min_information=2, max_information=3,
68
+ min_summary=2, max_summary=3,
69
+ strict_limits=True, max_results=5):
70
+ rag_graph = rag_graph[0]['rag_graph']
71
+ scenario_selector = scenario_selector[0]['scenario_selector']
72
+ llm = llm[0]['llm']
73
+ rag_chatbot = RAGChatbot(
74
+ rag_graph=rag_graph,
75
+ scenario_selector=scenario_selector,
76
+ llm=llm,
77
+ negative_answer=negative_answer,
78
+ limits_by_type=dict(information=[min_information, max_information], summary=[min_summary, max_summary]),
79
+ strict_limits=strict_limits,
80
+ max_results=max_results,
81
+ )
82
+ return {'chatbot': rag_chatbot}
83
+
84
+ @op("Chat processor")
85
+ def chat_processor(processor, *, _ctx: one_by_one.Context):
86
+ cfg = _ctx.last_result or {'question_processors': [], 'answer_processors': [], 'masks': []}
87
+ for f in ['question_processor', 'answer_processor', 'mask']:
88
+ if f in processor:
89
+ cfg[f + 's'].append(processor[f])
90
+ question_processors = cfg['question_processors'][:]
91
+ answer_processors = cfg['answer_processors'][:]
92
+ masking_templates = {}
93
+ for mask in cfg['masks']:
94
+ masking_templates[mask['name']] = mask
95
+ if masking_templates:
96
+ question_processors.append(MaskTemplate(masking_templates=masking_templates))
97
+ answer_processors.append(MaskTemplate(masking_templates=masking_templates))
98
+ chat_processor = ChatProcessor(question_processors=question_processors, answer_processors=answer_processors)
99
+ return {'chat_processor': chat_processor, **cfg}
100
+
101
+ @op("Truncate history")
102
+ def truncate_history(*, max_tokens=10000, language='English'):
103
+ return {'question_processor': TruncateHistory(max_tokens=max_tokens, language=language.lower())}
104
+
105
+ @op("Mask")
106
+ def mask(*, name='', regex='', exceptions='', mask_pattern=''):
107
+ exceptions = [e.strip() for e in exceptions.split(',') if e.strip()]
108
+ return {'mask': {'name': name, 'regex': regex, 'exceptions': exceptions, 'mask_pattern': mask_pattern}}
109
+
110
+ @ops.input_position(chat_api="bottom")
111
+ @op("Test Chat API")
112
+ def test_chat_api(message, chat_api):
113
+ chat_api = chat_api[0]['chat_api']
114
+ request = ChatAPIRequest(session_id="b43215a0-428f-11ef-9454-0242ac120002", question=message['text'], history=[])
115
+ response = asyncio.run(chat_api.answer(request))
116
+ return {'response': response.answer}
117
+
118
+ @op("Input chat")
119
+ def input_chat(*, chat: str):
120
+ return {'text': chat}
121
+
122
+ @ops.input_position(chatbot="bottom", chat_processor="bottom")
123
+ @op("Chat API")
124
+ def chat_api(chatbot, chat_processor, knowledge_base, *, model='gpt-4o-mini'):
125
+ chatbot = chatbot[0]['chatbot']
126
+ chat_processor = chat_processor[0]['chat_processor']
127
+ c = ChatAPI(
128
+ chatbot=chatbot,
129
+ chat_processor=chat_processor,
130
+ model=model,
131
+ )
132
+ if knowledge_base:
133
+ c.chatbot.rag_graph.kg_base.load_v1_knowledge_base(**knowledge_base)
134
+ c.chatbot.scenario_selector.check_compatibility(c.chatbot.rag_graph)
135
+ return {'chat_api': c}
136
+
137
+ @op("Knowledge base")
138
+ def knowledge_base(*, nodes_path='nodes.pickle', edges_path='edges.pickle', template_cluster_path='tempclusters.pickle'):
139
+ return {'nodes_path': nodes_path, 'edges_path': edges_path, 'template_cluster_path': template_cluster_path}
140
+
141
+ @op("View", view="table_view")
142
+ def view(input):
143
+ columns = [str(c) for c in input.keys() if not str(c).startswith('_')]
144
+ v = {
145
+ 'dataframes': { 'df': {
146
+ 'columns': columns,
147
+ 'data': [[input[c] for c in columns]],
148
+ }}
149
+ }
150
+ return v
web/package-lock.json CHANGED
@@ -25,6 +25,7 @@
25
  "sass": "^1.77.2",
26
  "svelte": "^4.2.12",
27
  "svelte-check": "^3.6.9",
 
28
  "tslib": "^2.6.2",
29
  "typescript": "^5.4.4",
30
  "unplugin-icons": "^0.18.5",
@@ -1081,6 +1082,13 @@
1081
  "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.14.tgz",
1082
  "integrity": "sha512-WCfD5Ht3ZesJUsONdhvm84dmzWOiOzOAqOncN0++w0lBw1o8OuDNJF2McvvCef/yBqb/HYRahp1BYtODFQ8bRg=="
1083
  },
 
 
 
 
 
 
 
1084
  "node_modules/@types/pug": {
1085
  "version": "2.0.10",
1086
  "resolved": "https://registry.npmjs.org/@types/pug/-/pug-2.0.10.tgz",
@@ -1844,6 +1852,19 @@
1844
  "@jridgewell/sourcemap-codec": "^1.4.15"
1845
  }
1846
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
1847
  "node_modules/mdn-data": {
1848
  "version": "2.0.30",
1849
  "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz",
@@ -2460,6 +2481,20 @@
2460
  "svelte": "^3.19.0 || ^4.0.0"
2461
  }
2462
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2463
  "node_modules/svelte-preprocess": {
2464
  "version": "5.1.4",
2465
  "resolved": "https://registry.npmjs.org/svelte-preprocess/-/svelte-preprocess-5.1.4.tgz",
 
25
  "sass": "^1.77.2",
26
  "svelte": "^4.2.12",
27
  "svelte-check": "^3.6.9",
28
+ "svelte-markdown": "^0.4.1",
29
  "tslib": "^2.6.2",
30
  "typescript": "^5.4.4",
31
  "unplugin-icons": "^0.18.5",
 
1082
  "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.14.tgz",
1083
  "integrity": "sha512-WCfD5Ht3ZesJUsONdhvm84dmzWOiOzOAqOncN0++w0lBw1o8OuDNJF2McvvCef/yBqb/HYRahp1BYtODFQ8bRg=="
1084
  },
1085
+ "node_modules/@types/marked": {
1086
+ "version": "5.0.2",
1087
+ "resolved": "https://registry.npmjs.org/@types/marked/-/marked-5.0.2.tgz",
1088
+ "integrity": "sha512-OucS4KMHhFzhz27KxmWg7J+kIYqyqoW5kdIEI319hqARQQUTqhao3M/F+uFnDXD0Rg72iDDZxZNxq5gvctmLlg==",
1089
+ "dev": true,
1090
+ "license": "MIT"
1091
+ },
1092
  "node_modules/@types/pug": {
1093
  "version": "2.0.10",
1094
  "resolved": "https://registry.npmjs.org/@types/pug/-/pug-2.0.10.tgz",
 
1852
  "@jridgewell/sourcemap-codec": "^1.4.15"
1853
  }
1854
  },
1855
+ "node_modules/marked": {
1856
+ "version": "5.1.2",
1857
+ "resolved": "https://registry.npmjs.org/marked/-/marked-5.1.2.tgz",
1858
+ "integrity": "sha512-ahRPGXJpjMjwSOlBoTMZAK7ATXkli5qCPxZ21TG44rx1KEo44bii4ekgTDQPNRQ4Kh7JMb9Ub1PVk1NxRSsorg==",
1859
+ "dev": true,
1860
+ "license": "MIT",
1861
+ "bin": {
1862
+ "marked": "bin/marked.js"
1863
+ },
1864
+ "engines": {
1865
+ "node": ">= 16"
1866
+ }
1867
+ },
1868
  "node_modules/mdn-data": {
1869
  "version": "2.0.30",
1870
  "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz",
 
2481
  "svelte": "^3.19.0 || ^4.0.0"
2482
  }
2483
  },
2484
+ "node_modules/svelte-markdown": {
2485
+ "version": "0.4.1",
2486
+ "resolved": "https://registry.npmjs.org/svelte-markdown/-/svelte-markdown-0.4.1.tgz",
2487
+ "integrity": "sha512-pOlLY6EruKJaWI9my/2bKX8PdTeP5CM0s4VMmwmC2prlOkjAf+AOmTM4wW/l19Y6WZ87YmP8+ZCJCCwBChWjYw==",
2488
+ "dev": true,
2489
+ "license": "MIT",
2490
+ "dependencies": {
2491
+ "@types/marked": "^5.0.1",
2492
+ "marked": "^5.1.2"
2493
+ },
2494
+ "peerDependencies": {
2495
+ "svelte": "^4.0.0"
2496
+ }
2497
+ },
2498
  "node_modules/svelte-preprocess": {
2499
  "version": "5.1.4",
2500
  "resolved": "https://registry.npmjs.org/svelte-preprocess/-/svelte-preprocess-5.1.4.tgz",
web/package.json CHANGED
@@ -15,6 +15,7 @@
15
  "sass": "^1.77.2",
16
  "svelte": "^4.2.12",
17
  "svelte-check": "^3.6.9",
 
18
  "tslib": "^2.6.2",
19
  "typescript": "^5.4.4",
20
  "unplugin-icons": "^0.18.5",
 
15
  "sass": "^1.77.2",
16
  "svelte": "^4.2.12",
17
  "svelte-check": "^3.6.9",
18
+ "svelte-markdown": "^0.4.1",
19
  "tslib": "^2.6.2",
20
  "typescript": "^5.4.4",
21
  "unplugin-icons": "^0.18.5",