summaryrefslogtreecommitdiffstats
path: root/2023/info/llm-after.md
blob: 5a020a0bf23f1f13f377ee26194a21de6b07c3df (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
<!-- Automatically generated by emacsconf-publish-after-page -->


<a name="llm-mainVideo-transcript"></a>
# Transcript


[[!template new="1" text="""Intro to the Talk""" start="00:00:00.000" video="mainVideo-llm" id="subtitle"]]

[[!template text="""Hello, I'm Andrew Hyatt and I'm going to talk to you""" start="00:00:00.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""about large language models and how""" start="00:00:04.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""they relate to Emacs.""" start="00:00:06.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And I'm going to talk to you about the technology""" start="00:00:11.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and how we're going to use it in Emacs.""" start="00:00:14.920" video="mainVideo-llm" id="subtitle"]]
[[!template text="""There'll be demos and there'll be talks about,""" start="00:00:18.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I'll finish up by kind of talking about where""" start="00:00:21.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I think this should go in the future.""" start="00:00:22.880" video="mainVideo-llm" id="subtitle"]]

[[!template new="1" text="""What are LLMs?""" start="00:00:25.080" video="mainVideo-llm" id="subtitle"]]

[[!template text="""So to start off with, let's just talk like,""" start="00:00:25.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I just want to make sure everyone's on the same page.""" start="00:00:28.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""What are large language models?""" start="00:00:29.760" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Not everyone may be caught up on this.""" start="00:00:30.920" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Large language models are a way... Basically,""" start="00:00:34.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""the current versions of large language models""" start="00:00:39.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""are all based on the similar architecture""" start="00:00:43.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""called the transformer.""" start="00:00:44.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It's just an efficient way to train and produce output.""" start="00:00:45.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So these things are basically models""" start="00:00:48.720" video="mainVideo-llm" id="subtitle"]]
[[!template text="""that predict the next word or something like that.""" start="00:00:51.920" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And they're trained on an enormous corpus of information""" start="00:00:58.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and they get extremely good""" start="00:01:02.120" video="mainVideo-llm" id="subtitle"]]
[[!template text="""at predicting the next word.""" start="00:01:04.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And from that basic ability, you can train""" start="00:01:06.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""through further tuning from human input,""" start="00:01:09.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""human ratings and things like that.""" start="00:01:12.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You can train different models based on that""" start="00:01:13.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""that will do question answering.""" start="00:01:17.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And this is how basically ChatGPT works.""" start="00:01:18.760" video="mainVideo-llm" id="subtitle"]]
[[!template text="""There's a base LLM, like GPT.""" start="00:01:22.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And then you have a chat version of that,""" start="00:01:25.600" video="mainVideo-llm" id="subtitle"]]
[[!template text="""which is just trained to just... You give""" start="00:01:27.800" video="mainVideo-llm" id="subtitle"]]
[[!template text="""it a prompt, like what do you want it to do?""" start="00:01:29.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And it gives you an output that does what you told it to do,""" start="00:01:32.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""or at least attempts to do it.""" start="00:01:37.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Those are the power of large language models is""" start="00:01:39.920" video="mainVideo-llm" id="subtitle"]]
[[!template text="""they're extremely, extremely impressive.""" start="00:01:42.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Certainly this is, in AI,""" start="00:01:45.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""this has been the biggest thing to happen""" start="00:01:47.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""probably in my lifetime,""" start="00:01:49.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""or at least my lifetime as my working lifetime.""" start="00:01:51.560" video="mainVideo-llm" id="subtitle"]]

[[!template new="1" text="""Power of LLMs (Magit Demo)""" start="00:01:56.360" video="mainVideo-llm" id="subtitle"]]

[[!template text="""So let me give you a demonstration of""" start="00:01:56.360" video="mainVideo-llm" id="subtitle"]]
[[!template text="""what kinds of stuff it could do in Emacs.""" start="00:02:02.560" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So here I have a Emacs file.""" start="00:02:06.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So this is my Emacs init file.""" start="00:02:09.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I have a change.""" start="00:02:12.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Let's commit that change.""" start="00:02:13.600" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And, you know, I don't like writing commit messages,""" start="00:02:16.880" video="mainVideo-llm" id="subtitle"]]
[[!template text="""so I can generate it.""" start="00:02:19.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And it did an actually just looking.""" start="00:02:23.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So all it does is it's looking, it's just reading the diff.""" start="00:02:27.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I'm just feeding it the diff with some instructions.""" start="00:02:29.760" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And it is this a incredible commit message?""" start="00:02:32.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It's not bad, actually.""" start="00:02:37.760" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You can see that it actually has really extracted""" start="00:02:39.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""the meaning of what I'm doing and has written""" start="00:02:42.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""a reasonably good commit message.""" start="00:02:46.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Now I have to edit it because this is not quite correct.""" start="00:02:48.880" video="mainVideo-llm" id="subtitle"]]
[[!template text="""But it's kind of impressive how good it is.""" start="00:02:53.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And my editing, it's kind of easier for me to edit this""" start="00:02:55.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""than just to write a new one.""" start="00:03:00.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And quite often it's good enough to just submit as is.""" start="00:03:01.880" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So this is kind of, you know, you could say""" start="00:03:04.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""this is just commit messages.""" start="00:03:08.120" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You could respond to emails.""" start="00:03:09.360" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You could, you know, using your own custom instructions""" start="00:03:10.720" video="mainVideo-llm" id="subtitle"]]
[[!template text="""about what you want your email to say.""" start="00:03:15.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It'll write the email for you.""" start="00:03:17.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It could do like this""" start="00:03:19.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Emacs is a way to interact with buffers.""" start="00:03:19.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""This could basically just output text.""" start="00:03:22.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So it's super useful for""" start="00:03:24.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""understanding something and outputting text based on that,""" start="00:03:27.760" video="mainVideo-llm" id="subtitle"]]
[[!template text="""which is just useful for Emacs.""" start="00:03:30.320" video="mainVideo-llm" id="subtitle"]]

[[!template new="1" text="""Drawbacks of LLMs (regex demo)""" start="00:03:32.240" video="mainVideo-llm" id="subtitle"]]

[[!template text="""So the drawback is, yeah, it's good,""" start="00:03:32.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""but it's not that reliable.""" start="00:03:39.920" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And you'd think it's very easy to get caught up in like,""" start="00:03:43.360" video="mainVideo-llm" id="subtitle"]]
[[!template text="""oh my gosh, like this is so powerful.""" start="00:03:45.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I bet it could work this, whatever idea could work.""" start="00:03:47.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And these ideas, like they almost can.""" start="00:03:50.600" video="mainVideo-llm" id="subtitle"]]
[[!template text="""For example, I was thinking, you know what I could do?""" start="00:03:52.920" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I don't like writing regexes.""" start="00:03:55.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Why can't I have a regex replace that's powered by LLMs?""" start="00:03:57.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And that way I could give just an instruction""" start="00:04:01.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""to regex replace.""" start="00:04:03.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And so for example, I could do Emacs LLM regex replace.""" start="00:04:07.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""This is not checked in anywhere.""" start="00:04:12.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""These are just my own kind of private functions.""" start="00:04:12.880" video="mainVideo-llm" id="subtitle"]]
[[!template text="""My description lowercase all the org headings.""" start="00:04:17.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Let's see if it works.""" start="00:04:19.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It might work.""" start="00:04:20.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""No, it doesn't work.""" start="00:04:21.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So if I, I'm not going to bother to show you""" start="00:04:22.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""what it actually came up with, but it's something,""" start="00:04:26.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""if you looked at it, it'd be like, wow,""" start="00:04:28.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""this is very close to being...""" start="00:04:29.880" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It looks like it should work, but it doesn't.""" start="00:04:31.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Okay.""" start="00:04:34.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It's not quite good enough to get it right.""" start="00:04:35.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And it's possible that perhaps by giving it""" start="00:04:38.720" video="mainVideo-llm" id="subtitle"]]
[[!template text="""a few examples of, or explaining more""" start="00:04:41.600" video="mainVideo-llm" id="subtitle"]]
[[!template text="""what makes Emacs regexes different.""" start="00:04:43.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It could do a better job""" start="00:04:46.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and maybe could solve these problems,""" start="00:04:47.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""but it's always a little bit random.""" start="00:04:49.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You're never quite sure what you're going to get.""" start="00:04:50.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So this is the drawback.""" start="00:04:52.360" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Like there's a lot of things that look like you could do it,""" start="00:04:54.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""but when it actually comes down to trying it,""" start="00:04:58.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""it's surprisingly hard.""" start="00:05:01.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And, you know, and whatever you're doing,""" start="00:05:03.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""it's surprisingly hard to get something""" start="00:05:06.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""that is repeatably, that's, that is always good.""" start="00:05:09.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So yeah, that's currently the problem.""" start="00:05:13.880" video="mainVideo-llm" id="subtitle"]]

[[!template new="1" text="""Embeddings""" start="00:05:20.120" video="mainVideo-llm" id="subtitle"]]

[[!template text="""So I want to talk about embeddings.""" start="00:05:20.120" video="mainVideo-llm" id="subtitle"]]
[[!template text="""They're another thing that LLMs offer""" start="00:05:23.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and that are extremely useful.""" start="00:05:26.920" video="mainVideo-llm" id="subtitle"]]
[[!template text="""They are, what they do is they encode from""" start="00:05:28.600" video="mainVideo-llm" id="subtitle"]]
[[!template text="""a input text that could be a word, a sentence,""" start="00:05:33.120" video="mainVideo-llm" id="subtitle"]]
[[!template text="""a small document.""" start="00:05:38.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It encodes a vector about what the meaning,""" start="00:05:42.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""the semantic meaning of that is.""" start="00:05:45.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""That means you could, something that is,""" start="00:05:46.920" video="mainVideo-llm" id="subtitle"]]
[[!template text="""uses completely different words,""" start="00:05:51.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""but is basically talking about the same thing,""" start="00:05:52.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""perhaps in a different language, should be pretty close""" start="00:05:54.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""as a vector to the other vector.""" start="00:05:57.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You know, as long as they're similarly semantic things,""" start="00:06:02.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""like the words""" start="00:06:05.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""highway and Camino are two different words.""" start="00:06:12.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""They mean the same thing.""" start="00:06:18.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""They should have very similar embeddings.""" start="00:06:19.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So it is a way to kind of encode this""" start="00:06:21.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and then you could use this for search.""" start="00:06:25.120" video="mainVideo-llm" id="subtitle"]]
[[!template text="""For example, I haven't tried to do this yet,""" start="00:06:26.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""but you could probably just make an embedding""" start="00:06:28.920" video="mainVideo-llm" id="subtitle"]]
[[!template text="""for every paragraph in the Emacs manual""" start="00:06:31.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and the Elisp manual.""" start="00:06:33.920" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And then, and then there's a very standard technique.""" start="00:06:36.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You just... You find that you have a query,""" start="00:06:39.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""oh, how do I do whatever, whatever in Emacs again?""" start="00:06:43.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And you could, you just find that 20 things""" start="00:06:45.800" video="mainVideo-llm" id="subtitle"]]
[[!template text="""that are closest to whatever you're""" start="00:06:49.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""trying to... the embedding of your query.""" start="00:06:50.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You send those things to the LLM, as you know,""" start="00:06:51.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""with the original query,""" start="00:06:55.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and you're basically telling the--asking the LLM,""" start="00:06:57.800" video="mainVideo-llm" id="subtitle"]]
[[!template text="""look, the user is trying to do this.""" start="00:06:59.920" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Here's what I found in the Emacs manual.""" start="00:07:01.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""That's on the Elisp manual.""" start="00:07:03.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""That's close to what they're trying to do.""" start="00:07:04.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So can you kind of just tell the user what to do?""" start="00:07:07.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And from this, and you could say,""" start="00:07:12.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""just use things from this, you know, that I give you.""" start="00:07:14.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Don't just make up your own idea.""" start="00:07:17.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You know, don't use your own ideas,""" start="00:07:20.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""because sometimes it likes to do that""" start="00:07:21.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and those things are wrong.""" start="00:07:23.800" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So you could try to, you know, do this and you get,""" start="00:07:24.360" video="mainVideo-llm" id="subtitle"]]
[[!template text="""you could get quite good results using this.""" start="00:07:26.720" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So no one has done this yet,""" start="00:07:28.720" video="mainVideo-llm" id="subtitle"]]
[[!template text="""but that should not be hard to do.""" start="00:07:30.000" video="mainVideo-llm" id="subtitle"]]

[[!template new="1" text="""Image Generation""" start="00:07:32.800" video="mainVideo-llm" id="subtitle"]]

[[!template text="""Image generation is something that's, you know,""" start="00:07:32.800" video="mainVideo-llm" id="subtitle"]]
[[!template text="""it's not quite an LLM in the sense of...""" start="00:07:34.880" video="mainVideo-llm" id="subtitle"]]
[[!template text="""These are... It's a different technology,""" start="00:07:38.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""but these things are kind of packaged together""" start="00:07:43.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""in a sense.""" start="00:07:48.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And you'll see that when I talk about Emacs packages,""" start="00:07:49.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""a lot of them bundle image generation""" start="00:07:51.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and large language models.""" start="00:07:54.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You know, the APIs are often bundled together by providers.""" start="00:07:55.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And the general idea is it's kind of similar""" start="00:07:59.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""because it's very similar to large, you know,""" start="00:08:02.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""doing a chat thing where you, you know,""" start="00:08:04.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""the chat is like, you give it a text request,""" start="00:08:06.560" video="mainVideo-llm" id="subtitle"]]
[[!template text="""like write me a sonnet about, you know,""" start="00:08:09.761" video="mainVideo-llm" id="subtitle"]]
[[!template text="""the battle between Emacs and vi.""" start="00:08:12.760" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And it could, it could do it.""" start="00:08:14.880" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It could do a very good job of that.""" start="00:08:15.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""But you could also say, you know,""" start="00:08:17.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""draw me a picture of Emacs and vi as boxers,""" start="00:08:22.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""as a character-character boxing in a ring,""" start="00:08:27.600" video="mainVideo-llm" id="subtitle"]]
[[!template text="""like a, you know, political cartoon style.""" start="00:08:30.360" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And it can do that as well.""" start="00:08:32.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And so you could basically think of this""" start="00:08:35.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""as just sort of... it's kind of the""" start="00:08:37.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""same thing with what you're doing""" start="00:08:39.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""with large language models,""" start="00:08:42.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""but instead of outputting a text,""" start="00:08:43.360" video="mainVideo-llm" id="subtitle"]]
[[!template text="""you're outputting a picture.""" start="00:08:44.800" video="mainVideo-llm" id="subtitle"]]

[[!template new="1" text="""Fine-tuning""" start="00:08:48.480" video="mainVideo-llm" id="subtitle"]]

[[!template text="""There's also, I want to mention the concept of fine-tuning.""" start="00:08:48.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Fine-tuning is a way to take your--""" start="00:08:51.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""take a corpus of inputs and outputs and just from""" start="00:08:55.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""a large language model, you're like, okay,""" start="00:08:59.760" video="mainVideo-llm" id="subtitle"]]
[[!template text="""given this base large language model,""" start="00:09:01.600" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I want to make sure that when I give you input,""" start="00:09:03.600" video="mainVideo-llm" id="subtitle"]]
[[!template text="""you give me something like output.""" start="00:09:06.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And this is what I'm just going to""" start="00:09:08.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""train you further on these,""" start="00:09:10.120" video="mainVideo-llm" id="subtitle"]]
[[!template text="""these mappings between input and output.""" start="00:09:11.800" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And for example, you could do this. Like,""" start="00:09:14.880" video="mainVideo-llm" id="subtitle"]]
[[!template text="""let's say you wanted to fix that regex demo""" start="00:09:16.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I had to make it good.""" start="00:09:18.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I don't think it, I think it'd be""" start="00:09:21.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""relatively effective to train,""" start="00:09:23.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""to have regex descriptions""" start="00:09:25.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and regex examples, Emacs regex examples""" start="00:09:27.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""as inputs and outputs.""" start="00:09:30.120" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You could get, you know, maybe a hundred,""" start="00:09:31.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""a few hundreds of these things.""" start="00:09:34.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You could train it.""" start="00:09:35.360" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I think that is a reasonable way to,""" start="00:09:38.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""let's just say, I don't know how well it would work,""" start="00:09:40.760" video="mainVideo-llm" id="subtitle"]]
[[!template text="""but these things definitely work some of the time""" start="00:09:43.880" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and produce pretty good results.""" start="00:09:46.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And you could do this on your own machine.""" start="00:09:48.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Corporations like OpenAI offer APIs with, you know,""" start="00:09:53.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""to build your fine tunes on top of OpenAI.""" start="00:09:59.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And I think, I'm not a hundred percent sure,""" start="00:10:01.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""but I think then you can share your model""" start="00:10:04.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""with other people.""" start="00:10:05.720" video="mainVideo-llm" id="subtitle"]]
[[!template text="""But if not, then you just, you know,""" start="00:10:06.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""you could use your model for your own specialized purposes.""" start="00:10:08.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""But in the world of models that you could run,""" start="00:10:10.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""for example, based on Llama, which is like...""" start="00:10:14.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Llama is this model you can run on your own machine from Meta.""" start="00:10:16.875" video="mainVideo-llm" id="subtitle"]]
[[!template text="""There's many fine-tuned models that you could download""" start="00:10:23.580" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and you could run on your own.""" start="00:10:26.881" video="mainVideo-llm" id="subtitle"]]
[[!template text="""They can do very different things too.""" start="00:10:28.961" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Some output Python programs, for example,""" start="00:10:30.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""that you could just run.""" start="00:10:33.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So you just say...""" start="00:10:34.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Tell me how old... Let's just say""" start="00:10:37.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""you have a random task, like""" start="00:10:40.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""tell me how old these five cities are in minutes,""" start="00:10:42.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""based on historical evidence.""" start="00:10:48.120" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It's kind of a weird query, but it probably can figure,""" start="00:10:49.800" video="mainVideo-llm" id="subtitle"]]
[[!template text="""it could probably run that for you.""" start="00:10:53.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It'll encode its knowledge into whatever""" start="00:10:55.120" video="mainVideo-llm" id="subtitle"]]
[[!template text="""the Python program, then use the Python program""" start="00:10:57.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""to do the correct calculations.""" start="00:10:59.600" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So pretty, pretty useful stuff.""" start="00:11:01.040" video="mainVideo-llm" id="subtitle"]]

[[!template new="1" text="""Open Source""" start="00:11:08.160" video="mainVideo-llm" id="subtitle"]]

[[!template text="""So I also want to mention open source""" start="00:11:08.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and basically free software here.""" start="00:11:10.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""These LLMs are mostly not free software.""" start="00:11:12.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""They're sometimes open source,""" start="00:11:17.600" video="mainVideo-llm" id="subtitle"]]
[[!template text="""but they're generally not free""" start="00:11:19.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""without restrictions to use.""" start="00:11:21.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Most of these things, even Llama,""" start="00:11:23.800" video="mainVideo-llm" id="subtitle"]]
[[!template text="""which you can use on your own machine,""" start="00:11:27.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""have restrictions that you cannot use it""" start="00:11:28.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""to train your own model.""" start="00:11:31.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""This is something that, you know,""" start="00:11:32.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""it costs millions and millions of dollars""" start="00:11:35.120" video="mainVideo-llm" id="subtitle"]]
[[!template text="""to train and produce these models.""" start="00:11:37.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And that's just computation costs.""" start="00:11:40.760" video="mainVideo-llm" id="subtitle"]]
[[!template text="""They do not want you""" start="00:11:42.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""stealing all that work by training your own models""" start="00:11:45.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""based on their output.""" start="00:11:47.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""But there are research LLMs that do, I believe,""" start="00:11:48.800" video="mainVideo-llm" id="subtitle"]]
[[!template text="""conform to free software principles.""" start="00:11:55.360" video="mainVideo-llm" id="subtitle"]]
[[!template text="""They're just not as good yet.""" start="00:11:58.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And I think that might change in the future.""" start="00:11:59.520" video="mainVideo-llm" id="subtitle"]]

[[!template new="1" text="""The Future""" start="00:12:02.840" video="mainVideo-llm" id="subtitle"]]

[[!template text="""So speaking of the future,""" start="00:12:02.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""one of the things I'd like to point out""" start="00:12:04.120" video="mainVideo-llm" id="subtitle"]]
[[!template text="""is that like the demos I showed you are based on,""" start="00:12:07.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I'm using OpenAI 3.5 model.""" start="00:12:09.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""That's more than, well, no,""" start="00:12:13.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""it's like a year old basically at this point.""" start="00:12:16.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And things are moving fast.""" start="00:12:18.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""They came out with 4.0.""" start="00:12:21.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""4.0 is significantly better.""" start="00:12:22.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I don't have access to it.""" start="00:12:23.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Even though I'm using the API and I'm paying money for it,""" start="00:12:24.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""you only can get access to 4.0""" start="00:12:30.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""if you can spend a dollar.""" start="00:12:33.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And I've never been able to spend,""" start="00:12:34.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""use so much API use that I've spent a dollar.""" start="00:12:36.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So I have, I don't have 4.0, but I've tried it""" start="00:12:38.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""because I do pay for this""" start="00:12:44.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""so I could get access to 4.0""" start="00:12:46.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and it is substantially better.""" start="00:12:48.341" video="mainVideo-llm" id="subtitle"]]
[[!template text="""By all reports, it's,""" start="00:12:49.600" video="mainVideo-llm" id="subtitle"]]
[[!template text="""the difference is extremely significant.""" start="00:12:50.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I would not be surprised""" start="00:12:53.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""if some of the limitations and drawbacks I described""" start="00:12:55.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""mostly went away with 4.0.""" start="00:12:59.760" video="mainVideo-llm" id="subtitle"]]
[[!template text="""We're probably at a stage""" start="00:13:02.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""where regexes will work maybe 5% of the time""" start="00:13:06.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""if you try them.""" start="00:13:09.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""But with 4.0, it could work like 80% of the time.""" start="00:13:10.120" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Now, is that good enough?""" start="00:13:13.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Probably not, but it's a,""" start="00:13:14.560" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I wouldn't be surprised if you got results like that.""" start="00:13:17.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And in a year's time, in two years time,""" start="00:13:20.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""no one knows how much this is going to play out""" start="00:13:22.920" video="mainVideo-llm" id="subtitle"]]
[[!template text="""before progress stalls,""" start="00:13:26.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""but there are a lot of interesting research.""" start="00:13:27.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I don't think, research wise,""" start="00:13:32.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I don't think things have slowed down.""" start="00:13:34.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You're still seeing a lot of advances.""" start="00:13:35.760" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You're still seeing a lot of models coming out""" start="00:13:38.720" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and that will come out.""" start="00:13:41.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""That will be each one, one upping the other one""" start="00:13:41.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""in terms of quality.""" start="00:13:46.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It'll be really interesting to see how this all plays out.""" start="00:13:49.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I think that message here is that""" start="00:13:52.760" video="mainVideo-llm" id="subtitle"]]
[[!template text="""we're at the beginning here.""" start="00:13:55.920" video="mainVideo-llm" id="subtitle"]]
[[!template text="""This is why I think this talk is important.""" start="00:13:58.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I think this is why we should be""" start="00:14:01.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""paying attention to this stuff.""" start="00:14:02.280" video="mainVideo-llm" id="subtitle"]]

[[!template new="1" text="""LLMs in Emacs - existing packages""" start="00:14:08.200" video="mainVideo-llm" id="subtitle"]]

[[!template text="""Let's talk about the existing packages.""" start="00:14:08.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Because there's a lot out there, people have,""" start="00:14:11.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I think people have been integrating with""" start="00:14:13.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""these LLMs that often have a relatively easy to use API.""" start="00:14:17.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So it's kind of natural that people""" start="00:14:21.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""have already put out a lot of packages.""" start="00:14:24.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Coming off this problem from a lot of different angles,""" start="00:14:25.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I don't have time to go through""" start="00:14:28.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""all of these packages.""" start="00:14:30.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""These are great packages though.""" start="00:14:31.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""If you're not familiar with them,""" start="00:14:33.560" video="mainVideo-llm" id="subtitle"]]
[[!template text="""please check them out.""" start="00:14:35.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And they all are doing slightly different things.""" start="00:14:37.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Some of these are relatively straightforward.""" start="00:14:41.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Interactions, just a way to""" start="00:14:43.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""almost in a comment sort of way to kind of""" start="00:14:47.920" video="mainVideo-llm" id="subtitle"]]
[[!template text="""have just an interaction,""" start="00:14:52.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""long running interaction with an LLM""" start="00:14:54.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""where you kind of build off previous responses,""" start="00:14:55.480" video="mainVideo-llm" id="subtitle"]]
[[!template text="""kind of like the OpenAI's UI.""" start="00:14:59.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Two very more Emacsy things where you can sort of""" start="00:15:01.800" video="mainVideo-llm" id="subtitle"]]
[[!template text="""embed these LLM responses within a org-mode block""" start="00:15:08.560" video="mainVideo-llm" id="subtitle"]]
[[!template text="""using the org-mode's context.""" start="00:15:13.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Or GitHub Copilot integration where you can use it""" start="00:15:15.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""for auto completion in a very powerful,""" start="00:15:20.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""you know, this stuff is very useful if it could figure out""" start="00:15:23.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""what you're trying to do based on the context.""" start="00:15:27.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It's quite effective.""" start="00:15:29.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""But I want to kind of call out one thing""" start="00:15:31.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""that I'd like to see change.""" start="00:15:36.360" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Which is that users right now,""" start="00:15:38.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""not all of these have a choice of,""" start="00:15:42.600" video="mainVideo-llm" id="subtitle"]]
[[!template text="""first of all, there's a lot of them.""" start="00:15:45.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Each one of them is doing their own calls.""" start="00:15:47.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And each one of them is, so each one of them""" start="00:15:49.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""has their own interfaces.""" start="00:15:54.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""They're rewriting the interface to OpenAI or wherever.""" start="00:15:55.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And they're not, they don't, most of these""" start="00:15:57.720" video="mainVideo-llm" id="subtitle"]]
[[!template text="""do not make it that configurable or at all configurable""" start="00:16:00.120" video="mainVideo-llm" id="subtitle"]]
[[!template text="""what LLM use.""" start="00:16:05.120" video="mainVideo-llm" id="subtitle"]]
[[!template text="""This is not good.""" start="00:16:06.600" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It is important that we use,""" start="00:16:07.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""we give the user a way to change the LLM they use.""" start="00:16:09.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And that is because you might not be comfortable""" start="00:16:15.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""sending your requests over to a private corporation""" start="00:16:21.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""where you don't get to see how they use their data.""" start="00:16:24.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Your data, really.""" start="00:16:27.800" video="mainVideo-llm" id="subtitle"]]
[[!template text="""That's especially true with things like embeddings""" start="00:16:29.800" video="mainVideo-llm" id="subtitle"]]
[[!template text="""where you might be sending over your documents.""" start="00:16:33.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You're just giving them your documents, basically.""" start="00:16:35.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And, you know, that does happen.""" start="00:16:37.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I don't think really that there's a reason""" start="00:16:40.760" video="mainVideo-llm" id="subtitle"]]
[[!template text="""to be uncomfortable with this,""" start="00:16:43.600" video="mainVideo-llm" id="subtitle"]]
[[!template text="""but that, you know, people are uncomfortable and that's okay.""" start="00:16:44.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""People might want to use a local machine,""" start="00:16:51.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""a local LLM for maximum privacy.""" start="00:16:53.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""That's something we should allow.""" start="00:16:58.360" video="mainVideo-llm" id="subtitle"]]
[[!template text="""People might want to especially use free software.""" start="00:17:00.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""That's something we should definitely allow.""" start="00:17:04.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""This is Emacs.""" start="00:17:05.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""We need to encourage that.""" start="00:17:07.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""But right now, as most of these things are written,""" start="00:17:08.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""you can't do it.""" start="00:17:12.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And they're spending precious time""" start="00:17:13.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""just doing things themselves.""" start="00:17:17.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""This is why I wrote LLM, which is...""" start="00:17:18.880" video="mainVideo-llm" id="subtitle"]]
[[!template text="""it will just make that connection to the LLM for you""" start="00:17:20.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and it will connect to, you know, it has plugins.""" start="00:17:23.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So if you can, the user can configure what plugin""" start="00:17:26.720" video="mainVideo-llm" id="subtitle"]]
[[!template text="""it actually goes to.""" start="00:17:30.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Does it go to OpenAI?""" start="00:17:31.360" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Does it go to Google Cloud Vertex?""" start="00:17:32.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Does it go to Llama on your machine?""" start="00:17:35.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""We're using Ollama,""" start="00:17:37.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""which is just a way to run Llama locally.""" start="00:17:38.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And more things in the future, I hope.""" start="00:17:41.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So this is, I'm hoping that we use this.""" start="00:17:47.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It's designed to be sort of maximally usable.""" start="00:17:52.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You don't need to install anything.""" start="00:17:54.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It's on GNU ELPA.""" start="00:17:56.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So even if you write something""" start="00:17:58.360" video="mainVideo-llm" id="subtitle"]]
[[!template text="""that you want to contribute to GNU ELPA,""" start="00:17:59.880" video="mainVideo-llm" id="subtitle"]]
[[!template text="""you can use it because it's on GNU ELPA.""" start="00:18:01.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It's part of the Emacs package, Emacs core packages.""" start="00:18:02.880" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So, but it has no functionality.""" start="00:18:06.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""It's really just there as a library""" start="00:18:09.880" video="mainVideo-llm" id="subtitle"]]
[[!template text="""to use by other things offering functionality. Okay.""" start="00:18:11.720" video="mainVideo-llm" id="subtitle"]]

[[!template new="1" text="""Abstracting LLM challenges""" start="00:18:15.960" video="mainVideo-llm" id="subtitle"]]

[[!template text="""And it's a little bit difficult to abstract.""" start="00:18:15.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I want to point this out""" start="00:18:19.840" video="mainVideo-llm" id="subtitle"]]
[[!template text="""because I think it's an important point""" start="00:18:21.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""is that the, it's, some of these LLMs, for example,""" start="00:18:23.600" video="mainVideo-llm" id="subtitle"]]
[[!template text="""have image generation.""" start="00:18:29.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Some do not.""" start="00:18:30.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Some of them have very large context windows, even for chat.""" start="00:18:31.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You say, okay, all these things can do chat.""" start="00:18:35.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Okay.""" start="00:18:37.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Yeah, kind of.""" start="00:18:37.320" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Some of these things you could pass a book to,""" start="00:18:38.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""like Anthropic's API.""" start="00:18:40.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Most, you cannot.""" start="00:18:41.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So there really are big differences""" start="00:18:43.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""in how these things work.""" start="00:18:45.560" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I hope those differences diminish in the future.""" start="00:18:46.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""But it's just one of the challenges""" start="00:18:51.540" video="mainVideo-llm" id="subtitle"]]
[[!template text="""that I hope we can work through in the LLM library.""" start="00:18:53.801" video="mainVideo-llm" id="subtitle"]]
[[!template text="""So it's compatible, but there's definitely""" start="00:18:57.521" video="mainVideo-llm" id="subtitle"]]
[[!template text="""limits to that compatibility.""" start="00:19:02.161" video="mainVideo-llm" id="subtitle"]]

[[!template new="1" text="""Emacs is the ideal interface for LLMs""" start="00:19:04.080" video="mainVideo-llm" id="subtitle"]]

[[!template text="""I want to point out just to finish off,""" start="00:19:04.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Emacs is the, Emacs has real power here""" start="00:19:06.161" video="mainVideo-llm" id="subtitle"]]
[[!template text="""that nothing else I think in the industry is offering.""" start="00:19:12.880" video="mainVideo-llm" id="subtitle"]]
[[!template text="""First of all, people that use Emacs""" start="00:19:15.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""tend to do a lot of things in Emacs.""" start="00:19:19.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""We have our to-dos in Emacs with the org mode.""" start="00:19:20.440" video="mainVideo-llm" id="subtitle"]]
[[!template text="""We have mail.""" start="00:19:22.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""We, you know, we might read email and we might,""" start="00:19:23.000" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and respond to email in Emacs.""" start="00:19:25.720" video="mainVideo-llm" id="subtitle"]]
[[!template text="""We might have notes in Emacs.""" start="00:19:27.680" video="mainVideo-llm" id="subtitle"]]
[[!template text="""This is very powerful.""" start="00:19:29.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Using... there's not other stuff like that.""" start="00:19:31.360" video="mainVideo-llm" id="subtitle"]]
[[!template text="""And you could feed this stuff to an LLM.""" start="00:19:34.160" video="mainVideo-llm" id="subtitle"]]
[[!template text="""You could do interesting things""" start="00:19:35.760" video="mainVideo-llm" id="subtitle"]]
[[!template text="""using a combination of all this data.""" start="00:19:37.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""No one else could do this.""" start="00:19:38.560" video="mainVideo-llm" id="subtitle"]]
[[!template text="""We need to start thinking about it.""" start="00:19:40.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Secondly, Emacs can execute commands.""" start="00:19:41.760" video="mainVideo-llm" id="subtitle"]]
[[!template text="""This might be a bad idea.""" start="00:19:45.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""This might be how the robots take over,""" start="00:19:46.240" video="mainVideo-llm" id="subtitle"]]
[[!template text="""but you could have the LLMs respond with Emacs""" start="00:19:48.400" video="mainVideo-llm" id="subtitle"]]
[[!template text="""commands and run those Emacs commands""" start="00:19:51.800" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and tell the LLM the response and have it do things""" start="00:19:54.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""as your agent in the editor.""" start="00:19:57.080" video="mainVideo-llm" id="subtitle"]]
[[!template text="""I think we need to explore ideas like this.""" start="00:19:58.680" video="mainVideo-llm" id="subtitle"]]

[[!template new="1" text="""Outro""" start="00:20:01.960" video="mainVideo-llm" id="subtitle"]]

[[!template text="""And I think we need to share these ideas""" start="00:20:01.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and we need to make sure that we're pushing the""" start="00:20:04.280" video="mainVideo-llm" id="subtitle"]]
[[!template text="""envelope for Emacs and actually, you know, doing things,""" start="00:20:07.040" video="mainVideo-llm" id="subtitle"]]
[[!template text="""sharing ideas, sharing progress,""" start="00:20:10.520" video="mainVideo-llm" id="subtitle"]]
[[!template text="""and kind of seeing how far we can push this stuff.""" start="00:20:12.960" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Let's really help Emacs out, be sort of,""" start="00:20:15.200" video="mainVideo-llm" id="subtitle"]]
[[!template text="""take advantage of this super powerful technique.""" start="00:20:20.640" video="mainVideo-llm" id="subtitle"]]
[[!template text="""Thank you for listening.""" start="00:20:24.520" video="mainVideo-llm" id="subtitle"]]



Captioner: bala

<a name="llm-qanda-transcript"></a>
# Q&A transcript (unedited)

[[!template text="""[Speaker 0]: Okay. Hello, everyone.""" start="00:00:13.099" video="qanda-llm" id="subtitle"]]
[[!template text="""I think this is the start of the Q&A session.""" start="00:00:16.560" video="qanda-llm" id="subtitle"]]
[[!template text="""So people can just ask me questions here.""" start="00:00:25.119" video="qanda-llm" id="subtitle"]]
[[!template text="""Or I think maybe these questions are going to""" start="00:00:28.259" video="qanda-llm" id="subtitle"]]
[[!template text="""be read by someone. Yes,""" start="00:00:30.560" video="qanda-llm" id="subtitle"]]
[[!template text="""thank you. Should I start doing that?""" start="00:00:34.680" video="qanda-llm" id="subtitle"]]
[[!template text="""I also know that there's questions in the""" start="00:00:39.280" video="qanda-llm" id="subtitle"]]
[[!template text="""either pad room, so I could start out""" start="00:00:41.320" video="qanda-llm" id="subtitle"]]
[[!template text="""answering those as well.""" start="00:00:42.280" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: Right, sure. Whichever way you prefer.""" start="00:00:45.020" video="qanda-llm" id="subtitle"]]
[[!template text="""If you prefer to read the questions yourself,""" start="00:00:46.860" video="qanda-llm" id="subtitle"]]
[[!template text="""by all means, or if you would prefer me to""" start="00:00:48.940" video="qanda-llm" id="subtitle"]]
[[!template text="""read them to you, that also works.""" start="00:00:50.080" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 2]: Oh, I see.""" start="00:00:50.580" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: Why don't you read them to me?""" start="00:00:51.600" video="qanda-llm" id="subtitle"]]
[[!template text="""I think it'll just be more interesting then.""" start="00:00:53.260" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: Sure. OK, let's see. The first question is,""" start="00:00:56.820" video="qanda-llm" id="subtitle"]]
[[!template text="""what is your use case for embedding,""" start="00:00:58.360" video="qanda-llm" id="subtitle"]]
[[!template text="""mainly for searching?""" start="00:01:00.060" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: Yeah, I mean, I think the use case really is""" start="00:01:06.180" video="qanda-llm" id="subtitle"]]
[[!template text="""searching. And I think it is very useful when""" start="00:01:12.100" video="qanda-llm" id="subtitle"]]
[[!template text="""you're searching for something in a vague""" start="00:01:15.060" video="qanda-llm" id="subtitle"]]
[[!template text="""way. Just to give you an example,""" start="00:01:18.280" video="qanda-llm" id="subtitle"]]
[[!template text="""I have a note system called EKG.""" start="00:01:23.860" video="qanda-llm" id="subtitle"]]
[[!template text="""I type all my notes on it.""" start="00:01:25.760" video="qanda-llm" id="subtitle"]]
[[!template text="""You can find it on GitHub and Melba.""" start="00:01:28.620" video="qanda-llm" id="subtitle"]]
[[!template text="""But I wrote something at some point a year""" start="00:01:34.140" video="qanda-llm" id="subtitle"]]
[[!template text="""ago or something. I wrote something that I""" start="00:01:35.840" video="qanda-llm" id="subtitle"]]
[[!template text="""just vaguely remembered.""" start="00:01:36.600" video="qanda-llm" id="subtitle"]]
[[!template text="""Oh, this was about a certain kind of""" start="00:01:38.800" video="qanda-llm" id="subtitle"]]
[[!template text="""communication. I wanted communicating to""" start="00:01:41.580" video="qanda-llm" id="subtitle"]]
[[!template text="""large audiences. There's some interesting tip""" start="00:01:43.280" video="qanda-llm" id="subtitle"]]
[[!template text="""that I wrote down that was really cool.""" start="00:01:44.700" video="qanda-llm" id="subtitle"]]
[[!template text="""And I was like, well, I need to find it.""" start="00:01:49.920" video="qanda-llm" id="subtitle"]]
[[!template text="""So I did an embedding search for something""" start="00:01:52.260" video="qanda-llm" id="subtitle"]]
[[!template text="""like, you know, tips for communicating.""" start="00:01:55.479" video="qanda-llm" id="subtitle"]]
[[!template text="""Like those words may not have been in what I""" start="00:01:58.979" video="qanda-llm" id="subtitle"]]
[[!template text="""was trying to find at all,""" start="00:02:00.020" video="qanda-llm" id="subtitle"]]
[[!template text="""But it was able to find it.""" start="00:02:02.680" video="qanda-llm" id="subtitle"]]
[[!template text="""And that is something that's very hard to do""" start="00:02:05.840" video="qanda-llm" id="subtitle"]]
[[!template text="""in other ways. Like, you know,""" start="00:02:07.200" video="qanda-llm" id="subtitle"]]
[[!template text="""if you had to do this with normal search,""" start="00:02:08.520" video="qanda-llm" id="subtitle"]]
[[!template text="""you have to do synonyms.""" start="00:02:09.199" video="qanda-llm" id="subtitle"]]
[[!template text="""And like maybe those synonyms wouldn't cover""" start="00:02:10.940" video="qanda-llm" id="subtitle"]]
[[!template text="""it. Like with embedding,""" start="00:02:11.960" video="qanda-llm" id="subtitle"]]
[[!template text="""you can basically get at like the vague""" start="00:02:13.940" video="qanda-llm" id="subtitle"]]
[[!template text="""sentiment. You're like,""" start="00:02:14.960" video="qanda-llm" id="subtitle"]]
[[!template text="""you know, you're, you know,""" start="00:02:17.320" video="qanda-llm" id="subtitle"]]
[[!template text="""you can really query on like what things are""" start="00:02:19.520" video="qanda-llm" id="subtitle"]]
[[!template text="""about as opposed to what words they have.""" start="00:02:21.760" video="qanda-llm" id="subtitle"]]
[[!template text="""Also, it's super good for similarity search.""" start="00:02:25.600" video="qanda-llm" id="subtitle"]]
[[!template text="""So you could say, look,""" start="00:02:27.720" video="qanda-llm" id="subtitle"]]
[[!template text="""I have a bunch of things that are encoded""" start="00:02:30.040" video="qanda-llm" id="subtitle"]]
[[!template text="""with embeddings that I want to show.""" start="00:02:31.400" video="qanda-llm" id="subtitle"]]
[[!template text="""For example, you can make an embedding for""" start="00:02:34.120" video="qanda-llm" id="subtitle"]]
[[!template text="""every buffer. You'd be like,""" start="00:02:35.220" video="qanda-llm" id="subtitle"]]
[[!template text="""well, show me buffers that are similar to""" start="00:02:37.060" video="qanda-llm" id="subtitle"]]
[[!template text="""this buffer. That doesn't sound super useful,""" start="00:02:38.740" video="qanda-llm" id="subtitle"]]
[[!template text="""but this is the kind of thing you could do.""" start="00:02:40.440" video="qanda-llm" id="subtitle"]]
[[!template text="""And so if you have a bunch of notes or""" start="00:02:45.300" video="qanda-llm" id="subtitle"]]
[[!template text="""something else that you want to search on,""" start="00:02:46.720" video="qanda-llm" id="subtitle"]]
[[!template text="""you'd be like, what's similar to this buffer?""" start="00:02:48.240" video="qanda-llm" id="subtitle"]]
[[!template text="""Or what notes are similar to each other?""" start="00:02:51.500" video="qanda-llm" id="subtitle"]]
[[!template text="""What buffers are similar to each other?""" start="00:02:53.040" video="qanda-llm" id="subtitle"]]
[[!template text="""It's super good for this sort of thing.""" start="00:02:55.380" video="qanda-llm" id="subtitle"]]
[[!template text="""And it's also good for this kind of retrieval""" start="00:03:00.780" video="qanda-llm" id="subtitle"]]
[[!template text="""augmented generation, where you sort of,""" start="00:03:03.080" video="qanda-llm" id="subtitle"]]
[[!template text="""you retrieve things and the purpose is not""" start="00:03:05.080" video="qanda-llm" id="subtitle"]]
[[!template text="""for you to see them, but then you pass that""" start="00:03:06.600" video="qanda-llm" id="subtitle"]]
[[!template text="""to the LLM. And then it's able to be a little""" start="00:03:12.040" video="qanda-llm" id="subtitle"]]
[[!template text="""bit more accurate because it has the actual""" start="00:03:14.340" video="qanda-llm" id="subtitle"]]
[[!template text="""text that you're trying to,""" start="00:03:15.760" video="qanda-llm" id="subtitle"]]
[[!template text="""that is relevant, and it can cite from and""" start="00:03:18.960" video="qanda-llm" id="subtitle"]]
[[!template text="""things like that. And then it could give you""" start="00:03:20.720" video="qanda-llm" id="subtitle"]]
[[!template text="""a much better answer that's kind of,""" start="00:03:22.260" video="qanda-llm" id="subtitle"]]
[[!template text="""you know, not just from its own little neural""" start="00:03:25.520" video="qanda-llm" id="subtitle"]]
[[!template text="""nets and memory.""" start="00:03:26.320" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: Cool, thanks. Let's see,""" start="00:03:31.920" video="qanda-llm" id="subtitle"]]
[[!template text="""next question. What do you think about embed""" start="00:03:35.740" video="qanda-llm" id="subtitle"]]
[[!template text="""Emacs manual versus GPT's Emacs manual?""" start="00:03:40.160" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: I'm not exactly sure what this question is""" start="00:03:45.480" video="qanda-llm" id="subtitle"]]
[[!template text="""trying to say. So I mean,""" start="00:03:46.980" video="qanda-llm" id="subtitle"]]
[[!template text="""if someone wrote that and wants to expand on""" start="00:03:51.000" video="qanda-llm" id="subtitle"]]
[[!template text="""it a little bit, but I think that maybe""" start="00:03:55.080" video="qanda-llm" id="subtitle"]]
[[!template text="""you're saying like you could embed,""" start="00:03:58.420" video="qanda-llm" id="subtitle"]]
[[!template text="""have embeddings for like various,""" start="00:04:00.280" video="qanda-llm" id="subtitle"]]
[[!template text="""like every paragraph or something of the""" start="00:04:02.520" video="qanda-llm" id="subtitle"]]
[[!template text="""Emacs manual. But it's also the case that""" start="00:04:04.540" video="qanda-llm" id="subtitle"]]
[[!template text="""like GPT is already for sure already read it,""" start="00:04:06.500" video="qanda-llm" id="subtitle"]]
[[!template text="""right? And so you could ask questions that""" start="00:04:09.760" video="qanda-llm" id="subtitle"]]
[[!template text="""are about Emacs and our ELISP or whatever""" start="00:04:13.460" video="qanda-llm" id="subtitle"]]
[[!template text="""part of the manual you want to find.""" start="00:04:15.200" video="qanda-llm" id="subtitle"]]
[[!template text="""And it will do a reasonably good job,""" start="00:04:19.760" video="qanda-llm" id="subtitle"]]
[[!template text="""especially the better models will do a""" start="00:04:22.280" video="qanda-llm" id="subtitle"]]
[[!template text="""reasonably good job of saying you something""" start="00:04:24.620" video="qanda-llm" id="subtitle"]]
[[!template text="""that is vaguely accurate.""" start="00:04:26.040" video="qanda-llm" id="subtitle"]]
[[!template text="""But if you do this retrieval augmented""" start="00:04:29.440" video="qanda-llm" id="subtitle"]]
[[!template text="""generation with embeddings,""" start="00:04:30.580" video="qanda-llm" id="subtitle"]]
[[!template text="""you can get something that is very accurate.""" start="00:04:32.640" video="qanda-llm" id="subtitle"]]
[[!template text="""At least I think. I haven't tried it,""" start="00:04:36.700" video="qanda-llm" id="subtitle"]]
[[!template text="""but this is a technique that works in other""" start="00:04:38.760" video="qanda-llm" id="subtitle"]]
[[!template text="""similar cases. So you can also imagine like,""" start="00:04:43.040" video="qanda-llm" id="subtitle"]]
[[!template text="""oh, this whole thing I said,""" start="00:04:44.320" video="qanda-llm" id="subtitle"]]
[[!template text="""like, oh, you can query for vague things and""" start="00:04:47.860" video="qanda-llm" id="subtitle"]]
[[!template text="""get parts of the manual,""" start="00:04:49.140" video="qanda-llm" id="subtitle"]]
[[!template text="""perhaps. I'm not exactly sure if that would""" start="00:04:52.680" video="qanda-llm" id="subtitle"]]
[[!template text="""be useful, but maybe. Usually when I'm""" start="00:04:55.120" video="qanda-llm" id="subtitle"]]
[[!template text="""looking things up in the Emacs manual or""" start="00:04:57.040" video="qanda-llm" id="subtitle"]]
[[!template text="""Elist manual, I have something extremely""" start="00:04:58.320" video="qanda-llm" id="subtitle"]]
[[!template text="""specific and I kind of know where to look.""" start="00:05:00.020" video="qanda-llm" id="subtitle"]]
[[!template text="""But having other ways to get at this""" start="00:05:02.960" video="qanda-llm" id="subtitle"]]
[[!template text="""information is always good.""" start="00:05:04.000" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: Right. Looks like they added a clarification""" start="00:05:10.240" video="qanda-llm" id="subtitle"]]
[[!template text="""if you would like to read that yourself,""" start="00:05:12.280" video="qanda-llm" id="subtitle"]]
[[!template text="""or would you like me to read it for you?""" start="00:05:14.180" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: Yeah. Yes, OK. It says,""" start="00:05:17.640" video="qanda-llm" id="subtitle"]]
[[!template text="""I've never tried. Yeah,""" start="00:05:20.460" video="qanda-llm" id="subtitle"]]
[[!template text="""the question is like OK,""" start="00:05:21.500" video="qanda-llm" id="subtitle"]]
[[!template text="""there is a difference between the kind of""" start="00:05:23.100" video="qanda-llm" id="subtitle"]]
[[!template text="""thing as I just described.""" start="00:05:23.860" video="qanda-llm" id="subtitle"]]
[[!template text="""I have not tried the difference with the EMAX""" start="00:05:26.200" video="qanda-llm" id="subtitle"]]
[[!template text="""manual itself. It'd be interesting to see""" start="00:05:31.560" video="qanda-llm" id="subtitle"]]
[[!template text="""what this is, but I would expect like these""" start="00:05:33.700" video="qanda-llm" id="subtitle"]]
[[!template text="""techniques, the retrieval augmented""" start="00:05:35.140" video="qanda-llm" id="subtitle"]]
[[!template text="""generation is generally pretty good.""" start="00:05:38.840" video="qanda-llm" id="subtitle"]]
[[!template text="""And I suspect it would,""" start="00:05:41.240" video="qanda-llm" id="subtitle"]]
[[!template text="""I would bet money on the fact that it's gonna""" start="00:05:43.580" video="qanda-llm" id="subtitle"]]
[[!template text="""give you, you know, better results than just,""" start="00:05:45.820" video="qanda-llm" id="subtitle"]]
[[!template text="""you know, doing a free form query without any""" start="00:05:48.160" video="qanda-llm" id="subtitle"]]
[[!template text="""retrieval augmented generation.""" start="00:05:49.440" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: Cool. Let's see. Next question.""" start="00:05:54.240" video="qanda-llm" id="subtitle"]]
[[!template text="""When deferring commit messages to an LLM,""" start="00:05:56.380" video="qanda-llm" id="subtitle"]]
[[!template text="""what, if anything, do you find you might have""" start="00:05:59.700" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: lost? Yeah, it's a good question.""" start="00:06:02.940" video="qanda-llm" id="subtitle"]]
[[!template text="""When deferring anything to a computer,""" start="00:06:06.060" video="qanda-llm" id="subtitle"]]
[[!template text="""like, you know, I used to have to remember""" start="00:06:08.860" video="qanda-llm" id="subtitle"]]
[[!template text="""how to get places, and now,""" start="00:06:11.200" video="qanda-llm" id="subtitle"]]
[[!template text="""you know, on the few occasions which I drive,""" start="00:06:14.540" video="qanda-llm" id="subtitle"]]
[[!template text="""like, It could just tell me how to get""" start="00:06:16.560" video="qanda-llm" id="subtitle"]]
[[!template text="""places. So similar things could occur here""" start="00:06:21.960" video="qanda-llm" id="subtitle"]]
[[!template text="""where like, okay, I'm just leaving the LLM.""" start="00:06:24.960" video="qanda-llm" id="subtitle"]]
[[!template text="""And so I'm kind of missing out on some""" start="00:06:27.380" video="qanda-llm" id="subtitle"]]
[[!template text="""opportunity to think coherently about a""" start="00:06:30.040" video="qanda-llm" id="subtitle"]]
[[!template text="""particular commit. Particular commits are""" start="00:06:32.440" video="qanda-llm" id="subtitle"]]
[[!template text="""kind of low level. I don't think it's usually""" start="00:06:36.140" video="qanda-llm" id="subtitle"]]
[[!template text="""relatively obvious and what they're doing.""" start="00:06:39.340" video="qanda-llm" id="subtitle"]]
[[!template text="""And in this case, I think there's not much""" start="00:06:42.600" video="qanda-llm" id="subtitle"]]
[[!template text="""loss. But for sure, in other cases,""" start="00:06:44.220" video="qanda-llm" id="subtitle"]]
[[!template text="""if you're starting to get into situations""" start="00:06:46.400" video="qanda-llm" id="subtitle"]]
[[!template text="""where it's writing your emails and all this""" start="00:06:48.640" video="qanda-llm" id="subtitle"]]
[[!template text="""stuff. First of all, it's in 1 sense,""" start="00:06:52.920" video="qanda-llm" id="subtitle"]]
[[!template text="""I'm not sure you might be losing something by""" start="00:06:55.580" video="qanda-llm" id="subtitle"]]
[[!template text="""delegating things. On the other hand,""" start="00:06:57.520" video="qanda-llm" id="subtitle"]]
[[!template text="""you know, when you're interacting with these""" start="00:06:59.120" video="qanda-llm" id="subtitle"]]
[[!template text="""LLMs, you have to be extremely specific about""" start="00:07:01.280" video="qanda-llm" id="subtitle"]]
[[!template text="""what you want, or else it's just not going to""" start="00:07:03.120" video="qanda-llm" id="subtitle"]]
[[!template text="""do a good job. And that might actually be a""" start="00:07:07.540" video="qanda-llm" id="subtitle"]]
[[!template text="""good thing. So the question might be that""" start="00:07:09.440" video="qanda-llm" id="subtitle"]]
[[!template text="""maybe you might gain things by using an LLM""" start="00:07:11.820" video="qanda-llm" id="subtitle"]]
[[!template text="""to do your work. It might not actually even""" start="00:07:13.860" video="qanda-llm" id="subtitle"]]
[[!template text="""save you that much time,""" start="00:07:15.060" video="qanda-llm" id="subtitle"]]
[[!template text="""at least initially, because you have to kind""" start="00:07:18.480" video="qanda-llm" id="subtitle"]]
[[!template text="""of practice again super specific about what""" start="00:07:20.460" video="qanda-llm" id="subtitle"]]
[[!template text="""you want to get out of the output it's going""" start="00:07:22.740" video="qanda-llm" id="subtitle"]]
[[!template text="""to give you so like oh I'm you know maybe you""" start="00:07:26.940" video="qanda-llm" id="subtitle"]]
[[!template text="""know you're on the emacs devel mailing list""" start="00:07:29.600" video="qanda-llm" id="subtitle"]]
[[!template text="""and you're like okay write this email about""" start="00:07:31.780" video="qanda-llm" id="subtitle"]]
[[!template text="""this about this And here's what I want to""" start="00:07:34.000" video="qanda-llm" id="subtitle"]]
[[!template text="""say. And here's the kind of tone I want to""" start="00:07:35.370" video="qanda-llm" id="subtitle"]]
[[!template text="""use. And here's the like,""" start="00:07:36.020" video="qanda-llm" id="subtitle"]]
[[!template text="""oh, you might want to specify like everything""" start="00:07:37.660" video="qanda-llm" id="subtitle"]]
[[!template text="""that you kind of want to get into this.""" start="00:07:39.620" video="qanda-llm" id="subtitle"]]
[[!template text="""Usually it's easier just to write the email.""" start="00:07:42.180" video="qanda-llm" id="subtitle"]]
[[!template text="""But I think that practice of kind of""" start="00:07:45.600" video="qanda-llm" id="subtitle"]]
[[!template text="""understanding what you want is not something""" start="00:07:48.080" video="qanda-llm" id="subtitle"]]
[[!template text="""you normally do. And I think it's going to be""" start="00:07:52.680" video="qanda-llm" id="subtitle"]]
[[!template text="""an interesting exercise that will help people""" start="00:07:56.040" video="qanda-llm" id="subtitle"]]
[[!template text="""understand. That said,""" start="00:07:57.280" video="qanda-llm" id="subtitle"]]
[[!template text="""I haven't done that much of that,""" start="00:07:58.860" video="qanda-llm" id="subtitle"]]
[[!template text="""so I can't say, oh, yeah,""" start="00:07:59.900" video="qanda-llm" id="subtitle"]]
[[!template text="""I've done this and it works for me.""" start="00:08:01.080" video="qanda-llm" id="subtitle"]]
[[!template text="""Maybe. I think it's an interesting thing to""" start="00:08:03.000" video="qanda-llm" id="subtitle"]]
[[!template text="""explore.""" start="00:08:03.120" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: Sure. Thanks. Let's see.""" start="00:08:07.720" video="qanda-llm" id="subtitle"]]
[[!template text="""Let's see. Can you share your font settings""" start="00:08:10.840" video="qanda-llm" id="subtitle"]]
[[!template text="""in your Emacs config? Those are some nice""" start="00:08:13.440" video="qanda-llm" id="subtitle"]]
[[!template text="""fonts for reading.""" start="00:08:14.200" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: Yeah, I think I was using Menlo at the time.""" start="00:08:18.900" video="qanda-llm" id="subtitle"]]
[[!template text="""Unfortunately, I don't save those kinds of""" start="00:08:20.840" video="qanda-llm" id="subtitle"]]
[[!template text="""things, like a history of this.""" start="00:08:21.940" video="qanda-llm" id="subtitle"]]
[[!template text="""I've kind of switched now to,""" start="00:08:24.000" video="qanda-llm" id="subtitle"]]
[[!template text="""what was that? I think I wrote it down in""" start="00:08:27.340" video="qanda-llm" id="subtitle"]]
[[!template text="""the, I switched to MunaSpace,""" start="00:08:29.440" video="qanda-llm" id="subtitle"]]
[[!template text="""which just came out like a week or 2 ago,""" start="00:08:31.920" video="qanda-llm" id="subtitle"]]
[[!template text="""and is also pretty cool.""" start="00:08:33.340" video="qanda-llm" id="subtitle"]]
[[!template text="""So I think it's Menlo.""" start="00:08:35.440" video="qanda-llm" id="subtitle"]]
[[!template text="""The internal question,""" start="00:08:37.380" video="qanda-llm" id="subtitle"]]
[[!template text="""what font are you using?""" start="00:08:38.400" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: Indeed, yeah. It looks like someone guessed""" start="00:08:42.020" video="qanda-llm" id="subtitle"]]
[[!template text="""as well that it might be Menlo.""" start="00:08:43.780" video="qanda-llm" id="subtitle"]]
[[!template text="""OK, Cool. Yeah, next question.""" start="00:08:47.680" video="qanda-llm" id="subtitle"]]
[[!template text="""In terms of standardization,""" start="00:08:48.900" video="qanda-llm" id="subtitle"]]
[[!template text="""do you see a need for the medium to large""" start="00:08:53.260" video="qanda-llm" id="subtitle"]]
[[!template text="""scale effort needed? And then they also""" start="00:08:55.840" video="qanda-llm" id="subtitle"]]
[[!template text="""elaborate about it.""" start="00:08:56.960" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: Yeah, I mean, I do think,""" start="00:09:03.600" video="qanda-llm" id="subtitle"]]
[[!template text="""I don't know if it's large scale,""" start="00:09:06.040" video="qanda-llm" id="subtitle"]]
[[!template text="""but at least it's probably medium scale.""" start="00:09:08.000" video="qanda-llm" id="subtitle"]]
[[!template text="""There's a lot of things that are missing that""" start="00:09:10.320" video="qanda-llm" id="subtitle"]]
[[!template text="""we don't have right now in emacs when you're""" start="00:09:12.260" video="qanda-llm" id="subtitle"]]
[[!template text="""dealing with LLMs. 1 is,""" start="00:09:13.660" video="qanda-llm" id="subtitle"]]
[[!template text="""a prompting system. And by that,""" start="00:09:18.240" video="qanda-llm" id="subtitle"]]
[[!template text="""I mean, you know, prompts are just like big""" start="00:09:21.820" video="qanda-llm" id="subtitle"]]
[[!template text="""blocks of text, but there's also senses that""" start="00:09:24.520" video="qanda-llm" id="subtitle"]]
[[!template text="""like prompts need to be composable and you""" start="00:09:28.260" video="qanda-llm" id="subtitle"]]
[[!template text="""need to be able to iterate on parts of the""" start="00:09:30.480" video="qanda-llm" id="subtitle"]]
[[!template text="""prompt. And so it's also customizable.""" start="00:09:36.600" video="qanda-llm" id="subtitle"]]
[[!template text="""Users might want to customize it.""" start="00:09:38.940" video="qanda-llm" id="subtitle"]]
[[!template text="""On the other hand, it's not super easy to""" start="00:09:41.260" video="qanda-llm" id="subtitle"]]
[[!template text="""write the prompt. So you want really good""" start="00:09:43.820" video="qanda-llm" id="subtitle"]]
[[!template text="""defaults. So the whole prompt system is kind""" start="00:09:47.900" video="qanda-llm" id="subtitle"]]
[[!template text="""of complicated. That needs to be kind of""" start="00:09:51.360" video="qanda-llm" id="subtitle"]]
[[!template text="""standardized, because I don't think there's""" start="00:09:52.580" video="qanda-llm" id="subtitle"]]
[[!template text="""any tools for doing something like that right""" start="00:09:54.380" video="qanda-llm" id="subtitle"]]
[[!template text="""now. I personally use my system,""" start="00:09:58.380" video="qanda-llm" id="subtitle"]]
[[!template text="""my note system for EKG.""" start="00:10:00.220" video="qanda-llm" id="subtitle"]]
[[!template text="""I don't think that's appropriate for""" start="00:10:01.720" video="qanda-llm" id="subtitle"]]
[[!template text="""everyone, but it does,""" start="00:10:02.800" video="qanda-llm" id="subtitle"]]
[[!template text="""I did write it to have some of these""" start="00:10:04.480" video="qanda-llm" id="subtitle"]]
[[!template text="""capabilities of composability that I think""" start="00:10:06.540" video="qanda-llm" id="subtitle"]]
[[!template text="""are useful for a prompt generation.""" start="00:10:08.360" video="qanda-llm" id="subtitle"]]
[[!template text="""It'd be nice to have a system like that,""" start="00:10:11.940" video="qanda-llm" id="subtitle"]]
[[!template text="""but for general use. I don't,""" start="00:10:15.660" video="qanda-llm" id="subtitle"]]
[[!template text="""this is something I've been meaning to think""" start="00:10:17.840" video="qanda-llm" id="subtitle"]]
[[!template text="""about, like how to do it,""" start="00:10:18.840" video="qanda-llm" id="subtitle"]]
[[!template text="""but like this, you know,""" start="00:10:19.760" video="qanda-llm" id="subtitle"]]
[[!template text="""if someone's interested in getting this area,""" start="00:10:21.260" video="qanda-llm" id="subtitle"]]
[[!template text="""like, I would love to chat about that or,""" start="00:10:26.120" video="qanda-llm" id="subtitle"]]
[[!template text="""you know, I think there's a lot of""" start="00:10:27.600" video="qanda-llm" id="subtitle"]]
[[!template text="""interesting ideas that we could have to have""" start="00:10:31.020" video="qanda-llm" id="subtitle"]]
[[!template text="""a system that allows us to make progress""" start="00:10:34.080" video="qanda-llm" id="subtitle"]]
[[!template text="""here. And also, I think there's more to""" start="00:10:38.860" video="qanda-llm" id="subtitle"]]
[[!template text="""standardization to be done.""" start="00:10:40.520" video="qanda-llm" id="subtitle"]]
[[!template text="""1 thing I'd also like to see that we haven't""" start="00:10:42.820" video="qanda-llm" id="subtitle"]]
[[!template text="""done yet is a system for standardizing on""" start="00:10:47.020" video="qanda-llm" id="subtitle"]]
[[!template text="""getting structured output.""" start="00:10:48.060" video="qanda-llm" id="subtitle"]]
[[!template text="""This is gonna be super useful.""" start="00:10:49.640" video="qanda-llm" id="subtitle"]]
[[!template text="""I have this for open AIs API,""" start="00:10:52.280" video="qanda-llm" id="subtitle"]]
[[!template text="""cause they support it.""" start="00:10:53.560" video="qanda-llm" id="subtitle"]]
[[!template text="""And it's really nice, cause then you can""" start="00:10:55.940" video="qanda-llm" id="subtitle"]]
[[!template text="""write elist functions that like,""" start="00:10:57.440" video="qanda-llm" id="subtitle"]]
[[!template text="""okay, I'm going to call the LLM.""" start="00:10:59.380" video="qanda-llm" id="subtitle"]]
[[!template text="""I'm gonna get structured output.""" start="00:11:00.760" video="qanda-llm" id="subtitle"]]
[[!template text="""I know what that structure is going to be.""" start="00:11:02.040" video="qanda-llm" id="subtitle"]]
[[!template text="""It's not going to be just a big block of""" start="00:11:03.480" video="qanda-llm" id="subtitle"]]
[[!template text="""text. I could turn it into a,""" start="00:11:05.660" video="qanda-llm" id="subtitle"]]
[[!template text="""you know, a P list or something.""" start="00:11:07.000" video="qanda-llm" id="subtitle"]]
[[!template text="""And then I could get the values out of that P""" start="00:11:09.280" video="qanda-llm" id="subtitle"]]
[[!template text="""list. And I know that way I could do,""" start="00:11:11.880" video="qanda-llm" id="subtitle"]]
[[!template text="""I could write actual apps that are,""" start="00:11:14.220" video="qanda-llm" id="subtitle"]]
[[!template text="""you know, very, very sort of,""" start="00:11:18.300" video="qanda-llm" id="subtitle"]]
[[!template text="""you know, useful for very specific purposes""" start="00:11:20.200" video="qanda-llm" id="subtitle"]]
[[!template text="""and not just for text generation.""" start="00:11:21.900" video="qanda-llm" id="subtitle"]]
[[!template text="""And I think that's 1 of the most important""" start="00:11:24.000" video="qanda-llm" id="subtitle"]]
[[!template text="""things we want to do. And I have some ideas""" start="00:11:27.100" video="qanda-llm" id="subtitle"]]
[[!template text="""about how to do it. I just haven't pursued""" start="00:11:28.840" video="qanda-llm" id="subtitle"]]
[[!template text="""those yet. But if other people have ideas,""" start="00:11:31.640" video="qanda-llm" id="subtitle"]]
[[!template text="""I think this would be really interesting to""" start="00:11:34.340" video="qanda-llm" id="subtitle"]]
[[!template text="""add to the LLM package.""" start="00:11:35.520" video="qanda-llm" id="subtitle"]]
[[!template text="""So contact me there.""" start="00:11:37.260" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: Awesome. Quick note before we continue.""" start="00:11:42.100" video="qanda-llm" id="subtitle"]]
[[!template text="""So I'm not sure how long we're going to be on""" start="00:11:44.440" video="qanda-llm" id="subtitle"]]
[[!template text="""stream for, because this is the last talk""" start="00:11:46.040" video="qanda-llm" id="subtitle"]]
[[!template text="""before the break. If we are on the stream""" start="00:11:48.640" video="qanda-llm" id="subtitle"]]
[[!template text="""long-term, then great.""" start="00:11:49.840" video="qanda-llm" id="subtitle"]]
[[!template text="""But if not, folks are welcome to continue""" start="00:11:51.820" video="qanda-llm" id="subtitle"]]
[[!template text="""writing questions on the pad.""" start="00:11:53.320" video="qanda-llm" id="subtitle"]]
[[!template text="""And hopefully, Andrew will get to them at""" start="00:11:55.140" video="qanda-llm" id="subtitle"]]
[[!template text="""some point. Or if Andrew maybe has some extra""" start="00:11:58.020" video="qanda-llm" id="subtitle"]]
[[!template text="""time available and wants to stay on""" start="00:11:59.960" video="qanda-llm" id="subtitle"]]
[[!template text="""BigBlueButton here, then folks are also""" start="00:12:01.640" video="qanda-llm" id="subtitle"]]
[[!template text="""welcome to join here and chat with Andrew""" start="00:12:03.940" video="qanda-llm" id="subtitle"]]
[[!template text="""directly as well. Okay,""" start="00:12:08.940" video="qanda-llm" id="subtitle"]]
[[!template text="""awesome. So yeah, the next question is,""" start="00:12:10.740" video="qanda-llm" id="subtitle"]]
[[!template text="""what are your thoughts on the carbon""" start="00:12:12.040" video="qanda-llm" id="subtitle"]]
[[!template text="""footprint of LLM usage?""" start="00:12:14.060" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: Yeah, it's a really interesting question.""" start="00:12:17.200" video="qanda-llm" id="subtitle"]]
[[!template text="""I don't have any particular knowledge or""" start="00:12:23.180" video="qanda-llm" id="subtitle"]]
[[!template text="""opinions about that. It's something I think""" start="00:12:25.440" video="qanda-llm" id="subtitle"]]
[[!template text="""we should all be educating ourselves more""" start="00:12:26.980" video="qanda-llm" id="subtitle"]]
[[!template text="""about. It is really, I mean,""" start="00:12:32.240" video="qanda-llm" id="subtitle"]]
[[!template text="""there's 2 parts of this,""" start="00:12:33.040" video="qanda-llm" id="subtitle"]]
[[!template text="""right? They take a, there's a huge amount of""" start="00:12:35.380" video="qanda-llm" id="subtitle"]]
[[!template text="""carbon footprint involved in training these""" start="00:12:37.160" video="qanda-llm" id="subtitle"]]
[[!template text="""things. Then running them is relatively""" start="00:12:38.720" video="qanda-llm" id="subtitle"]]
[[!template text="""lightweight. So the question is not""" start="00:12:42.540" video="qanda-llm" id="subtitle"]]
[[!template text="""necessarily like once it's trained,""" start="00:12:44.440" video="qanda-llm" id="subtitle"]]
[[!template text="""like I don't feel like it's a big deal to""" start="00:12:46.480" video="qanda-llm" id="subtitle"]]
[[!template text="""keep using it, but like training these things""" start="00:12:48.280" video="qanda-llm" id="subtitle"]]
[[!template text="""is kind of like the big carbon cost of it.""" start="00:12:50.680" video="qanda-llm" id="subtitle"]]
[[!template text="""But like right now, the way everything's""" start="00:12:53.680" video="qanda-llm" id="subtitle"]]
[[!template text="""going, like every, you know,""" start="00:12:56.040" video="qanda-llm" id="subtitle"]]
[[!template text="""all, you know, the top 5 or 6 tech companies""" start="00:12:59.060" video="qanda-llm" id="subtitle"]]
[[!template text="""are all training their LLMs,""" start="00:13:00.900" video="qanda-llm" id="subtitle"]]
[[!template text="""and this is all costing a giant amount of""" start="00:13:03.580" video="qanda-llm" id="subtitle"]]
[[!template text="""carbon probably. On the other hand these same""" start="00:13:06.820" video="qanda-llm" id="subtitle"]]
[[!template text="""companies are pretty good about using the""" start="00:13:08.560" video="qanda-llm" id="subtitle"]]
[[!template text="""least amount of carbon necessary you know""" start="00:13:10.260" video="qanda-llm" id="subtitle"]]
[[!template text="""they have their own their tricks for doing""" start="00:13:12.340" video="qanda-llm" id="subtitle"]]
[[!template text="""things very efficiently.""" start="00:13:13.260" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: Cool next question, LLMs are slow and""" start="00:13:22.100" video="qanda-llm" id="subtitle"]]
[[!template text="""responding. Do you think Emacs should provide""" start="00:13:24.000" video="qanda-llm" id="subtitle"]]
[[!template text="""more async primitives to keep it responsive?""" start="00:13:26.680" video="qanda-llm" id="subtitle"]]
[[!template text="""Like the URL retrieve is quite bad at""" start="00:13:29.380" video="qanda-llm" id="subtitle"]]
[[!template text="""building API clients with it.""" start="00:13:31.720" video="qanda-llm" id="subtitle"]]
[[!template text="""Building API clients with it?""" start="00:13:31.920" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: Yeah. Well, OK, so first of all,""" start="00:13:36.400" video="qanda-llm" id="subtitle"]]
[[!template text="""people should be using the LLM client.""" start="00:13:40.240" video="qanda-llm" id="subtitle"]]
[[!template text="""And So right now, 1 thing I should have""" start="00:13:48.740" video="qanda-llm" id="subtitle"]]
[[!template text="""mentioned at the top is that there are new""" start="00:13:50.220" video="qanda-llm" id="subtitle"]]
[[!template text="""packages that I recorded this talk that you""" start="00:13:52.500" video="qanda-llm" id="subtitle"]]
[[!template text="""just saw several months ago.""" start="00:13:54.480" video="qanda-llm" id="subtitle"]]
[[!template text="""And so like Elama, there's this package Elama""" start="00:13:57.780" video="qanda-llm" id="subtitle"]]
[[!template text="""that came out that is using the LM package.""" start="00:13:59.700" video="qanda-llm" id="subtitle"]]
[[!template text="""And so for example, it doesn't need to worry""" start="00:14:02.440" video="qanda-llm" id="subtitle"]]
[[!template text="""about this sort of thing because it just uses""" start="00:14:05.140" video="qanda-llm" id="subtitle"]]
[[!template text="""LLM and package and the LLM package worries""" start="00:14:07.560" video="qanda-llm" id="subtitle"]]
[[!template text="""about this. And while I'm on the subject of""" start="00:14:11.680" video="qanda-llm" id="subtitle"]]
[[!template text="""things I forgot to mention,""" start="00:14:12.540" video="qanda-llm" id="subtitle"]]
[[!template text="""I also should just mention very quickly that""" start="00:14:15.140" video="qanda-llm" id="subtitle"]]
[[!template text="""there is now an open source model,""" start="00:14:17.020" video="qanda-llm" id="subtitle"]]
[[!template text="""Mistral. And so that's kind of this new thing""" start="00:14:21.680" video="qanda-llm" id="subtitle"]]
[[!template text="""on the scene that happened after I recorded""" start="00:14:23.860" video="qanda-llm" id="subtitle"]]
[[!template text="""my talk. And I think it's super important to""" start="00:14:26.240" video="qanda-llm" id="subtitle"]]
[[!template text="""the community and important that we have the""" start="00:14:28.660" video="qanda-llm" id="subtitle"]]
[[!template text="""opportunity to use that if we want to.""" start="00:14:30.620" video="qanda-llm" id="subtitle"]]
[[!template text="""Okay, but to answer the actual question,""" start="00:14:33.160" video="qanda-llm" id="subtitle"]]
[[!template text="""there has been some talk about the problems""" start="00:14:37.660" video="qanda-llm" id="subtitle"]]
[[!template text="""with URL retrieve in the URL package in""" start="00:14:40.680" video="qanda-llm" id="subtitle"]]
[[!template text="""general in EmacsDevEl.""" start="00:14:42.200" video="qanda-llm" id="subtitle"]]
[[!template text="""It's not great. I would like to have better""" start="00:14:46.760" video="qanda-llm" id="subtitle"]]
[[!template text="""primitives. And I've asked the author of""" start="00:14:50.900" video="qanda-llm" id="subtitle"]]
[[!template text="""Please PLZ to kind of provide some necessary""" start="00:14:54.060" video="qanda-llm" id="subtitle"]]
[[!template text="""callbacks. I think that's a great library.""" start="00:14:56.120" video="qanda-llm" id="subtitle"]]
[[!template text="""And I'd like to see that kind of like,""" start="00:15:00.280" video="qanda-llm" id="subtitle"]]
[[!template text="""It's nice that we have options,""" start="00:15:01.320" video="qanda-llm" id="subtitle"]]
[[!template text="""and that is an option that uses curl on the""" start="00:15:03.340" video="qanda-llm" id="subtitle"]]
[[!template text="""back end, and that has some benefits.""" start="00:15:05.140" video="qanda-llm" id="subtitle"]]
[[!template text="""So there's this big debate about whether we""" start="00:15:09.060" video="qanda-llm" id="subtitle"]]
[[!template text="""should have primitives or just use curl.""" start="00:15:10.600" video="qanda-llm" id="subtitle"]]
[[!template text="""I'm not exactly sure what the right call is,""" start="00:15:13.340" video="qanda-llm" id="subtitle"]]
[[!template text="""but there has been discussions about this.""" start="00:15:15.320" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: Excellent. And someone commented that GPTEL""" start="00:15:19.540" video="qanda-llm" id="subtitle"]]
[[!template text="""is async and apparently very good at tracking""" start="00:15:21.820" video="qanda-llm" id="subtitle"]]
[[!template text="""the point.""" start="00:15:22.300" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: Yes, yes, GPTEL has similar functionalities""" start="00:15:26.680" video="qanda-llm" id="subtitle"]]
[[!template text="""to LLM, although I believe it's going to move""" start="00:15:29.800" video="qanda-llm" id="subtitle"]]
[[!template text="""to LLM itself sometime soon.""" start="00:15:33.040" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: Next question, speaking of which,""" start="00:15:39.480" video="qanda-llm" id="subtitle"]]
[[!template text="""anyone trained or fine-tuned or prompted a""" start="00:15:42.440" video="qanda-llm" id="subtitle"]]
[[!template text="""model with their org data yet and applied it""" start="00:15:44.680" video="qanda-llm" id="subtitle"]]
[[!template text="""to interesting use cases like planning,""" start="00:15:46.560" video="qanda-llm" id="subtitle"]]
[[!template text="""scheduling, et cetera,""" start="00:15:47.920" video="qanda-llm" id="subtitle"]]
[[!template text="""and maybe care to comment?""" start="00:15:49.320" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: I don't know anyone who is doing that.""" start="00:15:54.620" video="qanda-llm" id="subtitle"]]
[[!template text="""I think it is interesting.""" start="00:15:55.860" video="qanda-llm" id="subtitle"]]
[[!template text="""Like this is what I kind of mentioned at the""" start="00:15:57.800" video="qanda-llm" id="subtitle"]]
[[!template text="""very end of the talk. There is a lot of stuff""" start="00:16:01.060" video="qanda-llm" id="subtitle"]]
[[!template text="""there like you could you know if you""" start="00:16:02.440" video="qanda-llm" id="subtitle"]]
[[!template text="""especially mean an LLM can kind of work as""" start="00:16:04.760" video="qanda-llm" id="subtitle"]]
[[!template text="""sort of like a secretary kind of person that""" start="00:16:07.940" video="qanda-llm" id="subtitle"]]
[[!template text="""could help you prioritize Still it's a""" start="00:16:12.180" video="qanda-llm" id="subtitle"]]
[[!template text="""slightly unclear how what the best way to use""" start="00:16:14.760" video="qanda-llm" id="subtitle"]]
[[!template text="""it is So I think there's more of a question""" start="00:16:16.480" video="qanda-llm" id="subtitle"]]
[[!template text="""for the community about like what people have""" start="00:16:18.340" video="qanda-llm" id="subtitle"]]
[[!template text="""been trying. I see someone has mentioned that""" start="00:16:21.140" video="qanda-llm" id="subtitle"]]
[[!template text="""they are using it for weekly review.""" start="00:16:23.400" video="qanda-llm" id="subtitle"]]
[[!template text="""And it's kind of nice to like,""" start="00:16:26.940" video="qanda-llm" id="subtitle"]]
[[!template text="""maybe you could read your agenda or maybe""" start="00:16:29.060" video="qanda-llm" id="subtitle"]]
[[!template text="""this for like weekly review.""" start="00:16:30.480" video="qanda-llm" id="subtitle"]]
[[!template text="""It could like read all the stuff you've done""" start="00:16:32.040" video="qanda-llm" id="subtitle"]]
[[!template text="""and ask you questions about it.""" start="00:16:33.340" video="qanda-llm" id="subtitle"]]
[[!template text="""And like, what should happen next?""" start="00:16:35.020" video="qanda-llm" id="subtitle"]]
[[!template text="""Or like, is this going to cause a problem?""" start="00:16:36.520" video="qanda-llm" id="subtitle"]]
[[!template text="""Like, I can, I can understand if that could""" start="00:16:39.060" video="qanda-llm" id="subtitle"]]
[[!template text="""happen? That's like, that's kind of nice.""" start="00:16:40.860" video="qanda-llm" id="subtitle"]]
[[!template text="""And this kind of people have had good success""" start="00:16:43.660" video="qanda-llm" id="subtitle"]]
[[!template text="""out of using these LLMs to bounce ideas off""" start="00:16:48.540" video="qanda-llm" id="subtitle"]]
[[!template text="""of are, you know, for,""" start="00:16:49.920" video="qanda-llm" id="subtitle"]]
[[!template text="""you know, I've seen people say that like they""" start="00:16:52.680" video="qanda-llm" id="subtitle"]]
[[!template text="""want, they use it for reading and they kind""" start="00:16:55.360" video="qanda-llm" id="subtitle"]]
[[!template text="""of dialogue with the LM to kind of like do""" start="00:16:58.520" video="qanda-llm" id="subtitle"]]
[[!template text="""sort of active reading.""" start="00:16:59.500" video="qanda-llm" id="subtitle"]]
[[!template text="""So you can imagine doing something similar""" start="00:17:02.500" video="qanda-llm" id="subtitle"]]
[[!template text="""with your tasks where it's sort of you're""" start="00:17:04.400" video="qanda-llm" id="subtitle"]]
[[!template text="""engaged in dialogue about like planning your""" start="00:17:06.560" video="qanda-llm" id="subtitle"]]
[[!template text="""tax with some with a alum that could kind of""" start="00:17:08.880" video="qanda-llm" id="subtitle"]]
[[!template text="""understand what those are and ask you some""" start="00:17:10.800" video="qanda-llm" id="subtitle"]]
[[!template text="""questions I think it. You know,""" start="00:17:13.780" video="qanda-llm" id="subtitle"]]
[[!template text="""if it'd be nice. So, the problem is like""" start="00:17:16.839" video="qanda-llm" id="subtitle"]]
[[!template text="""there's no great way to share all this stuff.""" start="00:17:18.480" video="qanda-llm" id="subtitle"]]
[[!template text="""I guess if you have something like this,""" start="00:17:20.720" video="qanda-llm" id="subtitle"]]
[[!template text="""put it on Reddit. If you don't have Reddit,""" start="00:17:23.300" video="qanda-llm" id="subtitle"]]
[[!template text="""I don't know what to do.""" start="00:17:24.599" video="qanda-llm" id="subtitle"]]
[[!template text="""I would say put it somewhere.""" start="00:17:26.000" video="qanda-llm" id="subtitle"]]
[[!template text="""At the very least, I could maybe open up like""" start="00:17:28.840" video="qanda-llm" id="subtitle"]]
[[!template text="""an LLM discussion session on the LLM package""" start="00:17:31.320" video="qanda-llm" id="subtitle"]]
[[!template text="""GitHub, But not everyone likes to use GitHub.""" start="00:17:34.000" video="qanda-llm" id="subtitle"]]
[[!template text="""I don't know. It'd be nice if there's a""" start="00:17:36.100" video="qanda-llm" id="subtitle"]]
[[!template text="""mailing list or IRC chat for this sort of""" start="00:17:38.940" video="qanda-llm" id="subtitle"]]
[[!template text="""thing. But there isn't at the moment.""" start="00:17:40.840" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: All right. Let's see. I think that's the end""" start="00:17:46.560" video="qanda-llm" id="subtitle"]]
[[!template text="""of the questions on the pad so far.""" start="00:17:48.080" video="qanda-llm" id="subtitle"]]
[[!template text="""There was also some discussion or some""" start="00:17:51.020" video="qanda-llm" id="subtitle"]]
[[!template text="""chatter, I believe, on IRC.""" start="00:17:52.260" video="qanda-llm" id="subtitle"]]
[[!template text="""I'm not sure. Andrew, are you on IRC right""" start="00:17:54.560" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: I am, but I don't think I'm on any place that""" start="00:18:00.060" video="qanda-llm" id="subtitle"]]
[[!template text="""has the chatter. So if there's chatter,""" start="00:18:01.400" video="qanda-llm" id="subtitle"]]
[[!template text="""then I'm not seeing it.""" start="00:18:02.440" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: now? Okay. Yeah, it was in the emacsconf-dev""" start="00:18:04.600" video="qanda-llm" id="subtitle"]]
[[!template text="""channel.""" start="00:18:06.760" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: Okay, let me see if I can.""" start="00:18:09.600" video="qanda-llm" id="subtitle"]]
[[!template text="""Oh, yes. I mean, I could see the channel,""" start="00:18:25.600" video="qanda-llm" id="subtitle"]]
[[!template text="""but I missed whatever came before.""" start="00:18:27.520" video="qanda-llm" id="subtitle"]]
[[!template text="""So if there's anything you want to kind of""" start="00:18:29.340" video="qanda-llm" id="subtitle"]]
[[!template text="""call out, I can try to answer it here.""" start="00:18:30.840" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: OK, cool. I believe at least 2 other folks""" start="00:18:35.320" video="qanda-llm" id="subtitle"]]
[[!template text="""who are participating in the discussion there""" start="00:18:37.500" video="qanda-llm" id="subtitle"]]
[[!template text="""who have also joined here on BigBlueButton,""" start="00:18:40.120" video="qanda-llm" id="subtitle"]]
[[!template text="""Codin Quark and AeonTurn92.""" start="00:18:42.440" video="qanda-llm" id="subtitle"]]
[[!template text="""So you folks, if Andrew is still available""" start="00:18:47.000" video="qanda-llm" id="subtitle"]]
[[!template text="""and has time, you're welcome to chat here and""" start="00:18:50.460" video="qanda-llm" id="subtitle"]]
[[!template text="""ask questions or discuss here as well.""" start="00:18:53.000" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: 1 Thank you. Thank you for your help,""" start="00:18:55.580" video="qanda-llm" id="subtitle"]]
[[!template text="""and thank you for reading all the questions.""" start="00:18:57.740" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: AUDIENCE 2 Cheers, and thanks to you for a""" start="00:18:59.700" video="qanda-llm" id="subtitle"]]
[[!template text="""great talk and the discussion.""" start="00:19:00.540" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: AUDIENCE AUDIENCE 1 Thank you.""" start="00:19:01.880" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: AUDIENCE 2 Cheers.""" start="00:19:03.140" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: So I'll just, I will wait here and see if""" start="00:19:07.900" video="qanda-llm" id="subtitle"]]
[[!template text="""there's any questions.""" start="00:19:08.320" video="qanda-llm" id="subtitle"]]
[[!template text="""If not, I will log off after a few minutes.""" start="00:19:10.760" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 2]: Well, I guess since we were mentioned that""" start="00:19:15.900" video="qanda-llm" id="subtitle"]]
[[!template text="""there was a small chat about local alarms.""" start="00:19:18.480" video="qanda-llm" id="subtitle"]]
[[!template text="""Because chat dpt is nice,""" start="00:19:22.640" video="qanda-llm" id="subtitle"]]
[[!template text="""no, but privacy concerns,""" start="00:19:25.600" video="qanda-llm" id="subtitle"]]
[[!template text="""and it's not free and stuff.""" start="00:19:27.380" video="qanda-llm" id="subtitle"]]
[[!template text="""Which, so The question is,""" start="00:19:31.000" video="qanda-llm" id="subtitle"]]
[[!template text="""what is the promise for local models?""" start="00:19:36.960" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: Yeah, so local is definitely...""" start="00:19:39.660" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 2]: Or at least open source.""" start="00:19:41.380" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: Yeah, so there is a local open source model,""" start="00:19:45.680" video="qanda-llm" id="subtitle"]]
[[!template text="""Misral, which you could run.""" start="00:19:47.960" video="qanda-llm" id="subtitle"]]
[[!template text="""The LLM package allows you to use,""" start="00:19:51.340" video="qanda-llm" id="subtitle"]]
[[!template text="""I think there's 3 kind of local things you""" start="00:19:56.120" video="qanda-llm" id="subtitle"]]
[[!template text="""could use. Like many of these things,""" start="00:19:58.100" video="qanda-llm" id="subtitle"]]
[[!template text="""there's like many kind of ways to do the same""" start="00:20:00.220" video="qanda-llm" id="subtitle"]]
[[!template text="""sort of thing. So LLM is supporting OLAMMA""" start="00:20:03.960" video="qanda-llm" id="subtitle"]]
[[!template text="""and LLAMMA-CPP. And let's see,""" start="00:20:10.240" video="qanda-llm" id="subtitle"]]
[[!template text="""1 other. Which 1 is it?""" start="00:20:12.240" video="qanda-llm" id="subtitle"]]
[[!template text="""And maybe that's it. Maybe the,""" start="00:20:18.420" video="qanda-llm" id="subtitle"]]
[[!template text="""oh, GPT for all. So each 1 of these kind of""" start="00:20:21.820" video="qanda-llm" id="subtitle"]]
[[!template text="""has slightly different functionality.""" start="00:20:23.100" video="qanda-llm" id="subtitle"]]
[[!template text="""For example, I think GPT for all doesn't""" start="00:20:26.820" video="qanda-llm" id="subtitle"]]
[[!template text="""support embeddings. And I hear that Olama's""" start="00:20:31.780" video="qanda-llm" id="subtitle"]]
[[!template text="""embeddings are kind of currently broken.""" start="00:20:33.740" video="qanda-llm" id="subtitle"]]
[[!template text="""But basically they should support everything.""" start="00:20:35.920" video="qanda-llm" id="subtitle"]]
[[!template text="""And the open source models are,""" start="00:20:39.100" video="qanda-llm" id="subtitle"]]
[[!template text="""so the local models are reasonably good.""" start="00:20:43.180" video="qanda-llm" id="subtitle"]]
[[!template text="""Like I don't think you'd use them and be""" start="00:20:44.760" video="qanda-llm" id="subtitle"]]
[[!template text="""like, what is this horrible nonsense?""" start="00:20:46.200" video="qanda-llm" id="subtitle"]]
[[!template text="""Like it's, it gives you relatively good""" start="00:20:50.200" video="qanda-llm" id="subtitle"]]
[[!template text="""results. Like it's not gonna be at the level""" start="00:20:51.820" video="qanda-llm" id="subtitle"]]
[[!template text="""of like GPT 3.5 or 4, but it's not far away""" start="00:20:56.060" video="qanda-llm" id="subtitle"]]
[[!template text="""from GPT 3.5, I think.""" start="00:20:57.720" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 2]: I'm just saying that Olam has like a presets""" start="00:21:02.380" video="qanda-llm" id="subtitle"]]
[[!template text="""for connecting the actual working servers for""" start="00:21:05.940" video="qanda-llm" id="subtitle"]]
[[!template text="""Olama?""" start="00:21:06.300" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: So, I'll try. Yeah, so you could,""" start="00:21:08.560" video="qanda-llm" id="subtitle"]]
[[!template text="""what you could do is you could like for""" start="00:21:09.860" video="qanda-llm" id="subtitle"]]
[[!template text="""example you could download Olama which is""" start="00:21:11.940" video="qanda-llm" id="subtitle"]]
[[!template text="""just a way of setting up local models and""" start="00:21:15.780" video="qanda-llm" id="subtitle"]]
[[!template text="""running local models on your machine.""" start="00:21:17.320" video="qanda-llm" id="subtitle"]]
[[!template text="""So typically what it does,""" start="00:21:18.580" video="qanda-llm" id="subtitle"]]
[[!template text="""you like download a program,""" start="00:21:19.720" video="qanda-llm" id="subtitle"]]
[[!template text="""let's say Olama. Then Olama will have the""" start="00:21:23.720" video="qanda-llm" id="subtitle"]]
[[!template text="""ability to download models.""" start="00:21:24.940" video="qanda-llm" id="subtitle"]]
[[!template text="""And so you could choose from just a host of""" start="00:21:27.240" video="qanda-llm" id="subtitle"]]
[[!template text="""different models. Each 1 of these things has""" start="00:21:29.280" video="qanda-llm" id="subtitle"]]
[[!template text="""a bunch of different models.""" start="00:21:30.200" video="qanda-llm" id="subtitle"]]
[[!template text="""So it downloads all these things to your""" start="00:21:31.920" video="qanda-llm" id="subtitle"]]
[[!template text="""machine. But I would say that the key problem""" start="00:21:36.600" video="qanda-llm" id="subtitle"]]
[[!template text="""here is that it requires a fairly beefy""" start="00:21:40.200" video="qanda-llm" id="subtitle"]]
[[!template text="""machine.""" start="00:21:40.580" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 2]: So. Yeah, yeah, of course.""" start="00:21:42.600" video="qanda-llm" id="subtitle"]]
[[!template text="""Why I was asking, because you briefly""" start="00:21:45.060" video="qanda-llm" id="subtitle"]]
[[!template text="""mentioned that there are some Israeli""" start="00:21:46.440" video="qanda-llm" id="subtitle"]]
[[!template text="""servers. I understand that they run it like a""" start="00:21:52.300" video="qanda-llm" id="subtitle"]]
[[!template text="""government or stuff like that?""" start="00:21:53.680" video="qanda-llm" id="subtitle"]]
[[!template text="""No, no, sorry. People want everyone?""" start="00:21:55.440" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: I don't, I mean, maybe you've said something""" start="00:21:59.340" video="qanda-llm" id="subtitle"]]
[[!template text="""that sounded like Israeli servers.""" start="00:22:00.620" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 2]: Okay, okay.""" start="00:22:01.620" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: I think- There's no government LLMs as far as""" start="00:22:04.920" video="qanda-llm" id="subtitle"]]
[[!template text="""I know. Although, I'm sure the governments""" start="00:22:06.820" video="qanda-llm" id="subtitle"]]
[[!template text="""are working on their own LLMs,""" start="00:22:08.200" video="qanda-llm" id="subtitle"]]
[[!template text="""et cetera. But yeah, basically your choices""" start="00:22:10.980" video="qanda-llm" id="subtitle"]]
[[!template text="""are spend a, I mean, if you use open AI or""" start="00:22:15.060" video="qanda-llm" id="subtitle"]]
[[!template text="""something or anything else,""" start="00:22:16.080" video="qanda-llm" id="subtitle"]]
[[!template text="""you're really not spending any money.""" start="00:22:17.960" video="qanda-llm" id="subtitle"]]
[[!template text="""Like I've never been able to spend any money""" start="00:22:20.560" video="qanda-llm" id="subtitle"]]
[[!template text="""on OpenAI. Like unless you're doing something""" start="00:22:23.680" video="qanda-llm" id="subtitle"]]
[[!template text="""very intensive and really are using it to,""" start="00:22:25.840" video="qanda-llm" id="subtitle"]]
[[!template text="""you know, if you're using it for your""" start="00:22:28.000" video="qanda-llm" id="subtitle"]]
[[!template text="""personal use, it's just hard to spend any""" start="00:22:29.620" video="qanda-llm" id="subtitle"]]
[[!template text="""money. But on the other hand,""" start="00:22:31.720" video="qanda-llm" id="subtitle"]]
[[!template text="""it's not free. So you can,""" start="00:22:32.780" video="qanda-llm" id="subtitle"]]
[[!template text="""you know,""" start="00:22:33.040" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 2]: Actually, it's rather cheap.""" start="00:22:36.300" video="qanda-llm" id="subtitle"]]
[[!template text="""There's no question about that.""" start="00:22:37.680" video="qanda-llm" id="subtitle"]]
[[!template text="""The problem is that it has a bad track record""" start="00:22:40.580" video="qanda-llm" id="subtitle"]]
[[!template text="""on privacy.""" start="00:22:41.580" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: Yes, that's, I think that is a key problem.""" start="00:22:45.540" video="qanda-llm" id="subtitle"]]
[[!template text="""This is probably the number 1 reason why you""" start="00:22:48.120" video="qanda-llm" id="subtitle"]]
[[!template text="""might want to use a local AI,""" start="00:22:51.840" video="qanda-llm" id="subtitle"]]
[[!template text="""a local LLM. Another 1 is like,""" start="00:22:54.720" video="qanda-llm" id="subtitle"]]
[[!template text="""you may not agree with the decisions.""" start="00:22:57.400" video="qanda-llm" id="subtitle"]]
[[!template text="""You know, there's a lot of trust and safety""" start="00:23:00.360" video="qanda-llm" id="subtitle"]]
[[!template text="""stuff that these companies have to do.""" start="00:23:05.140" video="qanda-llm" id="subtitle"]]
[[!template text="""Like they don't want like the LMs to kind of""" start="00:23:09.020" video="qanda-llm" id="subtitle"]]
[[!template text="""like give you, like tell you how you can make""" start="00:23:11.400" video="qanda-llm" id="subtitle"]]
[[!template text="""meth or how you can make a bomb,""" start="00:23:13.180" video="qanda-llm" id="subtitle"]]
[[!template text="""which they would do. They would totally do""" start="00:23:14.960" video="qanda-llm" id="subtitle"]]
[[!template text="""it. So, But each time you kind of restrict""" start="00:23:19.580" video="qanda-llm" id="subtitle"]]
[[!template text="""what is happening with what you can get out""" start="00:23:22.540" video="qanda-llm" id="subtitle"]]
[[!template text="""of the LM, it gets a little worse.""" start="00:23:23.860" video="qanda-llm" id="subtitle"]]
[[!template text="""So some people""" start="00:23:24.960" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 2]: want to have local. That's expected.""" start="00:23:27.040" video="qanda-llm" id="subtitle"]]
[[!template text="""I guess even open source language modules""" start="00:23:31.080" video="qanda-llm" id="subtitle"]]
[[!template text="""will soon have HR spaces because it's simply""" start="00:23:33.540" video="qanda-llm" id="subtitle"]]
[[!template text="""a legal issue.""" start="00:23:34.720" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: I think that's true. I also think that there""" start="00:23:40.760" video="qanda-llm" id="subtitle"]]
[[!template text="""probably will be, although I don't know of""" start="00:23:42.880" video="qanda-llm" id="subtitle"]]
[[!template text="""any offhand, that will are completely""" start="00:23:45.060" video="qanda-llm" id="subtitle"]]
[[!template text="""uncensored. I know people are interested and""" start="00:23:46.960" video="qanda-llm" id="subtitle"]]
[[!template text="""are running uncensored models.""" start="00:23:48.240" video="qanda-llm" id="subtitle"]]
[[!template text="""I don't know how to do it.""" start="00:23:49.440" video="qanda-llm" id="subtitle"]]
[[!template text="""I think it's a little bit dubious,""" start="00:23:52.280" video="qanda-llm" id="subtitle"]]
[[!template text="""but some people do want to do it.""" start="00:23:54.960" video="qanda-llm" id="subtitle"]]
[[!template text="""There's another reason for using local""" start="00:23:56.280" video="qanda-llm" id="subtitle"]]
[[!template text="""servers. Do you have any recommendation for""" start="00:24:02.280" video="qanda-llm" id="subtitle"]]
[[!template text="""models to run locally and also comments on""" start="00:24:05.500" video="qanda-llm" id="subtitle"]]
[[!template text="""whether a GPU is required?""" start="00:24:06.780" video="qanda-llm" id="subtitle"]]
[[!template text="""Usually a GPU, well, you can run it without a""" start="00:24:14.040" video="qanda-llm" id="subtitle"]]
[[!template text="""GPU, but it does run much better.""" start="00:24:16.960" video="qanda-llm" id="subtitle"]]
[[!template text="""Like for example, I think when I used,""" start="00:24:19.480" video="qanda-llm" id="subtitle"]]
[[!template text="""Lama is sort of like a standard.""" start="00:24:22.560" video="qanda-llm" id="subtitle"]]
[[!template text="""This was the model for that Facebook came out""" start="00:24:27.160" video="qanda-llm" id="subtitle"]]
[[!template text="""with for local use. And It was,""" start="00:24:31.380" video="qanda-llm" id="subtitle"]]
[[!template text="""yeah, it's good. It's,""" start="00:24:37.260" video="qanda-llm" id="subtitle"]]
[[!template text="""but it's now it's I think,""" start="00:24:40.400" video="qanda-llm" id="subtitle"]]
[[!template text="""Mistral is kind of like has a better""" start="00:24:44.620" video="qanda-llm" id="subtitle"]]
[[!template text="""performance, But there's also different model""" start="00:24:46.480" video="qanda-llm" id="subtitle"]]
[[!template text="""sizes. There's 7B, like the Lama 7B is OK.""" start="00:24:51.000" video="qanda-llm" id="subtitle"]]
[[!template text="""The Mistral 7B, 7 billion,""" start="00:24:52.940" video="qanda-llm" id="subtitle"]]
[[!template text="""are like, basically it'll take like,""" start="00:24:54.800" video="qanda-llm" id="subtitle"]]
[[!template text="""you can run it with like 16 gigs of RAM,""" start="00:24:58.380" video="qanda-llm" id="subtitle"]]
[[!template text="""is pretty good. It's probably about as equal""" start="00:25:02.720" video="qanda-llm" id="subtitle"]]
[[!template text="""to the LLAMA13B. Those are the number of""" start="00:25:06.900" video="qanda-llm" id="subtitle"]]
[[!template text="""parameters, if I remember correctly.""" start="00:25:08.360" video="qanda-llm" id="subtitle"]]
[[!template text="""And then there's a 7B,""" start="00:25:10.680" video="qanda-llm" id="subtitle"]]
[[!template text="""which I've never been able to run.""" start="00:25:12.340" video="qanda-llm" id="subtitle"]]
[[!template text="""And even if the 7B, if you run it without a""" start="00:25:16.120" video="qanda-llm" id="subtitle"]]
[[!template text="""GPU, it takes quite a while to answer.""" start="00:25:19.640" video="qanda-llm" id="subtitle"]]
[[!template text="""I think I've had experiences where it took""" start="00:25:22.080" video="qanda-llm" id="subtitle"]]
[[!template text="""literally like several,""" start="00:25:23.940" video="qanda-llm" id="subtitle"]]
[[!template text="""like 5 minutes before it even started""" start="00:25:26.480" video="qanda-llm" id="subtitle"]]
[[!template text="""responding, but you do eventually get""" start="00:25:28.880" video="qanda-llm" id="subtitle"]]
[[!template text="""something. And it could be that like things""" start="00:25:32.220" video="qanda-llm" id="subtitle"]]
[[!template text="""have gotten better since the last time I""" start="00:25:33.840" video="qanda-llm" id="subtitle"]]
[[!template text="""tried this, because things are moving fast.""" start="00:25:35.440" video="qanda-llm" id="subtitle"]]
[[!template text="""But it is super recommended to have a GPU.""" start="00:25:38.360" video="qanda-llm" id="subtitle"]]
[[!template text="""This is the problem. It's kind of like,""" start="00:25:42.440" video="qanda-llm" id="subtitle"]]
[[!template text="""yes, free software is great.""" start="00:25:43.840" video="qanda-llm" id="subtitle"]]
[[!template text="""But if free software is requiring that you""" start="00:25:46.880" video="qanda-llm" id="subtitle"]]
[[!template text="""have these kind of beefy servers and have all""" start="00:25:50.460" video="qanda-llm" id="subtitle"]]
[[!template text="""this hardware, that's not great.""" start="00:25:52.000" video="qanda-llm" id="subtitle"]]
[[!template text="""I think there's a case to be made.""" start="00:25:53.600" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 1]: a hardware""" start="00:25:55.680" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: with slots instead of a laptop.""" start="00:25:59.040" video="qanda-llm" id="subtitle"]]
[[!template text="""Yeah, yeah, that's right.""" start="00:26:01.560" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 2]: Ideally, you can have Ideally,""" start="00:26:03.660" video="qanda-llm" id="subtitle"]]
[[!template text="""it would be nice if FSL for all things could""" start="00:26:07.400" video="qanda-llm" id="subtitle"]]
[[!template text="""run something for open source model.""" start="00:26:12.040" video="qanda-llm" id="subtitle"]]
[[!template text="""And not free, but the key point is that it's""" start="00:26:16.320" video="qanda-llm" id="subtitle"]]
[[!template text="""Libre?""" start="00:26:16.640" video="qanda-llm" id="subtitle"]]
[[!template text="""[Speaker 0]: Yes, so actually I think Google does do that.""" start="00:26:22.580" video="qanda-llm" id="subtitle"]]
[[!template text="""I'll have to look it up,""" start="00:26:24.720" video="qanda-llm" id="subtitle"]]
[[!template text="""but I haven't explored this yet.""" start="00:26:27.560" video="qanda-llm" id="subtitle"]]
[[!template text="""But Google's server, which LLM does support,""" start="00:26:31.220" video="qanda-llm" id="subtitle"]]
[[!template text="""supports arbitrary models.""" start="00:26:33.800" video="qanda-llm" id="subtitle"]]
[[!template text="""So you can run LLMA or things like that.""" start="00:26:36.420" video="qanda-llm" id="subtitle"]]
[[!template text="""The problem is that even if you're running""" start="00:26:38.940" video="qanda-llm" id="subtitle"]]
[[!template text="""Mistral, which has no restrictions.""" start="00:26:40.320" video="qanda-llm" id="subtitle"]]
[[!template text="""So this is the kind of thing that like the""" start="00:26:42.940" video="qanda-llm" id="subtitle"]]
[[!template text="""Free Software Foundation cares a lot about.""" start="00:26:44.900" video="qanda-llm" id="subtitle"]]
[[!template text="""Like you want it to be like no restrictions,""" start="00:26:47.240" video="qanda-llm" id="subtitle"]]
[[!template text="""legal restrictions on you as you run the""" start="00:26:49.740" video="qanda-llm" id="subtitle"]]
[[!template text="""model. So even if it's running Mistral,""" start="00:26:52.080" video="qanda-llm" id="subtitle"]]
[[!template text="""just by using the server,""" start="00:26:54.800" video="qanda-llm" id="subtitle"]]
[[!template text="""the company server, it will impose some""" start="00:26:58.460" video="qanda-llm" id="subtitle"]]
[[!template text="""restrictions on you probably,""" start="00:26:59.440" video="qanda-llm" id="subtitle"]]
[[!template text="""right? There's gonna be some license that you""" start="00:27:02.320" video="qanda-llm" id="subtitle"]]
[[!template text="""have to, or something you have to abide by.""" start="00:27:04.760" video="qanda-llm" id="subtitle"]]
[[!template text="""So I think, yes, it depends on how much you""" start="00:27:08.480" video="qanda-llm" id="subtitle"]]
[[!template text="""care about it, I guess.""" start="00:27:09.280" video="qanda-llm" id="subtitle"]]
[[!template text="""I should find out more about that and make""" start="00:27:19.500" video="qanda-llm" id="subtitle"]]
[[!template text="""sure that it's a good point that I should,""" start="00:27:21.580" video="qanda-llm" id="subtitle"]]
[[!template text="""you know, people should be able to run free""" start="00:27:23.980" video="qanda-llm" id="subtitle"]]
[[!template text="""models over the server.""" start="00:27:25.920" video="qanda-llm" id="subtitle"]]
[[!template text="""So I should make sure we support that in the""" start="00:27:28.320" video="qanda-llm" id="subtitle"]]
[[!template text="""LLM package. So, is there any other questions""" start="00:27:40.360" video="qanda-llm" id="subtitle"]]
[[!template text="""Or is otherwise we can end the session.""" start="00:27:48.240" video="qanda-llm" id="subtitle"]]
[[!template text="""Yeah, all right. Thank you.""" start="00:28:00.800" video="qanda-llm" id="subtitle"]]
[[!template text="""Thank you. Thank you everyone who listened.""" start="00:28:02.440" video="qanda-llm" id="subtitle"]]
[[!template text="""I'm super happy like I,""" start="00:28:04.540" video="qanda-llm" id="subtitle"]]
[[!template text="""the interest is great.""" start="00:28:06.560" video="qanda-llm" id="subtitle"]]
[[!template text="""I think there's great stuff to be done here""" start="00:28:08.900" video="qanda-llm" id="subtitle"]]
[[!template text="""and I'm kind of super excited what we're""" start="00:28:10.960" video="qanda-llm" id="subtitle"]]
[[!template text="""going to do in the next year,""" start="00:28:11.940" video="qanda-llm" id="subtitle"]]
[[!template text="""so hopefully, like next year,""" start="00:28:13.140" video="qanda-llm" id="subtitle"]]
[[!template text="""and the conference we have something even""" start="00:28:14.600" video="qanda-llm" id="subtitle"]]
[[!template text="""more exciting to say about LLM and how they""" start="00:28:16.440" video="qanda-llm" id="subtitle"]]
[[!template text="""can be used with Emacs.""" start="00:28:17.320" video="qanda-llm" id="subtitle"]]
[[!template text="""So thank""" start="00:28:19.620" video="qanda-llm" id="subtitle"]]
[[!template text="""you""" start="00:28:30.060" video="qanda-llm" id="subtitle"]]

Questions or comments? Please e-mail [ahyatt@gmail.com](mailto:ahyatt@gmail.com?subject=Comment%20for%20EmacsConf%202023%20llm%3A%20LLM%20clients%20in%20Emacs%2C%20functionality%20and%20standardization)


<!-- End of emacsconf-publish-after-page -->