summaryrefslogtreecommitdiffstats
path: root/2022/info
diff options
context:
space:
mode:
authorSacha Chua <sacha@sachachua.com>2022-12-13 00:50:32 -0500
committerSacha Chua <sacha@sachachua.com>2022-12-13 00:50:32 -0500
commitae3f835e219c5d6a21565247b05a112dffb4ee9e (patch)
treead732bf6a963e6ec10dff6670cb301d2af0a0853 /2022/info
parentb1b6bd421b12ff266eadf7f6a7353721fc813379 (diff)
downloademacsconf-wiki-ae3f835e219c5d6a21565247b05a112dffb4ee9e.tar.xz
emacsconf-wiki-ae3f835e219c5d6a21565247b05a112dffb4ee9e.zip
Automated commit
Diffstat (limited to '2022/info')
-rw-r--r--2022/info/grail-after.md638
-rw-r--r--2022/info/grail-before.md32
2 files changed, 666 insertions, 4 deletions
diff --git a/2022/info/grail-after.md b/2022/info/grail-after.md
index f721544b..98d5a049 100644
--- a/2022/info/grail-after.md
+++ b/2022/info/grail-after.md
@@ -1,6 +1,644 @@
<!-- Automatically generated by emacsconf-publish-after-page -->
+<a name="grail-mainVideo-transcript"></a>
+# Transcript
+
+[[!template new="1" text="""Thank you for joining me today. I'm Sameer Pradhan""" start="00:00:00.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""from the Linguistic Data Consortium""" start="00:00:05.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""at the University of Pennsylvania""" start="00:00:07.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and founder of cemantix.org .""" start="00:00:10.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Today we'll be addressing research""" start="00:00:14.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""in computational linguistics,""" start="00:00:16.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""also known as natural language processing""" start="00:00:18.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""a sub area of artificial intelligence""" start="00:00:22.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""with a focus on modeling and predicting""" start="00:00:24.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""complex linguistic structures from various signals.""" start="00:00:27.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""The work we present is limited to text and speech signals.""" start="00:00:31.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""but it can be extended to other signals.""" start="00:00:35.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""We propose an architecture,""" start="00:00:38.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and we call it GRAIL, which allows""" start="00:00:40.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the representation and aggregation""" start="00:00:42.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of such rich structures in a systematic fashion.""" start="00:00:44.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""I'll demonstrate a proof of concept""" start="00:00:50.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""for representing and manipulating data and annotations""" start="00:00:52.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""for the specific purpose of building""" start="00:00:56.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""machine learning models that simulate understanding.""" start="00:00:58.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""These technologies have the potential for impact""" start="00:01:02.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""in almost every conceivable field""" start="00:01:05.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that generates and uses data.""" start="00:01:09.120" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""We process human language""" start="00:01:13.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""when our brains receive and assimilate""" start="00:01:15.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""various signals which are then manipulated""" start="00:01:16.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and interpreted within a syntactic structure.""" start="00:01:20.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""it's a complex process that I have simplified here""" start="00:01:23.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""for the purpose of comparison to machine learning.""" start="00:01:27.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Recent machine learning models tend to require""" start="00:01:30.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""a large amount of raw, naturally occurring data""" start="00:01:33.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and a varying amount of manually enriched data,""" start="00:01:37.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""commonly known as &quot;annotations&quot;.""" start="00:01:40.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Owing to the complex and numerous nature""" start="00:01:43.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of linguistic phenomena, we have most often used""" start="00:01:45.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""a divide and conquer approach.""" start="00:01:49.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""The strength of this approach is that it allows us""" start="00:01:53.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to focus on a single, or perhaps a few related""" start="00:01:55.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""linguistic phenomena.""" start="00:01:58.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""The weaknesses are the universe of these phenomena""" start="00:02:00.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""keep expanding, as language itself""" start="00:02:03.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""evolves and changes over time,""" start="00:02:07.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and second, this approach requires an additional task""" start="00:02:09.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of aggregating the interpretations,""" start="00:02:13.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""creating more opportunities for computer error.""" start="00:02:14.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Our challenge, then, is to find the sweet spot""" start="00:02:18.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that allows us to encode complex information""" start="00:02:21.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""without the use of manual annotation,""" start="00:02:25.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""or without the additional task of aggregation by computers.""" start="00:02:27.720" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""So what do I mean by &quot;annotation&quot;?""" start="00:02:34.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""In this talk the word annotation refers to""" start="00:02:37.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the manual assignment of certain attributes""" start="00:02:39.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to portions of a signal which is necessary""" start="00:02:43.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to perform the end task.""" start="00:02:48.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""For example, in order for the algorithm""" start="00:02:51.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to accurately interpret a pronoun,""" start="00:02:54.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""it needs to know that pronoun,""" start="00:02:57.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""what that pronoun refers back to.""" start="00:03:00.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""We may find this task trivial, however,""" start="00:03:03.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""current algorithms repeatedly fail in this task.""" start="00:03:06.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So the complexities of understanding""" start="00:03:10.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""in computational linguistics require annotation.""" start="00:03:13.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""The world annotation itself is a useful example,""" start="00:03:16.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""because it also reminds us""" start="00:03:20.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that words have multiple meetings""" start="00:03:22.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""as annotation itself does—""" start="00:03:25.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""just as I needed to define it in this context,""" start="00:03:27.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""so that my message won't be misinterpreted.""" start="00:03:30.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So, too, must annotators do this for algorithms""" start="00:03:33.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""through the manual intervention.""" start="00:03:39.040" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""Learning from raw data""" start="00:03:43.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""(commonly known as unsupervised learning)""" start="00:03:44.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""poses limitations for machine learning.""" start="00:03:47.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""As I described, modeling complex phenomena""" start="00:03:50.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""need manual annotations.""" start="00:03:53.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""The learning algorithm uses these annotations""" start="00:03:55.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""as examples to build statistical models.""" start="00:03:58.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""This is called supervised learning.""" start="00:04:01.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Without going into too much detail,""" start="00:04:04.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""I'll simply note that the recent popularity""" start="00:04:06.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of the concept of deep learning""" start="00:04:10.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""is that evolutionary step""" start="00:04:12.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""where we have learned to train models""" start="00:04:14.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""using trillions of parameters in ways that they can""" start="00:04:17.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""learn richer hierarchical structures""" start="00:04:20.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""from very large amounts of annotate, unannotated data.""" start="00:04:25.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""These models can then be fine-tuned,""" start="00:04:29.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""using varying amounts of annotated examples""" start="00:04:32.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""depending on the complexity of the task""" start="00:04:35.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to generate better predictions.""" start="00:04:37.640" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""As you might imagine, manually annotating""" start="00:04:39.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""complex, linguistic phenomena""" start="00:04:44.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""can be very specific, labor-intensive task.""" start="00:04:47.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""For example, imagine if we were""" start="00:04:51.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to go back through this presentation""" start="00:04:54.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and connect all the pronouns""" start="00:04:56.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""with the nouns to which they refer.""" start="00:04:58.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Even for a short 18 min presentation,""" start="00:04:59.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""this would require hundreds of annotations.""" start="00:05:03.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""The models we build are only as good""" start="00:05:05.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""as the quality of the annotations we make.""" start="00:05:08.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""We need guidelines""" start="00:05:11.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that ensure that the annotations are done""" start="00:05:12.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""by at least two humans who have substantial agreement""" start="00:05:15.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""with each other in their interpretations.""" start="00:05:19.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""We know that if we try to trade a model using annotations""" start="00:05:22.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that are very subjective, or have more noise,""" start="00:05:25.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""we will receive poor predictions.""" start="00:05:28.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Additionally, there is the concern of introducing""" start="00:05:30.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""various unexpected biases into one's models.""" start="00:05:33.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So annotation is really both an art and a science.""" start="00:05:37.080" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""In the remaining time,""" start="00:05:44.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""we will turn to two fundamental questions.""" start="00:05:47.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""First, how can we develop a unified representation""" start="00:05:50.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of data and annotations""" start="00:05:54.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that encompasses arbitrary levels of linguistic information?""" start="00:05:55.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""There is a long history of attempting to answer""" start="00:05:59.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""this first question.""" start="00:06:03.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""This history is documented in our recent article,""" start="00:06:04.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and you can refer to that article.""" start="00:06:08.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""It will be on the website.""" start="00:06:11.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""It is as if we, as a community,""" start="00:06:16.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""have been searching for our own Holy Grail.""" start="00:06:19.000" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""The second question we will pose is""" start="00:06:22.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""what role might Emacs, along with Org mode,""" start="00:06:26.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""play in this process?""" start="00:06:30.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Well, the solution itself may not be tied to Emacs.""" start="00:06:31.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Emacs has built in capabilities""" start="00:06:35.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that could be useful for evaluating potential solutions.""" start="00:06:38.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""It's also one of the most extensively documented""" start="00:06:42.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""pieces of software and the most customizable""" start="00:06:45.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""piece of software that I have ever come across,""" start="00:06:48.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and many would agree with that.""" start="00:06:51.600" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""In order to approach this second question,""" start="00:06:55.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""we turn to the complex structure of language itself.""" start="00:07:00.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""At first glance, language appears to us""" start="00:07:03.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""as a series of words.""" start="00:07:07.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Words form sentences, sentences form paragraphs,""" start="00:07:09.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and paragraphs form completed text.""" start="00:07:13.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""If this was a sufficient description""" start="00:07:16.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of the complexity of language,""" start="00:07:19.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""all of us would be able to speak and read""" start="00:07:21.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""at least ten different languages.""" start="00:07:24.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""We know it is much more complex than this.""" start="00:07:26.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""There is a rich, underlying recursive tree structure--""" start="00:07:29.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""in fact, many possible tree structures""" start="00:07:33.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""which makes a particular sequence meaningful""" start="00:07:36.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and many others meaningless.""" start="00:07:39.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""One of the better understood tree structures""" start="00:07:42.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""is the syntactic structure.""" start="00:07:45.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""While natural language""" start="00:07:47.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""has rich ambiguities and complexities,""" start="00:07:49.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""programming languages are designed to be parsed""" start="00:07:51.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and interpreted deterministically.""" start="00:07:55.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Emacs has been used for programming very effectively.""" start="00:07:57.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So there is a potential for using Emacs""" start="00:08:02.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""as a tool for annotation.""" start="00:08:05.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""This would significantly improve our current set of tools.""" start="00:08:06.560" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""It is important to note that most of the annotation tools""" start="00:08:10.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that have been developed over the past few decades""" start="00:08:16.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""have relied on graphical interfaces,""" start="00:08:19.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""even those used for enriching textual information.""" start="00:08:22.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Most of the tools in current use""" start="00:08:26.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""are designed for a end user to add very specific,""" start="00:08:30.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""very restricted information.""" start="00:08:36.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""We have not really made use of the potential""" start="00:08:38.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that an editor or a rich editing environment like Emacs""" start="00:08:42.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""can add to the mix.""" start="00:08:45.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Emacs has long enabled the editing of, the manipulation of""" start="00:08:47.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""complex embedded tree structures abundant in source code.""" start="00:08:52.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So it's not difficult to imagine that it would have""" start="00:08:56.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""many capabilities that we we need""" start="00:08:58.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to represent actual language.""" start="00:09:00.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""In fact, it already does that with features""" start="00:09:02.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that allow us to quickly navigate""" start="00:09:04.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""through sentences and paragraphs,""" start="00:09:06.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and we don't need a few key strokes.""" start="00:09:07.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Or to add various text properties to text spans""" start="00:09:09.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to create overlays, to name but a few.""" start="00:09:13.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Emacs figured out this way to handle Unicode,""" start="00:09:17.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""so you don't even have to worry about the complexity""" start="00:09:22.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of managing multiple languages.""" start="00:09:26.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""It's built into Emacs. In fact, this is not the first time""" start="00:09:29.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Emacs has been used for linguistic analysis.""" start="00:09:34.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""One of the breakthrough moments in language,""" start="00:09:37.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""natural language processing was the creation""" start="00:09:41.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of manually created syntactic trees""" start="00:09:44.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""for a 1 million word collection""" start="00:09:48.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of Wall Street Journal articles.""" start="00:09:50.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""This was else around 1992""" start="00:09:52.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""before Java or graphical interfaces were common.""" start="00:09:54.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""The tool that was used to create that corpus was Emacs.""" start="00:09:59.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""It was created at UPenn, and is famously known as""" start="00:10:03.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the Penn Treebank. '92 was about when""" start="00:10:08.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the Linguistic Data Consortium was also established,""" start="00:10:12.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and it's been about 30 years""" start="00:10:16.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that it has been creating various""" start="00:10:18.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""language-related resources.""" start="00:10:20.720" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""Org mode--in particular, the outlining mode,""" start="00:10:22.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""or rather the enhanced form of outlining mode--""" start="00:10:28.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""allows us to create rich outlines,""" start="00:10:32.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""attaching properties to nodes,""" start="00:10:35.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and provides commands for easily customizing""" start="00:10:37.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""sorting of various pieces of information""" start="00:10:41.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""as per one's requirement.""" start="00:10:43.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""This can also be a very useful tool.""" start="00:10:45.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""This enhanced form of outline-mode adds more power to Emacs.""" start="00:10:50.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""It provides commands for easily customizing""" start="00:10:59.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and filtering information,""" start="00:11:03.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""while at the same time hiding unnecessary context.""" start="00:11:05.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""It also allows structural editing.""" start="00:11:09.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""This can be a very useful tool to enrich corpora""" start="00:11:11.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""where we are focusing on limited amount of phenomena.""" start="00:11:16.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""The two together allow us to create""" start="00:11:20.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""a rich representation""" start="00:11:24.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that can simultaneously capture multiple possible sequences,""" start="00:11:27.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""capture details necessary to recreate the original source,""" start="00:11:33.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""allow the creation of hierarchical representation,""" start="00:11:38.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""provide structural editing capabilities""" start="00:11:42.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that can take advantage of the concept of inheritance""" start="00:11:44.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""within the tree structure.""" start="00:11:47.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Together they allow local manipulations of structures,""" start="00:11:49.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""thereby minimizing data coupling.""" start="00:11:54.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""The concept of tags in Org mode""" start="00:11:56.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""complement the hierarchy part.""" start="00:11:59.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Hierarchies can be very rigid,""" start="00:12:01.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""but to tags on hierarchies,""" start="00:12:03.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""we can have a multifaceted representations.""" start="00:12:06.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""As a matter of fact, Org mode has the ability for the tags""" start="00:12:08.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to have their own hierarchical structure""" start="00:12:12.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""which further enhances the representational power.""" start="00:12:15.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""All of this can be done as a sequence""" start="00:12:18.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of mostly functional data transformations,""" start="00:12:22.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""because most of the capabilities""" start="00:12:25.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""can be configured and customized.""" start="00:12:27.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""It is not necessary to do everything at once.""" start="00:12:29.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Instead, it allows us to incrementally increase""" start="00:12:32.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the complexity of the representation.""" start="00:12:36.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Finally, all of this can be done""" start="00:12:37.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""in plain-text representation""" start="00:12:39.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""which comes with its own advantages.""" start="00:12:42.360" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""Now let's take a simple example.""" start="00:12:45.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""This is a a short video that I'll play.""" start="00:12:50.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""The sentence is &quot;I saw the moon with a telescope,&quot;""" start="00:12:56.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and let's just make a copy of the sentence.""" start="00:12:59.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""What we can do now is to see:""" start="00:13:04.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""what does this sentence comprise?""" start="00:13:09.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""It has a noun phrase &quot;I,&quot;""" start="00:13:11.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""followed by a word &quot;saw.&quot;""" start="00:13:13.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Then &quot;the moon&quot; is another noun phrase,""" start="00:13:17.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and &quot;with the telescope&quot; is a prepositional phrase.""" start="00:13:21.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Now one thing that you might remember,""" start="00:13:24.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""from grammar school or syntax is that""" start="00:13:30.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""there is a syntactic structure.""" start="00:13:36.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""And if you in this particular case--""" start="00:13:41.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""because we know that the moon is not typically""" start="00:13:44.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""something that can hold the telescope,""" start="00:13:47.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that the seeing must be done by me or &quot;I,&quot;""" start="00:13:51.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and the telescope must be in my hand,""" start="00:13:56.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""or &quot;I&quot; am viewing the moon with a telescope.""" start="00:14:01.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""However, it is possible that in a different context""" start="00:14:04.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the moon could be referring to an animated character""" start="00:14:13.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""in a animated series, and could actually hold the telescope.""" start="00:14:17.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""And this is one of the most--""" start="00:14:22.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the oldest and one of the most--""" start="00:14:23.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and in that case the situation might be""" start="00:14:24.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that I'm actually seeing the moon holding a telescope...""" start="00:14:26.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""I mean. The moon is holding the telescope,""" start="00:14:30.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and I'm just seeing the moon holding the telescope.""" start="00:14:36.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Complex linguistic ambiguity or linguistic""" start="00:14:40.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""phenomena that requires world knowledge,""" start="00:14:48.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and it's called the PP attachment problem""" start="00:14:53.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""where the propositional phrase attachment""" start="00:14:55.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""can be ambiguous, and various different contextual cues""" start="00:14:59.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""have to be used to resolve the ambiguity.""" start="00:15:04.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So in this case, as you saw,""" start="00:15:06.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""both the readings are technically true,""" start="00:15:09.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""depending on different contexts.""" start="00:15:11.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So one thing we could do is just""" start="00:15:13.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to cut the tree and duplicate it,""" start="00:15:16.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and then let's create another node""" start="00:15:19.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and call it an &quot;OR&quot; node.""" start="00:15:21.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""And because we are saying,""" start="00:15:24.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""this is one of the two interpretations.""" start="00:15:26.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Now let's call one interpretation &quot;a&quot;,""" start="00:15:28.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and that interpretation essentially""" start="00:15:32.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""is this child of that node &quot;a&quot;""" start="00:15:36.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and that says that the moon""" start="00:15:39.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""is holding the telescope.""" start="00:15:41.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Now we can create another representation &quot;b&quot;""" start="00:15:44.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""where we capture the other interpretation,""" start="00:15:46.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""where this, the act, the moon or--I am actually""" start="00:15:53.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""holding the telescope,""" start="00:15:59.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and watching the moon using it.""" start="00:16:00.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So now we have two separate interpretations""" start="00:16:06.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""in the same structure,""" start="00:16:09.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and all we do--we're able to do is with this,""" start="00:16:11.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""with very quick key strokes now...""" start="00:16:15.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""While we are at it, let's add another interesting thing,""" start="00:16:18.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""this node that represents &quot;I&quot;:""" start="00:16:22.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""&quot;He.&quot; It can be &quot;She&quot;.""" start="00:16:25.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""It can be &quot;the children,&quot; or it can be &quot;The people&quot;.""" start="00:16:28.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Basically, any entity that has the capability to &quot;see&quot;""" start="00:16:35.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""can be substituted in this particular node.""" start="00:16:45.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Let's see what we have here now.""" start="00:16:53.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""We just are getting sort of a zoom view""" start="00:16:57.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of the entire structure, what we created,""" start="00:17:01.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and essentially you can see that""" start="00:17:04.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""by just, you know, using a few keystrokes,""" start="00:17:08.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""we were able to capture two different interpretations""" start="00:17:11.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of a a simple sentence,""" start="00:17:17.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and they are also able to add""" start="00:17:20.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""these alternate pieces of information""" start="00:17:23.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that could help machine learning algorithms""" start="00:17:27.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""generalize better.""" start="00:17:30.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""All right.""" start="00:17:32.440" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""Now, let's look at the next thing. So in a sense,""" start="00:17:36.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""we can use this power of functional data structures""" start="00:17:40.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to represent various potentially conflicting""" start="00:17:46.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and structural readings of that piece of text.""" start="00:17:50.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""In addition to that, we can also create more texts,""" start="00:17:55.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""each with different structure,""" start="00:17:58.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and have them all in the same place.""" start="00:17:59.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""This allows us to address the interpretation""" start="00:18:01.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of a static sentence that might be occurring in the world,""" start="00:18:04.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""while simultaneously inserting information""" start="00:18:06.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that would add more value to it.""" start="00:18:09.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""This makes the enrichment process also very efficient.""" start="00:18:11.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Additionally, we can envision""" start="00:18:15.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""a power user of the future, or present,""" start="00:18:19.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""who can not only annotate a span,""" start="00:18:24.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""but also edit the information in situ""" start="00:18:27.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""in a way that would help machine algorithms""" start="00:18:31.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""generalize better by making more efficient use""" start="00:18:34.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of the annotations.""" start="00:18:36.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So together, Emacs and Org mode can speed up""" start="00:18:37.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the enrichment of the signals""" start="00:18:41.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""in a way that allows us""" start="00:18:42.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to focus on certain aspects and ignore others.""" start="00:18:44.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Extremely complex landscape of rich structures""" start="00:18:47.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""can be captured consistently,""" start="00:18:50.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""in a fashion that allows computers""" start="00:18:53.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to understand language.""" start="00:18:55.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""We can then build tools to enhance the tasks""" start="00:18:56.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that we do in our everyday life.""" start="00:19:00.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""YAMR is acronym, or the file's type or specification""" start="00:19:03.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that we are creating to capture this new""" start="00:19:10.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""rich representation.""" start="00:19:15.240" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""We'll now look at an example of spontaneous speech""" start="00:19:17.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that occurs in spoken conversations.""" start="00:19:21.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Conversations frequently contain errors in speech:""" start="00:19:24.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""interruptions, disfluencies,""" start="00:19:28.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""verbal sounds such as cough or laugh,""" start="00:19:30.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and other noises.""" start="00:19:33.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""In this sense, spontaneous speech is similar""" start="00:19:35.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to a functional data stream.""" start="00:19:38.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""We cannot take back words that come out of our mouth,""" start="00:19:39.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""but we tend to make mistakes, and we correct ourselves""" start="00:19:42.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""as soon as we realize that we have made--""" start="00:19:47.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""we have misspoken.""" start="00:19:49.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""This process manifests through a combination""" start="00:19:50.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of a handful of mechanisms, including immediate correction""" start="00:19:53.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""after an error, and we do this unconsciously.""" start="00:19:56.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Computers, on the other hand,""" start="00:20:00.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""must be taught to understand these cases.""" start="00:20:02.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""What we see here is a example document or outline,""" start="00:20:06.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""or part of a document that illustrates""" start="00:20:12.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""various different aspects of the representation.""" start="00:20:18.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""We don't have a lot of time to go through""" start="00:20:22.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""many of the details.""" start="00:20:25.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""I would highly encourage you to play a...""" start="00:20:28.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""I'm planning on making some videos, or ascii cinemas,""" start="00:20:31.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that I'll be posting, and you can,""" start="00:20:39.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""if you're interested, you can go through those.""" start="00:20:42.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""The idea here is to try to do""" start="00:20:46.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""a slightly more complex use case.""" start="00:20:50.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""But again, given the time constraint""" start="00:20:54.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and the amount of information""" start="00:20:57.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that needs to fit in the screen,""" start="00:21:00.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""this may not be very informative,""" start="00:21:01.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""but at least it will give you some idea""" start="00:21:05.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of what can be possible.""" start="00:21:08.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""And in this particular case, what you're seeing is that""" start="00:21:10.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""there is a sentence which is &quot;What I'm I'm tr- telling now.&quot;""" start="00:21:13.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Essentially, there is a repetition of the word &quot;I'm&quot;,""" start="00:21:18.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and then there is a partial word""" start="00:21:21.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that somebody tried to say &quot;telling&quot;,""" start="00:21:23.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""but started saying &quot;tr-&quot;, and then corrected themselves""" start="00:21:25.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and said, &quot;telling now.&quot;""" start="00:21:29.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So in this case, you see, we can capture words""" start="00:21:30.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""or a sequence of words, or a sequence of tokens.""" start="00:21:39.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""One thing to... An interesting thing to note is that in NLP,""" start="00:21:44.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""sometimes we have to break typically""" start="00:21:52.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""words that don't have spaces into two separate words,""" start="00:21:55.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""especially contractions like &quot;I'm&quot;,""" start="00:22:01.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""so the syntactic parser needs needs two separate nodes.""" start="00:22:04.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""But anyway, so I'll... You can see that here.""" start="00:22:08.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""The other... This view. What this view shows is that""" start="00:22:11.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""with each of the nodes in the sentence""" start="00:22:15.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""or in the representation,""" start="00:22:19.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""you can have a lot of different properties""" start="00:22:23.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that you can attach to them,""" start="00:22:26.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and these properties are typically hidden,""" start="00:22:27.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""like you saw in the earlier slide.""" start="00:22:30.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""But you can make use of all these properties""" start="00:22:32.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to do various kind of searches and filtering.""" start="00:22:35.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""And on the right hand side here--""" start="00:22:39.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""this is actually not a legitimate syntax--""" start="00:22:43.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""but on the right are descriptions""" start="00:22:48.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of what each of these represent.""" start="00:22:51.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""All the information is also available in the article.""" start="00:22:53.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""You can see there... It shows how much rich context""" start="00:22:57.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""you can capture.""" start="00:23:04.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""This is just a closer snapshot""" start="00:23:05.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of the properties on the node,""" start="00:23:08.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and you can see we can have things like,""" start="00:23:10.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""whether the word is a token or not,""" start="00:23:13.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""or that it's incomplete, whether some words""" start="00:23:14.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""might want to be filtered out for parsing,""" start="00:23:17.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and we can say this: PARSE_IGNORE,""" start="00:23:19.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""or some words or restart markers...""" start="00:23:23.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""We can mark, add a RESTART_MARKER, or sometimes,""" start="00:23:25.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""some of these might have durations. Things like that.""" start="00:23:29.240" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""The other fascinating thing of this representation""" start="00:23:32.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""is that you can edit properties in the column view.""" start="00:23:38.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""And suddenly, you have this tabular data structure""" start="00:23:42.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""combined with the hierarchical data structure.""" start="00:23:45.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""And as you can--you may not be able to see it here,""" start="00:23:48.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""but what has also happened here is that""" start="00:23:53.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""some of the tags have been inherited""" start="00:23:56.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""from the earlier nodes.""" start="00:24:01.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""And so you get a much fuller picture of things.""" start="00:24:02.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Essentially you, can filter out things""" start="00:24:07.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that you want to process,""" start="00:24:13.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""process them, and then reintegrate it into the whole.""" start="00:24:15.320" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""So, in conclusion, today we have proposed and demonstrated""" start="00:24:20.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the use of an architecture (GRAIL),""" start="00:24:25.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""which allows the representation, manipulation,""" start="00:24:27.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and aggregation of rich linguistic structures""" start="00:24:31.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""in a systematic fashion.""" start="00:24:34.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""We have shown how GRAIL advances the tools""" start="00:24:36.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""available for building machine learning models""" start="00:24:41.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that simulate understanding.""" start="00:24:44.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Thank you very much for your time and attention today.""" start="00:24:46.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""My contact information is on this slide.""" start="00:24:51.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""If you are interested in an additional example""" start="00:24:54.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that demonstrates the representation""" start="00:25:02.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of speech and written text together,""" start="00:25:05.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""please continue watching.""" start="00:25:08.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Otherwise, you can stop here""" start="00:25:10.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and enjoy the rest of the conference.""" start="00:25:12.200" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""Welcome to the bonus material.""" start="00:25:15.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""I'm glad for those of you who are stuck around.""" start="00:25:39.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""We are now going to examine an instance""" start="00:25:43.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of speech and text signals together""" start="00:25:46.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that produce multiple layers.""" start="00:25:49.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""When we have--when we take a spoken conversation""" start="00:25:51.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and use the best language processing models available,""" start="00:25:54.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""we suddenly hit a hard spot""" start="00:25:58.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""because the tools are typically not trained""" start="00:26:00.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to filter out the unnecessary cruft""" start="00:26:03.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""in order to automatically interpret""" start="00:26:05.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the part of what is being said""" start="00:26:07.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that is actually relevant.""" start="00:26:09.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Over time, language researchers""" start="00:26:11.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""have created many interdependent layers of annotations,""" start="00:26:14.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""yet the assumptions underlying them are seldom the same.""" start="00:26:17.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Piecing together such related but disjointed annotations""" start="00:26:21.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""on their predictions poses a huge challenge.""" start="00:26:25.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""This is another place where we can leverage""" start="00:26:28.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the data model underlying the Emacs editor,""" start="00:26:30.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""along with the structural editing capabilities""" start="00:26:33.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of Org mode to improve current tools.""" start="00:26:35.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Let's take this very simple looking utterance.""" start="00:26:38.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""&quot;Um \{lipsmack\} and that's it. (\{laugh\})&quot;""" start="00:26:42.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Looks like the person-- so this is--""" start="00:26:48.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""what you are seeing here is a transcript of an audio signal""" start="00:26:50.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that has a lip smack and a laugh as part of it,""" start="00:26:54.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and there is also a &quot;Um&quot; like interjection.""" start="00:27:00.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So this has a few interesting noises""" start="00:27:04.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and specific things that would be illustrative""" start="00:27:08.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of what we are going to, how we are going to represent it.""" start="00:27:14.000" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""Okay. So let's say you want to have""" start="00:27:20.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""a syntactic analysis of this sentence or utterance.""" start="00:27:25.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""One common technique people use""" start="00:27:28.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""is just to remove the cruft, and, you know,""" start="00:27:30.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""write some rules, clean up the utterance,""" start="00:27:32.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""make it look like it's proper English,""" start="00:27:35.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and then, you know, tokenize it,""" start="00:27:36.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and basically just use standard tools to process it.""" start="00:27:40.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""But in that process, they end up eliminating""" start="00:27:43.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""valid pieces of signal that have meaning to others""" start="00:27:47.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""studying different phenomena of language.""" start="00:27:51.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Here you have the rich transcript,""" start="00:27:52.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the input to the syntactic parser.""" start="00:27:56.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""As you can see, there is a little tokenization happening""" start="00:28:00.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""where you'll be inserting space""" start="00:28:05.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""between &quot;that&quot; and the contracted is ('s),""" start="00:28:07.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and between the period and the &quot;it,&quot;""" start="00:28:12.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and the output of the syntactic parser is shown below.""" start="00:28:15.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""which (surprise) is a S-expression.""" start="00:28:18.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Like I said, the parse trees, when they were created,""" start="00:28:21.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and still largely when they are used, are S-expressions,""" start="00:28:24.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and most of the viewers here""" start="00:28:29.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""should not have much problem reading it.""" start="00:28:33.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""You can see this tree structure""" start="00:28:35.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of this syntactic parser here.""" start="00:28:37.280" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""Now let's say you want to integrate""" start="00:28:39.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""phonetic information or phonetic layer""" start="00:28:40.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that's in the audio signal, and do some analysis.""" start="00:28:44.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Now, it would need you to do a few-- take a few steps.""" start="00:28:49.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""First, you would need to align the transcript""" start="00:28:57.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""with the audio. This process is called forced alignment,""" start="00:29:01.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""where you already know what the transcript is,""" start="00:29:06.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and you have the audio, and you can get a good alignment""" start="00:29:10.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""using both pieces of information.""" start="00:29:14.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""And this is typically a technique that is used to""" start="00:29:17.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""create training data for training""" start="00:29:20.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""automatic speech recognizers.""" start="00:29:23.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""One interesting thing is that in order to do""" start="00:29:25.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""this forced alignment, you have to keep""" start="00:29:29.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the non-speech events in transcript,""" start="00:29:32.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""because they consume some audio signal,""" start="00:29:35.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and if you don't have that signal,""" start="00:29:39.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the alignment process doesn't know exactly...""" start="00:29:41.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""you know, it doesn't do a good job,""" start="00:29:44.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""because it needs to align all parts of the signal""" start="00:29:45.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""with something, either pause or silence or noise or words.""" start="00:29:50.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Interestingly, punctuations really don't factor in,""" start="00:29:55.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""because we don't speak in punctuations.""" start="00:29:59.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So one of the things that you need to do""" start="00:30:01.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""is remove most of the punctuations,""" start="00:30:04.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""although you'll see there are some punctuations""" start="00:30:05.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that can be kept, or that are to be kept.""" start="00:30:08.040" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""And the other thing is that the alignment has to be done""" start="00:30:12.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""before tokenization, as it impacts pronunciation.""" start="00:30:15.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""To show an example: Here you see &quot;that's&quot;.""" start="00:30:20.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""When it's one word,""" start="00:30:24.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""it has a slightly different pronunciation""" start="00:30:26.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""than when it is two words, which is &quot;that is&quot;,""" start="00:30:31.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""like you can see &quot;is.&quot; And so,""" start="00:30:35.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""if you split the tokens or split the words""" start="00:30:38.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""in order for syntactic parser to process it,""" start="00:30:44.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""you would end up getting the wrong phonetic analysis.""" start="00:30:48.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""And if you have--if you process it""" start="00:30:51.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""through the phonetic analysis,""" start="00:30:54.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and you don't know how to integrate it""" start="00:30:55.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""with the tokenized syntax, you can, you know,""" start="00:30:59.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that can be pretty tricky. And a lot of time,""" start="00:31:02.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""people write one-off pieces of code that handle these,""" start="00:31:07.520" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""but the idea here is to try to have a general architecture""" start="00:31:10.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that seamlessly integrates all these pieces.""" start="00:31:14.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Then you do the syntactic parsing of the remaining tokens.""" start="00:31:17.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Then you align the data and the two annotations,""" start="00:31:21.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and then integrate the two layers.""" start="00:31:24.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Once that is done, then you can do all kinds of""" start="00:31:27.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""interesting analysis, and test various hypotheses""" start="00:31:31.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and generate the statistics,""" start="00:31:33.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""but without that you only are dealing""" start="00:31:35.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""with one or the other part.""" start="00:31:39.360" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""Let's just take a quick look at how each of the layers""" start="00:31:42.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that are involved look like.""" start="00:31:48.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So this is &quot;Um \{lipsmack\}, and that's it. \{laugh\}&quot;""" start="00:31:51.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""This is the transcript, and on the right hand side,""" start="00:31:56.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""you see the same thing as a transcript""" start="00:32:00.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""listed in a vertical in a column.""" start="00:32:04.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""You'll see why, in just a second.""" start="00:32:06.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""And there are some place--""" start="00:32:08.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""there are some rows that are empty,""" start="00:32:09.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""some rows that are wider than the others, and we'll see why.""" start="00:32:11.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""The next is the tokenized sentence""" start="00:32:15.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""where you have space added,""" start="00:32:19.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""you know space between these two tokens:""" start="00:32:20.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""&quot;that&quot; and the apostrophe &quot;s&quot; ('s),""" start="00:32:23.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and the &quot;it&quot; and the &quot;period&quot;.""" start="00:32:26.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""And you see on the right hand side""" start="00:32:28.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that the tokens have attributes.""" start="00:32:30.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So there is a token index, and there are 1, 2,""" start="00:32:33.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""you know 0, 1, 2, 3, 4, 5 tokens,""" start="00:32:36.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and each token has a start and end character,""" start="00:32:38.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and space (sp) also has a start and end character,""" start="00:32:41.480" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and space is represented by a &quot;sp&quot;. And there are""" start="00:32:45.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""these other things that we removed,""" start="00:32:50.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""like the &quot;\{LS\}&quot; which is for &quot;\{lipsmack\}&quot;""" start="00:32:54.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and &quot;\{LG\}&quot; which is &quot;\{laugh\}&quot; are showing grayed out,""" start="00:32:56.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and you'll see why some of these things are grayed out""" start="00:32:59.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""in a little bit.""" start="00:33:02.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""This is what the forced alignment tool produces.""" start="00:33:03.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Basically, it takes the transcript,""" start="00:33:11.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and this is the transcript""" start="00:33:17.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that has slightly different symbols,""" start="00:33:19.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""because different tools use different symbols""" start="00:33:24.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and their various configurational things.""" start="00:33:26.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""But this is what is used to get an alignment""" start="00:33:28.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""or time alignment with phones.""" start="00:33:33.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So this column shows the phones, and so each word...""" start="00:33:36.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So, for example, &quot;and&quot; has been aligned with these phones,""" start="00:33:40.080" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and these on the start and end""" start="00:33:43.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""are essentially temporal or time stamps that it aligned--""" start="00:33:46.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that has been aligned to it.""" start="00:33:52.960" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Interestingly, sometimes we don't really have any pause""" start="00:33:54.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""or any time duration between some words""" start="00:34:00.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and those are highlighted as gray here.""" start="00:34:05.160" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""See, there's this space... Actually""" start="00:34:08.200" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""it does not have any temporal content,""" start="00:34:12.760" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""whereas this other space has some duration.""" start="00:34:17.800" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So the ones that have some duration are captured,""" start="00:34:21.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""while the others are the ones that in the earlier diagram""" start="00:34:24.840" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""we saw were left out.""" start="00:34:29.520" video="mainVideo-grail" id="subtitle"]]
+[[!template new="1" text="""And the aligner actually produces multiple files.""" start="00:34:31.320" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""One of the files has a different, slightly different""" start="00:34:37.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""variation on the same information,""" start="00:34:44.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and in this case, you can see""" start="00:34:46.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""that the punctuation is missing,""" start="00:34:50.000" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and the punctuation is, you know, deliberately missing,""" start="00:34:52.400" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""because there is no time associated with it,""" start="00:34:57.600" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and you see that it's not the tokenized sentence--""" start="00:35:02.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""a tokenized word. This... Now it gives you a full table,""" start="00:35:06.440" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and you can't really look into it very carefully.""" start="00:35:17.120" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""But we can focus on the part that seems legible,""" start="00:35:21.240" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""or, you know, properly written sentence,""" start="00:35:25.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""process it and reincorporate it back into the whole.""" start="00:35:28.560" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""So if somebody wants to look at, for example,""" start="00:35:32.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""how many pauses the person made while they were talking,""" start="00:35:35.880" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""And they can actually measure the pause, the number,""" start="00:35:39.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the duration, and make connections between that""" start="00:35:42.920" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and the rich syntactic structure that is being produced.""" start="00:35:46.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""And in order to do that, you have to get these layers""" start="00:35:49.640" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""to align with each other,""" start="00:35:57.280" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""and this table is just a tabular representation""" start="00:35:59.040" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""of the information that we'll be storing in the YAMR file.""" start="00:36:04.360" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Congratulations! You have reached""" start="00:36:08.680" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""the end of this demonstration.""" start="00:36:11.720" video="mainVideo-grail" id="subtitle"]]
+[[!template text="""Thank you for your time and attention.""" start="00:36:13.480" video="mainVideo-grail" id="subtitle"]]
+
Questions or comments? Please e-mail [emacsconf-org-private@gnu.org](mailto:emacsconf-org-private@gnu.org?subject=Comment%20for%20EmacsConf%202022%20grail%3A%20GRAIL---A%20Generalized%20Representation%20and%20Aggregation%20of%20Information%20Layers)
diff --git a/2022/info/grail-before.md b/2022/info/grail-before.md
index 93a2cd3b..30c5a22d 100644
--- a/2022/info/grail-before.md
+++ b/2022/info/grail-before.md
@@ -1,7 +1,7 @@
<!-- Automatically generated by emacsconf-publish-before-page -->
The following image shows where the talk is in the schedule for Sun 2022-12-04. Solid lines show talks with Q&A via BigBlueButton. Dashed lines show talks with Q&A via IRC or Etherpad.<div class="schedule-in-context schedule-svg-container" data-slug="grail">
-<svg width="800" height="150" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> <title> Schedule for Sunday</title> <rect width="800" height="150" x="0" y="0" fill="white"></rect> <text font-size="10" fill="black" y="12" x="3"> Sunday</text> <a href="/2022/talks/sun-open" title="Sunday opening remarks" data-slug="sun-open"> <title> 9:00- 9:05 Sunday opening remarks</title> <rect x="0" y="15" opacity="0.5" width="7" height="59" stroke="black" stroke-dasharray="" fill="peachpuff"></rect> <g transform="translate(5,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> sun-open</text></g></a> <a href="/2022/talks/survey" title="Results of the 2022 Emacs Survey" data-slug="survey"> <title> 9:06- 9:26 Results of the 2022 Emacs Survey</title> <rect x="9" y="15" opacity="0.5" width="31" height="59" stroke="black" stroke-dasharray="5,5,5" fill="peachpuff"></rect> <g transform="translate(38,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> survey</text></g></a> <a href="/2022/talks/orgyear" title="This Year in Org" data-slug="orgyear"> <title> 9:35- 9:45 This Year in Org</title> <rect x="54" y="15" opacity="0.5" width="15" height="59" stroke="black" stroke-dasharray="5,5,5" fill="peachpuff"></rect> <g transform="translate(67,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> orgyear</text></g></a> <a href="/2022/talks/rolodex" title="Build a Zettelkasten with the Hyperbole Rolodex" data-slug="rolodex"> <title> 9:57-10:22 Build a Zettelkasten with the Hyperbole Rolodex</title> <rect x="89" y="15" opacity="0.5" width="39" height="59" stroke="black" stroke-dasharray="" fill="peachpuff"></rect> <g transform="translate(126,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> rolodex</text></g></a> <a href="/2022/talks/orgsuperlinks" title="Linking headings with org-super-links (poor-man's Zettelkasten)" data-slug="orgsuperlinks"> <title> 10:40-10:50 Linking headings with org-super-links (poor-man's Zettelkasten)</title> <rect x="156" y="15" opacity="0.5" width="15" height="59" stroke="black" stroke-dasharray="" fill="peachpuff"></rect> <g transform="translate(169,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> orgsuperlinks</text></g></a> <a href="/2022/talks/orgvm" title="orgvm: a simple HTTP server for org" data-slug="orgvm"> <title> 11:10-11:20 orgvm: a simple HTTP server for org</title> <rect x="203" y="15" opacity="0.5" width="15" height="59" stroke="black" stroke-dasharray="" fill="peachpuff"></rect> <g transform="translate(216,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> orgvm</text></g></a> <a href="/2022/talks/hyperorg" title="Powerful productivity with Hyperbole and Org Mode" data-slug="hyperorg"> <title> 1:00- 1:30 Powerful productivity with Hyperbole and Org Mode</title> <rect x="376" y="15" opacity="0.5" width="47" height="59" stroke="black" stroke-dasharray="" fill="peachpuff"></rect> <g transform="translate(421,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> hyperorg</text></g></a> <a href="/2022/talks/workflows" title="Org workflows for developers" data-slug="workflows"> <title> 1:50- 2:15 Org workflows for developers</title> <rect x="454" y="15" opacity="0.5" width="39" height="59" stroke="black" stroke-dasharray="" fill="peachpuff"></rect> <g transform="translate(491,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> workflows</text></g></a> <a href="/2022/talks/grail" title="GRAIL---A Generalized Representation and Aggregation of Information Layers" data-slug="grail"> <title> 2:35- 2:55 GRAIL---A Generalized Representation and Aggregation of Information Layers</title> <rect stroke-width="3" x="525" y="15" opacity="0.8" width="31" height="59" stroke="black" stroke-dasharray="" fill="peachpuff"></rect> <g transform="translate(554,73)"> <text font-weight="bold" fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> grail</text></g></a> <a href="/2022/talks/indieweb" title="Putting Org Mode on the Indieweb" data-slug="indieweb"> <title> 3:25- 3:45 Putting Org Mode on the Indieweb</title> <rect x="603" y="15" opacity="0.5" width="31" height="59" stroke="black" stroke-dasharray="" fill="peachpuff"></rect> <g transform="translate(632,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> indieweb</text></g></a> <a href="/2022/talks/devel" title="Emacs development updates" data-slug="devel"> <title> 4:05- 4:15 Emacs development updates</title> <rect x="666" y="15" opacity="0.5" width="15" height="59" stroke="black" stroke-dasharray="5,5,5" fill="peachpuff"></rect> <g transform="translate(679,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> devel</text></g></a> <a href="/2022/talks/fanfare" title="Fanfare for the Common Emacs User" data-slug="fanfare"> <title> 4:25- 4:35 Fanfare for the Common Emacs User</title> <rect x="698" y="15" opacity="0.5" width="15" height="59" stroke="black" stroke-dasharray="" fill="peachpuff"></rect> <g transform="translate(711,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> fanfare</text></g></a> <a href="/2022/talks/sun-close" title="Sunday closing remarks" data-slug="sun-close"> <title> 4:50- 5:00 Sunday closing remarks</title> <rect x="737" y="15" opacity="0.5" width="15" height="59" stroke="black" stroke-dasharray="" fill="peachpuff"></rect> <g transform="translate(750,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> sun-close</text></g></a> <a href="/2022/talks/rde" title="rde Emacs introduction" data-slug="rde"> <title> 10:00-10:25 rde Emacs introduction</title> <rect x="94" y="75" opacity="0.5" width="39" height="59" stroke="black" stroke-dasharray="" fill="skyblue"></rect> <g transform="translate(131,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> rde</text></g></a> <a href="/2022/talks/justl" title="justl: Driving recipes within Emacs" data-slug="justl"> <title> 10:50-11:05 justl: Driving recipes within Emacs</title> <rect x="172" y="75" opacity="0.5" width="23" height="59" stroke="black" stroke-dasharray="5,5,5" fill="skyblue"></rect> <g transform="translate(193,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> justl</text></g></a> <a href="/2022/talks/rms" title="What I'd like to see in Emacs" data-slug="rms"> <title> 11:15-11:35 What I'd like to see in Emacs</title> <rect x="211" y="75" opacity="0.5" width="31" height="59" stroke="black" stroke-dasharray="5,5,5" fill="skyblue"></rect> <g transform="translate(240,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> rms</text></g></a> <a href="/2022/talks/detached" title="Getting detached from Emacs" data-slug="detached"> <title> 1:01- 1:16 Getting detached from Emacs</title> <rect x="378" y="75" opacity="0.5" width="23" height="59" stroke="black" stroke-dasharray="" fill="skyblue"></rect> <g transform="translate(399,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> detached</text></g></a> <a href="/2022/talks/eshell" title="Top 10 reasons why you should be using Eshell" data-slug="eshell"> <title> 1:40- 1:55 Top 10 reasons why you should be using Eshell</title> <rect x="439" y="75" opacity="0.5" width="23" height="59" stroke="black" stroke-dasharray="" fill="skyblue"></rect> <g transform="translate(460,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> eshell</text></g></a> <a href="/2022/talks/async" title="Emacs was async before async was cool" data-slug="async"> <title> 2:20- 2:40 Emacs was async before async was cool</title> <rect x="501" y="75" opacity="0.5" width="31" height="59" stroke="black" stroke-dasharray="" fill="skyblue"></rect> <g transform="translate(530,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> async</text></g></a> <a href="/2022/talks/dbus" title="The Wheels on D-Bus" data-slug="dbus"> <title> 3:15- 3:35 The Wheels on D-Bus</title> <rect x="588" y="75" opacity="0.5" width="31" height="59" stroke="black" stroke-dasharray="" fill="skyblue"></rect> <g transform="translate(617,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> dbus</text></g></a> <a href="/2022/talks/localizing" title="Pre-localizing Emacs" data-slug="localizing"> <title> 4:00- 4:10 Pre-localizing Emacs</title> <rect x="658" y="75" opacity="0.5" width="15" height="59" stroke="black" stroke-dasharray="" fill="skyblue"></rect> <g transform="translate(671,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> localizing</text></g></a> <a href="/2022/talks/python" title="Short hyperlinks to Python docs" data-slug="python"> <title> 4:30- 4:35 Short hyperlinks to Python docs</title> <rect x="705" y="75" opacity="0.5" width="7" height="59" stroke="black" stroke-dasharray="5,5,5" fill="skyblue"></rect> <g transform="translate(710,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> python</text></g></a> <g transform="translate(0,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 9 AM</text></g> <g transform="translate(94,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 10 AM</text></g> <g transform="translate(188,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 11 AM</text></g> <g transform="translate(282,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 12 PM</text></g> <g transform="translate(376,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 1 PM</text></g> <g transform="translate(470,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 2 PM</text></g> <g transform="translate(564,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 3 PM</text></g> <g transform="translate(658,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 4 PM</text></g> <g transform="translate(752,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 5 PM</text></g></svg>
+<svg width="800" height="150" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> <title> Schedule for Sunday</title> <rect width="800" height="150" x="0" y="0" fill="white"></rect> <text font-size="10" fill="black" y="12" x="3"> Sunday</text> <a href="/2022/talks/sun-open" title="Sunday opening remarks" data-slug="sun-open"> <title> 9:00- 9:05 Sunday opening remarks</title> <rect x="0" y="15" opacity="0.5" width="7" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(5,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> sun-open</text></g></a> <a href="/2022/talks/survey" title="Results of the 2022 Emacs Survey" data-slug="survey"> <title> 9:06- 9:26 Results of the 2022 Emacs Survey</title> <rect x="9" y="15" opacity="0.5" width="31" height="59" stroke="black" stroke-dasharray="5,5,5" fill="gray"></rect> <g transform="translate(38,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> survey</text></g></a> <a href="/2022/talks/orgyear" title="This Year in Org" data-slug="orgyear"> <title> 9:35- 9:45 This Year in Org</title> <rect x="54" y="15" opacity="0.5" width="15" height="59" stroke="black" stroke-dasharray="5,5,5" fill="gray"></rect> <g transform="translate(67,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> orgyear</text></g></a> <a href="/2022/talks/rolodex" title="Build a Zettelkasten with the Hyperbole Rolodex" data-slug="rolodex"> <title> 9:57-10:22 Build a Zettelkasten with the Hyperbole Rolodex</title> <rect x="89" y="15" opacity="0.5" width="39" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(126,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> rolodex</text></g></a> <a href="/2022/talks/orgsuperlinks" title="Linking headings with org-super-links (poor-man's Zettelkasten)" data-slug="orgsuperlinks"> <title> 10:40-10:50 Linking headings with org-super-links (poor-man's Zettelkasten)</title> <rect x="156" y="15" opacity="0.5" width="15" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(169,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> orgsuperlinks</text></g></a> <a href="/2022/talks/orgvm" title="orgvm: a simple HTTP server for org" data-slug="orgvm"> <title> 11:10-11:20 orgvm: a simple HTTP server for org</title> <rect x="203" y="15" opacity="0.5" width="15" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(216,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> orgvm</text></g></a> <a href="/2022/talks/hyperorg" title="Powerful productivity with Hyperbole and Org Mode" data-slug="hyperorg"> <title> 1:00- 1:30 Powerful productivity with Hyperbole and Org Mode</title> <rect x="376" y="15" opacity="0.5" width="47" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(421,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> hyperorg</text></g></a> <a href="/2022/talks/workflows" title="Org workflows for developers" data-slug="workflows"> <title> 1:50- 2:15 Org workflows for developers</title> <rect x="454" y="15" opacity="0.5" width="39" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(491,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> workflows</text></g></a> <a href="/2022/talks/grail" title="GRAIL---A Generalized Representation and Aggregation of Information Layers" data-slug="grail"> <title> 2:35- 2:55 GRAIL---A Generalized Representation and Aggregation of Information Layers</title> <rect stroke-width="3" x="525" y="15" opacity="0.8" width="31" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(554,73)"> <text font-weight="bold" fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> grail</text></g></a> <a href="/2022/talks/indieweb" title="Putting Org Mode on the Indieweb" data-slug="indieweb"> <title> 3:25- 3:45 Putting Org Mode on the Indieweb</title> <rect x="603" y="15" opacity="0.5" width="31" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(632,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> indieweb</text></g></a> <a href="/2022/talks/devel" title="Emacs development updates" data-slug="devel"> <title> 4:05- 4:15 Emacs development updates</title> <rect x="666" y="15" opacity="0.5" width="15" height="59" stroke="black" stroke-dasharray="5,5,5" fill="gray"></rect> <g transform="translate(679,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> devel</text></g></a> <a href="/2022/talks/fanfare" title="Fanfare for the Common Emacs User" data-slug="fanfare"> <title> 4:25- 4:35 Fanfare for the Common Emacs User</title> <rect x="698" y="15" opacity="0.5" width="15" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(711,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> fanfare</text></g></a> <a href="/2022/talks/sun-close" title="Sunday closing remarks" data-slug="sun-close"> <title> 4:50- 5:00 Sunday closing remarks</title> <rect x="737" y="15" opacity="0.5" width="15" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(750,73)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> sun-close</text></g></a> <a href="/2022/talks/rde" title="rde Emacs introduction" data-slug="rde"> <title> 10:00-10:25 rde Emacs introduction</title> <rect x="94" y="75" opacity="0.5" width="39" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(131,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> rde</text></g></a> <a href="/2022/talks/justl" title="justl: Driving recipes within Emacs" data-slug="justl"> <title> 10:50-11:05 justl: Driving recipes within Emacs</title> <rect x="172" y="75" opacity="0.5" width="23" height="59" stroke="black" stroke-dasharray="5,5,5" fill="gray"></rect> <g transform="translate(193,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> justl</text></g></a> <a href="/2022/talks/rms" title="What I'd like to see in Emacs" data-slug="rms"> <title> 11:15-11:35 What I'd like to see in Emacs</title> <rect x="211" y="75" opacity="0.5" width="31" height="59" stroke="black" stroke-dasharray="5,5,5" fill="gray"></rect> <g transform="translate(240,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> rms</text></g></a> <a href="/2022/talks/detached" title="Getting detached from Emacs" data-slug="detached"> <title> 1:01- 1:16 Getting detached from Emacs</title> <rect x="378" y="75" opacity="0.5" width="23" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(399,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> detached</text></g></a> <a href="/2022/talks/eshell" title="Top 10 reasons why you should be using Eshell" data-slug="eshell"> <title> 1:40- 1:55 Top 10 reasons why you should be using Eshell</title> <rect x="439" y="75" opacity="0.5" width="23" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(460,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> eshell</text></g></a> <a href="/2022/talks/async" title="Emacs was async before async was cool" data-slug="async"> <title> 2:20- 2:40 Emacs was async before async was cool</title> <rect x="501" y="75" opacity="0.5" width="31" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(530,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> async</text></g></a> <a href="/2022/talks/dbus" title="The Wheels on D-Bus" data-slug="dbus"> <title> 3:15- 3:35 The Wheels on D-Bus</title> <rect x="588" y="75" opacity="0.5" width="31" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(617,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> dbus</text></g></a> <a href="/2022/talks/localizing" title="Pre-localizing Emacs" data-slug="localizing"> <title> 4:00- 4:10 Pre-localizing Emacs</title> <rect x="658" y="75" opacity="0.5" width="15" height="59" stroke="black" stroke-dasharray="" fill="gray"></rect> <g transform="translate(671,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> localizing</text></g></a> <a href="/2022/talks/python" title="Short hyperlinks to Python docs" data-slug="python"> <title> 4:30- 4:35 Short hyperlinks to Python docs</title> <rect x="705" y="75" opacity="0.5" width="7" height="59" stroke="black" stroke-dasharray="5,5,5" fill="gray"></rect> <g transform="translate(710,133)"> <text fill="black" x="0" y="0" font-size="10" transform="rotate(-90)"> python</text></g></a> <g transform="translate(0,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 9 AM</text></g> <g transform="translate(94,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 10 AM</text></g> <g transform="translate(188,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 11 AM</text></g> <g transform="translate(282,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 12 PM</text></g> <g transform="translate(376,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 1 PM</text></g> <g transform="translate(470,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 2 PM</text></g> <g transform="translate(564,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 3 PM</text></g> <g transform="translate(658,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 4 PM</text></g> <g transform="translate(752,15)"> <line stroke="darkgray" x1="0" y1="0" x2="0" y2="120"></line> <text fill="black" x="0" y="133" font-size="10" text-anchor="left"> 5 PM</text></g></svg>
</div>
[[!toc ]]
@@ -13,16 +13,40 @@ Status: Q&A finished, IRC and pad will be archived on this page
# Talk
-[[!template id="vid" vidid="grail-mainVideo" src="https://media.emacsconf.org/2022/emacsconf-2022-grail--graila-generalized-representation-and-aggregation-of-information-layers--sameer-pradhan--main.webm" poster="https://media.emacsconf.org/2022/emacsconf-2022-grail--graila-generalized-representation-and-aggregation-of-information-layers--sameer-pradhan--main.png" ${captions}
-size="91M" duration="36:42" other_resources="""[Download --main.webm (91MB)](https://media.emacsconf.org/2022/emacsconf-2022-grail--graila-generalized-representation-and-aggregation-of-information-layers--sameer-pradhan--main.webm)
+[[!template id="vid" vidid="grail-mainVideo" src="https://media.emacsconf.org/2022/emacsconf-2022-grail--graila-generalized-representation-and-aggregation-of-information-layers--sameer-pradhan--main.webm" poster="${poster}" ${captions}
+size="60M" duration="36:42" other_resources="""[Download --main.webm (81MB)](https://media.emacsconf.org/2022/emacsconf-2022-grail--graila-generalized-representation-and-aggregation-of-information-layers--sameer-pradhan--main.webm)
+[Download --main--chapters.vtt](https://media.emacsconf.org/2022/emacsconf-2022-grail--graila-generalized-representation-and-aggregation-of-information-layers--sameer-pradhan--main--chapters.vtt)
[View on Toobnix](https://toobnix.org/w/deH3ktCLYo19Fh19eLo79D)
"""]]
+[[!template id="chapters" vidid="grail-mainVideo" data="""
+00:00:00.000 Introduction
+01:13.400 Processing language
+02:34.560 Annotation
+03:43.240 Learning from data
+04:39.680 Manual annotation
+05:44.400 How can we develop a unified representation?
+06:22.520 What role might Emacs and Org mode play?
+06:55.280 The complex structure of language
+08:10.800 Annotation tools
+10:22.360 Org mode
+12:45.480 Example
+17:36.240 Different readings
+19:17.680 Spontaneous speech
+23:32.000 Editing properties in column view
+24:20.280 Conclusion
+25:15.280 Bonus material
+27:20.480 Syntactic analysis
+28:39.280 Forced alignment
+30:12.600 Alignment before tokenization
+31:42.880 Layers
+34:31.320 Variations
+"""]]
# Q&A
-[[!template id="vid" vidid="grail-qanda" src="https://media.emacsconf.org/2022/emacsconf-2022-grail--graila-generalized-representation-and-aggregation-of-information-layers--sameer-pradhan--answers.webm" poster="https://media.emacsconf.org/2022/emacsconf-2022-grail--graila-generalized-representation-and-aggregation-of-information-layers--sameer-pradhan--answers.png" ${captions}
+[[!template id="vid" vidid="grail-qanda" src="https://media.emacsconf.org/2022/emacsconf-2022-grail--graila-generalized-representation-and-aggregation-of-information-layers--sameer-pradhan--answers.webm" poster="${poster}" ${captions}
size="41M" duration="36:42" other_resources=""""""]]