3 |
3 |
4 %% Created for CS TAN at 2022-05-23 18:43:50 +0100 |
4 %% Created for CS TAN at 2022-05-23 18:43:50 +0100 |
5 |
5 |
6 |
6 |
7 %% Saved with string encoding Unicode (UTF-8) |
7 %% Saved with string encoding Unicode (UTF-8) |
8 |
8 @INPROCEEDINGS{Verbatim, author={Egolf, Derek and Lasser, Sam and Fisher, Kathleen}, booktitle={2021 IEEE Security and Privacy Workshops (SPW)}, title={Verbatim: A Verified Lexer Generator}, year={2021}, volume={}, number={}, pages={92-100}, doi={10.1109/SPW53761.2021.00022}} |
|
9 |
|
10 |
|
11 @inproceedings{Verbatimpp, |
|
12 author = {Egolf, Derek and Lasser, Sam and Fisher, Kathleen}, |
|
13 title = {Verbatim++: Verified, Optimized, and Semantically Rich Lexing with Derivatives}, |
|
14 year = {2022}, |
|
15 isbn = {9781450391825}, |
|
16 publisher = {Association for Computing Machinery}, |
|
17 address = {New York, NY, USA}, |
|
18 url = {https://doi.org/10.1145/3497775.3503694}, |
|
19 doi = {10.1145/3497775.3503694}, |
|
20 abstract = {Lexers and parsers are attractive targets for attackers because they often sit at the boundary between a software system's internals and the outside world. Formally verified lexers can reduce the attack surface of these systems, thus making them more secure. One recent step in this direction is the development of Verbatim, a verified lexer based on the concept of Brzozowski derivatives. Two limitations restrict the tool's usefulness. First, its running time is quadratic in the length of its input string. Second, the lexer produces tokens with a simple "tag and string" representation, which limits the tool's ability to integrate with parsers that operate on more expressive token representations. In this work, we present a suite of extensions to Verbatim that overcomes these limitations while preserving the tool's original correctness guarantees. The lexer achieves effectively linear performance on a JSON benchmark through a combination of optimizations that, to our knowledge, has not been previously verified. The enhanced version of Verbatim also enables users to augment their lexical specifications with custom semantic actions, and it uses these actions to produce semantically rich tokens---i.e., tokens that carry values with arbitrary, user-defined types. All extensions were implemented and verified with the Coq Proof Assistant.}, |
|
21 booktitle = {Proceedings of the 11th ACM SIGPLAN International Conference on Certified Programs and Proofs}, |
|
22 pages = {27–39}, |
|
23 numpages = {13}, |
|
24 keywords = {Brzozowski derivatives, formal verification, lexical analysis, semantic actions}, |
|
25 location = {Philadelphia, PA, USA}, |
|
26 series = {CPP 2022} |
|
27 } |
9 |
28 |
10 |
29 |
11 @article{Turo_ov__2020, |
30 @article{Turo_ov__2020, |
12 author = {Lenka Turo{\v{n}}ov{\'{a}} and Luk{\'{a}}{\v{s}} Hol{\'{\i}}k and Ond{\v{r}}ej Leng{\'{a}}l and Olli Saarikivi and Margus Veanes and Tom{\'{a}}{\v{s}} Vojnar}, |
31 author = {Lenka Turo{\v{n}}ov{\'{a}} and Luk{\'{a}}{\v{s}} Hol{\'{\i}}k and Ond{\v{r}}ej Leng{\'{a}}l and Olli Saarikivi and Margus Veanes and Tom{\'{a}}{\v{s}} Vojnar}, |
13 date-added = {2022-05-23 18:43:04 +0100}, |
32 date-added = {2022-05-23 18:43:04 +0100}, |