From e07efb3798c7c6ec54aac9093ab50fddfc6c6a5b Mon Sep 17 00:00:00 2001 From: Hugo Herbelin Date: Mon, 5 Apr 2021 16:40:10 +0200 Subject: Relying on the abstract notion of streams with location for parsing. We also get rid of ploc.ml, now useless, relying a priori on more robust code in lStream.ml for location reporting (see e.g. parse_parsable in grammar.ml). --- printing/proof_diffs.ml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'printing') diff --git a/printing/proof_diffs.ml b/printing/proof_diffs.ml index 9ba64717d9..804fbdea85 100644 --- a/printing/proof_diffs.ml +++ b/printing/proof_diffs.ml @@ -91,7 +91,7 @@ let tokenize_string s = But I don't understand how it's used--it looks like things get appended to it but it never gets cleared. *) let rec stream_tok acc str = - let e = Stream.next str in + let e = LStream.next str in if Tok.(equal e EOI) then List.rev acc else @@ -101,7 +101,7 @@ let tokenize_string s = try let istr = Stream.of_string s in let lex = CLexer.LexerDiff.tok_func istr in - let toks = stream_tok [] (fst lex) in + let toks = stream_tok [] lex in CLexer.Lexer.State.set st; toks with exn -> -- cgit v1.2.3