[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[PATCH for Dlang support] d: change api.token.raw default value to true
From: |
Adela Vais |
Subject: |
[PATCH for Dlang support] d: change api.token.raw default value to true |
Date: |
Sat, 3 Oct 2020 17:01:14 +0300 |
Generate consecutive values for enum TokenKind, as D's yylex()
returns TokenKind and collisions can't happen.
* data/skeletons/d.m4: Change default value.
* tests/scanner.at, tests/d.at: Check it.
---
data/skeletons/d.m4 | 1 +
tests/d.at | 76 +++++++++++++++++++++++++++++++++++++++++++++
tests/scanner.at | 3 +-
3 files changed, 79 insertions(+), 1 deletion(-)
diff --git a/data/skeletons/d.m4 b/data/skeletons/d.m4
index dbc2e934..8d454d1c 100644
--- a/data/skeletons/d.m4
+++ b/data/skeletons/d.m4
@@ -172,6 +172,7 @@ private static immutable b4_int_type_for([$2])[[]] yy$1_ =
## ------------- ##
m4_define([b4_symbol(-2, id)], [[YYEMPTY]])
+b4_percent_define_default([[api.token.raw]], [[true]])
# b4_token_enum(TOKEN-NAME, TOKEN-NUMBER)
# ---------------------------------------
diff --git a/tests/d.at b/tests/d.at
index d38e8130..5ed45ee5 100644
--- a/tests/d.at
+++ b/tests/d.at
@@ -38,6 +38,59 @@ AT_CHECK([[grep '[mb]4_' YYParser.y]], [1], [ignore])
AT_COMPILE_D([[YYParser]])
])
+# AT_CHECK_D_MINIMAL_W_LEXER([1:DIRECTIVES],
+# [2:YYLEX_ACTION], [3:LEXER_BODY], [4:PARSER_ACTION], [5:VALUE_TYPE],
+# [6:POSITION_TYPE], [7:LOCATION_TYPE])
+# ---------------------------------------------------------------------
+# Check that a minimal parser with DIRECTIVES and a body for yylex()
+# compiles in D.
+m4_define([AT_CHECK_D_MINIMAL_W_LEXER],
+[AT_CHECK_D_MINIMAL([$1], [], [], [
+
+import std.range.primitives;
+import std.stdio;
+
+auto calcLexer(R)(R range)
+ if (isInputRange!R && is (ElementType!R : dchar))
+{
+ return new CalcLexer!R(range);
+}
+
+auto calcLexer (File f)
+{
+ import std.algorithm : map, joiner;
+ import std.utf : byDchar;
+
+ return f.byChunk(1024) // avoid making a syscall roundtrip per char
+ .map!(chunk => cast(char[]) chunk) // because byChunk returns ubyte[]
+ .joiner // combine chunks into a single virtual range
of char
+ .calcLexer; // forward to other overload
+}
+
+class CalcLexer(R) : Lexer
+ if (isInputRange!R && is (ElementType!R : dchar))
+{
+ R input;
+
+ this(R r) {
+ input = r;
+ }
+
+ void yyerror(string s) {}
+
+ YYSemanticType semanticVal_;
+ YYSemanticType semanticVal() @property { return semanticVal_; }
+
+ TokenKind yylex()
+ {
+ $2
+ }
+}
+]
+[
+ $3
+], [$4], [$6])])
+
# AT_CHECK_D_GREP([LINE], [COUNT=1])
# -------------------------------------
# Check that YYParser.d contains exactly COUNT lines matching ^LINE$
@@ -80,3 +133,26 @@ interface Interface2 {}
AT_CHECK_D_GREP([[class YYParser : BaseClass, Interface1, Interface2]])
AT_CLEANUP
+
+## --------------------------------------------- ##
+## D parser class api.token.raw true by default. ##
+## --------------------------------------------- ##
+
+AT_SETUP([D parser class api.token.raw true by default])
+AT_KEYWORDS([d])
+
+AT_CHECK_D_MINIMAL_W_LEXER([
+%define api.token.raw true
+%union { int ival; }], [return TokenKind.END;])
+AT_CHECK_D_GREP([[ END = 3,]])
+
+AT_CHECK_D_MINIMAL_W_LEXER([
+%define api.token.raw false
+%union { int ival; }], [return TokenKind.END;])
+AT_CHECK_D_GREP([[ END = 258,]])
+
+AT_CHECK_D_MINIMAL_W_LEXER([
+%union { int ival; }], [return TokenKind.END;])
+AT_CHECK_D_GREP([[ END = 3,]])
+
+AT_CLEANUP
diff --git a/tests/scanner.at b/tests/scanner.at
index 2ec2cd78..b65895c9 100644
--- a/tests/scanner.at
+++ b/tests/scanner.at
@@ -321,7 +321,8 @@ AT_FULL_COMPILE([input])
# lalr1.java uses 'byte[] translate_table_ =' (and yytranslate_).
AT_CHECK([[$EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table
=|translate_table_ =' input.]AT_LANG_EXT],
[ignore],
- [AT_TOKEN_RAW_IF([0], [1])[
+ [AT_D_IF([AT_TOKEN_RAW_IF([0], [0])],
+ [AT_TOKEN_RAW_IF([0], [1])])[
]])
--
2.17.1
- [PATCH for Dlang support] d: change api.token.raw default value to true,
Adela Vais <=