summary | shortlog | log | commit | commitdiff | tree
raw | patch | inline | side by side (parent: 6b2e9c8)
raw | patch | inline | side by side (parent: 6b2e9c8)
author | Ketmar Dark <ketmar@ketmar.no-ip.org> | |
Tue, 21 Jun 2016 02:30:12 +0000 (05:30 +0300) | ||
committer | Ketmar Dark <ketmar@ketmar.no-ip.org> | |
Tue, 21 Jun 2016 02:43:38 +0000 (05:43 +0300) |
src/tools/mapiogen/lexer.d | [new file with mode: 0644] | patch | blob |
src/tools/mapiogen/mapiogen.d | [new file with mode: 0644] | patch | blob |
diff --git a/src/tools/mapiogen/lexer.d b/src/tools/mapiogen/lexer.d
--- /dev/null
@@ -0,0 +1,607 @@
+/* coded by Ketmar // Invisible Vector <ketmar@ketmar.no-ip.org>
+ * Understanding is not required. Only obedience.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+module lexer;
+static assert(__VERSION__ >= 2071, "you need as least DMD 2.071 to compile this code");
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+static if (!is(typeof(usize))) private alias usize = size_t;
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+public struct Loc {
+ string file;
+ int line, col;
+ uint tpos;
+
+ string toString () const { import std.string : format; return "%s (%s,%s)".format(file, line, col); }
+ string toStringNoFile () const { import std.string : format; return "(%s,%s)".format(line, col); }
+
+ @property bool valid () const pure nothrow @safe @nogc { pragma(inline, true); return (line > 0 && col > 0); }
+}
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+public class ErrorAt : Exception {
+ Loc loc;
+
+ this (string msg, Throwable next=null, string file=__FILE__, usize line=__LINE__) pure nothrow @safe @nogc { super(msg, file, line, next); }
+ this (in Loc aloc, string msg, Throwable next=null, string file=__FILE__, usize line=__LINE__) pure nothrow @safe @nogc { loc = aloc; super(msg, file, line, next); }
+}
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+public struct Token {
+public:
+ enum Type {
+ EOF = -1,
+ Id,
+ Str,
+ Num,
+ Spec,
+ }
+
+private:
+ const(char)[] tkstr;
+
+public:
+ Loc loc, eloc; // token start, token end (after last char)
+ Type type = Type.EOF; // token type
+ long num; // should be enough for everyone
+
+@safe:
+ void mustbeType (Token.Type tp, string msg="identifier expected", string file=__FILE__, usize line=__LINE__) {
+ pragma(inline, true);
+ if (type != tp) throw new ErrorAt(loc, msg, null, file, line);
+ }
+ void mustbeId (string msg="identifier expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); mustbeType(Type.Id, msg, file, line); }
+ void mustbeStr (string msg="string expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); mustbeType(Type.Str, msg, file, line); }
+ void mustbeNum (string msg="number expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); mustbeType(Type.Num, msg, file, line); }
+ void mustbeSpec (string msg="punctuation expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); mustbeType(Type.Spec, msg, file, line); }
+
+ string toString () const @trusted {
+ import std.string : format;
+ final switch (type) with (Type) {
+ case EOF: return "(%s,%d): <EOF>".format(loc.line, loc.col);
+ case Id: return "(%s,%d): Id:%s".format(loc.line, loc.col, tkstr);
+ case Str: return "(%s,%d): Str:%s".format(loc.line, loc.col, Lexer.quote(tkstr));
+ case Num: return "(%s,%d): Num:%s".format(loc.line, loc.col, num);
+ case Spec: return "(%s,%d): Spec:<%s>".format(loc.line, loc.col, tkstr);
+ }
+ assert(0);
+ }
+
+nothrow:
+ // get immutable string
+ // this converts id to `string` via `.idup`, use with caution!
+ // `.idup` is used to not anchor the whole source string
+ @property string istr () const { pragma(inline, true); return (tkstr.length ? tkstr.idup : null); }
+
+const pure nothrow @nogc @property:
+ const(char)[] str () { pragma(inline, true); return tkstr; }
+ bool isId () { pragma(inline, true); return (type == Type.Id); }
+ bool isStr () { pragma(inline, true); return (type == Type.Str); }
+ bool isNum () { pragma(inline, true); return (type == Type.Num); }
+ bool isSpec () { pragma(inline, true); return (type == Type.Spec); }
+ bool isEOF () { pragma(inline, true); return (type == Type.EOF); }
+}
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+public final class Lexer {
+private:
+ const(char)[] text;
+ uint tpos;
+ Loc cpos; // position for last `getChar()`
+ Loc pend; // end of previous token, for better error messages
+ bool eof;
+ bool lastWasEOL = true;
+ Token[] lookup;
+ Token tokeof; // will be fixed by `nextToken()`
+
+public:
+ this(T) (const(char)[] atext, T afname=null) if (is(T : const(char)[])) {
+ text = atext;
+ if (afname.length > 0) { static if (is(T == string)) cpos.file = afname; else cpos.file = afname.idup; }
+ tokeof.loc.file = cpos.file;
+ nextToken();
+ pend.line = 1;
+ pend.col = 1;
+ pend.tpos = 0;
+ }
+
+ void error (string msg, string file=__FILE__, usize line=__LINE__) {
+ pragma(inline, true);
+ throw new ErrorAt((lookup.length == 0 ? loc : lookup[0].loc), msg, null, file, line);
+ }
+
+ static private void error (in ref Token tk, string msg, string file=__FILE__, usize line=__LINE__) {
+ pragma(inline, true);
+ throw new ErrorAt(tk.loc, msg, null, file, line);
+ }
+
+ static private void error() (in auto ref Loc loc, string msg, string file=__FILE__, usize line=__LINE__) {
+ pragma(inline, true);
+ throw new ErrorAt(loc, msg, null, file, line);
+ }
+
+ const(char)[] line (uint idx) {
+ if (idx == 0) ++idx;
+ uint pos = 0;
+ while (--idx > 0) {
+ while (pos < text.length && text.ptr[pos] != '\n') ++pos;
+ ++pos;
+ }
+ if (pos >= text.length) return null;
+ uint epos = pos;
+ while (epos < text.length && text.ptr[epos] != '\n') ++epos;
+ while (epos > pos && text.ptr[epos-1] <= ' ') --epos;
+ return text[pos..epos];
+ }
+
+ void popFront () {
+ if (lookup.length > 0) {
+ pend = lookup.ptr[0].eloc;
+ ++pend.col; // for better error messages
+ ++pend.tpos; // to be consistent
+ foreach (immutable idx; 1..lookup.length) lookup.ptr[idx-1] = lookup.ptr[idx];
+ lookup.length -= 1;
+ lookup.assumeSafeAppend;
+ }
+ nextToken();
+ }
+
+ @property pure nothrow @safe @nogc {
+ bool empty () const { pragma(inline, true); return (lookup.length == 0); }
+ ref inout(Token) front () inout { pragma(inline, true); return (lookup.length ? lookup.ptr[0] : tokeof); }
+ // current token's loc
+ auto loc () inout { pragma(inline, true); return front.loc; }
+ auto eloc () inout { pragma(inline, true); return front.eloc; }
+ auto peloc () inout { pragma(inline, true); return pend; }
+
+ bool isId () const { pragma(inline, true); return front.isId; }
+ bool isStr () const { pragma(inline, true); return front.isStr; }
+ bool isNum () const { pragma(inline, true); return front.isNum; }
+ bool isSpec () const { pragma(inline, true); return front.isSpec; }
+ }
+
+ // this eats identifier
+ void expect (const(char)[] id, string file=__FILE__, usize line=__LINE__) {
+ if (!front.isId || front.str != id) error(loc, "`"~id.idup~"` expected", file, line);
+ popFront();
+ }
+
+ // this eats identifier
+ void expectCI (const(char)[] id, string file=__FILE__, usize line=__LINE__) {
+ if (front.isId && id.length == front.str.length) {
+ bool ok = true;
+ foreach (immutable idx, char ch; front.str) {
+ if (ch >= 'A' && ch <= 'Z') ch += 32; // poor man's `tolower()`
+ char c1 = id[idx];
+ if (c1 >= 'A' && c1 <= 'Z') c1 += 32; // poor man's `tolower()`
+ if (ch != c1) { ok = false; break; }
+ }
+ if (ok) { popFront(); return; }
+ }
+ error(loc, "`"~id.idup~"` expected", file, line);
+ }
+
+ auto expectSpec (string msg="punctuation expected", string file=__FILE__, usize line=__LINE__) {
+ mustbeSpec(msg, file, line);
+ auto res = lookup[0].str;
+ popFront();
+ return res;
+ }
+
+ // this converts id to `string` via `.idup`, use with caution!
+ // `.idup` is used to not anchor the whole source string
+ string expectId (string msg="identifier expected", string file=__FILE__, usize line=__LINE__) {
+ mustbeId(msg, file, line);
+ auto res = lookup[0].istr;
+ popFront();
+ return res;
+ }
+
+ // this converts id to `string` via `.idup`, use with caution!
+ // `.idup` is used to not anchor the whole source string
+ string expectStr (string msg="string expected", string file=__FILE__, usize line=__LINE__) {
+ //pragma(inline, true);
+ mustbeStr(msg, file, line);
+ auto res = lookup[0].istr;
+ popFront();
+ return res;
+ }
+
+ // `mustbe` doesn't eat token
+ void mustbeType (Token.Type tp, string msg="identifier expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); return front.mustbeType(tp, msg, file, line); }
+ void mustbeId (string msg="identifier expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); return front.mustbeId(msg, file, line); }
+ void mustbeStr (string msg="string expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); return front.mustbeStr(msg, file, line); }
+ void mustbeNum (string msg="number expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); return front.mustbeNum(msg, file, line); }
+ void mustbeSpec (string msg="punctuation expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); return front.mustbeSpec(msg, file, line); }
+
+ bool eat (const(char)[] id) {
+ if (front.isId && front.str == id) { popFront(); return true; }
+ return false;
+ }
+
+ const(char)[] eatCI (const(char)[] id) {
+ if (front.isId && id.length == front.str.length) {
+ bool ok = true;
+ foreach (immutable idx, char ch; front.str) {
+ if (ch >= 'A' && ch <= 'Z') ch += 32; // poor man's `tolower()`
+ char c1 = id[idx];
+ if (c1 >= 'A' && c1 <= 'Z') c1 += 32; // poor man's `tolower()`
+ if (ch != c1) { ok = false; break; }
+ }
+ if (ok) { auto res = front.str; popFront(); return res; }
+ }
+ return null;
+ }
+
+ ref Token peek (uint dist) {
+ while (!eof && lookup.length <= dist) nextToken();
+ return (dist < lookup.length ? lookup.ptr[dist] : tokeof);
+ }
+
+ ref Token opIndex (usize dist) { pragma(inline, true); return peek(dist); }
+
+ // return loc for next `getChar()`
+ Loc nextLoc () nothrow @safe @nogc {
+ Loc res = cpos;
+ if (lastWasEOL) { ++res.line; res.col = 1; } else ++res.col;
+ return res;
+ }
+
+ char peekChar (uint dist=0) nothrow @trusted @nogc {
+ pragma(inline, true);
+ return (tpos+dist >= text.length ? '\0' : (text.ptr[tpos+dist] ? text.ptr[tpos+dist] : ' '));
+ }
+
+ // return char or 0
+ char getChar () nothrow @trusted @nogc {
+ if (tpos >= text.length) { tpos = text.length; eof = true; }
+ if (eof) return '\0';
+ cpos.tpos = tpos;
+ char ch = text.ptr[tpos++];
+ if (ch == '\0') ch = ' ';
+ if (lastWasEOL) { ++cpos.line; cpos.col = 1; } else ++cpos.col;
+ lastWasEOL = (ch == '\n');
+ return ch;
+ }
+
+ // skip blanks and comments
+ //TODO: make special "comment" token(s)?
+ void skipBlanks () @safe {
+ for (;;) {
+ char ch = peekChar;
+ if (ch == '/' && peekChar(1) == '/') {
+ // single-line comment
+ do { ch = getChar(); } while (ch != 0 && ch != '\n');
+ continue;
+ } else if (ch == '(' && peekChar(1) == '*') {
+ getChar(); // skip starting char
+ auto lc = cpos;
+ getChar(); // skip star
+ char pch = ' ';
+ ch = ' '; // we need this
+ for (;;) {
+ pch = ch;
+ ch = getChar();
+ if (ch == 0) error(lc, "unterminated comment");
+ if (ch == ')' && pch == '*') break;
+ }
+ continue;
+ } else if (ch == '{') {
+ getChar(); // skip starting char
+ auto lc = cpos;
+ do {
+ ch = getChar();
+ if (ch == 0) error(lc, "unterminated comment");
+ } while (ch != '}');
+ continue;
+ }
+ if (ch == 0 || ch > 32) return;
+ getChar();
+ }
+ }
+
+ private void nextToken () {
+ if (eof) return;
+
+ skipBlanks();
+ if (peekChar == '\0') {
+ eof = true;
+ tokeof.loc = cpos;
+ tokeof.eloc = cpos;
+ return;
+ }
+
+ Token tk;
+ auto tkspos = tpos;
+ char ch = getChar();
+ tk.loc = cpos;
+
+ // quoted string
+ if (ch == '"' || ch == '\'') {
+ char ech = ch;
+ tk.type = Token.Type.Str;
+ ++tkspos; // skip quote
+ for (;;) {
+ ch = getChar();
+ if (ch == 0) error(tk, "unterminated string");
+ if (ch == ech) break;
+ }
+ tk.tkstr = text[tkspos..tpos-1]; // -1 due to eaten quote
+ tk.eloc = cpos;
+ lookup ~= tk;
+ return;
+ }
+
+ // hex number
+ if (ch == '$') {
+ long n = 0;
+ tk.type = Token.Type.Num;
+ getChar(); // skip dollar
+ int dv = digitValue(peekChar);
+ if (dv < 0 || dv > 15) error(tk, "hex number expected");
+ for (;;) {
+ dv = digitValue(peekChar);
+ if (dv < 0 || dv > 15) break;
+ n = n*16+dv;
+ getChar();
+ }
+ ch = peekChar;
+ if (isIdChar(ch) || ch == '.') error(tk, "hex number expected");
+ tk.num = n;
+ tk.tkstr = text[tkspos..tpos];
+ tk.eloc = cpos;
+ lookup ~= tk;
+ return;
+ }
+
+ // number
+ if (isDigit(ch)) {
+ long n = ch-'0';
+ tk.type = Token.Type.Num;
+ for (;;) {
+ if (!isDigit(peekChar)) break;
+ ch = getChar();
+ n = n*10+ch-'0';
+ }
+ tk.num = n;
+ tk.tkstr = text[tkspos..tpos];
+ tk.eloc = cpos;
+ ch = peekChar;
+ if (isIdChar(ch)) error(tk, "invalid number");
+ lookup ~= tk;
+ return;
+ }
+
+ // identifier
+ if (isIdStart(ch)) {
+ tk.type = Token.Type.Id;
+ while (isIdChar(peekChar)) getChar();
+ tk.tkstr = text[tkspos..tpos];
+ tk.eloc = cpos;
+ lookup ~= tk;
+ return;
+ }
+
+ static immutable string[9] longSpecs = [
+ "<=",
+ ">=",
+ ":=",
+ "<>",
+ "+=",
+ "-=",
+ "*=",
+ "/=",
+ "..",
+ ];
+ enum MaxSpecLength = {
+ int ml = 0;
+ foreach (string s; longSpecs) if (s.length > ml) ml = cast(int)s.length;
+ return ml;
+ }();
+
+ // delimiter
+ char[MaxSpecLength] dbuf;
+ dbuf[0] = ch;
+ uint len = 0;
+ for (;;) {
+ ch = dbuf[len];
+ bool found = false;
+ foreach (string s; longSpecs) if (len < s.length && s[len] == ch) { found = true; break; }
+ if (!found) break;
+ if (len > 0) getChar(); // this char should be eaten
+ if (++len >= MaxSpecLength) break;
+ dbuf[len] = peekChar(0);
+ }
+ tk.type = Token.Type.Spec;
+ tk.tkstr = text[tkspos..tpos];
+ tk.eloc = cpos;
+ lookup ~= tk;
+ }
+
+ auto select(RetType, string mode="peek", A...) (scope A args) { pragma(inline, true); return selectN!(RetType, mode)(0, args); }
+
+ auto selectN(RetType, string mode="peek", A...) (usize n, scope A args) {
+ import std.traits : ReturnType;
+
+ static assert(mode == "peek" || mode == "pop" || mode == "pop-nondefault", "selectN: invalid mode: '"~mode~"'");
+
+ template isGoodDg(usize idx, T) {
+ private import std.traits;
+ static if (idx < A.length && isCallable!(A[idx]) && arity!(args[idx]) == 1) {
+ enum isGoodDg = is(Parameters!(A[idx])[0] == T);
+ } else {
+ enum isGoodDg = false;
+ }
+ }
+
+ template isGoodArglessDg(usize idx) {
+ private import std.traits;
+ static if (idx < A.length && isCallable!(A[idx]) && arity!(args[idx]) == 0) {
+ enum isGoodArglessDg = true;
+ } else {
+ enum isGoodArglessDg = false;
+ }
+ }
+
+ // sorry, but this has to be string mixin, due to possible empty `arg`
+ enum DoCallDg(string arg) =
+ "static if (!is(ReturnType!(A[xidx]) == void)) return cast(RetType)(args[xidx]("~arg~")); else { args[xidx]("~arg~"); return RetType.init; }";
+
+ // we can't have inner mixin templates, so... sorry, it's string again
+ enum CallDg = q{
+ static if (isGoodDg!(xidx, Token)) { mixin(DoCallDg!"tk"); }
+ else static if (isGoodDg!(xidx, Loc)) { mixin(DoCallDg!"tk.loc"); }
+ else static if (isGoodDg!(xidx, Token.Type)) { mixin(DoCallDg!"tk.type"); }
+ else static if (isGoodDg!(xidx, Keyword)) { mixin(DoCallDg!"tk.Kw"); }
+ else static if (isGoodArglessDg!(xidx)) { mixin(DoCallDg!""); }
+ else static assert(0, "selectN: invalid delegate #"~xidx.stringof);
+ };
+
+ auto tk = peek(n);
+ bool found = false;
+ foreach (immutable aidx, immutable arg; args) {
+ static if (aidx%2 == 0) {
+ static if (is(typeof(arg) == Keyword) || is(typeof(arg) == Token.Type)) {
+ static if (is(typeof(arg) == Keyword)) found = (tk == arg);
+ else static if (is(typeof(arg) == Token.Type)) found = (tk.type == arg);
+ else static assert(0, "wtf?!");
+ if (found) {
+ // process `mode`
+ static if (mode != "peek") popFront();
+ // call delegate
+ enum xidx = aidx+1;
+ mixin(CallDg);
+ }
+ } else {
+ // default
+ // process `mode`
+ static if (mode == "pop") popFront();
+ // call delegate
+ enum xidx = aidx;
+ mixin(CallDg);
+ }
+ }
+ }
+ error(tk, "selectN is out of nodes");
+ assert(0);
+ }
+
+static:
+ private immutable byte[256] digitValues = {
+ byte[256] res = -1;
+ foreach (ubyte idx; '0'..'9'+1) res[idx] = cast(byte)(idx-'0');
+ foreach (ubyte idx; 'A'..'Z'+1) res[idx] = cast(byte)(idx-'A'+10);
+ foreach (ubyte idx; 'a'..'z'+1) res[idx] = cast(byte)(idx-'a'+10);
+ return res;
+ }();
+
+ private immutable bool[256] idStartChars = {
+ bool[256] res = false;
+ foreach (ubyte idx; 'A'..'Z'+1) res[idx] = true;
+ foreach (ubyte idx; 'a'..'z'+1) res[idx] = true;
+ res['_'] = true;
+ return res;
+ }();
+
+ private immutable bool[256] idChars = {
+ bool[256] res = false;
+ foreach (ubyte idx; '0'..'9'+1) res[idx] = true;
+ foreach (ubyte idx; 'A'..'Z'+1) res[idx] = true;
+ foreach (ubyte idx; 'a'..'z'+1) res[idx] = true;
+ res['_'] = true;
+ return res;
+ }();
+
+ bool isDigit() (char ch) { pragma(inline, true); return (ch >= '0' && ch <= '9'); }
+ int digitValue() (char ch) { pragma(inline, true); return digitValues.ptr[cast(ubyte)ch]; }
+ bool isIdStart() (char ch) { pragma(inline, true); return idStartChars.ptr[cast(ubyte)ch]; }
+ bool isIdChar() (char ch) { pragma(inline, true); return idChars.ptr[cast(ubyte)ch]; }
+
+ string gmlQuote (const(char)[] s) {
+ import std.array : appender;
+ auto res = appender!string();
+ enum Prev { Nothing, Char, Spec }
+ Prev prev = Prev.Nothing;
+ foreach (char ch; s) {
+ if (ch < ' ' || ch == 127 || ch == '"') {
+ import std.conv : to;
+ final switch (prev) with (Prev) {
+ case Nothing: break;
+ case Char: res.put(`"+`); break;
+ case Spec: res.put(`+`); break;
+ }
+ prev = Prev.Spec;
+ res.put("chr(");
+ res.put(to!string(cast(uint)ch));
+ res.put(")");
+ } else {
+ final switch (prev) with (Prev) {
+ case Nothing: res.put('"'); break;
+ case Char: break;
+ case Spec: res.put(`+"`); break;
+ }
+ prev = Prev.Char;
+ res.put(ch);
+ }
+ }
+ if (prev == Prev.Nothing) return `""`;
+ if (prev == Prev.Char) res.put('"');
+ return res.data;
+ }
+
+ /// quote string: append double quotes, screen all special chars;
+ /// so quoted string forms valid D string literal.
+ /// allocates.
+ string quote (const(char)[] s) {
+ import std.array : appender;
+ import std.format : formatElement, FormatSpec;
+ auto res = appender!string();
+ FormatSpec!char fspc; // defaults to 's'
+ formatElement(res, s, fspc);
+ return res.data;
+ }
+}
+
+
+version(lexer_test) unittest {
+ import std.file;
+ import std.stdio;
+ //enum FName = "z00.txt";
+ enum FName = "shared/MAPDEF.pas";
+ string s;
+ {
+ auto fl = File(FName);
+ auto buf = new char[](cast(uint)fl.size);
+ fl.rawRead(buf[]);
+ s = cast(string)buf;
+ }
+ auto lex = new Lexer(s, FName);
+ try {
+ while (!lex.empty) {
+ writeln(lex.front);
+ lex.popFront();
+ }
+ } catch (ErrorAt e) {
+ writeln("PARSE ERROR: ", e.line);
+ writeln(e.loc);
+ }
+}
diff --git a/src/tools/mapiogen/mapiogen.d b/src/tools/mapiogen/mapiogen.d
--- /dev/null
@@ -0,0 +1,679 @@
+/* coded by Ketmar // Invisible Vector <ketmar@ketmar.no-ip.org>
+ * Understanding is not required. Only obedience.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+module mapiogen;
+static assert(__VERSION__ >= 2071, "you need as least DMD 2.071 to compile this code");
+
+import std.stdio;
+
+import lexer;
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+bool useDelphiAlignment = false;
+ubyte[string] triggers;
+string[ubyte] trignums;
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+struct Field {
+ enum Type { Bytes, Chars, Integral, TPoint, Boolean }
+
+ string name;
+ string typename;
+ uint size;
+ uint ofs;
+ Type type;
+}
+
+struct Record {
+ string[] ids; // null: default
+ Field[] fields;
+ uint size; // max size
+ bool normal;
+
+ string getRWName () const {
+ import std.string : capitalize;
+ if (ids.length == 0) return "Default";
+ if (normal) return ids[0].capitalize;
+ return ids[0][8..$].capitalize;
+ }
+
+ // calc field offsets and record size
+ void finalize (bool packed=false) {
+ // calculate offsets
+ uint ofs = 0;
+ foreach (immutable idx, ref fld; fields) {
+ fld.ofs = ofs;
+ // delphi does this (roughly)
+ if (!packed && fld.size != 1) {
+ if (useDelphiAlignment && fld.type == Field.Type.TPoint) {
+ } else {
+ //ubyte pd = (fld.size > 4 ? 2 : fld.size > 2 ? 4 : 2);
+ ubyte pd = (fld.size > 2 ? 4 : 2);
+ if (fld.type == Field.Type.Chars && fld.size > 1) pd = 2;
+ fld.ofs += ofs.padding(pd);
+ }
+ }
+ ofs = fld.ofs+fld.size;
+ }
+ size = ofs;
+ //if (fields.length > 0 && fields[$-1].size != 1 && fields[$-1].type == Field.Type.Integral) size += size.padding(2); // just in case
+ }
+}
+
+
+Record[] tgrecords;
+Record[] records;
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+// 0 128 Default (Byte128)
+void dumpRecord (in ref Record rec) {
+ foreach (const ref fld; rec.fields) {
+ writefln("%3s %3s %s (%s)", fld.ofs, fld.size, fld.name, fld.typename);
+ }
+}
+
+
+void dumpRecords () { foreach (const ref rec; tgrecords) rec.dumpRecord(); }
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+void genMisc (File fo) {
+ fo.write(
+q{procedure getBytesAt (var dest; const buf; ofs, len: Integer);
+begin
+ Move((PChar(@buf)+ofs)^, dest, len);
+end;
+
+procedure getWordAt (var dest; const buf; ofs: Integer);
+type PWord = ^Word; PByte = ^Byte;
+var
+ p: PByte;
+ d: PWord;
+begin
+ p := PByte(@buf); Inc(p, ofs);
+ d := PWord(@dest);
+ d^ := p^;
+ Inc(p);
+ d^ := (d^) or ((p^) shl 8);
+end;
+
+procedure getIntAt (var dest; const buf; ofs: Integer);
+type PInt = ^LongWord; PByte = ^Byte;
+var
+ p: PByte;
+ d: PInt;
+begin
+ p := PByte(@buf); Inc(p, ofs);
+ d := PInt(@dest);
+ d^ := p^;
+ Inc(p);
+ d^ := (d^) or ((p^) shl 8);
+ Inc(p);
+ d^ := (d^) or ((p^) shl 16);
+ Inc(p);
+ d^ := (d^) or ((p^) shl 24);
+end;
+
+procedure putBytesAt (var buf; ofs: Integer; const src; len: Integer);
+begin
+ Move(src, (PChar(@buf)+ofs)^, len);
+end;
+
+procedure putWordAt (var buf; ofs: Integer; const src);
+type PWord = ^Word; PByte = ^Byte;
+var
+ p: PByte;
+ d: PWord;
+begin
+ p := PByte(PChar(@buf)+ofs);
+ d := PWord(@src);
+ p^ := (d^) and $ff;
+ Inc(p);
+ p^ := ((d^) shr 8) and $ff;
+end;
+
+procedure putIntAt (var buf; ofs: Integer; const src);
+type PInt = ^LongWord; PByte = ^Byte;
+var
+ p: PByte;
+ d: PInt;
+begin
+ p := PByte(PChar(@buf)+ofs);
+ d := PInt(@src);
+ p^ := (d^) and $ff;
+ Inc(p);
+ p^ := ((d^) shr 8) and $ff;
+ Inc(p);
+ p^ := ((d^) shr 16) and $ff;
+ Inc(p);
+ p^ := ((d^) shr 24) and $ff;
+end;
+
+});
+}
+
+
+void genReader (File fo, in ref Record rec) {
+ fo.write(
+ " procedure xread", rec.getRWName, " ();\n"~
+ " begin\n"
+ );
+ foreach (const ref fld; rec.fields) {
+ final switch (fld.type) {
+ case Field.Type.Bytes:
+ case Field.Type.Chars:
+ case Field.Type.Boolean:
+ fo.writeln(" getBytesAt(tr.", fld.name, ", buf, ", fld.ofs, ", ", fld.size, ");");
+ break;
+ case Field.Type.Integral:
+ switch (fld.size) {
+ case 1: fo.writeln(" getBytesAt(tr.", fld.name, ", buf, ", fld.ofs, ", ", fld.size, ");"); break;
+ case 2: fo.writeln(" getWordAt(tr.", fld.name, ", buf, ", fld.ofs, ");"); break;
+ case 4: fo.writeln(" getIntAt(tr.", fld.name, ", buf, ", fld.ofs, ");"); break;
+ default: assert(0);
+ }
+ break;
+ case Field.Type.TPoint:
+ fo.writeln(" getIntAt(tr.", fld.name, ".x, buf, ", fld.ofs, ");");
+ fo.writeln(" getIntAt(tr.", fld.name, ".y, buf, ", fld.ofs+4, ");");
+ break;
+ }
+ }
+ fo.writeln(" end;\n");
+}
+
+
+void genReaders (File fo) {
+ fo.write(
+ "procedure mb_Read_TriggerData (var tr: TTriggerData; ttype: Integer; const buf; bufsize: Integer);\n"
+ );
+ uint maxsize = 0;
+ foreach (const ref rec; tgrecords) {
+ if (rec.ids.length == 0) continue;
+ if (rec.size > maxsize) maxsize = rec.size;
+ fo.genReader(rec);
+ }
+ fo.write(
+ "begin\n"~
+ " if (bufsize < ", maxsize, ") then raise Exception.Create('invalid buffer size in mb_Read_TriggerData');\n"
+ );
+ foreach (const ref rec; tgrecords) {
+ foreach (string id; rec.ids) {
+ fo.writeln(" if (ttype = ", id, ") then begin xread", rec.getRWName, "(); exit; end;");
+ }
+ }
+ fo.writeln(" raise Exception.Create('invalid trigger type in mb_Read_TriggerData');");
+ fo.writeln("end;\n\n");
+ foreach (ref rec; records) {
+ assert(rec.normal);
+ fo.writeln("procedure mb_Read_", rec.ids[0], " (var tr: ", rec.ids[0], "; const buf; bufsize: Integer);");
+ fo.genReader(rec);
+ fo.write(
+ "begin\n"~
+ " if (bufsize < ", rec.size, ") then raise Exception.Create('invalid buffer size in read", rec.ids[0], "');\n"
+ " xread", rec.getRWName, "();\n"~
+ "end;\n\n"
+ );
+ }
+}
+
+
+void genWriter (File fo, in ref Record rec) {
+ fo.write(
+ " procedure xwrite", rec.getRWName, " ();\n"~
+ " begin\n"
+ );
+ foreach (const ref fld; rec.fields) {
+ final switch (fld.type) {
+ case Field.Type.Bytes:
+ case Field.Type.Chars:
+ case Field.Type.Boolean:
+ fo.writeln(" putBytesAt(buf, ", fld.ofs, ", tr.", fld.name, ", ", fld.size, ");");
+ break;
+ case Field.Type.Integral:
+ switch (fld.size) {
+ case 1: fo.writeln(" putBytesAt(buf, ", fld.ofs, ", tr.", fld.name, ", ", fld.size, ");"); break;
+ case 2: fo.writeln(" putWordAt(buf, ", fld.ofs, ", tr.", fld.name, ");"); break;
+ case 4: fo.writeln(" putIntAt(buf, ", fld.ofs, ", tr.", fld.name, ");"); break;
+ default: assert(0);
+ }
+ break;
+ case Field.Type.TPoint:
+ fo.writeln(" putIntAt(buf, ", fld.ofs , ", tr.", fld.name, ".x);");
+ fo.writeln(" putIntAt(buf, ", fld.ofs+4, ", tr.", fld.name, ".y);");
+ break;
+ }
+ }
+ fo.writeln(" end;\n");
+}
+
+
+void genWriters (File fo) {
+ fo.write(
+ "procedure mb_Write_TriggerData (var buf; bufsize: Integer; ttype: Integer; var tr: TTriggerData);\n"
+ );
+ uint maxsize = 0;
+ foreach (const ref rec; tgrecords) {
+ assert(!rec.normal);
+ if (rec.ids.length == 0) continue;
+ if (rec.size > maxsize) maxsize = rec.size;
+ fo.genWriter(rec);
+ }
+ fo.write(
+ "begin\n"~
+ " if (bufsize < ", maxsize, ") then raise Exception.Create('invalid buffer size in mb_Write_TriggerData');\n"
+ );
+ foreach (const ref rec; tgrecords) {
+ foreach (string id; rec.ids) {
+ fo.writeln(" if (ttype = ", id, ") then begin xwrite", rec.getRWName, "(); exit; end;");
+ }
+ }
+ fo.writeln(" raise Exception.Create('invalid trigger type in mb_Write_TriggerData');");
+ fo.writeln("end;\n\n");
+ foreach (ref rec; records) {
+ assert(rec.normal);
+ fo.writeln("procedure mb_Write_", rec.ids[0], " (var buf; bufsize: Integer; var tr: ", rec.ids[0], ");");
+ fo.genWriter(rec);
+ fo.write(
+ "begin\n"~
+ " if (bufsize < ", rec.size, ") then raise Exception.Create('invalid buffer size in write", rec.ids[0], "');\n"
+ " xwrite", rec.getRWName, "();\n"~
+ "end;\n\n"
+ );
+ }
+}
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+void printCaret (Lexer lex, Loc loc, File ofile=stdout) {
+ auto line = lex.line(loc.line);
+ if (line.length == 0) return;
+ ofile.writeln(line);
+ foreach (immutable _; 1..loc.col) ofile.write(' ');
+ ofile.writeln('^');
+}
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+ubyte padding (uint size, ubyte alg) {
+ uint nsz = (size+alg-1)/alg*alg;
+ return cast(ubyte)(nsz-size);
+}
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+void parseType (ref Field fld, const(char)[] typestr, Lexer lex) {
+ import std.algorithm : startsWith;
+ import std.string : toLower;
+ auto type = typestr.toLower;
+ if (type.startsWith("byte") || type.startsWith("char")) {
+ import std.conv : to;
+ fld.type = (type[0] == 'b' ? Field.Type.Bytes : Field.Type.Chars);
+ if (type.length == 4) {
+ fld.size = 1;
+ return;
+ }
+ try {
+ auto sz = to!uint(type[4..$]);
+ if (sz < 1 || sz > 32767) throw new Exception("invalid size");
+ fld.size = sz;
+ return;
+ } catch (Exception) {}
+ } else if (type == "tpoint") {
+ fld.type = Field.Type.TPoint;
+ fld.size = 4*2;
+ return;
+ } else if (type == "boolean") {
+ fld.type = Field.Type.Boolean;
+ fld.size = 1;
+ return;
+ } else if (type == "integer") {
+ fld.type = Field.Type.Integral;
+ fld.size = 4;
+ return;
+ } else if (type == "word") {
+ fld.type = Field.Type.Integral;
+ fld.size = 2;
+ return;
+ } else if (type == "shortint") {
+ fld.type = Field.Type.Integral;
+ fld.size = 1;
+ return;
+ }
+ lex.error("invalid type: '"~typestr.idup~"'");
+}
+
+
+/*
+(TargetPoint: TPoint;
+ d2d_teleport: Boolean;
+ silent_teleport: Boolean;
+ TlpDir: Byte);
+*/
+Field[] parseFields (Lexer lex) {
+ Field[] res;
+ if (!lex.isSpec || lex.front.str != "(") lex.error("'(' expected");
+ lex.popFront();
+ for (;;) {
+ if (lex.isSpec && lex.front.str == ")") { lex.popFront(); break; }
+ string[] names;
+ for (;;) {
+ names ~= lex.expectId();
+ if (lex.isSpec && lex.front.str == ":") break;
+ if (lex.isSpec && lex.front.str == ",") { lex.popFront(); continue; }
+ lex.error("':' expected");
+ }
+ if (!lex.isSpec || lex.front.str != ":") lex.error("':' expected");
+ lex.popFront();
+ auto type = lex.expectId();
+ //writeln(" ", names[], ": <", type, ">");
+ foreach (string name; names) {
+ Field fld;
+ fld.name = name;
+ fld.typename = type;
+ fld.parseType(type, lex);
+ res ~= fld;
+ }
+ if (!lex.isSpec) lex.error("';' or ')' expected");
+ if (lex.front.str == ";") { lex.popFront(); continue; }
+ if (lex.front.str != ")") lex.error("';' or ')' expected");
+ }
+ if (lex.isSpec && lex.front.str == ";") lex.popFront();
+ return res;
+}
+
+
+/*
+TargetPoint: TPoint;
+ d2d_teleport: Boolean;
+ silent_teleport: Boolean;
+ TlpDir: Byte;
+ end;
+*/
+Field[] parseRecFields (Lexer lex) {
+ Field[] res;
+ for (;;) {
+ if (lex.eatCI("end") !is null) break;
+ string[] names;
+ for (;;) {
+ names ~= lex.expectId();
+ if (lex.isSpec && lex.front.str == ":") break;
+ if (lex.isSpec && lex.front.str == ",") { lex.popFront(); continue; }
+ lex.error("':' expected");
+ }
+ if (!lex.isSpec || lex.front.str != ":") lex.error("':' expected");
+ lex.popFront();
+ auto type = lex.expectId();
+ foreach (string name; names) {
+ Field fld;
+ fld.name = name;
+ fld.typename = type;
+ fld.parseType(type, lex);
+ res ~= fld;
+ }
+ if (lex.eatCI("end") !is null) break;
+ if (!lex.isSpec || lex.front.str != ";") lex.error("';' expected");
+ lex.popFront();
+ }
+ return res;
+}
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+bool isGoodTriggerName (const(char)[] id) {
+ import std.algorithm : startsWith;
+ import std.string : indexOf;
+ if (!id.startsWith("TRIGGER_")) return false;
+ if (id == "TRIGGER_MAX") return false;
+ if (id[8..$].indexOf('_') >= 0) return false;
+ return true;
+}
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+void parseMapDef (string fname) {
+ import std.string : format, toLower, toUpper;
+ Lexer lex;
+ {
+ auto fl = File(fname);
+ auto buf = new char[](cast(uint)fl.size);
+ fl.rawRead(buf[]);
+ lex = new Lexer(cast(string)buf, fname);
+ }
+ // find "interface"
+ while (!lex.empty) {
+ if (!lex.front.isId) { lex.popFront(); continue; }
+ if (lex.front.str.toLower == "interface") break;
+ lex.popFront();
+ }
+ if (lex.empty) throw new Exception("where is my interface?!");
+ enum Section { Unknown, Const, Type }
+ Section section;
+ while (!lex.empty) {
+ if (lex.front.isId) {
+ auto kw = lex.front.str.toLower;
+ if (kw == "implementation") break;
+ if (kw == "const") {
+ //writeln("CONST!");
+ section = Section.Const;
+ lex.popFront();
+ continue;
+ }
+ if (kw == "type") {
+ //writeln("TYPE!");
+ section = Section.Type;
+ lex.popFront();
+ continue;
+ }
+ }
+ if (section == Section.Const) {
+ if (!lex.isId) lex.error("identifier expected");
+ auto id = lex.front.istr.toUpper;
+ lex.popFront();
+ auto lc = lex.loc;
+ if (lex.expectSpec() != "=") lex.error(lc, "'=' expected");
+ if (isGoodTriggerName(id)) {
+ lex.mustbeNum();
+ auto lcn = lex.loc;
+ auto n = lex.front.num;
+ lex.popFront();
+ if (n < 0 || n > 255) lex.error(lcn, "invalid value (%s) for '%s'".format(n, id));
+ auto b = cast(ubyte)n;
+ if (id in triggers) lex.error(lc, "duplicate constant '%s'".format(id));
+ if (auto tg = b in trignums) lex.error(lcn, "same value (%s) for triggers '%s' and '%s'".format(n, id, *tg));
+ triggers[id] = b;
+ trignums[b] = id;
+ //writeln("trigger: ", id, " (", b, ")");
+ } else {
+ while (!lex.empty) {
+ if (lex.front.isSpec && lex.front.str == ";") break;
+ lex.popFront();
+ }
+ }
+ lc = lex.loc;
+ if (lex.expectSpec() != ";") lex.error(lc, "';' expected");
+ continue;
+ }
+ if (section == Section.Type) {
+ if (!lex.isId) lex.error("identifier expected");
+ auto id = lex.front.istr.toUpper;
+ lex.popFront();
+ auto lc = lex.loc;
+ if (lex.expectSpec() != "=") lex.error(lc, "'=' expected");
+ //writeln("id: ", id);
+ if (id != "TTRIGGERDATA") {
+ // skip definition
+ while (!lex.empty) {
+ if (lex.eatCI("end") !is null) break;
+ lex.popFront();
+ }
+ lc = lex.loc;
+ if (lex.expectSpec() != ";") lex.error(lc, "';' expected");
+ continue;
+ } else {
+ lex.expectCI("record");
+ lex.expectCI("case");
+ lex.expectCI("byte");
+ lex.expectCI("of");
+ // now parse defs
+ for (;;) {
+ if (lex.eatCI("end") !is null) break;
+ string[] ids;
+ Field[] fields;
+ if (lex.isNum) {
+ if (lex.front.num != 0) lex.error(lc, "'0' expected");
+ lex.popFront();
+ if (!lex.isSpec || lex.front.str != ":") lex.error("':' expected");
+ lex.popFront();
+ //writeln("=== DEFAULT ===");
+ ids = null;
+ fields = lex.parseFields();
+ } else {
+ for (;;) {
+ ids ~= lex.expectId();
+ if (lex.isSpec && lex.front.str == ":") { lex.popFront(); break; }
+ if (lex.isSpec && lex.front.str == ",") { lex.popFront(); continue; }
+ lex.error("',' or ':' expected");
+ }
+ //writeln("=== ", ids[], " ===");
+ fields = lex.parseFields();
+ }
+ tgrecords ~= Record(ids, fields);
+ tgrecords[$-1].finalize;
+ //writeln("=== ", ids[], " === : ", rcsize);
+ }
+ lc = lex.loc;
+ if (lex.expectSpec() != ";") lex.error(lc, "';' expected");
+ break; // we are done
+ }
+ }
+ lex.popFront();
+ continue;
+ }
+}
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+void parseMapStruct (string fname) {
+ import std.string : format, toLower, toUpper;
+
+ static bool isGoodRecName (const(char)[] id) {
+ import std.algorithm : startsWith, endsWith;
+ import std.string : indexOf, toUpper;
+ id = id.toUpper;
+ if (!id.startsWith("T") || !id.endsWith("_1")) return false;
+ return true;
+ }
+
+ static bool isGoodDef (Lexer lex) {
+ import std.string : toLower;
+ auto tk = lex.peek(1);
+ if (!tk.isSpec || tk.str != "=") return false;
+ tk = lex.peek(2);
+ if (!tk.isId || tk.str.toLower != "packed") return false;
+ tk = lex.peek(3);
+ if (!tk.isId || tk.str.toLower != "record") return false;
+ return true;
+ }
+
+ Lexer lex;
+ {
+ auto fl = File(fname);
+ auto buf = new char[](cast(uint)fl.size);
+ fl.rawRead(buf[]);
+ lex = new Lexer(cast(string)buf, fname);
+ }
+ // find "interface"
+ while (!lex.empty) {
+ if (!lex.front.isId) { lex.popFront(); continue; }
+ if (lex.front.str.toLower == "interface") break;
+ lex.popFront();
+ }
+ if (lex.empty) throw new Exception("where is my interface?!");
+ enum Section { Unknown, Type }
+ Section section;
+ while (!lex.empty) {
+ if (lex.front.isId) {
+ auto kw = lex.front.str.toLower;
+ if (kw == "implementation") break;
+ if (kw == "type") {
+ section = Section.Type;
+ lex.popFront();
+ continue;
+ }
+ }
+ if (section == Section.Type) {
+ if (lex.isId && isGoodRecName(lex.front.str) && isGoodDef(lex)) {
+ string origId = lex.front.istr;
+ lex.popFront();
+ lex.popFront(); // skip "="
+ lex.expectCI("packed");
+ lex.expectCI("record");
+ // now parse fields
+ Record rec;
+ rec.ids ~= origId;
+ rec.fields = lex.parseRecFields();
+ rec.normal = true;
+ rec.finalize(true);
+ records ~= rec;
+ {
+ auto lc = lex.loc;
+ if (lex.expectSpec() != ";") lex.error(lc, "';' expected");
+ }
+ continue;
+ }
+ }
+ lex.popFront();
+ continue;
+ }
+}
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+void main () {
+ try {
+ parseMapDef("../../shared/MAPDEF.pas");
+ parseMapStruct("../../shared/MAPSTRUCT.pas");
+ debug {
+ dumpRecords();
+ } else {
+ {
+ auto fo = File("mapstructio.inc", "w");
+ fo.genMisc();
+ fo.genReaders();
+ fo.genWriters();
+ }
+ {
+ auto fo = File("mapstructsizes.inc", "w");
+ fo.writeln("const");
+ foreach (ref rec; records) fo.writeln(" SizeOf_", rec.ids[0], " = ", rec.size, ";");
+ fo.writeln();
+ fo.writeln("procedure mb_Read_TriggerData (var tr: TTriggerData; ttype: Integer; const buf; bufsize: Integer);");
+ fo.writeln("procedure mb_Write_TriggerData (var buf; bufsize: Integer; ttype: Integer; var tr: TTriggerData);");
+ foreach (ref rec; records) {
+ fo.writeln("procedure mb_Read_", rec.ids[0], " (var tr: ", rec.ids[0], "; const buf; bufsize: Integer);");
+ fo.writeln("procedure mb_Write_", rec.ids[0], " (var buf; bufsize: Integer; var tr: ", rec.ids[0], ");");
+ }
+ }
+ }
+ } catch (ErrorAt e) {
+ writeln("PARSE ERROR: ", e.loc, ": ", e.msg);
+ //lex.printCaret(e.loc);
+ }
+}