summary | shortlog | log | commit | commitdiff | tree
raw | patch | inline | side by side (parent: c7ee5fd)
raw | patch | inline | side by side (parent: c7ee5fd)
author | Ketmar Dark <ketmar@ketmar.no-ip.org> | |
Wed, 30 Aug 2017 16:36:00 +0000 (19:36 +0300) | ||
committer | Ketmar Dark <ketmar@ketmar.no-ip.org> | |
Wed, 30 Aug 2017 16:36:44 +0000 (19:36 +0300) |
13 files changed:
src/mapdef/mapdef.txt | [moved from src/shared/mapdef.txt with 100% similarity] | patch | blob | history |
src/shared/MAPDEF.pas | patch | blob | history | |
src/shared/mapdef.inc | patch | blob | history | |
src/shared/tests/test_hash.dpr | [moved from src/shared/zhash00.dpr with 100% similarity] | patch | blob | history |
src/shared/tests/test_heap.dpr | [new file with mode: 0644] | patch | blob |
src/shared/tests/test_idpool.dpr | [moved from src/shared/ztest_idpool.dpr with 99% similarity] | patch | blob | history |
src/shared/utils.pas | patch | blob | history | |
src/shared/xparser.pas | patch | blob | history | |
src/tools/mapcvt.dpr | [new file with mode: 0644] | patch | blob |
src/tools/mapgen.dpr | [moved from src/shared/zmapgen.dpr with 53% similarity] | patch | blob | history |
src/tools/mapiogen/lexer.d | [deleted file] | patch | blob | history |
src/tools/mapiogen/mapiogen.d | [deleted file] | patch | blob | history |
src/tools/wadcvt.dpr | [moved from src/sfs/wadcvt.dpr with 99% similarity] | patch | blob | history |
diff --git a/src/shared/mapdef.txt b/src/mapdef/mapdef.txt
diff --git a/src/shared/MAPDEF.pas b/src/shared/MAPDEF.pas
index 12e05628ab16110b1582075be182cc15c0b5cb6f..e3bdddc085c1d4caf412770bbedec55ea438dfa8 100644 (file)
--- a/src/shared/MAPDEF.pas
+++ b/src/shared/MAPDEF.pas
implementation
uses
- e_log, xparser, xstreams;
+ {e_log,} xparser, xstreams;
function GetMapHeader (rec: TDynRecord): TMapHeaderRec_1;
diff --git a/src/shared/mapdef.inc b/src/shared/mapdef.inc
index 595dff4259cf1510deb98b31f8e90dc60fa0c2ad..a25866fb33c674ae5bbb90b3779ad303fe8bf740 100644 (file)
--- a/src/shared/mapdef.inc
+++ b/src/shared/mapdef.inc
// *** WARNING! ***
-// regenerate this part directly from "mapdef.txt" with 'zmapgen', NEVER manually change anything here!
+// regenerate this part directly from "mapdef.txt" with 'mapgen', NEVER manually change anything here!
// ////////////////////////////////////////////////////////////////////////// //
diff --git a/src/shared/tests/test_heap.dpr b/src/shared/tests/test_heap.dpr
--- /dev/null
@@ -0,0 +1,41 @@
+{$INCLUDE ../a_modes.inc}
+uses
+ SysUtils,
+ binheap in '../binheap.pas';
+
+
+var
+ heap: TBinaryHeapInt;
+begin
+ writeln('================');
+ heap := binHeapNewIntLess();
+ heap.insert(666);
+ heap.insert(42);
+ heap.insert(69);
+ heap.insert(-666);
+ heap.insert(8);
+
+ while (heap.count > 0) do
+ begin
+ writeln(heap.front);
+ heap.popFront();
+ end;
+
+ heap.Free();
+
+ writeln('================');
+ heap := binHeapNewIntGreat();
+ heap.insert(666);
+ heap.insert(42);
+ heap.insert(69);
+ heap.insert(-666);
+ heap.insert(8);
+
+ while (heap.count > 0) do
+ begin
+ writeln(heap.front);
+ heap.popFront();
+ end;
+
+ heap.Free();
+end.
similarity index 99%
rename from src/shared/ztest_idpool.dpr
rename to src/shared/tests/test_idpool.dpr
index 32d0fd05540476db2ac05aee79e34808360e057a..4938eb5252cbf61deeaf0a616727aeb0e20de745 100644 (file)
rename from src/shared/ztest_idpool.dpr
rename to src/shared/tests/test_idpool.dpr
index 32d0fd05540476db2ac05aee79e34808360e057a..4938eb5252cbf61deeaf0a616727aeb0e20de745 100644 (file)
{$INCLUDE a_modes.inc}
{$DEFINE IDPOOL_CHECKS}
uses
- SysUtils, idpool;
+ SysUtils,
+ idpool in '../idpool.pas';
// ////////////////////////////////////////////////////////////////////////// //
diff --git a/src/shared/utils.pas b/src/shared/utils.pas
index 49f7ef1e4f1a9b2ec39be9cdd79760cb929ba538..6bfe7915453d93af3b465d0fb5b0a7fbd97e46df 100644 (file)
--- a/src/shared/utils.pas
+++ b/src/shared/utils.pas
// ////////////////////////////////////////////////////////////////////////// //
+function getFilenameExt (const fn: AnsiString): AnsiString;
+function setFilenameExt (const fn, ext: AnsiString): AnsiString;
+function forceFilenameExt (const fn, ext: AnsiString): AnsiString;
+
+// strips out name from `fn`, leaving trailing slash
+function getFilenamePath (const fn: AnsiString): AnsiString;
+
+// ends with '/' or '\'?
+function isFilenamePath (const fn: AnsiString): Boolean;
+
+// strips extra trailing slashes in `path, and extra leading slashes in `fn`
+// will add slash to `path`, even if `fn` is empty!
+function filenameConcat (const path, fn: AnsiString): AnsiString;
+
// does filename have one of ".wad", ".pk3", ".zip" extensions?
-function hasWadExtension (fn: AnsiString): Boolean;
+function hasWadExtension (const fn: AnsiString): Boolean;
// does filepath have ".XXX:\" in it?
-function isWadPath (fn: AnsiString): Boolean;
+function isWadPath (const fn: AnsiString): Boolean;
// adds ".wad" extension if filename doesn't have one of ".wad", ".pk3", ".zip"
-function addWadExtension (fn: AnsiString): AnsiString;
+function addWadExtension (const fn: AnsiString): AnsiString;
// convert number to strig with nice commas
function Int64ToStrComma (i: Int64): AnsiString;
// ////////////////////////////////////////////////////////////////////////// //
-function hasWadExtension (fn: AnsiString): Boolean;
+function getFilenameExt (const fn: AnsiString): AnsiString;
+var
+ pos: Integer;
+ ch: AnsiChar;
+begin
+ pos := Length(fn);
+ while (pos > 0) do
+ begin
+ ch := fn[pos];
+ if (ch = '.') then
+ begin
+ if (pos = Length(fn)) then result := '' else result := Copy(fn, pos, Length(fn)-pos+1);
+ exit;
+ end;
+ if (ch = '/') or (ch = '\') then break;
+ Dec(pos);
+ end;
+ result := ''; // no extension
+end;
+
+
+function setFilenameExt (const fn, ext: AnsiString): AnsiString;
+var
+ pos: Integer;
+ ch: AnsiChar;
+begin
+ result := fn;
+ if (Length(ext) = 0) or (ext = '.') then exit;
+ pos := Length(fn);
+ while (pos > 0) do
+ begin
+ ch := fn[pos];
+ if (ch = '.') then exit;
+ if (ch = '/') or (ch = '\') then break;
+ Dec(pos);
+ end;
+ if (ext[1] <> '.') then result += '.';
+ result += ext;
+end;
+
+
+function forceFilenameExt (const fn, ext: AnsiString): AnsiString;
+var
+ pos: Integer;
+ ch: AnsiChar;
+begin
+ result := fn;
+ pos := Length(fn);
+ while (pos > 0) do
+ begin
+ ch := fn[pos];
+ if (ch = '.') then
+ begin
+ if (Length(ext) = 0) or (ext = '.') then
+ begin
+ result := Copy(fn, 1, pos-1);
+ end
+ else
+ begin
+ if (ext[1] = '.') then result := Copy(fn, 1, pos-1) else result := Copy(fn, 1, pos);
+ result += ext;
+ exit;
+ end;
+ end;
+ if (ch = '/') or (ch = '\') then break;
+ Dec(pos);
+ end;
+ if (Length(ext) > 0) then
+ begin
+ if (ext[1] <> '.') then result += '.';
+ result += ext;
+ end;
+end;
+
+
+// strips out name from `fn`, leaving trailing slash
+function getFilenamePath (const fn: AnsiString): AnsiString;
+var
+ pos: Integer;
+ ch: AnsiChar;
begin
- fn := ExtractFileExt(fn);
- result := StrEquCI1251(fn, '.wad') or StrEquCI1251(fn, '.pk3') or StrEquCI1251(fn, '.zip');
+ if (Length(fn) = 0) then begin result := './'; exit; end;
+ if (fn[Length(fn)] = '/') or (fn[Length(fn)] = '\') then begin result := fn; exit; end;
+ pos := Length(fn);
+ while (pos > 0) do
+ begin
+ ch := fn[pos];
+ if (ch = '/') or (ch = '\') then begin result := Copy(fn, 1, pos); exit; end;
+ Dec(pos);
+ end;
+ result := './'; // no path -> current dir
end;
-function addWadExtension (fn: AnsiString): AnsiString;
+// ends with '/' or '\'?
+function isFilenamePath (const fn: AnsiString): Boolean;
+begin
+ if (Length(fn) = 0) then
+ begin
+ result := false;
+ end
+ else
+ begin
+ result := (fn[Length(fn)] = '/') or (fn[Length(fn)] = '\');
+ end;
+end;
+
+
+// strips extra trailing slashes in `path, and extra leading slashes in `fn`
+// will add slash to `path`, even if `fn` is empty!
+function filenameConcat (const path, fn: AnsiString): AnsiString;
+var
+ pos: Integer;
+begin
+ pos := 1;
+ while (pos <= Length(fn)) and ((fn[pos] = '/') or (fn[pos] = '\')) do Inc(pos);
+ result := path;
+ if (Length(result) > 0) and ((result[Length(result)] <> '/') and (result[Length(result)] <> '\')) then result += '/';
+ if (pos <= Length(fn)) then
+ begin
+ result += Copy(fn, pos, Length(fn)-pos+1);
+ //FIXME: make this faster!
+ while (Length(result) > 0) and ((result[Length(result)] = '/') or (result[Length(result)] = '\')) do
+ begin
+ Delete(result, Length(result), 1);
+ end;
+ if (fn[Length(fn)] = '/') or (fn[Length(fn)] = '\') then result += '/';
+ end;
+end;
+
+
+function hasWadExtension (const fn: AnsiString): Boolean;
+var
+ ext: AnsiString;
+begin
+ ext := getFilenameExt(fn);
+ result := StrEquCI1251(ext, '.wad') or StrEquCI1251(ext, '.pk3') or StrEquCI1251(ext, '.zip');
+end;
+
+
+function addWadExtension (const fn: AnsiString): AnsiString;
begin
result := fn;
if not hasWadExtension(result) then result := result+'.wad';
end;
-function isWadPath (fn: AnsiString): Boolean;
+function isWadPath (const fn: AnsiString): Boolean;
var
- p: Integer;
+ pos: Integer;
s: AnsiString;
begin
result := false;
- while true do
+ pos := 1;
+ while (pos <= Length(fn)) do
begin
- p := Pos(':', fn);
- if (p = 0) or (length(fn)-p < 1) then break;
- if (p-4 > 1) and (fn[p-4] = '.') and ((fn[p+1] = '\') or (fn[p+1] = '/')) then
+ if (fn[pos] = ':') then
begin
- s := Copy(fn, p-4, 4);
- if StrEquCI1251(s, '.wad') or StrEquCI1251(s, '.pk3') or StrEquCI1251(s, '.zip') then
+ if (Length(fn)-pos < 1) then break;
+ if (pos-4 > 1) and (fn[pos-4] = '.') and ((fn[pos+1] = '\') or (fn[pos+1] = '/')) then
begin
- result := true;
- exit;
+ s := Copy(fn, pos-4, 4);
+ if StrEquCI1251(s, '.wad') or StrEquCI1251(s, '.pk3') or StrEquCI1251(s, '.zip') then
+ begin
+ result := true;
+ exit;
+ end;
end;
end;
- Delete(fn, 1, p);
+ Inc(pos);
end;
end;
diff --git a/src/shared/xparser.pas b/src/shared/xparser.pas
index 7263b7db60037eef69dba6c49ffa9b6cac5a3108..3f1d1db5b462a6ed9fc69fa7ff8284029e2270b6 100644 (file)
--- a/src/shared/xparser.pas
+++ b/src/shared/xparser.pas
procedure loadNextChar (); virtual; abstract; // loads next char into mNextChar; #0 means 'eof'
public
- constructor Create (loadToken: Boolean=true);
+ constructor Create ();
destructor Destroy (); override;
function isEOF (): Boolean; inline;
type
TFileTextParser = class(TTextParser)
private
- const BufSize = 65536;
+ const BufSize = 16384;
private
mFile: TStream;
+ mStreamOwned: Boolean;
mBuffer: PChar;
mBufLen: Integer;
mBufPos: Integer;
procedure loadNextChar (); override; // loads next char into mNextChar; #0 means 'eof'
public
- constructor Create (const fname: AnsiString; loadToken: Boolean=true);
- constructor Create (st: TStream; loadToken: Boolean=true); // will take ownership on st
+ constructor Create (const fname: AnsiString);
+ constructor Create (st: TStream; astOwned: Boolean=true); // will take ownership on st by default
destructor Destroy (); override;
end;
procedure loadNextChar (); override; // loads next char into mNextChar; #0 means 'eof'
public
- constructor Create (const astr: AnsiString; loadToken: Boolean=true);
+ constructor Create (const astr: AnsiString);
destructor Destroy (); override;
end;
public
constructor Create ();
+ procedure flush (); virtual;
+
procedure put (const s: AnsiString); overload;
procedure put (v: Byte); overload;
procedure put (v: Integer); overload;
// ////////////////////////////////////////////////////////////////////////// //
type
TFileTextWriter = class(TTextWriter)
+ private
+ const BufSize = 16384;
+
private
mFile: TStream;
+ mStreamOwned: Boolean;
+ mBuffer: PAnsiChar;
+ mBufUsed: Integer;
protected
procedure putBuf (constref buf; len: SizeUInt); override;
public
constructor Create (const fname: AnsiString);
+ constructor Create (ast: TStream; astOwned: Boolean=true); // will own the stream by default
destructor Destroy (); override;
+
+ procedure flush (); override;
end;
@@ -182,45 +194,7 @@ function StrEqu (const a, b: AnsiString): Boolean; inline; begin result := (a =
// ////////////////////////////////////////////////////////////////////////// //
-var
- wc2shitmap: array[0..65535] of AnsiChar;
- wc2shitmapInited: Boolean = false;
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-procedure initShitMap ();
-const
- cp1251: array[0..127] of Word = (
- $0402,$0403,$201A,$0453,$201E,$2026,$2020,$2021,$20AC,$2030,$0409,$2039,$040A,$040C,$040B,$040F,
- $0452,$2018,$2019,$201C,$201D,$2022,$2013,$2014,$003F,$2122,$0459,$203A,$045A,$045C,$045B,$045F,
- $00A0,$040E,$045E,$0408,$00A4,$0490,$00A6,$00A7,$0401,$00A9,$0404,$00AB,$00AC,$00AD,$00AE,$0407,
- $00B0,$00B1,$0406,$0456,$0491,$00B5,$00B6,$00B7,$0451,$2116,$0454,$00BB,$0458,$0405,$0455,$0457,
- $0410,$0411,$0412,$0413,$0414,$0415,$0416,$0417,$0418,$0419,$041A,$041B,$041C,$041D,$041E,$041F,
- $0420,$0421,$0422,$0423,$0424,$0425,$0426,$0427,$0428,$0429,$042A,$042B,$042C,$042D,$042E,$042F,
- $0430,$0431,$0432,$0433,$0434,$0435,$0436,$0437,$0438,$0439,$043A,$043B,$043C,$043D,$043E,$043F,
- $0440,$0441,$0442,$0443,$0444,$0445,$0446,$0447,$0448,$0449,$044A,$044B,$044C,$044D,$044E,$044F
- );
-var
- f: Integer;
-begin
- for f := 0 to High(wc2shitmap) do wc2shitmap[f] := '?';
- for f := 0 to 127 do wc2shitmap[f] := AnsiChar(f);
- for f := 0 to 127 do wc2shitmap[cp1251[f]] := AnsiChar(f+128);
- wc2shitmapInited := true;
-end;
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-// TODO: make a hash or something
-function wcharTo1251 (wc: WideChar): AnsiChar; inline;
-begin
- if not wc2shitmapInited then initShitMap();
- if (LongWord(wc) > 65535) then result := '?' else result := wc2shitmap[LongWord(wc)];
-end;
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-constructor TTextParser.Create (loadToken: Boolean=true);
+constructor TTextParser.Create ();
begin
mLine := 1;
mCol := 1;
mTokInt := 0;
mAllowSignedNumbers := true;
warmup(); // change `mAllowSignedNumbers` there, if necessary
- if loadToken then skipToken();
+ skipToken();
end;
// ////////////////////////////////////////////////////////////////////////// //
-constructor TFileTextParser.Create (const fname: AnsiString; loadToken: Boolean=true);
+constructor TFileTextParser.Create (const fname: AnsiString);
begin
mBuffer := nil;
mFile := openDiskFileRO(fname);
+ mStreamOwned := true;
GetMem(mBuffer, BufSize);
mBufPos := 0;
mBufLen := mFile.Read(mBuffer^, BufSize);
if (mBufLen < 0) then raise Exception.Create('TFileTextParser: read error');
- inherited Create(loadToken);
+ inherited Create();
end;
-constructor TFileTextParser.Create (st: TStream; loadToken: Boolean=true);
+constructor TFileTextParser.Create (st: TStream; astOwned: Boolean=true);
begin
if (st = nil) then raise Exception.Create('cannot create parser for nil stream');
mFile := st;
+ mStreamOwned := astOwned;
GetMem(mBuffer, BufSize);
mBufPos := 0;
mBufLen := mFile.Read(mBuffer^, BufSize);
if (mBufLen < 0) then raise Exception.Create('TFileTextParser: read error');
- inherited Create(loadToken);
+ inherited Create();
end;
destructor TFileTextParser.Destroy ();
begin
if (mBuffer <> nil) then FreeMem(mBuffer);
- mFile.Free();
+ mBuffer := nil;
+ mBufPos := 0;
+ mBufLen := 0;
+ if mStreamOwned then mFile.Free();
+ mFile := nil;
inherited;
end;
// ////////////////////////////////////////////////////////////////////////// //
-constructor TStrTextParser.Create (const astr: AnsiString; loadToken: Boolean=true);
+constructor TStrTextParser.Create (const astr: AnsiString);
begin
mStr := astr;
mPos := 1;
- inherited Create(loadToken);
+ inherited Create();
end;
// ////////////////////////////////////////////////////////////////////////// //
constructor TTextWriter.Create (); begin mIndent := 0; end;
+procedure TTextWriter.flush (); begin end;
procedure TTextWriter.put (const s: AnsiString); overload; begin if (Length(s) > 0) then putBuf((@(s[1]))^, Length(s)); end;
procedure TTextWriter.put (v: Byte); overload; begin put('%d', [v]); end;
procedure TTextWriter.put (v: Integer); overload; begin put('%d', [v]); end;
constructor TFileTextWriter.Create (const fname: AnsiString);
begin
mFile := createDiskFile(fname);
+ mStreamOwned := true;
+ mBufUsed := 0;
+ GetMem(mBuffer, BufSize);
+ assert(mBuffer <> nil);
inherited Create();
end;
+constructor TFileTextWriter.Create (ast: TStream; astOwned: Boolean=true);
+begin
+ if (ast = nil) then raise Exception.Create('cannot write to nil stream');
+ mFile := ast;
+ mStreamOwned := astOwned;
+ mBufUsed := 0;
+ GetMem(mBuffer, BufSize);
+ assert(mBuffer <> nil);
+end;
+
+
destructor TFileTextWriter.Destroy ();
begin
- mFile.Free();
+ flush();
+ if (mBuffer <> nil) then FreeMem(mBuffer);
+ mBufUsed := 0;
+ mBuffer := nil;
+ if (mStreamOwned) then mFile.Free();
+ mFile := nil;
inherited;
end;
+procedure TFileTextWriter.flush ();
+begin
+ if (mFile <> nil) and (mBufUsed > 0) then
+ begin
+ mFile.WriteBuffer(mBuffer^, mBufUsed);
+ end;
+ mBufUsed := 0;
+end;
+
+
procedure TFileTextWriter.putBuf (constref buf; len: SizeUInt);
var
pc: PChar;
+ left: Integer;
begin
- if (len > 0) then
+ if (len = 0) then exit;
+ pc := @buf;
+ while (len > 0) do
begin
- pc := @buf;
- mFile.WriteBuffer(pc^, len);
- {
- while (len > 0) do
+ left := BufSize-mBufUsed;
+ if (left = 0) then
begin
- write(pc^);
- Inc(pc);
- Dec(len);
+ flush();
+ left := BufSize-mBufUsed;
+ assert(left > 0);
end;
- }
+ if (left > len) then left := Integer(len);
+ Move(pc^, (mBuffer+mBufUsed)^, left);
+ Inc(mBufUsed, left);
+ pc += left;
+ len -= left;
end;
end;
diff --git a/src/tools/mapcvt.dpr b/src/tools/mapcvt.dpr
--- /dev/null
+++ b/src/tools/mapcvt.dpr
@@ -0,0 +1,116 @@
+{$INCLUDE ../shared/a_modes.inc}
+{$APPTYPE CONSOLE}
+
+uses
+ SysUtils, Classes,
+ xstreams in '../shared/xstreams.pas',
+ xparser in '../shared/xparser.pas',
+ xdynrec in '../shared/xdynrec.pas',
+ xprofiler in '../shared/xprofiler.pas',
+ utils in '../shared/utils.pas',
+ MAPDEF in '../shared/MAPDEF.pas';
+
+
+// ////////////////////////////////////////////////////////////////////////// //
+var
+ pr: TTextParser;
+ dfmapdef: TDynMapDef;
+ wr: TTextWriter;
+ map: TDynRecord;
+ st: TStream;
+ stt: UInt64;
+ inname: AnsiString = '';
+ outname: AnsiString = '';
+ totext: Integer = -1; // <0: guess; force outname extension
+ sign: packed array[0..3] of AnsiChar;
+begin
+ if (ParamCount = 0) then
+ begin
+ writeln('usage: mapcvt inname outname');
+ Halt(1);
+ end;
+
+ inname := ParamStr(1);
+ //writeln('inname: [', inname, ']');
+ if (ParamCount = 1) then
+ begin
+ outname := forceFilenameExt(ParamStr(1), '');
+ end
+ else
+ begin
+ outname := ParamStr(2);
+ if StrEquCI1251(getFilenameExt(outname), '.txt') then totext := 1
+ else if StrEquCI1251(getFilenameExt(outname), '.map') then totext := 0
+ else if StrEquCI1251(getFilenameExt(outname), '.dfmap') then totext := 0
+ else if (Length(getFilenameExt(outname)) = 0) then totext := -1
+ else begin writeln('FATAL: can''t guess output format!'); Halt(1); end;
+ end;
+ //writeln('outname: [', outname, ']; totext=', totext);
+
+ writeln('parsing "mapdef.txt"...');
+ //pr := TFileTextParser.Create('mapdef.txt');
+ pr := TStrTextParser.Create(defaultMapDef);
+ try
+ dfmapdef := TDynMapDef.Create(pr);
+ except on e: Exception do
+ begin
+ writeln('ERROR at (', pr.line, ',', pr.col, '): ', e.message);
+ Halt(1);
+ end;
+ end;
+
+ writeln('parsing "', inname, '"...');
+ st := openDiskFileRO(inname);
+ st.ReadBuffer(sign, 4);
+ st.position := 0;
+ if (sign[0] = 'M') and (sign[1] = 'A') and (sign[2] = 'P') and (sign[3] = #1) then
+ begin
+ // binary map
+ if (totext < 0) then begin outname := forceFilenameExt(outname, '.txt'); totext := 1; end;
+ stt := curTimeMicro();
+ map := dfmapdef.parseBinMap(st);
+ stt := curTimeMicro()-stt;
+ writeln('binary map parsed in ', stt div 1000, '.', stt mod 1000, ' microseconds');
+ end
+ else
+ begin
+ // text map
+ if (totext < 0) then begin outname := forceFilenameExt(outname, '.map'); totext := 0; end;
+ pr := TFileTextParser.Create(st);
+ try
+ stt := curTimeMicro();
+ map := dfmapdef.parseMap(pr);
+ stt := curTimeMicro()-stt;
+ writeln('text map parsed in ', stt div 1000, '.', stt mod 1000, ' microseconds');
+ except on e: Exception do
+ begin
+ writeln('ERROR at (', pr.line, ',', pr.col, '): ', e.message);
+ Halt(1);
+ end;
+ end;
+ pr.Free();
+ end;
+
+ assert(totext >= 0);
+
+ writeln('writing "', outname, '"...');
+ st := createDiskFile(outname);
+ if (totext = 0) then
+ begin
+ // write binary map
+ stt := curTimeMicro();
+ map.writeBinTo(st);
+ stt := curTimeMicro()-stt;
+ writeln('binary map written in ', stt div 1000, '.', stt mod 1000, ' microseconds');
+ end
+ else
+ begin
+ // write text map
+ wr := TFileTextWriter.Create(st);
+ stt := curTimeMicro();
+ map.writeTo(wr);
+ stt := curTimeMicro()-stt;
+ writeln('text map written in ', stt div 1000, '.', stt mod 1000, ' microseconds');
+ wr.Free();
+ end;
+end.
diff --git a/src/shared/zmapgen.dpr b/src/tools/mapgen.dpr
similarity index 53%
rename from src/shared/zmapgen.dpr
rename to src/tools/mapgen.dpr
index e983e43688b3846ecdf3b7a355e69349393676d1..bb38d087d8e153896ac54b725dc86839ad4fb190 100644 (file)
rename from src/shared/zmapgen.dpr
rename to src/tools/mapgen.dpr
index e983e43688b3846ecdf3b7a355e69349393676d1..bb38d087d8e153896ac54b725dc86839ad4fb190 100644 (file)
--- a/src/shared/zmapgen.dpr
+++ b/src/tools/mapgen.dpr
-{$INCLUDE a_modes.inc}
-{$M+}
+{$INCLUDE ../shared/a_modes.inc}
+{$APPTYPE CONSOLE}
uses
SysUtils, Classes,
- xparser in 'xparser.pas',
- xdynrec in 'xdynrec.pas',
- utils in 'utils.pas';
+ xstreams in '../shared/xstreams.pas',
+ xparser in '../shared/xparser.pas',
+ xdynrec in '../shared/xdynrec.pas',
+ utils in '../shared/utils.pas';
// ////////////////////////////////////////////////////////////////////////// //
pr: TTextParser;
dfmapdef: TDynMapDef;
fo: TextFile;
- st: TStream;
+ st: TStream = nil;
ch: AnsiChar;
wdt: Integer;
s: AnsiString;
begin
+ //writeln(getFilenamePath(ParamStr(0)), '|');
+
writeln('parsing "mapdef.txt"...');
- pr := TFileTextParser.Create('mapdef.txt');
+ try
+ st := openDiskFileRO('mapdef.txt');
+ writeln('found: local mapdef');
+ except // sorry
+ st := nil;
+ end;
+ try
+ writeln(filenameConcat(getFilenamePath(ParamStr(0)), '../mapdef/mapdef.txt'), '|');
+ st := openDiskFileRO(filenameConcat(getFilenamePath(ParamStr(0)), '../mapdef/mapdef.txt'));
+ writeln('found: system mapdef');
+ except // sorry
+ writeln('FATAL: mapdef not found!');
+ end;
+
+ pr := TFileTextParser.Create(st, false); // don't own
try
dfmapdef := TDynMapDef.Create(pr);
except on e: Exception do
Halt(1);
end;
end;
+ pr.Free();
writeln('writing "mapdef.inc"...');
AssignFile(fo, 'mapdef.inc');
Rewrite(fo);
write(fo, '// *** WARNING! ***'#10);
- write(fo, '// regenerate this part directly from "mapdef.txt" with ''zmapgen'', NEVER manually change anything here!'#10#10#10);
+ write(fo, '// regenerate this part directly from "mapdef.txt" with ''mapgen'', NEVER manually change anything here!'#10#10#10);
write(fo, dfmapdef.pasdef);
- st := openDiskFileRO('mapdef.txt');
+ //st := openDiskFileRO('mapdef.txt');
+ st.position := 0;
write(fo, #10#10'const defaultMapDef: AnsiString = ''''+'#10' ');
wdt := 2;
while true do
diff --git a/src/tools/mapiogen/lexer.d b/src/tools/mapiogen/lexer.d
+++ /dev/null
@@ -1,607 +0,0 @@
-/* coded by Ketmar // Invisible Vector <ketmar@ketmar.no-ip.org>
- * Understanding is not required. Only obedience.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-module lexer;
-static assert(__VERSION__ >= 2071, "you need as least DMD 2.071 to compile this code");
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-static if (!is(typeof(usize))) private alias usize = size_t;
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-public struct Loc {
- string file;
- int line, col;
- uint tpos;
-
- string toString () const { import std.string : format; return "%s (%s,%s)".format(file, line, col); }
- string toStringNoFile () const { import std.string : format; return "(%s,%s)".format(line, col); }
-
- @property bool valid () const pure nothrow @safe @nogc { pragma(inline, true); return (line > 0 && col > 0); }
-}
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-public class ErrorAt : Exception {
- Loc loc;
-
- this (string msg, Throwable next=null, string file=__FILE__, usize line=__LINE__) pure nothrow @safe @nogc { super(msg, file, line, next); }
- this (in Loc aloc, string msg, Throwable next=null, string file=__FILE__, usize line=__LINE__) pure nothrow @safe @nogc { loc = aloc; super(msg, file, line, next); }
-}
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-public struct Token {
-public:
- enum Type {
- EOF = -1,
- Id,
- Str,
- Num,
- Spec,
- }
-
-private:
- const(char)[] tkstr;
-
-public:
- Loc loc, eloc; // token start, token end (after last char)
- Type type = Type.EOF; // token type
- long num; // should be enough for everyone
-
-@safe:
- void mustbeType (Token.Type tp, string msg="identifier expected", string file=__FILE__, usize line=__LINE__) {
- pragma(inline, true);
- if (type != tp) throw new ErrorAt(loc, msg, null, file, line);
- }
- void mustbeId (string msg="identifier expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); mustbeType(Type.Id, msg, file, line); }
- void mustbeStr (string msg="string expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); mustbeType(Type.Str, msg, file, line); }
- void mustbeNum (string msg="number expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); mustbeType(Type.Num, msg, file, line); }
- void mustbeSpec (string msg="punctuation expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); mustbeType(Type.Spec, msg, file, line); }
-
- string toString () const @trusted {
- import std.string : format;
- final switch (type) with (Type) {
- case EOF: return "(%s,%d): <EOF>".format(loc.line, loc.col);
- case Id: return "(%s,%d): Id:%s".format(loc.line, loc.col, tkstr);
- case Str: return "(%s,%d): Str:%s".format(loc.line, loc.col, Lexer.quote(tkstr));
- case Num: return "(%s,%d): Num:%s".format(loc.line, loc.col, num);
- case Spec: return "(%s,%d): Spec:<%s>".format(loc.line, loc.col, tkstr);
- }
- assert(0);
- }
-
-nothrow:
- // get immutable string
- // this converts id to `string` via `.idup`, use with caution!
- // `.idup` is used to not anchor the whole source string
- @property string istr () const { pragma(inline, true); return (tkstr.length ? tkstr.idup : null); }
-
-const pure nothrow @nogc @property:
- const(char)[] str () { pragma(inline, true); return tkstr; }
- bool isId () { pragma(inline, true); return (type == Type.Id); }
- bool isStr () { pragma(inline, true); return (type == Type.Str); }
- bool isNum () { pragma(inline, true); return (type == Type.Num); }
- bool isSpec () { pragma(inline, true); return (type == Type.Spec); }
- bool isEOF () { pragma(inline, true); return (type == Type.EOF); }
-}
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-public final class Lexer {
-private:
- const(char)[] text;
- uint tpos;
- Loc cpos; // position for last `getChar()`
- Loc pend; // end of previous token, for better error messages
- bool eof;
- bool lastWasEOL = true;
- Token[] lookup;
- Token tokeof; // will be fixed by `nextToken()`
-
-public:
- this(T) (const(char)[] atext, T afname=null) if (is(T : const(char)[])) {
- text = atext;
- if (afname.length > 0) { static if (is(T == string)) cpos.file = afname; else cpos.file = afname.idup; }
- tokeof.loc.file = cpos.file;
- nextToken();
- pend.line = 1;
- pend.col = 1;
- pend.tpos = 0;
- }
-
- void error (string msg, string file=__FILE__, usize line=__LINE__) {
- pragma(inline, true);
- throw new ErrorAt((lookup.length == 0 ? loc : lookup[0].loc), msg, null, file, line);
- }
-
- static private void error (in ref Token tk, string msg, string file=__FILE__, usize line=__LINE__) {
- pragma(inline, true);
- throw new ErrorAt(tk.loc, msg, null, file, line);
- }
-
- static private void error() (in auto ref Loc loc, string msg, string file=__FILE__, usize line=__LINE__) {
- pragma(inline, true);
- throw new ErrorAt(loc, msg, null, file, line);
- }
-
- const(char)[] line (uint idx) {
- if (idx == 0) ++idx;
- uint pos = 0;
- while (--idx > 0) {
- while (pos < text.length && text.ptr[pos] != '\n') ++pos;
- ++pos;
- }
- if (pos >= text.length) return null;
- uint epos = pos;
- while (epos < text.length && text.ptr[epos] != '\n') ++epos;
- while (epos > pos && text.ptr[epos-1] <= ' ') --epos;
- return text[pos..epos];
- }
-
- void popFront () {
- if (lookup.length > 0) {
- pend = lookup.ptr[0].eloc;
- ++pend.col; // for better error messages
- ++pend.tpos; // to be consistent
- foreach (immutable idx; 1..lookup.length) lookup.ptr[idx-1] = lookup.ptr[idx];
- lookup.length -= 1;
- lookup.assumeSafeAppend;
- }
- nextToken();
- }
-
- @property pure nothrow @safe @nogc {
- bool empty () const { pragma(inline, true); return (lookup.length == 0); }
- ref inout(Token) front () inout { pragma(inline, true); return (lookup.length ? lookup.ptr[0] : tokeof); }
- // current token's loc
- auto loc () inout { pragma(inline, true); return front.loc; }
- auto eloc () inout { pragma(inline, true); return front.eloc; }
- auto peloc () inout { pragma(inline, true); return pend; }
-
- bool isId () const { pragma(inline, true); return front.isId; }
- bool isStr () const { pragma(inline, true); return front.isStr; }
- bool isNum () const { pragma(inline, true); return front.isNum; }
- bool isSpec () const { pragma(inline, true); return front.isSpec; }
- }
-
- // this eats identifier
- void expect (const(char)[] id, string file=__FILE__, usize line=__LINE__) {
- if (!front.isId || front.str != id) error(loc, "`"~id.idup~"` expected", file, line);
- popFront();
- }
-
- // this eats identifier
- void expectCI (const(char)[] id, string file=__FILE__, usize line=__LINE__) {
- if (front.isId && id.length == front.str.length) {
- bool ok = true;
- foreach (immutable idx, char ch; front.str) {
- if (ch >= 'A' && ch <= 'Z') ch += 32; // poor man's `tolower()`
- char c1 = id[idx];
- if (c1 >= 'A' && c1 <= 'Z') c1 += 32; // poor man's `tolower()`
- if (ch != c1) { ok = false; break; }
- }
- if (ok) { popFront(); return; }
- }
- error(loc, "`"~id.idup~"` expected", file, line);
- }
-
- auto expectSpec (string msg="punctuation expected", string file=__FILE__, usize line=__LINE__) {
- mustbeSpec(msg, file, line);
- auto res = lookup[0].str;
- popFront();
- return res;
- }
-
- // this converts id to `string` via `.idup`, use with caution!
- // `.idup` is used to not anchor the whole source string
- string expectId (string msg="identifier expected", string file=__FILE__, usize line=__LINE__) {
- mustbeId(msg, file, line);
- auto res = lookup[0].istr;
- popFront();
- return res;
- }
-
- // this converts id to `string` via `.idup`, use with caution!
- // `.idup` is used to not anchor the whole source string
- string expectStr (string msg="string expected", string file=__FILE__, usize line=__LINE__) {
- //pragma(inline, true);
- mustbeStr(msg, file, line);
- auto res = lookup[0].istr;
- popFront();
- return res;
- }
-
- // `mustbe` doesn't eat token
- void mustbeType (Token.Type tp, string msg="identifier expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); return front.mustbeType(tp, msg, file, line); }
- void mustbeId (string msg="identifier expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); return front.mustbeId(msg, file, line); }
- void mustbeStr (string msg="string expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); return front.mustbeStr(msg, file, line); }
- void mustbeNum (string msg="number expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); return front.mustbeNum(msg, file, line); }
- void mustbeSpec (string msg="punctuation expected", string file=__FILE__, usize line=__LINE__) { pragma(inline, true); return front.mustbeSpec(msg, file, line); }
-
- bool eat (const(char)[] id) {
- if (front.isId && front.str == id) { popFront(); return true; }
- return false;
- }
-
- const(char)[] eatCI (const(char)[] id) {
- if (front.isId && id.length == front.str.length) {
- bool ok = true;
- foreach (immutable idx, char ch; front.str) {
- if (ch >= 'A' && ch <= 'Z') ch += 32; // poor man's `tolower()`
- char c1 = id[idx];
- if (c1 >= 'A' && c1 <= 'Z') c1 += 32; // poor man's `tolower()`
- if (ch != c1) { ok = false; break; }
- }
- if (ok) { auto res = front.str; popFront(); return res; }
- }
- return null;
- }
-
- ref Token peek (uint dist) {
- while (!eof && lookup.length <= dist) nextToken();
- return (dist < lookup.length ? lookup.ptr[dist] : tokeof);
- }
-
- ref Token opIndex (usize dist) { pragma(inline, true); return peek(dist); }
-
- // return loc for next `getChar()`
- Loc nextLoc () nothrow @safe @nogc {
- Loc res = cpos;
- if (lastWasEOL) { ++res.line; res.col = 1; } else ++res.col;
- return res;
- }
-
- char peekChar (uint dist=0) nothrow @trusted @nogc {
- pragma(inline, true);
- return (tpos+dist >= text.length ? '\0' : (text.ptr[tpos+dist] ? text.ptr[tpos+dist] : ' '));
- }
-
- // return char or 0
- char getChar () nothrow @trusted @nogc {
- if (tpos >= text.length) { tpos = text.length; eof = true; }
- if (eof) return '\0';
- cpos.tpos = tpos;
- char ch = text.ptr[tpos++];
- if (ch == '\0') ch = ' ';
- if (lastWasEOL) { ++cpos.line; cpos.col = 1; } else ++cpos.col;
- lastWasEOL = (ch == '\n');
- return ch;
- }
-
- // skip blanks and comments
- //TODO: make special "comment" token(s)?
- void skipBlanks () @safe {
- for (;;) {
- char ch = peekChar;
- if (ch == '/' && peekChar(1) == '/') {
- // single-line comment
- do { ch = getChar(); } while (ch != 0 && ch != '\n');
- continue;
- } else if (ch == '(' && peekChar(1) == '*') {
- getChar(); // skip starting char
- auto lc = cpos;
- getChar(); // skip star
- char pch = ' ';
- ch = ' '; // we need this
- for (;;) {
- pch = ch;
- ch = getChar();
- if (ch == 0) error(lc, "unterminated comment");
- if (ch == ')' && pch == '*') break;
- }
- continue;
- } else if (ch == '{') {
- getChar(); // skip starting char
- auto lc = cpos;
- do {
- ch = getChar();
- if (ch == 0) error(lc, "unterminated comment");
- } while (ch != '}');
- continue;
- }
- if (ch == 0 || ch > 32) return;
- getChar();
- }
- }
-
- private void nextToken () {
- if (eof) return;
-
- skipBlanks();
- if (peekChar == '\0') {
- eof = true;
- tokeof.loc = cpos;
- tokeof.eloc = cpos;
- return;
- }
-
- Token tk;
- auto tkspos = tpos;
- char ch = getChar();
- tk.loc = cpos;
-
- // quoted string
- if (ch == '"' || ch == '\'') {
- char ech = ch;
- tk.type = Token.Type.Str;
- ++tkspos; // skip quote
- for (;;) {
- ch = getChar();
- if (ch == 0) error(tk, "unterminated string");
- if (ch == ech) break;
- }
- tk.tkstr = text[tkspos..tpos-1]; // -1 due to eaten quote
- tk.eloc = cpos;
- lookup ~= tk;
- return;
- }
-
- // hex number
- if (ch == '$') {
- long n = 0;
- tk.type = Token.Type.Num;
- getChar(); // skip dollar
- int dv = digitValue(peekChar);
- if (dv < 0 || dv > 15) error(tk, "hex number expected");
- for (;;) {
- dv = digitValue(peekChar);
- if (dv < 0 || dv > 15) break;
- n = n*16+dv;
- getChar();
- }
- ch = peekChar;
- if (isIdChar(ch) || ch == '.') error(tk, "hex number expected");
- tk.num = n;
- tk.tkstr = text[tkspos..tpos];
- tk.eloc = cpos;
- lookup ~= tk;
- return;
- }
-
- // number
- if (isDigit(ch)) {
- long n = ch-'0';
- tk.type = Token.Type.Num;
- for (;;) {
- if (!isDigit(peekChar)) break;
- ch = getChar();
- n = n*10+ch-'0';
- }
- tk.num = n;
- tk.tkstr = text[tkspos..tpos];
- tk.eloc = cpos;
- ch = peekChar;
- if (isIdChar(ch)) error(tk, "invalid number");
- lookup ~= tk;
- return;
- }
-
- // identifier
- if (isIdStart(ch)) {
- tk.type = Token.Type.Id;
- while (isIdChar(peekChar)) getChar();
- tk.tkstr = text[tkspos..tpos];
- tk.eloc = cpos;
- lookup ~= tk;
- return;
- }
-
- static immutable string[9] longSpecs = [
- "<=",
- ">=",
- ":=",
- "<>",
- "+=",
- "-=",
- "*=",
- "/=",
- "..",
- ];
- enum MaxSpecLength = {
- int ml = 0;
- foreach (string s; longSpecs) if (s.length > ml) ml = cast(int)s.length;
- return ml;
- }();
-
- // delimiter
- char[MaxSpecLength] dbuf;
- dbuf[0] = ch;
- uint len = 0;
- for (;;) {
- ch = dbuf[len];
- bool found = false;
- foreach (string s; longSpecs) if (len < s.length && s[len] == ch) { found = true; break; }
- if (!found) break;
- if (len > 0) getChar(); // this char should be eaten
- if (++len >= MaxSpecLength) break;
- dbuf[len] = peekChar(0);
- }
- tk.type = Token.Type.Spec;
- tk.tkstr = text[tkspos..tpos];
- tk.eloc = cpos;
- lookup ~= tk;
- }
-
- auto select(RetType, string mode="peek", A...) (scope A args) { pragma(inline, true); return selectN!(RetType, mode)(0, args); }
-
- auto selectN(RetType, string mode="peek", A...) (usize n, scope A args) {
- import std.traits : ReturnType;
-
- static assert(mode == "peek" || mode == "pop" || mode == "pop-nondefault", "selectN: invalid mode: '"~mode~"'");
-
- template isGoodDg(usize idx, T) {
- private import std.traits;
- static if (idx < A.length && isCallable!(A[idx]) && arity!(args[idx]) == 1) {
- enum isGoodDg = is(Parameters!(A[idx])[0] == T);
- } else {
- enum isGoodDg = false;
- }
- }
-
- template isGoodArglessDg(usize idx) {
- private import std.traits;
- static if (idx < A.length && isCallable!(A[idx]) && arity!(args[idx]) == 0) {
- enum isGoodArglessDg = true;
- } else {
- enum isGoodArglessDg = false;
- }
- }
-
- // sorry, but this has to be string mixin, due to possible empty `arg`
- enum DoCallDg(string arg) =
- "static if (!is(ReturnType!(A[xidx]) == void)) return cast(RetType)(args[xidx]("~arg~")); else { args[xidx]("~arg~"); return RetType.init; }";
-
- // we can't have inner mixin templates, so... sorry, it's string again
- enum CallDg = q{
- static if (isGoodDg!(xidx, Token)) { mixin(DoCallDg!"tk"); }
- else static if (isGoodDg!(xidx, Loc)) { mixin(DoCallDg!"tk.loc"); }
- else static if (isGoodDg!(xidx, Token.Type)) { mixin(DoCallDg!"tk.type"); }
- else static if (isGoodDg!(xidx, Keyword)) { mixin(DoCallDg!"tk.Kw"); }
- else static if (isGoodArglessDg!(xidx)) { mixin(DoCallDg!""); }
- else static assert(0, "selectN: invalid delegate #"~xidx.stringof);
- };
-
- auto tk = peek(n);
- bool found = false;
- foreach (immutable aidx, immutable arg; args) {
- static if (aidx%2 == 0) {
- static if (is(typeof(arg) == Keyword) || is(typeof(arg) == Token.Type)) {
- static if (is(typeof(arg) == Keyword)) found = (tk == arg);
- else static if (is(typeof(arg) == Token.Type)) found = (tk.type == arg);
- else static assert(0, "wtf?!");
- if (found) {
- // process `mode`
- static if (mode != "peek") popFront();
- // call delegate
- enum xidx = aidx+1;
- mixin(CallDg);
- }
- } else {
- // default
- // process `mode`
- static if (mode == "pop") popFront();
- // call delegate
- enum xidx = aidx;
- mixin(CallDg);
- }
- }
- }
- error(tk, "selectN is out of nodes");
- assert(0);
- }
-
-static:
- private immutable byte[256] digitValues = {
- byte[256] res = -1;
- foreach (ubyte idx; '0'..'9'+1) res[idx] = cast(byte)(idx-'0');
- foreach (ubyte idx; 'A'..'Z'+1) res[idx] = cast(byte)(idx-'A'+10);
- foreach (ubyte idx; 'a'..'z'+1) res[idx] = cast(byte)(idx-'a'+10);
- return res;
- }();
-
- private immutable bool[256] idStartChars = {
- bool[256] res = false;
- foreach (ubyte idx; 'A'..'Z'+1) res[idx] = true;
- foreach (ubyte idx; 'a'..'z'+1) res[idx] = true;
- res['_'] = true;
- return res;
- }();
-
- private immutable bool[256] idChars = {
- bool[256] res = false;
- foreach (ubyte idx; '0'..'9'+1) res[idx] = true;
- foreach (ubyte idx; 'A'..'Z'+1) res[idx] = true;
- foreach (ubyte idx; 'a'..'z'+1) res[idx] = true;
- res['_'] = true;
- return res;
- }();
-
- bool isDigit() (char ch) { pragma(inline, true); return (ch >= '0' && ch <= '9'); }
- int digitValue() (char ch) { pragma(inline, true); return digitValues.ptr[cast(ubyte)ch]; }
- bool isIdStart() (char ch) { pragma(inline, true); return idStartChars.ptr[cast(ubyte)ch]; }
- bool isIdChar() (char ch) { pragma(inline, true); return idChars.ptr[cast(ubyte)ch]; }
-
- string gmlQuote (const(char)[] s) {
- import std.array : appender;
- auto res = appender!string();
- enum Prev { Nothing, Char, Spec }
- Prev prev = Prev.Nothing;
- foreach (char ch; s) {
- if (ch < ' ' || ch == 127 || ch == '"') {
- import std.conv : to;
- final switch (prev) with (Prev) {
- case Nothing: break;
- case Char: res.put(`"+`); break;
- case Spec: res.put(`+`); break;
- }
- prev = Prev.Spec;
- res.put("chr(");
- res.put(to!string(cast(uint)ch));
- res.put(")");
- } else {
- final switch (prev) with (Prev) {
- case Nothing: res.put('"'); break;
- case Char: break;
- case Spec: res.put(`+"`); break;
- }
- prev = Prev.Char;
- res.put(ch);
- }
- }
- if (prev == Prev.Nothing) return `""`;
- if (prev == Prev.Char) res.put('"');
- return res.data;
- }
-
- /// quote string: append double quotes, screen all special chars;
- /// so quoted string forms valid D string literal.
- /// allocates.
- string quote (const(char)[] s) {
- import std.array : appender;
- import std.format : formatElement, FormatSpec;
- auto res = appender!string();
- FormatSpec!char fspc; // defaults to 's'
- formatElement(res, s, fspc);
- return res.data;
- }
-}
-
-
-version(lexer_test) unittest {
- import std.file;
- import std.stdio;
- //enum FName = "z00.txt";
- enum FName = "shared/MAPDEF.pas";
- string s;
- {
- auto fl = File(FName);
- auto buf = new char[](cast(uint)fl.size);
- fl.rawRead(buf[]);
- s = cast(string)buf;
- }
- auto lex = new Lexer(s, FName);
- try {
- while (!lex.empty) {
- writeln(lex.front);
- lex.popFront();
- }
- } catch (ErrorAt e) {
- writeln("PARSE ERROR: ", e.line);
- writeln(e.loc);
- }
-}
diff --git a/src/tools/mapiogen/mapiogen.d b/src/tools/mapiogen/mapiogen.d
+++ /dev/null
@@ -1,679 +0,0 @@
-/* coded by Ketmar // Invisible Vector <ketmar@ketmar.no-ip.org>
- * Understanding is not required. Only obedience.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-module mapiogen;
-static assert(__VERSION__ >= 2071, "you need as least DMD 2.071 to compile this code");
-
-import std.stdio;
-
-import lexer;
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-bool useDelphiAlignment = false;
-ubyte[string] triggers;
-string[ubyte] trignums;
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-struct Field {
- enum Type { Bytes, Chars, Integral, TPoint, Boolean }
-
- string name;
- string typename;
- uint size;
- uint ofs;
- Type type;
-}
-
-struct Record {
- string[] ids; // null: default
- Field[] fields;
- uint size; // max size
- bool normal;
-
- string getRWName () const {
- import std.string : capitalize;
- if (ids.length == 0) return "Default";
- if (normal) return ids[0].capitalize;
- return ids[0][8..$].capitalize;
- }
-
- // calc field offsets and record size
- void finalize (bool packed=false) {
- // calculate offsets
- uint ofs = 0;
- foreach (immutable idx, ref fld; fields) {
- fld.ofs = ofs;
- // delphi does this (roughly)
- if (!packed && fld.size != 1) {
- if (useDelphiAlignment && fld.type == Field.Type.TPoint) {
- } else {
- //ubyte pd = (fld.size > 4 ? 2 : fld.size > 2 ? 4 : 2);
- ubyte pd = (fld.size > 2 ? 4 : 2);
- if (fld.type == Field.Type.Chars && fld.size > 1) pd = 2;
- fld.ofs += ofs.padding(pd);
- }
- }
- ofs = fld.ofs+fld.size;
- }
- size = ofs;
- //if (fields.length > 0 && fields[$-1].size != 1 && fields[$-1].type == Field.Type.Integral) size += size.padding(2); // just in case
- }
-}
-
-
-Record[] tgrecords;
-Record[] records;
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-// 0 128 Default (Byte128)
-void dumpRecord (in ref Record rec) {
- foreach (const ref fld; rec.fields) {
- writefln("%3s %3s %s (%s)", fld.ofs, fld.size, fld.name, fld.typename);
- }
-}
-
-
-void dumpRecords () { foreach (const ref rec; tgrecords) rec.dumpRecord(); }
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-void genMisc (File fo) {
- fo.write(
-q{procedure getBytesAt (var dest; const buf; ofs, len: Integer);
-begin
- Move((PChar(@buf)+ofs)^, dest, len);
-end;
-
-procedure getWordAt (var dest; const buf; ofs: Integer);
-type PWord = ^Word; PByte = ^Byte;
-var
- p: PByte;
- d: PWord;
-begin
- p := PByte(@buf); Inc(p, ofs);
- d := PWord(@dest);
- d^ := p^;
- Inc(p);
- d^ := (d^) or ((p^) shl 8);
-end;
-
-procedure getIntAt (var dest; const buf; ofs: Integer);
-type PInt = ^LongWord; PByte = ^Byte;
-var
- p: PByte;
- d: PInt;
-begin
- p := PByte(@buf); Inc(p, ofs);
- d := PInt(@dest);
- d^ := p^;
- Inc(p);
- d^ := (d^) or ((p^) shl 8);
- Inc(p);
- d^ := (d^) or ((p^) shl 16);
- Inc(p);
- d^ := (d^) or ((p^) shl 24);
-end;
-
-procedure putBytesAt (var buf; ofs: Integer; const src; len: Integer);
-begin
- Move(src, (PChar(@buf)+ofs)^, len);
-end;
-
-procedure putWordAt (var buf; ofs: Integer; const src);
-type PWord = ^Word; PByte = ^Byte;
-var
- p: PByte;
- d: PWord;
-begin
- p := PByte(PChar(@buf)+ofs);
- d := PWord(@src);
- p^ := (d^) and $ff;
- Inc(p);
- p^ := ((d^) shr 8) and $ff;
-end;
-
-procedure putIntAt (var buf; ofs: Integer; const src);
-type PInt = ^LongWord; PByte = ^Byte;
-var
- p: PByte;
- d: PInt;
-begin
- p := PByte(PChar(@buf)+ofs);
- d := PInt(@src);
- p^ := (d^) and $ff;
- Inc(p);
- p^ := ((d^) shr 8) and $ff;
- Inc(p);
- p^ := ((d^) shr 16) and $ff;
- Inc(p);
- p^ := ((d^) shr 24) and $ff;
-end;
-
-});
-}
-
-
-void genReader (File fo, in ref Record rec) {
- fo.write(
- " procedure xread", rec.getRWName, " ();\n"~
- " begin\n"
- );
- foreach (const ref fld; rec.fields) {
- final switch (fld.type) {
- case Field.Type.Bytes:
- case Field.Type.Chars:
- case Field.Type.Boolean:
- fo.writeln(" getBytesAt(tr.", fld.name, ", buf, ", fld.ofs, ", ", fld.size, ");");
- break;
- case Field.Type.Integral:
- switch (fld.size) {
- case 1: fo.writeln(" getBytesAt(tr.", fld.name, ", buf, ", fld.ofs, ", ", fld.size, ");"); break;
- case 2: fo.writeln(" getWordAt(tr.", fld.name, ", buf, ", fld.ofs, ");"); break;
- case 4: fo.writeln(" getIntAt(tr.", fld.name, ", buf, ", fld.ofs, ");"); break;
- default: assert(0);
- }
- break;
- case Field.Type.TPoint:
- fo.writeln(" getIntAt(tr.", fld.name, ".x, buf, ", fld.ofs, ");");
- fo.writeln(" getIntAt(tr.", fld.name, ".y, buf, ", fld.ofs+4, ");");
- break;
- }
- }
- fo.writeln(" end;\n");
-}
-
-
-void genReaders (File fo) {
- fo.write(
- "procedure mb_Read_TriggerData (var tr: TTriggerData; ttype: Integer; const buf; bufsize: Integer);\n"
- );
- uint maxsize = 0;
- foreach (const ref rec; tgrecords) {
- if (rec.ids.length == 0) continue;
- if (rec.size > maxsize) maxsize = rec.size;
- fo.genReader(rec);
- }
- fo.write(
- "begin\n"~
- " if (bufsize < ", maxsize, ") then raise Exception.Create('invalid buffer size in mb_Read_TriggerData');\n"
- );
- foreach (const ref rec; tgrecords) {
- foreach (string id; rec.ids) {
- fo.writeln(" if (ttype = ", id, ") then begin xread", rec.getRWName, "(); exit; end;");
- }
- }
- fo.writeln(" raise Exception.Create('invalid trigger type in mb_Read_TriggerData');");
- fo.writeln("end;\n\n");
- foreach (ref rec; records) {
- assert(rec.normal);
- fo.writeln("procedure mb_Read_", rec.ids[0], " (var tr: ", rec.ids[0], "; const buf; bufsize: Integer);");
- fo.genReader(rec);
- fo.write(
- "begin\n"~
- " if (bufsize < ", rec.size, ") then raise Exception.Create('invalid buffer size in read", rec.ids[0], "');\n"
- " xread", rec.getRWName, "();\n"~
- "end;\n\n"
- );
- }
-}
-
-
-void genWriter (File fo, in ref Record rec) {
- fo.write(
- " procedure xwrite", rec.getRWName, " ();\n"~
- " begin\n"
- );
- foreach (const ref fld; rec.fields) {
- final switch (fld.type) {
- case Field.Type.Bytes:
- case Field.Type.Chars:
- case Field.Type.Boolean:
- fo.writeln(" putBytesAt(buf, ", fld.ofs, ", tr.", fld.name, ", ", fld.size, ");");
- break;
- case Field.Type.Integral:
- switch (fld.size) {
- case 1: fo.writeln(" putBytesAt(buf, ", fld.ofs, ", tr.", fld.name, ", ", fld.size, ");"); break;
- case 2: fo.writeln(" putWordAt(buf, ", fld.ofs, ", tr.", fld.name, ");"); break;
- case 4: fo.writeln(" putIntAt(buf, ", fld.ofs, ", tr.", fld.name, ");"); break;
- default: assert(0);
- }
- break;
- case Field.Type.TPoint:
- fo.writeln(" putIntAt(buf, ", fld.ofs , ", tr.", fld.name, ".x);");
- fo.writeln(" putIntAt(buf, ", fld.ofs+4, ", tr.", fld.name, ".y);");
- break;
- }
- }
- fo.writeln(" end;\n");
-}
-
-
-void genWriters (File fo) {
- fo.write(
- "procedure mb_Write_TriggerData (var buf; bufsize: Integer; ttype: Integer; var tr: TTriggerData);\n"
- );
- uint maxsize = 0;
- foreach (const ref rec; tgrecords) {
- assert(!rec.normal);
- if (rec.ids.length == 0) continue;
- if (rec.size > maxsize) maxsize = rec.size;
- fo.genWriter(rec);
- }
- fo.write(
- "begin\n"~
- " if (bufsize < ", maxsize, ") then raise Exception.Create('invalid buffer size in mb_Write_TriggerData');\n"
- );
- foreach (const ref rec; tgrecords) {
- foreach (string id; rec.ids) {
- fo.writeln(" if (ttype = ", id, ") then begin xwrite", rec.getRWName, "(); exit; end;");
- }
- }
- fo.writeln(" raise Exception.Create('invalid trigger type in mb_Write_TriggerData');");
- fo.writeln("end;\n\n");
- foreach (ref rec; records) {
- assert(rec.normal);
- fo.writeln("procedure mb_Write_", rec.ids[0], " (var buf; bufsize: Integer; var tr: ", rec.ids[0], ");");
- fo.genWriter(rec);
- fo.write(
- "begin\n"~
- " if (bufsize < ", rec.size, ") then raise Exception.Create('invalid buffer size in write", rec.ids[0], "');\n"
- " xwrite", rec.getRWName, "();\n"~
- "end;\n\n"
- );
- }
-}
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-void printCaret (Lexer lex, Loc loc, File ofile=stdout) {
- auto line = lex.line(loc.line);
- if (line.length == 0) return;
- ofile.writeln(line);
- foreach (immutable _; 1..loc.col) ofile.write(' ');
- ofile.writeln('^');
-}
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-ubyte padding (uint size, ubyte alg) {
- uint nsz = (size+alg-1)/alg*alg;
- return cast(ubyte)(nsz-size);
-}
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-void parseType (ref Field fld, const(char)[] typestr, Lexer lex) {
- import std.algorithm : startsWith;
- import std.string : toLower;
- auto type = typestr.toLower;
- if (type.startsWith("byte") || type.startsWith("char")) {
- import std.conv : to;
- fld.type = (type[0] == 'b' ? Field.Type.Bytes : Field.Type.Chars);
- if (type.length == 4) {
- fld.size = 1;
- return;
- }
- try {
- auto sz = to!uint(type[4..$]);
- if (sz < 1 || sz > 32767) throw new Exception("invalid size");
- fld.size = sz;
- return;
- } catch (Exception) {}
- } else if (type == "tpoint") {
- fld.type = Field.Type.TPoint;
- fld.size = 4*2;
- return;
- } else if (type == "boolean") {
- fld.type = Field.Type.Boolean;
- fld.size = 1;
- return;
- } else if (type == "integer") {
- fld.type = Field.Type.Integral;
- fld.size = 4;
- return;
- } else if (type == "word") {
- fld.type = Field.Type.Integral;
- fld.size = 2;
- return;
- } else if (type == "shortint") {
- fld.type = Field.Type.Integral;
- fld.size = 1;
- return;
- }
- lex.error("invalid type: '"~typestr.idup~"'");
-}
-
-
-/*
-(TargetPoint: TPoint;
- d2d_teleport: Boolean;
- silent_teleport: Boolean;
- TlpDir: Byte);
-*/
-Field[] parseFields (Lexer lex) {
- Field[] res;
- if (!lex.isSpec || lex.front.str != "(") lex.error("'(' expected");
- lex.popFront();
- for (;;) {
- if (lex.isSpec && lex.front.str == ")") { lex.popFront(); break; }
- string[] names;
- for (;;) {
- names ~= lex.expectId();
- if (lex.isSpec && lex.front.str == ":") break;
- if (lex.isSpec && lex.front.str == ",") { lex.popFront(); continue; }
- lex.error("':' expected");
- }
- if (!lex.isSpec || lex.front.str != ":") lex.error("':' expected");
- lex.popFront();
- auto type = lex.expectId();
- //writeln(" ", names[], ": <", type, ">");
- foreach (string name; names) {
- Field fld;
- fld.name = name;
- fld.typename = type;
- fld.parseType(type, lex);
- res ~= fld;
- }
- if (!lex.isSpec) lex.error("';' or ')' expected");
- if (lex.front.str == ";") { lex.popFront(); continue; }
- if (lex.front.str != ")") lex.error("';' or ')' expected");
- }
- if (lex.isSpec && lex.front.str == ";") lex.popFront();
- return res;
-}
-
-
-/*
-TargetPoint: TPoint;
- d2d_teleport: Boolean;
- silent_teleport: Boolean;
- TlpDir: Byte;
- end;
-*/
-Field[] parseRecFields (Lexer lex) {
- Field[] res;
- for (;;) {
- if (lex.eatCI("end") !is null) break;
- string[] names;
- for (;;) {
- names ~= lex.expectId();
- if (lex.isSpec && lex.front.str == ":") break;
- if (lex.isSpec && lex.front.str == ",") { lex.popFront(); continue; }
- lex.error("':' expected");
- }
- if (!lex.isSpec || lex.front.str != ":") lex.error("':' expected");
- lex.popFront();
- auto type = lex.expectId();
- foreach (string name; names) {
- Field fld;
- fld.name = name;
- fld.typename = type;
- fld.parseType(type, lex);
- res ~= fld;
- }
- if (lex.eatCI("end") !is null) break;
- if (!lex.isSpec || lex.front.str != ";") lex.error("';' expected");
- lex.popFront();
- }
- return res;
-}
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-bool isGoodTriggerName (const(char)[] id) {
- import std.algorithm : startsWith;
- import std.string : indexOf;
- if (!id.startsWith("TRIGGER_")) return false;
- if (id == "TRIGGER_MAX") return false;
- if (id[8..$].indexOf('_') >= 0) return false;
- return true;
-}
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-void parseMapDef (string fname) {
- import std.string : format, toLower, toUpper;
- Lexer lex;
- {
- auto fl = File(fname);
- auto buf = new char[](cast(uint)fl.size);
- fl.rawRead(buf[]);
- lex = new Lexer(cast(string)buf, fname);
- }
- // find "interface"
- while (!lex.empty) {
- if (!lex.front.isId) { lex.popFront(); continue; }
- if (lex.front.str.toLower == "interface") break;
- lex.popFront();
- }
- if (lex.empty) throw new Exception("where is my interface?!");
- enum Section { Unknown, Const, Type }
- Section section;
- while (!lex.empty) {
- if (lex.front.isId) {
- auto kw = lex.front.str.toLower;
- if (kw == "implementation") break;
- if (kw == "const") {
- //writeln("CONST!");
- section = Section.Const;
- lex.popFront();
- continue;
- }
- if (kw == "type") {
- //writeln("TYPE!");
- section = Section.Type;
- lex.popFront();
- continue;
- }
- }
- if (section == Section.Const) {
- if (!lex.isId) lex.error("identifier expected");
- auto id = lex.front.istr.toUpper;
- lex.popFront();
- auto lc = lex.loc;
- if (lex.expectSpec() != "=") lex.error(lc, "'=' expected");
- if (isGoodTriggerName(id)) {
- lex.mustbeNum();
- auto lcn = lex.loc;
- auto n = lex.front.num;
- lex.popFront();
- if (n < 0 || n > 255) lex.error(lcn, "invalid value (%s) for '%s'".format(n, id));
- auto b = cast(ubyte)n;
- if (id in triggers) lex.error(lc, "duplicate constant '%s'".format(id));
- if (auto tg = b in trignums) lex.error(lcn, "same value (%s) for triggers '%s' and '%s'".format(n, id, *tg));
- triggers[id] = b;
- trignums[b] = id;
- //writeln("trigger: ", id, " (", b, ")");
- } else {
- while (!lex.empty) {
- if (lex.front.isSpec && lex.front.str == ";") break;
- lex.popFront();
- }
- }
- lc = lex.loc;
- if (lex.expectSpec() != ";") lex.error(lc, "';' expected");
- continue;
- }
- if (section == Section.Type) {
- if (!lex.isId) lex.error("identifier expected");
- auto id = lex.front.istr.toUpper;
- lex.popFront();
- auto lc = lex.loc;
- if (lex.expectSpec() != "=") lex.error(lc, "'=' expected");
- //writeln("id: ", id);
- if (id != "TTRIGGERDATA") {
- // skip definition
- while (!lex.empty) {
- if (lex.eatCI("end") !is null) break;
- lex.popFront();
- }
- lc = lex.loc;
- if (lex.expectSpec() != ";") lex.error(lc, "';' expected");
- continue;
- } else {
- lex.expectCI("record");
- lex.expectCI("case");
- lex.expectCI("byte");
- lex.expectCI("of");
- // now parse defs
- for (;;) {
- if (lex.eatCI("end") !is null) break;
- string[] ids;
- Field[] fields;
- if (lex.isNum) {
- if (lex.front.num != 0) lex.error(lc, "'0' expected");
- lex.popFront();
- if (!lex.isSpec || lex.front.str != ":") lex.error("':' expected");
- lex.popFront();
- //writeln("=== DEFAULT ===");
- ids = null;
- fields = lex.parseFields();
- } else {
- for (;;) {
- ids ~= lex.expectId();
- if (lex.isSpec && lex.front.str == ":") { lex.popFront(); break; }
- if (lex.isSpec && lex.front.str == ",") { lex.popFront(); continue; }
- lex.error("',' or ':' expected");
- }
- //writeln("=== ", ids[], " ===");
- fields = lex.parseFields();
- }
- tgrecords ~= Record(ids, fields);
- tgrecords[$-1].finalize;
- //writeln("=== ", ids[], " === : ", rcsize);
- }
- lc = lex.loc;
- if (lex.expectSpec() != ";") lex.error(lc, "';' expected");
- break; // we are done
- }
- }
- lex.popFront();
- continue;
- }
-}
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-void parseMapStruct (string fname) {
- import std.string : format, toLower, toUpper;
-
- static bool isGoodRecName (const(char)[] id) {
- import std.algorithm : startsWith, endsWith;
- import std.string : indexOf, toUpper;
- id = id.toUpper;
- if (!id.startsWith("T") || !id.endsWith("_1")) return false;
- return true;
- }
-
- static bool isGoodDef (Lexer lex) {
- import std.string : toLower;
- auto tk = lex.peek(1);
- if (!tk.isSpec || tk.str != "=") return false;
- tk = lex.peek(2);
- if (!tk.isId || tk.str.toLower != "packed") return false;
- tk = lex.peek(3);
- if (!tk.isId || tk.str.toLower != "record") return false;
- return true;
- }
-
- Lexer lex;
- {
- auto fl = File(fname);
- auto buf = new char[](cast(uint)fl.size);
- fl.rawRead(buf[]);
- lex = new Lexer(cast(string)buf, fname);
- }
- // find "interface"
- while (!lex.empty) {
- if (!lex.front.isId) { lex.popFront(); continue; }
- if (lex.front.str.toLower == "interface") break;
- lex.popFront();
- }
- if (lex.empty) throw new Exception("where is my interface?!");
- enum Section { Unknown, Type }
- Section section;
- while (!lex.empty) {
- if (lex.front.isId) {
- auto kw = lex.front.str.toLower;
- if (kw == "implementation") break;
- if (kw == "type") {
- section = Section.Type;
- lex.popFront();
- continue;
- }
- }
- if (section == Section.Type) {
- if (lex.isId && isGoodRecName(lex.front.str) && isGoodDef(lex)) {
- string origId = lex.front.istr;
- lex.popFront();
- lex.popFront(); // skip "="
- lex.expectCI("packed");
- lex.expectCI("record");
- // now parse fields
- Record rec;
- rec.ids ~= origId;
- rec.fields = lex.parseRecFields();
- rec.normal = true;
- rec.finalize(true);
- records ~= rec;
- {
- auto lc = lex.loc;
- if (lex.expectSpec() != ";") lex.error(lc, "';' expected");
- }
- continue;
- }
- }
- lex.popFront();
- continue;
- }
-}
-
-
-// ////////////////////////////////////////////////////////////////////////// //
-void main () {
- try {
- parseMapDef("../../shared/MAPDEF.pas");
- parseMapStruct("../../shared/MAPSTRUCT.pas");
- debug {
- dumpRecords();
- } else {
- {
- auto fo = File("mapstructio.inc", "w");
- fo.genMisc();
- fo.genReaders();
- fo.genWriters();
- }
- {
- auto fo = File("mapstructsizes.inc", "w");
- fo.writeln("const");
- foreach (ref rec; records) fo.writeln(" SizeOf_", rec.ids[0], " = ", rec.size, ";");
- fo.writeln();
- fo.writeln("procedure mb_Read_TriggerData (var tr: TTriggerData; ttype: Integer; const buf; bufsize: Integer);");
- fo.writeln("procedure mb_Write_TriggerData (var buf; bufsize: Integer; ttype: Integer; var tr: TTriggerData);");
- foreach (ref rec; records) {
- fo.writeln("procedure mb_Read_", rec.ids[0], " (var tr: ", rec.ids[0], "; const buf; bufsize: Integer);");
- fo.writeln("procedure mb_Write_", rec.ids[0], " (var buf; bufsize: Integer; var tr: ", rec.ids[0], ");");
- }
- }
- }
- } catch (ErrorAt e) {
- writeln("PARSE ERROR: ", e.loc, ": ", e.msg);
- //lex.printCaret(e.loc);
- }
-}
diff --git a/src/sfs/wadcvt.dpr b/src/tools/wadcvt.dpr
similarity index 99%
rename from src/sfs/wadcvt.dpr
rename to src/tools/wadcvt.dpr
index 8d02803a8f82b23e903da72fc57113ffe7fda427..452d549ccacc809cbc4c405b050ed69882c02d5c 100644 (file)
rename from src/sfs/wadcvt.dpr
rename to src/tools/wadcvt.dpr
index 8d02803a8f82b23e903da72fc57113ffe7fda427..452d549ccacc809cbc4c405b050ed69882c02d5c 100644 (file)
--- a/src/sfs/wadcvt.dpr
+++ b/src/tools/wadcvt.dpr
Classes,
utils in '../shared/utils.pas',
xstreams in '../shared/xstreams.pas',
+ xparser in '../shared/xparser.pas',
+ xdynrec in '../shared/xdynrec.pas',
crc,
- sfs,
- sfsPlainFS,
- sfsZipFS,
+ sfs in '../sfs/sfs.pas',
+ sfsPlainFS in '../sfs/sfsPlainFS.pas',
+ sfsZipFS in '../sfs/sfsZipFS.pas',
paszlib,
wadreader in '../shared/wadreader.pas',
conbuf in '../shared/conbuf.pas',
BinEditor in '../shared/BinEditor.pas',
- MAPSTRUCT in '../shared/MAPSTRUCT.pas',
MAPDEF in '../shared/MAPDEF.pas',
CONFIG in '../shared/CONFIG.pas',
e_log in '../engine/e_log.pas',