Compare commits
8 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 86997e60c6 | |||
| 22764e18e3 | |||
| c4bb89dac6 | |||
| 32bdbd7d44 | |||
| d5ff351bec | |||
| 778155c9a8 | |||
| b7c390187b | |||
| 3410ce7baa |
@@ -6,6 +6,10 @@ Portable narrative scripting language based on Ink.
|
|||||||
Hank is a more portable answer to Inkle's open-source [Ink](http://github.com/inkle/ink)
|
Hank is a more portable answer to Inkle's open-source [Ink](http://github.com/inkle/ink)
|
||||||
engine. It is currently just a proof of concept, but you may use it at your own risk.
|
engine. It is currently just a proof of concept, but you may use it at your own risk.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
Hank currently requires Haxe 3.4.7. I recommend using [hvm](https://github.com/dpeek/hvm) and typing `hvm use haxe 3.4.7`.
|
||||||
|
|
||||||
## Comparison with Ink and Inkjs
|
## Comparison with Ink and Inkjs
|
||||||
|
|
||||||
|Feature|Ink|Inkjs|Hank|
|
|Feature|Ink|Inkjs|Hank|
|
||||||
|
|||||||
145
hank/HankLexer.hx
Normal file
145
hank/HankLexer.hx
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
// This file is based on @Zicklag's gist: https://gist.github.com/zicklag/c2a6060452759ce13864e43135e856f3
|
||||||
|
|
||||||
|
package hank;
|
||||||
|
|
||||||
|
import hxparse.Lexer;
|
||||||
|
import hxparse.RuleBuilder;
|
||||||
|
|
||||||
|
enum HankToken {
|
||||||
|
// Brackets
|
||||||
|
TParenOpen;
|
||||||
|
TParenClose;
|
||||||
|
TSquareOpen;
|
||||||
|
TSquareClose;
|
||||||
|
TCurlyOpen;
|
||||||
|
TCurlyClose;
|
||||||
|
// Symbols
|
||||||
|
TStar;
|
||||||
|
TPlus;
|
||||||
|
TDash;
|
||||||
|
TBang;
|
||||||
|
TTilde;
|
||||||
|
TArrow;
|
||||||
|
TEqual;
|
||||||
|
TDoubleEqual;
|
||||||
|
TTripleEqual;
|
||||||
|
TGlue;
|
||||||
|
TLineComment(s:String);
|
||||||
|
TBlockComment(s:String);
|
||||||
|
TNewline;
|
||||||
|
// HInterface
|
||||||
|
TBacktick;
|
||||||
|
TTripleBacktick;
|
||||||
|
TComma;
|
||||||
|
TTripleComma;
|
||||||
|
// Other
|
||||||
|
TInclude(p:String);
|
||||||
|
TWord(w:String);
|
||||||
|
TEof;
|
||||||
|
}
|
||||||
|
class N {
|
||||||
|
public static var n = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
Lexer for valid tokens inside a Hank script.
|
||||||
|
**/
|
||||||
|
class HankLexer extends Lexer implements RuleBuilder {
|
||||||
|
static var buf: StringBuf;
|
||||||
|
|
||||||
|
public static var tok = @:rule [
|
||||||
|
// Brackets
|
||||||
|
"\\(" => TParenOpen,
|
||||||
|
"\\)" => TParenClose,
|
||||||
|
"[" => TSquareOpen,
|
||||||
|
"]" => TSquareClose,
|
||||||
|
"{" => TCurlyOpen,
|
||||||
|
"}" => TCurlyClose,
|
||||||
|
// Symbols
|
||||||
|
"\\*" => TStar,
|
||||||
|
"+" => TPlus,
|
||||||
|
"-" => TDash,
|
||||||
|
"!" => TBang,
|
||||||
|
"~" => TTilde,
|
||||||
|
"->" => TArrow,
|
||||||
|
"=" => TEqual,
|
||||||
|
"==" => TDoubleEqual,
|
||||||
|
"===" => TTripleEqual,
|
||||||
|
"<>" => TGlue,
|
||||||
|
"//" => {
|
||||||
|
buf = new StringBuf();
|
||||||
|
lexer.token(lineComment);
|
||||||
|
TLineComment(buf.toString());
|
||||||
|
},
|
||||||
|
"/\\*" => {
|
||||||
|
buf = new StringBuf();
|
||||||
|
lexer.token(blockComment);
|
||||||
|
TBlockComment(buf.toString());
|
||||||
|
},
|
||||||
|
"\n" => TNewline,
|
||||||
|
// HInterface
|
||||||
|
"`" => TBacktick,
|
||||||
|
"```" => TTripleBacktick,
|
||||||
|
"," => TComma,
|
||||||
|
",,," => TTripleComma,
|
||||||
|
// Other
|
||||||
|
"INCLUDE " => {
|
||||||
|
buf = new StringBuf();
|
||||||
|
lexer.token(include);
|
||||||
|
TInclude(buf.toString());
|
||||||
|
},
|
||||||
|
"." => {
|
||||||
|
buf = new StringBuf();
|
||||||
|
lexer.token(word);
|
||||||
|
TWord(buf.toString());
|
||||||
|
},
|
||||||
|
"" => TEof
|
||||||
|
];
|
||||||
|
|
||||||
|
public static var lineComment = @:rule [
|
||||||
|
'\n' => {
|
||||||
|
lexer.curPos().pmax;
|
||||||
|
},
|
||||||
|
'[^"]' => {
|
||||||
|
buf.add(lexer.current);
|
||||||
|
lexer.token(lineComment);
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
public static var blockComment = @:rule [
|
||||||
|
'\\*/' => {
|
||||||
|
lexer.curPos().pmax;
|
||||||
|
},
|
||||||
|
'[^"]' => {
|
||||||
|
buf.add(lexer.current);
|
||||||
|
lexer.token(blockComment);
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
public static var include = @:rule [
|
||||||
|
'\n' => {
|
||||||
|
lexer.curPos().pmax;
|
||||||
|
},
|
||||||
|
' ' => {
|
||||||
|
lexer.curPos().pmax;
|
||||||
|
},
|
||||||
|
'[^"]' => {
|
||||||
|
buf.add(lexer.current);
|
||||||
|
lexer.token(include);
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
public static var word = @:rule [
|
||||||
|
'\n' => {
|
||||||
|
lexer.curPos().pmax;
|
||||||
|
},
|
||||||
|
' ' => {
|
||||||
|
lexer.curPos().pmax;
|
||||||
|
},
|
||||||
|
'[^"]' => {
|
||||||
|
buf.add(lexer.current);
|
||||||
|
lexer.token(include);
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
}
|
||||||
17
hank/ViewCounts.hx
Normal file
17
hank/ViewCounts.hx
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
/*
|
||||||
|
package hank;
|
||||||
|
|
||||||
|
import haxe.ds.Option;
|
||||||
|
|
||||||
|
/**
|
||||||
|
Represents a scope of ViewCounts.
|
||||||
|
**/
|
||||||
|
class ViewCounts {
|
||||||
|
|
||||||
|
var counts: Map<String, Int> = new Map();
|
||||||
|
var scopes: Map<String, ViewCounts> = new Map();
|
||||||
|
|
||||||
|
// TODO need to define expected behavior for view counts. For example, each time a stitch in a section is viewed, does that increment the section's view count? Do view counts increment when a section loops back on itself, or only when the scope changes from a distinct one to this one?
|
||||||
|
|
||||||
|
}
|
||||||
|
*/
|
||||||
@@ -6,9 +6,10 @@
|
|||||||
"description": "Narrative scripting language for HaxeFlixel games based on Inkle's Ink engine",
|
"description": "Narrative scripting language for HaxeFlixel games based on Inkle's Ink engine",
|
||||||
"version": "0.0.6",
|
"version": "0.0.6",
|
||||||
"releasenote": "It isn't safe to use this library yet.",
|
"releasenote": "It isn't safe to use this library yet.",
|
||||||
"contributors": ["NQNStudios"],
|
"contributors": ["NQNStudios", "Zicklag"],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"hscript": "",
|
"hscript": "",
|
||||||
|
"hxparse": "",
|
||||||
"utest": ""
|
"utest": ""
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
-lib hscript
|
-lib hscript
|
||||||
-lib utest
|
-lib utest
|
||||||
|
-lib hxparse
|
||||||
-main tests.TestMain
|
-main tests.TestMain
|
||||||
--interp
|
--interp
|
||||||
|
|||||||
14
tests/HankAssert.hx
Normal file
14
tests/HankAssert.hx
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
package tests;
|
||||||
|
|
||||||
|
import utest.Assert;
|
||||||
|
|
||||||
|
class HankAssert {
|
||||||
|
/**
|
||||||
|
Assert that two complex values (i.e. algebraic enums) are the same.
|
||||||
|
**/
|
||||||
|
public static function equals(expected: Dynamic, actual: Dynamic, ?pos: String) {
|
||||||
|
var failureMessage = 'Assertion that ${actual} is ${expected} failed ${if (pos!= null) 'at ${pos}' else ''}';
|
||||||
|
Assert.equals(Std.string(Type.typeof(expected)), Std.string(Type.typeof(actual)), failureMessage);
|
||||||
|
Assert.equals(Std.string(expected), Std.string(actual), failureMessage);
|
||||||
|
}
|
||||||
|
}
|
||||||
31
tests/HankLexerTest.hx
Normal file
31
tests/HankLexerTest.hx
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
package tests;
|
||||||
|
|
||||||
|
import haxe.io.Bytes;
|
||||||
|
import sys.io.File;
|
||||||
|
import byte.ByteData;
|
||||||
|
|
||||||
|
import utest.Test;
|
||||||
|
import utest.Assert;
|
||||||
|
|
||||||
|
import hxparse.LexerTokenSource;
|
||||||
|
|
||||||
|
import hank.HankLexer;
|
||||||
|
import hank.HankLexer.HankToken;
|
||||||
|
|
||||||
|
import tests.HankAssert;
|
||||||
|
|
||||||
|
class HankLexerTest extends utest.Test {
|
||||||
|
|
||||||
|
public function testLexMainExample() {
|
||||||
|
var lexer = new HankLexer(ByteData.ofString(File.getContent('examples/main/main.hank')), 'testScript');
|
||||||
|
var ts = new LexerTokenSource(lexer, HankLexer.tok);
|
||||||
|
HankAssert.equals(TInclude("extra.hank"), ts.token());
|
||||||
|
HankAssert.equals(TNewline, ts.token());
|
||||||
|
HankAssert.equals(TArrow, ts.token());
|
||||||
|
HankAssert.equals(TWord("start"), ts.token());
|
||||||
|
HankAssert.equals(TLineComment(" This syntax moves the game flow to a new section."), ts.token());
|
||||||
|
for (i in 0...100) {
|
||||||
|
trace(ts.token());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,6 +3,6 @@ import utest.Test;
|
|||||||
|
|
||||||
class TestMain extends Test {
|
class TestMain extends Test {
|
||||||
public static function main() {
|
public static function main() {
|
||||||
utest.UTest.run([new HInterfaceTest()]);
|
utest.UTest.run([new HankLexerTest(), new HInterfaceTest()]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Reference in New Issue
Block a user