a bunch of cleanup, reworking xml parsing and ui changes
This commit is contained in:
parent
a03eac8cd9
commit
6f46531c59
@ -1,5 +1,4 @@
|
|||||||
-js ../htdocs/harmoniouscode.js
|
-js ../htdocs/harmoniouscode.js
|
||||||
-main JavaScriptTarget
|
-main JavaScriptTarget
|
||||||
-resource ../data/functions_tokens_cache.hxd
|
-resource ../data/all_tokens.hxd
|
||||||
-resource ../data/constant_tokens_cache.hxd
|
|
||||||
-cp ../src
|
-cp ../src
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
-main MyTests
|
-main MyTests
|
||||||
-neko ../neko/my_tests.n
|
-neko ../neko/my_tests.n
|
||||||
-resource ../data/functions_tokens_cache.hxd
|
-resource ../data/all_tokens.hxd
|
||||||
-resource ../data/constant_tokens_cache.hxd
|
|
||||||
-cp ../src
|
-cp ../src
|
||||||
|
@ -12,10 +12,11 @@
|
|||||||
<div id="form" style="display: none">
|
<div id="form" style="display: none">
|
||||||
<div id="form-holder">
|
<div id="form-holder">
|
||||||
<form action="" method="post" onsubmit="return false;">
|
<form action="" method="post" onsubmit="return false;">
|
||||||
<textarea name="source" id="source" rows="15" cols="80"></textarea><br />
|
<textarea name="source" id="source"></textarea><br />
|
||||||
<input id="analyze-code-button" type="button" value="Analyze Code" onclick="JavaScriptTarget.do_analysis(this.form.elements.source)" />
|
<input id="analyze-code-button" type="button" value="Analyze Code" onclick="JavaScriptTarget.do_analysis(this.form.elements.source)" />
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
|
<div id="processing"></div>
|
||||||
<div id="output"></div>
|
<div id="output"></div>
|
||||||
<div id="permanent-ignore" style="display: none">
|
<div id="permanent-ignore" style="display: none">
|
||||||
To permanently ignore tokens (and globally ignore modules), do one of the following:
|
To permanently ignore tokens (and globally ignore modules), do one of the following:
|
||||||
|
@ -7,13 +7,18 @@ body, td, div, li, p, span {
|
|||||||
font-size: 12px;
|
font-size: 12px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
h1 {
|
||||||
|
text-align: center
|
||||||
|
}
|
||||||
|
|
||||||
div#form-holder {
|
div#form-holder {
|
||||||
text-align: center
|
text-align: center
|
||||||
}
|
}
|
||||||
|
|
||||||
input#analyze-code-button {
|
input#analyze-code-button {
|
||||||
width: 200px;
|
width: 750px;
|
||||||
padding: 5px;
|
padding: 5px;
|
||||||
|
margin-top: 5px;
|
||||||
}
|
}
|
||||||
|
|
||||||
div#loading {
|
div#loading {
|
||||||
@ -63,13 +68,32 @@ th.is-filtering {
|
|||||||
color: blue;
|
color: blue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
textarea#source {
|
||||||
|
width: 750px;
|
||||||
|
height: 250px;
|
||||||
|
}
|
||||||
|
|
||||||
div#footer {
|
div#footer {
|
||||||
text-align: center;
|
text-align: center;
|
||||||
font-size: 10px;
|
font-size: 10px;
|
||||||
color: #555;
|
color: #555;
|
||||||
font-style: oblique
|
font-style: oblique;
|
||||||
|
border-top: solid #555 1px;
|
||||||
|
padding-top: 5px;
|
||||||
|
margin-top: 5px
|
||||||
}
|
}
|
||||||
|
|
||||||
span.ignore-code-holder {
|
span.ignore-code-holder {
|
||||||
font-family: monospace
|
font-family: monospace
|
||||||
}
|
}
|
||||||
|
|
||||||
|
div#processing, div#code-announcement {
|
||||||
|
text-align: center;
|
||||||
|
font-size: 14px;
|
||||||
|
font-weight: bold
|
||||||
|
}
|
||||||
|
|
||||||
|
div#code-announcement {
|
||||||
|
border-bottom: solid #555 1px;
|
||||||
|
margin-bottom: 5px
|
||||||
|
}
|
@ -20,16 +20,9 @@ class CodeParser {
|
|||||||
/**
|
/**
|
||||||
Load all possible token processors from disk.
|
Load all possible token processors from disk.
|
||||||
**/
|
**/
|
||||||
public function load_processors_from_disk() {
|
public function load_all_processors_from_disk() {
|
||||||
for (processor_type_name in processor_types) {
|
for (processor in TokenProcessor.load_all_from_cache()) {
|
||||||
var processor : TokenProcessor = Type.createInstance(Type.resolveClass(processor_type_name), []);
|
this.token_processors.set(Type.getClassName(Type.getClass(processor)), processor);
|
||||||
|
|
||||||
if (!processor.load_from_cache()) {
|
|
||||||
processor.populate_from_file();
|
|
||||||
processor.save_to_cache();
|
|
||||||
}
|
|
||||||
|
|
||||||
this.token_processors.set(processor_type_name, processor);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#end
|
#end
|
||||||
@ -38,12 +31,8 @@ class CodeParser {
|
|||||||
Load all possible token processors form haXe Resources.
|
Load all possible token processors form haXe Resources.
|
||||||
**/
|
**/
|
||||||
public function load_processors_from_resources() {
|
public function load_processors_from_resources() {
|
||||||
for (processor_type_name in processor_types) {
|
for (processor in TokenProcessor.load_all_from_resource()) {
|
||||||
var processor : TokenProcessor = Type.createInstance(Type.resolveClass(processor_type_name), []);
|
this.token_processors.set(Type.getClassName(Type.getClass(processor)), processor);
|
||||||
|
|
||||||
processor.load_from_resource();
|
|
||||||
|
|
||||||
this.token_processors.set(processor_type_name, processor);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -118,10 +107,12 @@ class CodeParser {
|
|||||||
if (!tokens_found.exists(token)) {
|
if (!tokens_found.exists(token)) {
|
||||||
if (!flattened_tokens.exists(token)) {
|
if (!flattened_tokens.exists(token)) {
|
||||||
for (token_processor in this.token_processors.iterator()) {
|
for (token_processor in this.token_processors.iterator()) {
|
||||||
|
if ((token_processor.get_default_token_type() == FunctionToken) == is_function) {
|
||||||
if (token_processor.tokenHash.exists(token)) {
|
if (token_processor.tokenHash.exists(token)) {
|
||||||
results.push(token_processor.tokenHash.get(token).toResult()); break;
|
results.push(token_processor.tokenHash.get(token).toResult()); break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
tokens_found.set(token, true);
|
tokens_found.set(token, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,4 @@
|
|||||||
class ConstantTokenProcessor extends TokenProcessor {
|
class ConstantTokenProcessor extends TokenProcessor {
|
||||||
public static var cachePath : String = "../data/constant_tokens_cache.hxd";
|
|
||||||
override public function get_cache_path() { return ConstantTokenProcessor.cachePath; }
|
|
||||||
override public function get_default_token_type() { return ConstantToken; }
|
override public function get_default_token_type() { return ConstantToken; }
|
||||||
|
|
||||||
public static var source_path : String = "../data";
|
public static var source_path : String = "../data";
|
||||||
|
@ -1,17 +1,25 @@
|
|||||||
class FunctionTokenProcessor extends TokenProcessor {
|
class FunctionTokenProcessor extends TokenProcessor {
|
||||||
public static var cachePath : String = "../data/functions_tokens_cache.hxd";
|
public static var source_path : String = "../data/phpdoc_function_versions.xml";
|
||||||
override public function get_cache_path() { return FunctionTokenProcessor.cachePath; }
|
|
||||||
public static var sourcePath : String = "../data/phpdoc_function_versions.xml";
|
|
||||||
override public function get_default_token_type() { return FunctionToken; }
|
override public function get_default_token_type() { return FunctionToken; }
|
||||||
|
|
||||||
#if neko
|
#if neko
|
||||||
public override function populate_from_file() {
|
public override function populate_from_file() {
|
||||||
this.populate_from_string(neko.io.File.getContent(sourcePath));
|
this.populate_from_string(neko.io.File.getContent(source_path));
|
||||||
}
|
}
|
||||||
|
|
||||||
public function populate_from_string(s : String) {
|
public function populate_from_string(s : String) {
|
||||||
this.tokenHash = new Hash<Token>();
|
this.tokenHash = new Hash<Token>();
|
||||||
for (child in Xml.parse(s).firstElement()) {
|
var tokens_parsed = 0;
|
||||||
|
|
||||||
|
//
|
||||||
|
// haXe XML parsing is slow, as it uses a custom XML parser.
|
||||||
|
// so I'll use a custom XML parser for this data.
|
||||||
|
//
|
||||||
|
/*var start = Date.now();
|
||||||
|
var first_element = Xml.parse(s).firstElement();
|
||||||
|
var end = Date.now();
|
||||||
|
trace(end.getTime() - start.getTime());
|
||||||
|
for (child in first_element) {
|
||||||
if (child.nodeType == Xml.Element) {
|
if (child.nodeType == Xml.Element) {
|
||||||
if (child.nodeName == "function") {
|
if (child.nodeName == "function") {
|
||||||
var version = child.get("from");
|
var version = child.get("from");
|
||||||
@ -19,9 +27,45 @@ class FunctionTokenProcessor extends TokenProcessor {
|
|||||||
version = ~/\:/.replace(version, " ");
|
version = ~/\:/.replace(version, " ");
|
||||||
var token = child.get("name");
|
var token = child.get("name");
|
||||||
this.tokenHash.set(token, new FunctionToken(child.get("name"), version));
|
this.tokenHash.set(token, new FunctionToken(child.get("name"), version));
|
||||||
|
tokens_parsed++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}*/
|
||||||
|
|
||||||
|
var s_length = s.length;
|
||||||
|
var i = 0;
|
||||||
|
|
||||||
|
var version_regexp = ~/from=\'([^\']*)\'/i;
|
||||||
|
var token_regexp = ~/name=\'([^\']*)\'/i;
|
||||||
|
|
||||||
|
while (i < s_length) {
|
||||||
|
var new_i = s.indexOf("<function", i);
|
||||||
|
if (new_i != -1) {
|
||||||
|
var tag_end = s.indexOf(">", new_i);
|
||||||
|
if (tag_end != -1) {
|
||||||
|
var tag = s.substr(new_i, tag_end - new_i + 1);
|
||||||
|
|
||||||
|
if (version_regexp.match(tag) && token_regexp.match(tag)) {
|
||||||
|
var version = version_regexp.matched(1);
|
||||||
|
var token = token_regexp.matched(1);
|
||||||
|
version = ~/PECL /.replace(version, "");
|
||||||
|
version = ~/\:/.replace(version, " ");
|
||||||
|
|
||||||
|
this.tokenHash.set(token, new FunctionToken(token, version));
|
||||||
|
tokens_parsed++;
|
||||||
|
i = tag_end;
|
||||||
|
} else {
|
||||||
|
i++;
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
trace("tokens parsed: " + tokens_parsed);
|
||||||
}
|
}
|
||||||
#end
|
#end
|
||||||
}
|
}
|
@ -91,14 +91,12 @@ class JavaScriptTarget {
|
|||||||
static public function display_version_information() {
|
static public function display_version_information() {
|
||||||
var version_info = new CodeVersionInformation(current_results, ignored_modules);
|
var version_info = new CodeVersionInformation(current_results, ignored_modules);
|
||||||
|
|
||||||
var output = "Your code in requires the following minimum PHP & PECL module versions:";
|
var output = "<div id=\"code-announcement\">Your code requires the following minimum PHP & PECL module versions:</div>";
|
||||||
|
|
||||||
var minimum = version_info.final_versions.get("minimum");
|
var minimum = version_info.final_versions.get("minimum");
|
||||||
|
|
||||||
output += "<form action=\"\" onsubmit=\"return false\">";
|
output += "<form action=\"\" onsubmit=\"return false\">";
|
||||||
|
|
||||||
output += "<ul>";
|
|
||||||
|
|
||||||
var all_modules_hash = new Hash<Bool>();
|
var all_modules_hash = new Hash<Bool>();
|
||||||
|
|
||||||
for (module in minimum.keys()) { all_modules_hash.set(module, true); }
|
for (module in minimum.keys()) { all_modules_hash.set(module, true); }
|
||||||
@ -291,8 +289,20 @@ class JavaScriptTarget {
|
|||||||
static public function do_analysis(textarea) {
|
static public function do_analysis(textarea) {
|
||||||
show_only_modules = new Hash<Bool>();
|
show_only_modules = new Hash<Bool>();
|
||||||
|
|
||||||
|
js.Lib.document.getElementById('processing').innerHTML = "Analyzing code...";
|
||||||
|
untyped {
|
||||||
|
js.Lib.document.getElementById('analyze-code-button').disabled = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
haxe.Timer.delay(function() {
|
||||||
JavaScriptTarget.get_results(textarea.value);
|
JavaScriptTarget.get_results(textarea.value);
|
||||||
JavaScriptTarget.display_version_information();
|
JavaScriptTarget.display_version_information();
|
||||||
|
|
||||||
|
js.Lib.document.getElementById('processing').innerHTML = "";
|
||||||
|
untyped {
|
||||||
|
js.Lib.document.getElementById('analyze-code-button').disabled = false;
|
||||||
|
}
|
||||||
|
}, 100);
|
||||||
}
|
}
|
||||||
|
|
||||||
static public function toggle_module_and_redraw(module : String) {
|
static public function toggle_module_and_redraw(module : String) {
|
||||||
|
@ -2,6 +2,7 @@ class MyTests {
|
|||||||
static function main() {
|
static function main() {
|
||||||
var r = new haxe.unit.TestRunner();
|
var r = new haxe.unit.TestRunner();
|
||||||
r.add(new TestToken());
|
r.add(new TestToken());
|
||||||
|
r.add(new TestTokenProcessor());
|
||||||
r.add(new TestFunctionToken());
|
r.add(new TestFunctionToken());
|
||||||
r.add(new TestFunctionTokenProcessor());
|
r.add(new TestFunctionTokenProcessor());
|
||||||
r.add(new TestConstantToken());
|
r.add(new TestConstantToken());
|
||||||
|
@ -1,17 +1,3 @@
|
|||||||
class RegenerateDataFiles {
|
class RegenerateDataFiles {
|
||||||
public static function main() {
|
public static function main() { TokenProcessor.save_all_to_cache(); }
|
||||||
var functionProcessor = new FunctionTokenProcessor();
|
|
||||||
if (!functionProcessor.load_from_cache()) {
|
|
||||||
neko.Lib.print("Regenerating functions cache...\n");
|
|
||||||
functionProcessor.populate_from_file();
|
|
||||||
functionProcessor.save_to_cache();
|
|
||||||
}
|
|
||||||
|
|
||||||
var constantProcessor = new ConstantTokenProcessor();
|
|
||||||
if (!constantProcessor.load_from_cache()) {
|
|
||||||
neko.Lib.print("Regenerating constants cache...\n");
|
|
||||||
constantProcessor.populate_from_file();
|
|
||||||
constantProcessor.save_to_cache();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
@ -14,14 +14,14 @@ class TestCodeParser extends haxe.unit.TestCase {
|
|||||||
#if neko
|
#if neko
|
||||||
function testCodeParserLoadTokens() {
|
function testCodeParserLoadTokens() {
|
||||||
var p = new CodeParser();
|
var p = new CodeParser();
|
||||||
p.load_processors_from_disk();
|
p.load_all_processors_from_disk();
|
||||||
assertTrue(p.token_processors.exists("FunctionTokenProcessor"));
|
assertTrue(p.token_processors.exists("FunctionTokenProcessor"));
|
||||||
assertTrue(p.token_processors.exists("ConstantTokenProcessor"));
|
assertTrue(p.token_processors.exists("ConstantTokenProcessor"));
|
||||||
}
|
}
|
||||||
|
|
||||||
function testProcessCode() {
|
function testProcessCode() {
|
||||||
var p = new CodeParser();
|
var p = new CodeParser();
|
||||||
p.load_processors_from_disk();
|
p.load_all_processors_from_disk();
|
||||||
|
|
||||||
for (code in test_code) {
|
for (code in test_code) {
|
||||||
var result = p.parse(code[0]);
|
var result = p.parse(code[0]);
|
||||||
|
@ -12,22 +12,4 @@ class TestFunctionTokenProcessor extends haxe.unit.TestCase {
|
|||||||
public function testGenerateSampleToken() {
|
public function testGenerateSampleToken() {
|
||||||
assertTrue(token_processor.tokenHash.exists(function_name));
|
assertTrue(token_processor.tokenHash.exists(function_name));
|
||||||
}
|
}
|
||||||
|
|
||||||
public function testSerializeInfo() {
|
|
||||||
var test_xml = "<versions> <function name='one' from='PHP 4, PHP 5' /> <function name='two' from='PHP 4, PHP 5' /> </versions>";
|
|
||||||
token_processor.populate_from_string(test_xml);
|
|
||||||
|
|
||||||
var target_token_hash = "{one => { version => PHP 4, PHP 5, token => one }, two => { version => PHP 4, PHP 5, token => two }}";
|
|
||||||
|
|
||||||
assertEquals(target_token_hash, token_processor.tokenHash.toString());
|
|
||||||
|
|
||||||
var unwound_tokens = token_processor.unwind_tokens();
|
|
||||||
|
|
||||||
assertTrue(unwound_tokens.toString().length < target_token_hash.length);
|
|
||||||
|
|
||||||
token_processor = new FunctionTokenProcessor();
|
|
||||||
token_processor.populate_from_unwound_tokens(unwound_tokens);
|
|
||||||
|
|
||||||
assertEquals(target_token_hash, token_processor.tokenHash.toString());
|
|
||||||
}
|
|
||||||
}
|
}
|
35
src/TestTokenProcessor.hx
Normal file
35
src/TestTokenProcessor.hx
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
class TestTokenProcessor extends haxe.unit.TestCase {
|
||||||
|
function testSerializeMultipleProcessors() {
|
||||||
|
var token_processor_one = new TokenProcessor();
|
||||||
|
token_processor_one.tokenHash.set("one", new Token("one", "version one"));
|
||||||
|
token_processor_one.tokenHash.set("two", new Token("two", "version one"));
|
||||||
|
token_processor_one.tokenHash.set("three", new Token("three", "version two"));
|
||||||
|
|
||||||
|
var token_processor_two = new TokenProcessor();
|
||||||
|
token_processor_two.tokenHash.set("four", new Token("four", "version one"));
|
||||||
|
token_processor_two.tokenHash.set("five", new Token("five", "version two"));
|
||||||
|
token_processor_two.tokenHash.set("six", new Token("six", "version three"));
|
||||||
|
|
||||||
|
var normalized_data = TokenProcessor.normalize_processors([token_processor_one, token_processor_two]);
|
||||||
|
|
||||||
|
assertTrue(normalized_data.exists("types"));
|
||||||
|
assertEquals("{0 => TokenProcessor, 1 => TokenProcessor}", normalized_data.get("types").toString());
|
||||||
|
|
||||||
|
assertTrue(normalized_data.exists("versions"));
|
||||||
|
assertEquals("{version one => 0, version two => 1, version three => 2}".length, normalized_data.get("versions").toString().length);
|
||||||
|
|
||||||
|
assertTrue(normalized_data.exists("processor-0"));
|
||||||
|
assertTrue(normalized_data.exists("processor-1"));
|
||||||
|
|
||||||
|
var trap_invalid = true;
|
||||||
|
try {
|
||||||
|
TokenProcessor.unnormalize_processors(new Hash<Hash<Dynamic>>());
|
||||||
|
trap_invalid = false;
|
||||||
|
} catch (e : Dynamic) {}
|
||||||
|
assertTrue(trap_invalid);
|
||||||
|
|
||||||
|
var unnormalized_processors = TokenProcessor.unnormalize_processors(normalized_data);
|
||||||
|
|
||||||
|
assertTrue(unnormalized_processors.length == 2);
|
||||||
|
}
|
||||||
|
}
|
@ -1,47 +1,109 @@
|
|||||||
|
import FunctionTokenProcessor;
|
||||||
|
import ConstantTokenProcessor;
|
||||||
|
|
||||||
class TokenProcessor {
|
class TokenProcessor {
|
||||||
public var tokenHash : Hash<Token>;
|
public var tokenHash : Hash<Token>;
|
||||||
public static var cachePath : String = null;
|
public static var cache_path : String = "../data/all_tokens.hxd";
|
||||||
|
|
||||||
public function new() { this.tokenHash = new Hash<Token>(); }
|
public function new() { this.tokenHash = new Hash<Token>(); }
|
||||||
public function get_cache_path() { return TokenProcessor.cachePath; }
|
|
||||||
public function get_default_token_type() { return Token; }
|
public function get_default_token_type() { return Token; }
|
||||||
|
|
||||||
|
public static var all_token_processors = [ "FunctionTokenProcessor", "ConstantTokenProcessor" ];
|
||||||
|
|
||||||
#if neko
|
#if neko
|
||||||
public function load_from_cache() : Bool {
|
public static function load_all_from_cache() : Array<TokenProcessor> {
|
||||||
if (neko.FileSystem.exists(this.get_cache_path())) {
|
if (neko.FileSystem.exists(cache_path)) {
|
||||||
this.populate_from_unwound_tokens(haxe.Unserializer.run(neko.io.File.getContent(this.get_cache_path())));
|
return unnormalize_processors(haxe.Unserializer.run(neko.io.File.getContent(cache_path)));
|
||||||
return true;
|
|
||||||
} else {
|
} else {
|
||||||
return false;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public function save_to_cache() {
|
public static function save_all_to_cache() {
|
||||||
var fh = neko.io.File.write(this.get_cache_path(), true);
|
if (!neko.FileSystem.exists(cache_path)) {
|
||||||
fh.writeString(haxe.Serializer.run(this.unwind_tokens()));
|
var all_processors = new Array<TokenProcessor>();
|
||||||
|
for (processor_class in all_token_processors) {
|
||||||
|
var processor : TokenProcessor = Type.createInstance(Type.resolveClass(processor_class), []);
|
||||||
|
processor.populate_from_file();
|
||||||
|
all_processors.push(processor);
|
||||||
|
}
|
||||||
|
|
||||||
|
var fh = neko.io.File.write(cache_path, true);
|
||||||
|
fh.writeString(haxe.Serializer.run(normalize_processors(all_processors)));
|
||||||
fh.close();
|
fh.close();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public function populate_from_file() {}
|
public function populate_from_file() {}
|
||||||
#end
|
#end
|
||||||
|
|
||||||
public function load_from_resource() {
|
public static function load_all_from_resource() {
|
||||||
this.populate_from_unwound_tokens(haxe.Unserializer.run(haxe.Resource.getString(this.get_cache_path())));
|
return unnormalize_processors(haxe.Unserializer.run(haxe.Resource.getString(cache_path)));
|
||||||
}
|
}
|
||||||
|
|
||||||
public function unwind_tokens() : Hash<String> {
|
public static function normalize_processors(processors : Array<TokenProcessor>) : Hash<Hash<Dynamic>> {
|
||||||
var unwound_tokens = new Hash<String>();
|
if (processors.length == 0) { throw "no processors specified"; }
|
||||||
for (token in this.tokenHash.keys()) {
|
var normalized_data = new Hash<Hash<Dynamic>>();
|
||||||
unwound_tokens.set(token, this.tokenHash.get(token).version);
|
|
||||||
|
var types = new Hash<String>();
|
||||||
|
var all_versions_with_index = new Hash<Int>();
|
||||||
|
|
||||||
|
var version_index = 0;
|
||||||
|
for (i in 0...processors.length) {
|
||||||
|
var i_string = Std.string(i);
|
||||||
|
var tokens_with_version_index = new Hash<Int>();
|
||||||
|
types.set(i_string, Type.getClassName(Type.getClass(processors[i])));
|
||||||
|
for (token in processors[i].tokenHash.keys()) {
|
||||||
|
var version = processors[i].tokenHash.get(token).version;
|
||||||
|
if (!all_versions_with_index.exists(version)) {
|
||||||
|
all_versions_with_index.set(version, version_index);
|
||||||
|
version_index++;
|
||||||
}
|
}
|
||||||
return unwound_tokens;
|
tokens_with_version_index.set(token, all_versions_with_index.get(version));
|
||||||
}
|
}
|
||||||
|
|
||||||
public function populate_from_unwound_tokens(unwound_tokens : Hash<String>) {
|
normalized_data.set("processor-" + i_string, tokens_with_version_index);
|
||||||
this.tokenHash = new Hash<Token>();
|
}
|
||||||
var token_type = get_default_token_type();
|
|
||||||
for (token in unwound_tokens.keys()) {
|
trace("Unique version strings: " + version_index);
|
||||||
this.tokenHash.set(token, Type.createInstance(token_type, [ token, unwound_tokens.get(token) ]));
|
|
||||||
}
|
var flipped_versions = new Hash<String>();
|
||||||
|
for (version in all_versions_with_index.keys()) {
|
||||||
|
flipped_versions.set(Std.string(all_versions_with_index.get(version)), version);
|
||||||
|
}
|
||||||
|
|
||||||
|
normalized_data.set("versions", flipped_versions);
|
||||||
|
normalized_data.set("types", types);
|
||||||
|
|
||||||
|
return normalized_data;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static function unnormalize_processors(normalized_data : Hash<Hash<Dynamic>>) : Array<TokenProcessor> {
|
||||||
|
var unnormalized_processors = new Array<TokenProcessor>();
|
||||||
|
|
||||||
|
if (!normalized_data.exists("versions")) { throw "versions not defined"; }
|
||||||
|
if (!normalized_data.exists("types")) { throw "types not defined"; }
|
||||||
|
|
||||||
|
var versions = normalized_data.get("versions");
|
||||||
|
var types = normalized_data.get("types");
|
||||||
|
|
||||||
|
for (type_key in types.keys()) {
|
||||||
|
var i = Std.parseInt(type_key);
|
||||||
|
var processor : TokenProcessor = Type.createInstance(Type.resolveClass(types.get(type_key)), []);
|
||||||
|
|
||||||
|
var processor_key = "processor-" + type_key;
|
||||||
|
if (!normalized_data.exists(processor_key)) { throw "processor " + type_key + " not defined"; }
|
||||||
|
|
||||||
|
var processor_tokens = normalized_data.get(processor_key);
|
||||||
|
var token_type = processor.get_default_token_type();
|
||||||
|
for (token in processor_tokens.keys()) {
|
||||||
|
var version_lookup = Std.string(processor_tokens.get(token));
|
||||||
|
processor.tokenHash.set(token, Type.createInstance(token_type, [token, versions.get(version_lookup)]));
|
||||||
|
}
|
||||||
|
|
||||||
|
unnormalized_processors.push(processor);
|
||||||
|
}
|
||||||
|
|
||||||
|
return unnormalized_processors;
|
||||||
}
|
}
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user