a little bit of cleanup
This commit is contained in:
parent
6f46531c59
commit
ab40ac919b
|
@ -39,6 +39,9 @@ class CodeParser {
|
|||
public function get_token_processors() { return this.token_processors; }
|
||||
public function get_ignored_modules() { return this.ignored_modules; }
|
||||
|
||||
/**
|
||||
Flatten a list of hashes into a single hash.
|
||||
**/
|
||||
private function flatten_tokens_to_ignore(tokens_to_ignore : Array<Hash<Bool>>) : Hash<Bool> {
|
||||
var flattened_tokens = new Hash<Bool>();
|
||||
for (token_hash in tokens_to_ignore) {
|
||||
|
@ -49,6 +52,9 @@ class CodeParser {
|
|||
return flattened_tokens;
|
||||
}
|
||||
|
||||
/**
|
||||
Parse a block of PHP code, returning the Result set.
|
||||
**/
|
||||
public function parse(s : String) : Array<Result> {
|
||||
var results = new Array<Result>();
|
||||
this.ignored_modules = new Hash<Bool>();
|
||||
|
@ -108,8 +114,8 @@ class CodeParser {
|
|||
if (!flattened_tokens.exists(token)) {
|
||||
for (token_processor in this.token_processors.iterator()) {
|
||||
if ((token_processor.get_default_token_type() == FunctionToken) == is_function) {
|
||||
if (token_processor.tokenHash.exists(token)) {
|
||||
results.push(token_processor.tokenHash.get(token).toResult()); break;
|
||||
if (token_processor.token_hash.exists(token)) {
|
||||
results.push(token_processor.token_hash.get(token).to_result()); break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -117,6 +123,7 @@ class CodeParser {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
// see if this is a //harmonious ignore indicator
|
||||
if (current == "/") {
|
||||
if (s.indexOf("//harmonious", index) == index) {
|
||||
var end_of_line = s.indexOf("\n", index);
|
||||
|
@ -158,8 +165,10 @@ class CodeParser {
|
|||
var token = s.substr(capture_index, index - capture_index);
|
||||
|
||||
for (token_processor in this.token_processors.iterator()) {
|
||||
if (token_processor.tokenHash.exists(token)) {
|
||||
results.push(token_processor.tokenHash.get(token).toResult()); break;
|
||||
if ((token_processor.get_default_token_type() == FunctionToken) == false) {
|
||||
if (token_processor.token_hash.exists(token)) {
|
||||
results.push(token_processor.token_hash.get(token).to_result()); break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
class ConstantToken extends Token {
|
||||
override public function get_token_type() {
|
||||
return ResultType.Constant;
|
||||
}
|
||||
override public function get_token_type() { return ResultType.Constant; }
|
||||
}
|
|
@ -1,3 +1,15 @@
|
|||
/**
|
||||
ConstantTokenProcessor attempts to parse the PHP constants documentation
|
||||
to glean version information about constants. This is done by examining
|
||||
the descriptions of constants to see if the string:
|
||||
|
||||
~/since php ([0-9\.]+)/i
|
||||
|
||||
exists in the description and taking that as the minimum version
|
||||
necessary to use that constant. This method is imperfect and probably
|
||||
inaccurate, and one should always remember to test one's code on
|
||||
the target platform to ensure maximum compatibility.
|
||||
**/
|
||||
class ConstantTokenProcessor extends TokenProcessor {
|
||||
override public function get_default_token_type() { return ConstantToken; }
|
||||
|
||||
|
@ -16,19 +28,25 @@ class ConstantTokenProcessor extends TokenProcessor {
|
|||
|
||||
#if neko
|
||||
public override function populate_from_file() {
|
||||
this.tokenHash = new Hash<Token>();
|
||||
this.token_hash = new Hash<Token>();
|
||||
for (file in neko.FileSystem.readDirectory(source_path)) {
|
||||
if (source_file_pattern.match(file)) {
|
||||
trace(file + ": " + this.append_from_string(neko.io.File.getContent(source_path + "/" + file)));
|
||||
//trace(file + ": " + this.append_from_string(neko.io.File.getContent(source_path + "/" + file)));
|
||||
this.append_from_string(neko.io.File.getContent(source_path + "/" + file));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Process an XML string and append any tokens found to the token store
|
||||
for this processor.
|
||||
**/
|
||||
public function append_from_string(s : String) : String {
|
||||
var type = "none";
|
||||
for (child in Xml.parse(s).firstElement()) {
|
||||
if (child.nodeType == Xml.Element) {
|
||||
var any_skipped;
|
||||
// dig past unnecessary nodes at the top of the tree
|
||||
do {
|
||||
any_skipped = false;
|
||||
for (nodes_to_skip in node_skip_information) {
|
||||
|
@ -69,13 +87,13 @@ class ConstantTokenProcessor extends TokenProcessor {
|
|||
} catch (e : Dynamic) {}
|
||||
}
|
||||
if (token_name != null) {
|
||||
this.tokenHash.set(token_name, new ConstantToken(token_name, "PHP " + token_version));
|
||||
this.token_hash.set(token_name, new ConstantToken(token_name, "PHP " + token_version));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// variablelist
|
||||
if (child.nodeName == "variablelist") {
|
||||
type = "variablelist";
|
||||
|
@ -105,7 +123,7 @@ class ConstantTokenProcessor extends TokenProcessor {
|
|||
}
|
||||
|
||||
if (token_name != null) {
|
||||
this.tokenHash.set(token_name, new ConstantToken(token_name, "PHP " + token_version));
|
||||
this.token_hash.set(token_name, new ConstantToken(token_name, "PHP " + token_version));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -149,7 +167,7 @@ class ConstantTokenProcessor extends TokenProcessor {
|
|||
}
|
||||
}
|
||||
if (token_name != null) {
|
||||
this.tokenHash.set(token_name, new ConstantToken(token_name, "PHP " + token_version));
|
||||
this.token_hash.set(token_name, new ConstantToken(token_name, "PHP " + token_version));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -160,8 +178,11 @@ class ConstantTokenProcessor extends TokenProcessor {
|
|||
return type;
|
||||
}
|
||||
|
||||
/**
|
||||
Populate a new token store from the provided XML string.
|
||||
**/
|
||||
public function populate_from_string(s : String) {
|
||||
this.tokenHash = new Hash<Token>();
|
||||
this.token_hash = new Hash<Token>();
|
||||
this.append_from_string(s);
|
||||
}
|
||||
#end
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
class FunctionToken extends Token {
|
||||
override public function get_token_type() {
|
||||
return ResultType.Function;
|
||||
}
|
||||
override public function get_token_type() { return ResultType.Function; }
|
||||
}
|
|
@ -8,7 +8,7 @@ class FunctionTokenProcessor extends TokenProcessor {
|
|||
}
|
||||
|
||||
public function populate_from_string(s : String) {
|
||||
this.tokenHash = new Hash<Token>();
|
||||
this.token_hash = new Hash<Token>();
|
||||
var tokens_parsed = 0;
|
||||
|
||||
//
|
||||
|
@ -26,7 +26,7 @@ class FunctionTokenProcessor extends TokenProcessor {
|
|||
version = ~/PECL /.replace(version, "");
|
||||
version = ~/\:/.replace(version, " ");
|
||||
var token = child.get("name");
|
||||
this.tokenHash.set(token, new FunctionToken(child.get("name"), version));
|
||||
this.token_hash.set(token, new FunctionToken(child.get("name"), version));
|
||||
tokens_parsed++;
|
||||
}
|
||||
}
|
||||
|
@ -51,21 +51,19 @@ class FunctionTokenProcessor extends TokenProcessor {
|
|||
version = ~/PECL /.replace(version, "");
|
||||
version = ~/\:/.replace(version, " ");
|
||||
|
||||
this.tokenHash.set(token, new FunctionToken(token, version));
|
||||
this.token_hash.set(token, new FunctionToken(token, version));
|
||||
tokens_parsed++;
|
||||
i = tag_end;
|
||||
} else {
|
||||
i++;
|
||||
}
|
||||
} else {
|
||||
i++;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
trace("tokens parsed: " + tokens_parsed);
|
||||
}
|
||||
#end
|
||||
}
|
|
@ -20,8 +20,8 @@ class TestConstantTokenProcessor extends haxe.unit.TestCase {
|
|||
var tokenProcessor = new ConstantTokenProcessor();
|
||||
tokenProcessor.populate_from_string(string);
|
||||
|
||||
assertTrue(tokenProcessor.tokenHash.exists(constant_name));
|
||||
assertEquals("PHP " + constant_from, tokenProcessor.tokenHash.get(constant_name).version);
|
||||
assertTrue(tokenProcessor.token_hash.exists(constant_name));
|
||||
assertEquals("PHP " + constant_from, tokenProcessor.token_hash.get(constant_name).version);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -10,6 +10,6 @@ class TestFunctionTokenProcessor extends haxe.unit.TestCase {
|
|||
}
|
||||
|
||||
public function testGenerateSampleToken() {
|
||||
assertTrue(token_processor.tokenHash.exists(function_name));
|
||||
assertTrue(token_processor.token_hash.exists(function_name));
|
||||
}
|
||||
}
|
|
@ -1,21 +1,21 @@
|
|||
class TestToken extends haxe.unit.TestCase {
|
||||
static var tokenName : String = "test";
|
||||
static var tokenVersion : String = "5.2";
|
||||
static var token_name : String = "test";
|
||||
static var token_version : String = "5.2";
|
||||
var t : Token;
|
||||
|
||||
public override function setup() {
|
||||
t = new Token(tokenName, tokenVersion);
|
||||
t = new Token(token_name, token_version);
|
||||
}
|
||||
|
||||
public function testInstantiateToken() {
|
||||
assertEquals(tokenName, t.token);
|
||||
assertEquals(tokenVersion, t.version);
|
||||
assertEquals(token_name, t.token);
|
||||
assertEquals(token_version, t.version);
|
||||
}
|
||||
|
||||
public function testToResult() {
|
||||
var result = t.toResult();
|
||||
var result = t.to_result();
|
||||
assertEquals(ResultType.Generic, result.type);
|
||||
assertEquals(tokenName, result.token);
|
||||
assertEquals(tokenVersion, result.version);
|
||||
assertEquals(token_name, result.token);
|
||||
assertEquals(token_version, result.version);
|
||||
}
|
||||
}
|
|
@ -1,14 +1,14 @@
|
|||
class TestTokenProcessor extends haxe.unit.TestCase {
|
||||
function testSerializeMultipleProcessors() {
|
||||
var token_processor_one = new TokenProcessor();
|
||||
token_processor_one.tokenHash.set("one", new Token("one", "version one"));
|
||||
token_processor_one.tokenHash.set("two", new Token("two", "version one"));
|
||||
token_processor_one.tokenHash.set("three", new Token("three", "version two"));
|
||||
token_processor_one.token_hash.set("one", new Token("one", "version one"));
|
||||
token_processor_one.token_hash.set("two", new Token("two", "version one"));
|
||||
token_processor_one.token_hash.set("three", new Token("three", "version two"));
|
||||
|
||||
var token_processor_two = new TokenProcessor();
|
||||
token_processor_two.tokenHash.set("four", new Token("four", "version one"));
|
||||
token_processor_two.tokenHash.set("five", new Token("five", "version two"));
|
||||
token_processor_two.tokenHash.set("six", new Token("six", "version three"));
|
||||
token_processor_two.token_hash.set("four", new Token("four", "version one"));
|
||||
token_processor_two.token_hash.set("five", new Token("five", "version two"));
|
||||
token_processor_two.token_hash.set("six", new Token("six", "version three"));
|
||||
|
||||
var normalized_data = TokenProcessor.normalize_processors([token_processor_one, token_processor_two]);
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ class Token {
|
|||
public function get_version() { return this.version; }
|
||||
public function get_token_type() { return ResultType.Generic; }
|
||||
|
||||
public function toResult() {
|
||||
public function to_result() {
|
||||
return new Result(this.token_type, this.token, this.version);
|
||||
}
|
||||
}
|
|
@ -1,16 +1,23 @@
|
|||
import FunctionTokenProcessor;
|
||||
import ConstantTokenProcessor;
|
||||
|
||||
/**
|
||||
Class that loads tokens from PHP documentation and holds them
|
||||
for use by CodeParser.
|
||||
**/
|
||||
class TokenProcessor {
|
||||
public var tokenHash : Hash<Token>;
|
||||
public var token_hash : Hash<Token>;
|
||||
public static var cache_path : String = "../data/all_tokens.hxd";
|
||||
|
||||
public function new() { this.tokenHash = new Hash<Token>(); }
|
||||
public function new() { this.token_hash = new Hash<Token>(); }
|
||||
public function get_default_token_type() { return Token; }
|
||||
|
||||
public static var all_token_processors = [ "FunctionTokenProcessor", "ConstantTokenProcessor" ];
|
||||
|
||||
#if neko
|
||||
/**
|
||||
Load all possible token processors from the cache.
|
||||
**/
|
||||
public static function load_all_from_cache() : Array<TokenProcessor> {
|
||||
if (neko.FileSystem.exists(cache_path)) {
|
||||
return unnormalize_processors(haxe.Unserializer.run(neko.io.File.getContent(cache_path)));
|
||||
|
@ -19,6 +26,9 @@ class TokenProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
If the cache file does not exist, save all token processors to disk.
|
||||
**/
|
||||
public static function save_all_to_cache() {
|
||||
if (!neko.FileSystem.exists(cache_path)) {
|
||||
var all_processors = new Array<TokenProcessor>();
|
||||
|
@ -34,13 +44,24 @@ class TokenProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Load the tokens for this type of processor from disk.
|
||||
**/
|
||||
public function populate_from_file() {}
|
||||
#end
|
||||
|
||||
/**
|
||||
Load all possible token processors from the cache Resource.
|
||||
**/
|
||||
public static function load_all_from_resource() {
|
||||
return unnormalize_processors(haxe.Unserializer.run(haxe.Resource.getString(cache_path)));
|
||||
}
|
||||
|
||||
/**
|
||||
Given an array of TokenProcessors, normalize the version information
|
||||
out of the tokens and into a separate hash, and return the
|
||||
TokenProcessor, version_id => version, and token => version_id information.
|
||||
**/
|
||||
public static function normalize_processors(processors : Array<TokenProcessor>) : Hash<Hash<Dynamic>> {
|
||||
if (processors.length == 0) { throw "no processors specified"; }
|
||||
var normalized_data = new Hash<Hash<Dynamic>>();
|
||||
|
@ -53,8 +74,8 @@ class TokenProcessor {
|
|||
var i_string = Std.string(i);
|
||||
var tokens_with_version_index = new Hash<Int>();
|
||||
types.set(i_string, Type.getClassName(Type.getClass(processors[i])));
|
||||
for (token in processors[i].tokenHash.keys()) {
|
||||
var version = processors[i].tokenHash.get(token).version;
|
||||
for (token in processors[i].token_hash.keys()) {
|
||||
var version = processors[i].token_hash.get(token).version;
|
||||
if (!all_versions_with_index.exists(version)) {
|
||||
all_versions_with_index.set(version, version_index);
|
||||
version_index++;
|
||||
|
@ -65,8 +86,6 @@ class TokenProcessor {
|
|||
normalized_data.set("processor-" + i_string, tokens_with_version_index);
|
||||
}
|
||||
|
||||
trace("Unique version strings: " + version_index);
|
||||
|
||||
var flipped_versions = new Hash<String>();
|
||||
for (version in all_versions_with_index.keys()) {
|
||||
flipped_versions.set(Std.string(all_versions_with_index.get(version)), version);
|
||||
|
@ -78,6 +97,9 @@ class TokenProcessor {
|
|||
return normalized_data;
|
||||
}
|
||||
|
||||
/**
|
||||
Unnormalize a set of data produced from TokenProcessor#normalize_processors.
|
||||
**/
|
||||
public static function unnormalize_processors(normalized_data : Hash<Hash<Dynamic>>) : Array<TokenProcessor> {
|
||||
var unnormalized_processors = new Array<TokenProcessor>();
|
||||
|
||||
|
@ -98,7 +120,7 @@ class TokenProcessor {
|
|||
var token_type = processor.get_default_token_type();
|
||||
for (token in processor_tokens.keys()) {
|
||||
var version_lookup = Std.string(processor_tokens.get(token));
|
||||
processor.tokenHash.set(token, Type.createInstance(token_type, [token, versions.get(version_lookup)]));
|
||||
processor.token_hash.set(token, Type.createInstance(token_type, [token, versions.get(version_lookup)]));
|
||||
}
|
||||
|
||||
unnormalized_processors.push(processor);
|
||||
|
|
Loading…
Reference in New Issue