|
|
@ -107,6 +107,13 @@ namespace molasses { |
|
|
|
effective_snapshots[idx] = type_stack; |
|
|
|
it = ahead; |
|
|
|
++idx; |
|
|
|
} else if(auto ahead = it; ++ahead != consumed_stream.end() and (lexer_state.dictionary.at(*ahead) == "__JUMP_IF__")) { |
|
|
|
if(type_stack.empty()) return false; |
|
|
|
if(type_stack.back() != "i64") return false; |
|
|
|
//type_stack.pop_back(); // JUMP_IF does not consume its test variable
|
|
|
|
effective_snapshots[idx] = type_stack; |
|
|
|
it = ahead; |
|
|
|
++idx; |
|
|
|
} else if(symbol.is_string) { |
|
|
|
type_stack.emplace_back("u8 ptr"); |
|
|
|
} else if(auto is_int32 = try_parse_int32(symbol_text); is_int32) { |
|
|
@ -134,7 +141,8 @@ namespace molasses { |
|
|
|
PROC_KW, |
|
|
|
END_KW, |
|
|
|
LABEL_KW, |
|
|
|
GOTO_KW |
|
|
|
GOTO_KW, |
|
|
|
JUMP_IF_KW |
|
|
|
}; |
|
|
|
|
|
|
|
lexed_output fake; |
|
|
@ -144,6 +152,7 @@ namespace molasses { |
|
|
|
fake.dictionary[END_KW] = "__END__"; |
|
|
|
fake.dictionary[LABEL_KW] = "__LABEL__"; |
|
|
|
fake.dictionary[GOTO_KW] = "__GOTO__"; |
|
|
|
fake.dictionary[JUMP_IF_KW] = "__JUMP_IF__"; |
|
|
|
|
|
|
|
|
|
|
|
auto tokens = concatenate(fake, lexer_data); |
|
|
@ -221,10 +230,13 @@ namespace molasses { |
|
|
|
std::vector<std::pair<size_t, size_t>> sub_bodies; |
|
|
|
std::map<std::string, size_t> found_labels; |
|
|
|
std::map<std::string, size_t> found_gotos; |
|
|
|
std::map<std::string, size_t> found_jump_ifs; |
|
|
|
while(*it != END_KW) { |
|
|
|
if(auto ahead = it; ++ahead != tokens.symbols.end() and (*ahead == GOTO_KW or *ahead == LABEL_KW)) { |
|
|
|
if(auto ahead = it; ++ahead != tokens.symbols.end() and (*ahead == GOTO_KW or *ahead == JUMP_IF_KW or *ahead == LABEL_KW)) { |
|
|
|
if(*ahead == GOTO_KW) { |
|
|
|
found_gotos[tokens.dictionary[*it]] = body.size(); |
|
|
|
} else if(*ahead == JUMP_IF_KW) { |
|
|
|
found_jump_ifs[tokens.dictionary[*it]] = body.size(); |
|
|
|
} else if(*ahead == LABEL_KW) { |
|
|
|
auto label_value = tokens.dictionary[*it]; |
|
|
|
if(found_labels.contains(label_value)) { |
|
|
@ -251,6 +263,14 @@ namespace molasses { |
|
|
|
throw orphan_goto_error(body[index], dest); |
|
|
|
} |
|
|
|
|
|
|
|
sub_bodies.emplace_back(std::min(index, found_labels[dest]), std::max(index, found_labels[dest])); |
|
|
|
} |
|
|
|
|
|
|
|
for(auto& [dest, index] : found_jump_ifs) { |
|
|
|
if(not found_labels.contains(dest)) { |
|
|
|
throw orphan_goto_error(body[index], dest); |
|
|
|
} |
|
|
|
|
|
|
|
sub_bodies.emplace_back(std::min(index, found_labels[dest]), std::max(index, found_labels[dest])); |
|
|
|
} |
|
|
|
|
|
|
@ -290,7 +310,14 @@ namespace molasses { |
|
|
|
for(auto it = _body.begin(); it != _body.end(); ++it) { |
|
|
|
auto elem = *it; |
|
|
|
auto token = lexer_data.dictionary.at(elem); |
|
|
|
if(auto ahead = it; ++ahead != _body.end() and (lexer_data.dictionary.at(*ahead) == "__GOTO__" or lexer_data.dictionary.at(*ahead) == "__LABEL__")) { |
|
|
|
if( |
|
|
|
auto ahead = it; |
|
|
|
++ahead != _body.end() and ( |
|
|
|
lexer_data.dictionary.at(*ahead) == "__GOTO__" |
|
|
|
or lexer_data.dictionary.at(*ahead) == "__LABEL__" |
|
|
|
or lexer_data.dictionary.at(*ahead) == "__JUMP_IF__" |
|
|
|
) |
|
|
|
) { |
|
|
|
if(lexer_data.dictionary.at(*ahead) == "__GOTO__") { |
|
|
|
for(auto&& instruction : generate_goto(name() + " in " + token)) { |
|
|
|
ops.push_back(instruction); |
|
|
@ -299,6 +326,10 @@ namespace molasses { |
|
|
|
for(auto&& instruction : generate_label(name() + " in " + token)) { |
|
|
|
ops.push_back(instruction); |
|
|
|
} |
|
|
|
} else if(lexer_data.dictionary.at(*ahead) == "__JUMP_IF__") { |
|
|
|
for(auto&& instruction : generate_jump_if(name() + " in " + token)) { |
|
|
|
ops.push_back(instruction); |
|
|
|
} |
|
|
|
} |
|
|
|
it = ahead; |
|
|
|
} else if(elem.is_string) { |
|
|
|