Add boilerplate for semantic analysis
Signed-off-by: Username404-59 <w.iron.zombie@gmail.com>
This commit is contained in:
parent
ea8902a3cc
commit
d4df3aa4ea
|
@ -186,7 +186,13 @@ set(CPACK_PACKAGE_INSTALL_DIRECTORY "${PROJECT_NAME} ${CMAKE_PROJECT_VERSION_MAJ
|
|||
set(CPACK_PACKAGE_FILE_NAME "${LOWERCASE_PROJECT_NAME}-${PROJECT_VERSION}-${TIME}.${CMAKE_SYSTEM_PROCESSOR}")
|
||||
|
||||
include_directories(${CMAKE_CURRENT_LIST_DIR})
|
||||
add_executable(${EXENAME} src/main.cpp ${CMAKE_CURRENT_BINARY_DIR}/processed/${PROJECT_NAME}.rc src/etc/filefuncs.cpp src/etc/lexer.cpp src/headers/lex.hpp src/headers/misc.hpp src/headers/parsing/ParseComponents.hpp src/headers/transpiler/Target.hpp src/headers/transpiler/implementations/Lua.hpp src/headers/transpiler/implementations/Js.hpp src/headers/transpiler/implementations/Py.hpp src/headers/parsing/Parser.hpp src/headers/arguments.hpp src/headers/parsing/ReservedIdentifiers.hpp src/headers/emscripten_compatibility.h)
|
||||
add_executable(${EXENAME} src/main.cpp ${CMAKE_CURRENT_BINARY_DIR}/processed/${PROJECT_NAME}.rc src/etc/filefuncs.cpp
|
||||
src/etc/lexer.cpp src/headers/lex.hpp src/headers/misc.hpp
|
||||
src/headers/parsing/ParseComponents.hpp src/headers/transpiler/Target.hpp
|
||||
src/headers/transpiler/implementations/Lua.hpp src/headers/transpiler/implementations/Js.hpp src/headers/transpiler/implementations/Py.hpp
|
||||
src/headers/parsing/Parser.hpp src/headers/arguments.hpp src/headers/parsing/ReservedIdentifiers.hpp src/headers/emscripten_compatibility.h
|
||||
src/headers/Tasking.hpp
|
||||
src/headers/SemanticAnalysis.hpp)
|
||||
target_compile_definitions(${EXENAME} PRIVATE YBCON_VERSION="${CODENAME} ${PROJECT_VERSION}")
|
||||
target_precompile_headers(${EXENAME} PRIVATE src/headers/Yerbacon.hpp)
|
||||
if (Threads_FOUND)
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
#ifndef YERBACON_SEMANTICANALYSIS_HPP
|
||||
#define YERBACON_SEMANTICANALYSIS_HPP
|
||||
|
||||
#include "parsing/ParseComponents.hpp"
|
||||
#include "Tasking.hpp"
|
||||
|
||||
#define make_task(T, C, F) make_task_base_R(T, C, F)
|
||||
|
||||
struct SemanticAnalyser final: public Tasking {
|
||||
const auto& analyseTree(const ParseTree& tree) {
|
||||
const auto& task_map = getTaskMapInstance();
|
||||
for (unsigned int i = 0; i < tree.size(); ++i) {
|
||||
try {
|
||||
task_map.at(tree[i]->getId())(tree, i);
|
||||
} catch (const out_of_range&) {}
|
||||
}
|
||||
return tree;
|
||||
}
|
||||
private:
|
||||
unordered_task_map getTaskMapInstance() final {
|
||||
return {
|
||||
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
#undef make_task
|
||||
|
||||
#endif //YERBACON_SEMANTICANALYSIS_HPP
|
|
@ -0,0 +1,20 @@
|
|||
#ifndef YERBACON_TASKING_HPP
|
||||
#define YERBACON_TASKING_HPP
|
||||
|
||||
#include <typeindex>
|
||||
#include <unordered_map>
|
||||
#include "parsing/ParseComponents.hpp"
|
||||
|
||||
#define make_task_base(start, type, captures, function_body) make_pair(type_index(typeid(type)), [captures](const ParseTree& parsedTree, unsigned int& index) { start; function_body })
|
||||
#define make_task_base_R(T, C, F) make_task_base(const T& parseComponent = reinterpret_cast<T&>(*parsedTree[index]), T, C, F)
|
||||
|
||||
class Tasking {
|
||||
protected:
|
||||
typedef function<void (const ParseTree& parsedTree, unsigned int& index)> task;
|
||||
typedef unordered_map<type_index, task> unordered_task_map;
|
||||
public:
|
||||
virtual unordered_task_map getTaskMapInstance() = 0;
|
||||
virtual ~Tasking() = default;
|
||||
};
|
||||
|
||||
#endif //YERBACON_TASKING_HPP
|
|
@ -135,6 +135,7 @@ namespace Parser {
|
|||
break;
|
||||
} else i -= parametersDistance;
|
||||
|
||||
// TODO Move this to semantic analysis
|
||||
bool isFinalDefine = nextAre({TAG, DEFINE});
|
||||
if (isFinalDefine || next.toktype == DEFINE) {
|
||||
const optional previousDefinition = parseTree.template findReferenceByName<Define>(current.toktext);
|
||||
|
@ -183,6 +184,7 @@ namespace Parser {
|
|||
} catch (const NamedIdentifier<true>::identifier_reserved_exception&) {
|
||||
parsingError(current, " is a reserved identifier", true);
|
||||
}
|
||||
// TODO Move this to semantic analysis
|
||||
if (not parseTree.empty()) {
|
||||
const auto& last = parseTree.cend() - 1;
|
||||
const type_info& lastId = last->get()->getId();
|
||||
|
|
|
@ -15,8 +15,9 @@
|
|||
#endif
|
||||
|
||||
#include "../parsing/ParseComponents.hpp"
|
||||
#include "../Tasking.hpp"
|
||||
|
||||
class Target {
|
||||
class Target: protected Tasking {
|
||||
constexpr static const char* const interpolationString = "${";
|
||||
constexpr static const char* const interpolationCloseString = "}";
|
||||
protected:
|
||||
|
@ -65,13 +66,9 @@ protected:
|
|||
}
|
||||
} else output << openCharacters << view << closeCharacters;
|
||||
}
|
||||
typedef function<void (const ParseTree& parsedTree, unsigned int& index)> task;
|
||||
#define make_task_base(start, type, captures, function_body) make_pair(type_index(typeid(type)), [captures](const ParseTree& parsedTree, unsigned int& index) { start; function_body })
|
||||
#define make_task_base_R(T, C, F) make_task_base(const T& parseComponent = reinterpret_cast<T&>(*parsedTree[index]), T, C, F)
|
||||
#define make_task(T, F) make_task_base_R(T, this, F)
|
||||
#define make_task_noR(T, F) make_task_base(,T, this, F)
|
||||
#define make_nonlocal_task(T, F) make_task_base_R(T, , F)
|
||||
typedef unordered_map<type_index, task> unordered_task_map;
|
||||
typedef pair<const char*, const char*> print_functions_pair;
|
||||
virtual unordered_task_map getTaskMap() = 0;
|
||||
virtual print_functions_pair printFunctions() = 0;
|
||||
|
@ -124,7 +121,7 @@ protected:
|
|||
string separator;
|
||||
static constexpr const char* indentation = " ";
|
||||
public:
|
||||
unordered_task_map getTaskMapInstance() {
|
||||
unordered_task_map getTaskMapInstance() final {
|
||||
unordered_task_map fullMap = getTaskMap();
|
||||
// Default / Shared tasks:
|
||||
fullMap.merge(unordered_task_map({
|
||||
|
@ -151,7 +148,7 @@ public:
|
|||
};
|
||||
explicit Target(const bool& newLines): output(), newLines(newLines), separator() {};
|
||||
Target() = delete;
|
||||
virtual ~Target() = default;
|
||||
~Target() override = default;
|
||||
};
|
||||
|
||||
#include "implementations/Lua.hpp"
|
||||
|
|
|
@ -7,6 +7,7 @@ using namespace std;
|
|||
|
||||
#include "headers/misc.hpp"
|
||||
#include "headers/arguments.hpp"
|
||||
#include "headers/SemanticAnalysis.hpp"
|
||||
#include "headers/transpiler/Target.hpp"
|
||||
#include "headers/emscripten_compatibility.h"
|
||||
|
||||
|
@ -58,7 +59,9 @@ int main(int argc, char* argv[]) {
|
|||
unit_result resultingPair;
|
||||
try {
|
||||
resultingPair.first = Target::forName(target, newLines)->transpileWithTree(
|
||||
SemanticAnalyser().analyseTree(
|
||||
parseString(text_provided ? string(currentArgument) + '\n' : getFileContent(file_path.string(/* NOTE: This string is expected to finish with a line ending */)))
|
||||
)
|
||||
);
|
||||
if (!text_provided or output_directory.has_value())
|
||||
outputFileContent(
|
||||
|
|
Loading…
Reference in New Issue