Skip to content

Instantly share code, notes, and snippets.

@marionette-of-u
Last active November 3, 2015 06:37
Show Gist options
  • Save marionette-of-u/8ed10d19a2ab0cb714e4 to your computer and use it in GitHub Desktop.
Save marionette-of-u/8ed10d19a2ab0cb714e4 to your computer and use it in GitHub Desktop.
lexer & parser (lxq)
<lexer> lexer{
mul = "\*";
div = "\/";
add = "\+";
sub = "\-";
left_paren = "\(";
right_paren = "\)";
[make_id] id = "[0-9]+";
}
<token> token{
<right>{ unary_minus; }
<left>{
mul, div;
add, sub;
}
left_paren, right_paren;
id;
}
<parser> parser{
[default_value]
E
: [make_add] E(0) add E(1)
| [make_sub] E(0) sub E(1)
| [make_mlt] E(0) mul E(1)
| [make_div] E(0) div E(1)
| [identity] left_paren E(0) right_paren
| [make_inv] <unary_minus> sub E(0)
| [identity] id(0)
;
}
// 自動生成済み
#ifndef LEXER_HPP_
#define LEXER_HPP_
#include <memory>
#include <vector>
#include <exception>
#include "lxq.hpp"
namespace lxq{
enum class token_id : int{
mul = 1,
div = 2,
add = 3,
sub = 4,
left_paren = 5,
right_paren = 6,
id = 7,
end = 2147483647
};
}
template<class Iter>
struct lexer{
struct token_type{
using identifier_type = lxq::token_id;
token_type() : value(nullptr){}
token_type(const token_type&) = delete;
token_type(token_type &&other) : first(std::move(other.first)), last(std::move(other.last)), line_num(other.line_num), char_num(other.line_num), word_num(other.word_num), identifier(other.identifier), value(std::move(other.value)){}
~token_type() = default;
Iter first, last;
std::size_t line_num, char_num, word_num;
identifier_type identifier;
std::unique_ptr<lxq::semantic_data> value;
};
template<class Action>
static std::vector<token_type> tokenize(Iter iter, Iter end, Action &action){
std::vector<token_type> result;
Iter first = iter;
std::size_t line_num = 0, char_num = 0, word_num = 0;
char c;
state_1:;
if(iter == end){
goto end_of_tokenize;
}
c = *iter;
switch(c){
case 40:
++char_num;
++iter;
goto state_2;
case 41:
++char_num;
++iter;
goto state_3;
case 42:
++char_num;
++iter;
goto state_4;
case 43:
++char_num;
++iter;
goto state_5;
case 45:
++char_num;
++iter;
goto state_6;
case 47:
++char_num;
++iter;
goto state_7;
case 48: case 49: case 50: case 51: case 52: case 53: case 54: case 55:
case 56: case 57:
++char_num;
++iter;
goto state_8;
}
throw std::runtime_error("lexical error : state 1");
state_2:;
if(iter == end){
token_type t;
t.first = first;
t.last = iter;
t.line_num = line_num;
t.char_num = char_num;
t.word_num = word_num++;
t.identifier = token_type::identifier_type::left_paren;
result.push_back(std::move(t));
goto end_of_tokenize;
}
{
token_type t;
t.first = first;
t.last = iter;
t.line_num = line_num;
t.char_num = char_num;
t.word_num = word_num++;
t.identifier = token_type::identifier_type::left_paren;
result.push_back(std::move(t));
first = iter;
goto state_1;
}
state_3:;
if(iter == end){
token_type t;
t.first = first;
t.last = iter;
t.line_num = line_num;
t.char_num = char_num;
t.word_num = word_num++;
t.identifier = token_type::identifier_type::right_paren;
result.push_back(std::move(t));
goto end_of_tokenize;
}
{
token_type t;
t.first = first;
t.last = iter;
t.line_num = line_num;
t.char_num = char_num;
t.word_num = word_num++;
t.identifier = token_type::identifier_type::right_paren;
result.push_back(std::move(t));
first = iter;
goto state_1;
}
state_4:;
if(iter == end){
token_type t;
t.first = first;
t.last = iter;
t.line_num = line_num;
t.char_num = char_num;
t.word_num = word_num++;
t.identifier = token_type::identifier_type::mul;
result.push_back(std::move(t));
goto end_of_tokenize;
}
{
token_type t;
t.first = first;
t.last = iter;
t.line_num = line_num;
t.char_num = char_num;
t.word_num = word_num++;
t.identifier = token_type::identifier_type::mul;
result.push_back(std::move(t));
first = iter;
goto state_1;
}
state_5:;
if(iter == end){
token_type t;
t.first = first;
t.last = iter;
t.line_num = line_num;
t.char_num = char_num;
t.word_num = word_num++;
t.identifier = token_type::identifier_type::add;
result.push_back(std::move(t));
goto end_of_tokenize;
}
{
token_type t;
t.first = first;
t.last = iter;
t.line_num = line_num;
t.char_num = char_num;
t.word_num = word_num++;
t.identifier = token_type::identifier_type::add;
result.push_back(std::move(t));
first = iter;
goto state_1;
}
state_6:;
if(iter == end){
token_type t;
t.first = first;
t.last = iter;
t.line_num = line_num;
t.char_num = char_num;
t.word_num = word_num++;
t.identifier = token_type::identifier_type::sub;
result.push_back(std::move(t));
goto end_of_tokenize;
}
{
token_type t;
t.first = first;
t.last = iter;
t.line_num = line_num;
t.char_num = char_num;
t.word_num = word_num++;
t.identifier = token_type::identifier_type::sub;
result.push_back(std::move(t));
first = iter;
goto state_1;
}
state_7:;
if(iter == end){
token_type t;
t.first = first;
t.last = iter;
t.line_num = line_num;
t.char_num = char_num;
t.word_num = word_num++;
t.identifier = token_type::identifier_type::div;
result.push_back(std::move(t));
goto end_of_tokenize;
}
{
token_type t;
t.first = first;
t.last = iter;
t.line_num = line_num;
t.char_num = char_num;
t.word_num = word_num++;
t.identifier = token_type::identifier_type::div;
result.push_back(std::move(t));
first = iter;
goto state_1;
}
state_8:;
if(iter == end){
token_type t;
t.first = first;
t.last = iter;
t.line_num = line_num;
t.char_num = char_num;
t.word_num = word_num++;
t.identifier = token_type::identifier_type::id;
t.value = std::move(std::unique_ptr<lxq::semantic_data>(action.make_id(first, iter)));
result.push_back(std::move(t));
goto end_of_tokenize;
}
c = *iter;
switch(c){
case 48: case 49: case 50: case 51: case 52: case 53: case 54: case 55:
case 56: case 57:
++char_num;
++iter;
goto state_8;
}
{
token_type t;
t.first = first;
t.last = iter;
t.line_num = line_num;
t.char_num = char_num;
t.word_num = word_num++;
t.identifier = token_type::identifier_type::id;
t.value = std::move(std::unique_ptr<lxq::semantic_data>(action.make_id(first, iter)));
result.push_back(std::move(t));
first = iter;
goto state_1;
}
end_of_tokenize:;
{
token_type t;
t.first = iter;
t.last = iter;
t.line_num = 0;
t.char_num = 0;
t.word_num = 0;
t.identifier = token_type::identifier_type::end;
result.push_back(std::move(t));
}
return result;
}
};
#endif // LEXER_HPP_
#ifndef LXQ_HPP_
#define LXQ_HPP_
namespace lxq{
template<class T = void>
class semantic_data_proto{
public:
virtual ~semantic_data_proto() = default;
};
using semantic_data = semantic_data_proto<>;
}
#endif
// 自動生成済み
#ifndef PARSER_HPP_
#define PARSER_HPP_
#include <functional>
#include <exception>
#include <sstream>
#include <vector>
#include <map>
#include <cstdlib>
#include <cassert>
#include "lxq.hpp"
#include "lexer.hpp"
namespace parser{
using semantic_data = lxq::semantic_data;
// parser
template<class Lexer, class SemanticDataProc>
class parser{
private:
using term = int;
using token_type = typename Lexer::token_type;
using arg_type = std::vector<std::unique_ptr<semantic_data>>;
using call_function = std::function<std::unique_ptr<semantic_data>(parser&, arg_type const&)>;
struct term_sequence_data{
std::size_t norm;
call_function call;
};
struct parsing_table_item{
enum class enum_action{
shift,
reduce,
accept
};
enum_action action;
std::size_t num;
};
struct parsing_data{
std::size_t first;
std::map<std::size_t, std::pair<term, term_sequence_data>> n2r;
std::map<std::size_t, std::map<term, parsing_table_item>> parsing_table;
std::map<std::size_t, std::map<term, std::size_t>> goto_table;
};
static parsing_data const &parsing_data_storage(){
auto init = [](){
return parsing_data{
0, // first
// n2r
decltype(parsing_data::n2r){
std::make_pair(
0,
std::make_pair(-2, term_sequence_data{ 3, [](parser &p, arg_type const &arg){ return call_make_mlt(p, arg[0], arg[2]); } })
),
std::make_pair(
1,
std::make_pair(-2, term_sequence_data{ 3, [](parser &p, arg_type const &arg){ return call_make_div(p, arg[0], arg[2]); } })
),
std::make_pair(
2,
std::make_pair(-2, term_sequence_data{ 3, [](parser &p, arg_type const &arg){ return call_make_add(p, arg[0], arg[2]); } })
),
std::make_pair(
3,
std::make_pair(-2, term_sequence_data{ 3, [](parser &p, arg_type const &arg){ return call_make_sub(p, arg[0], arg[2]); } })
),
std::make_pair(
4,
std::make_pair(-2, term_sequence_data{ 2, [](parser &p, arg_type const &arg){ return call_make_inv(p, arg[1]); } })
),
std::make_pair(
5,
std::make_pair(-2, term_sequence_data{ 3, [](parser &p, arg_type const &arg){ return call_identity(p, arg[1]); } })
),
std::make_pair(
6,
std::make_pair(-2, term_sequence_data{ 1, [](parser &p, arg_type const &arg){ return call_identity(p, arg[0]); } })
),
},
// parsing_table
decltype(parsing_data::parsing_table){
std::make_pair(
0,
std::map<term, parsing_table_item>{
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::shift, 2 }),
std::make_pair(5, parsing_table_item{ parsing_table_item::enum_action::shift, 1 }),
std::make_pair(7, parsing_table_item{ parsing_table_item::enum_action::shift, 14 })
}
),
std::make_pair(
1,
std::map<term, parsing_table_item>{
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::shift, 2 }),
std::make_pair(5, parsing_table_item{ parsing_table_item::enum_action::shift, 1 }),
std::make_pair(7, parsing_table_item{ parsing_table_item::enum_action::shift, 14 })
}
),
std::make_pair(
2,
std::map<term, parsing_table_item>{
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::shift, 2 }),
std::make_pair(5, parsing_table_item{ parsing_table_item::enum_action::shift, 1 }),
std::make_pair(7, parsing_table_item{ parsing_table_item::enum_action::shift, 14 })
}
),
std::make_pair(
3,
std::map<term, parsing_table_item>{
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::shift, 2 }),
std::make_pair(5, parsing_table_item{ parsing_table_item::enum_action::shift, 1 }),
std::make_pair(7, parsing_table_item{ parsing_table_item::enum_action::shift, 14 })
}
),
std::make_pair(
4,
std::map<term, parsing_table_item>{
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::shift, 2 }),
std::make_pair(5, parsing_table_item{ parsing_table_item::enum_action::shift, 1 }),
std::make_pair(7, parsing_table_item{ parsing_table_item::enum_action::shift, 14 })
}
),
std::make_pair(
5,
std::map<term, parsing_table_item>{
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::shift, 2 }),
std::make_pair(5, parsing_table_item{ parsing_table_item::enum_action::shift, 1 }),
std::make_pair(7, parsing_table_item{ parsing_table_item::enum_action::shift, 14 })
}
),
std::make_pair(
6,
std::map<term, parsing_table_item>{
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::shift, 2 }),
std::make_pair(5, parsing_table_item{ parsing_table_item::enum_action::shift, 1 }),
std::make_pair(7, parsing_table_item{ parsing_table_item::enum_action::shift, 14 })
}
),
std::make_pair(
7,
std::map<term, parsing_table_item>{
std::make_pair(1, parsing_table_item{ parsing_table_item::enum_action::shift, 3 }),
std::make_pair(2, parsing_table_item{ parsing_table_item::enum_action::shift, 4 }),
std::make_pair(3, parsing_table_item{ parsing_table_item::enum_action::shift, 5 }),
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::shift, 6 }),
std::make_pair(6, parsing_table_item{ parsing_table_item::enum_action::shift, 15 })
}
),
std::make_pair(
8,
std::map<term, parsing_table_item>{
std::make_pair(1, parsing_table_item{ parsing_table_item::enum_action::shift, 3 }),
std::make_pair(2, parsing_table_item{ parsing_table_item::enum_action::shift, 4 }),
std::make_pair(3, parsing_table_item{ parsing_table_item::enum_action::reduce, 4 }),
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::reduce, 4 }),
std::make_pair(6, parsing_table_item{ parsing_table_item::enum_action::reduce, 4 }),
std::make_pair(2147483647, parsing_table_item{ parsing_table_item::enum_action::reduce, 4 })
}
),
std::make_pair(
9,
std::map<term, parsing_table_item>{
std::make_pair(1, parsing_table_item{ parsing_table_item::enum_action::reduce, 0 }),
std::make_pair(2, parsing_table_item{ parsing_table_item::enum_action::reduce, 0 }),
std::make_pair(3, parsing_table_item{ parsing_table_item::enum_action::reduce, 0 }),
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::reduce, 0 }),
std::make_pair(6, parsing_table_item{ parsing_table_item::enum_action::reduce, 0 }),
std::make_pair(2147483647, parsing_table_item{ parsing_table_item::enum_action::reduce, 0 })
}
),
std::make_pair(
10,
std::map<term, parsing_table_item>{
std::make_pair(1, parsing_table_item{ parsing_table_item::enum_action::reduce, 1 }),
std::make_pair(2, parsing_table_item{ parsing_table_item::enum_action::reduce, 1 }),
std::make_pair(3, parsing_table_item{ parsing_table_item::enum_action::reduce, 1 }),
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::reduce, 1 }),
std::make_pair(6, parsing_table_item{ parsing_table_item::enum_action::reduce, 1 }),
std::make_pair(2147483647, parsing_table_item{ parsing_table_item::enum_action::reduce, 1 })
}
),
std::make_pair(
11,
std::map<term, parsing_table_item>{
std::make_pair(1, parsing_table_item{ parsing_table_item::enum_action::shift, 3 }),
std::make_pair(2, parsing_table_item{ parsing_table_item::enum_action::shift, 4 }),
std::make_pair(3, parsing_table_item{ parsing_table_item::enum_action::reduce, 2 }),
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::reduce, 2 }),
std::make_pair(6, parsing_table_item{ parsing_table_item::enum_action::reduce, 2 }),
std::make_pair(2147483647, parsing_table_item{ parsing_table_item::enum_action::reduce, 2 })
}
),
std::make_pair(
12,
std::map<term, parsing_table_item>{
std::make_pair(1, parsing_table_item{ parsing_table_item::enum_action::shift, 3 }),
std::make_pair(2, parsing_table_item{ parsing_table_item::enum_action::shift, 4 }),
std::make_pair(3, parsing_table_item{ parsing_table_item::enum_action::reduce, 3 }),
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::reduce, 3 }),
std::make_pair(6, parsing_table_item{ parsing_table_item::enum_action::reduce, 3 }),
std::make_pair(2147483647, parsing_table_item{ parsing_table_item::enum_action::reduce, 3 })
}
),
std::make_pair(
13,
std::map<term, parsing_table_item>{
std::make_pair(1, parsing_table_item{ parsing_table_item::enum_action::shift, 3 }),
std::make_pair(2, parsing_table_item{ parsing_table_item::enum_action::shift, 4 }),
std::make_pair(3, parsing_table_item{ parsing_table_item::enum_action::shift, 5 }),
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::shift, 6 }),
std::make_pair(2147483647, parsing_table_item{ parsing_table_item::enum_action::accept, 0 })
}
),
std::make_pair(
14,
std::map<term, parsing_table_item>{
std::make_pair(1, parsing_table_item{ parsing_table_item::enum_action::reduce, 6 }),
std::make_pair(2, parsing_table_item{ parsing_table_item::enum_action::reduce, 6 }),
std::make_pair(3, parsing_table_item{ parsing_table_item::enum_action::reduce, 6 }),
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::reduce, 6 }),
std::make_pair(6, parsing_table_item{ parsing_table_item::enum_action::reduce, 6 }),
std::make_pair(2147483647, parsing_table_item{ parsing_table_item::enum_action::reduce, 6 })
}
),
std::make_pair(
15,
std::map<term, parsing_table_item>{
std::make_pair(1, parsing_table_item{ parsing_table_item::enum_action::reduce, 5 }),
std::make_pair(2, parsing_table_item{ parsing_table_item::enum_action::reduce, 5 }),
std::make_pair(3, parsing_table_item{ parsing_table_item::enum_action::reduce, 5 }),
std::make_pair(4, parsing_table_item{ parsing_table_item::enum_action::reduce, 5 }),
std::make_pair(6, parsing_table_item{ parsing_table_item::enum_action::reduce, 5 }),
std::make_pair(2147483647, parsing_table_item{ parsing_table_item::enum_action::reduce, 5 })
}
)
},
// goto_table
decltype(parsing_data::goto_table){
std::make_pair(
0,
std::map<term, std::size_t>{
std::make_pair(-2, 13)
}
),
std::make_pair(
1,
std::map<term, std::size_t>{
std::make_pair(-2, 7)
}
),
std::make_pair(
2,
std::map<term, std::size_t>{
std::make_pair(-2, 8)
}
),
std::make_pair(
3,
std::map<term, std::size_t>{
std::make_pair(-2, 9)
}
),
std::make_pair(
4,
std::map<term, std::size_t>{
std::make_pair(-2, 10)
}
),
std::make_pair(
5,
std::map<term, std::size_t>{
std::make_pair(-2, 11)
}
),
std::make_pair(
6,
std::map<term, std::size_t>{
std::make_pair(-2, 12)
}
),
std::make_pair(
7,
std::map<term, std::size_t>{}
),
std::make_pair(
8,
std::map<term, std::size_t>{}
),
std::make_pair(
9,
std::map<term, std::size_t>{}
),
std::make_pair(
10,
std::map<term, std::size_t>{}
),
std::make_pair(
11,
std::map<term, std::size_t>{}
),
std::make_pair(
12,
std::map<term, std::size_t>{}
),
std::make_pair(
13,
std::map<term, std::size_t>{}
),
std::make_pair(
14,
std::map<term, std::size_t>{}
),
std::make_pair(
15,
std::map<term, std::size_t>{}
)
}
};
};
static parsing_data data = init();
return data;
}
static std::unique_ptr<semantic_data> call_make_mlt(parser &p,std::unique_ptr<semantic_data> const &v_0, std::unique_ptr<semantic_data> const &v_1){
return std::move(std::unique_ptr<semantic_data>(p.semantic_data_proc.make_mlt(v_0.get(), v_1.get())));
}
static std::unique_ptr<semantic_data> call_make_div(parser &p,std::unique_ptr<semantic_data> const &v_0, std::unique_ptr<semantic_data> const &v_1){
return std::move(std::unique_ptr<semantic_data>(p.semantic_data_proc.make_div(v_0.get(), v_1.get())));
}
static std::unique_ptr<semantic_data> call_make_add(parser &p,std::unique_ptr<semantic_data> const &v_0, std::unique_ptr<semantic_data> const &v_1){
return std::move(std::unique_ptr<semantic_data>(p.semantic_data_proc.make_add(v_0.get(), v_1.get())));
}
static std::unique_ptr<semantic_data> call_make_sub(parser &p,std::unique_ptr<semantic_data> const &v_0, std::unique_ptr<semantic_data> const &v_1){
return std::move(std::unique_ptr<semantic_data>(p.semantic_data_proc.make_sub(v_0.get(), v_1.get())));
}
static std::unique_ptr<semantic_data> call_make_inv(parser &p,std::unique_ptr<semantic_data> const &v_0){
return std::move(std::unique_ptr<semantic_data>(p.semantic_data_proc.make_inv(v_0.get())));
}
static std::unique_ptr<semantic_data> call_identity(parser &p,std::unique_ptr<semantic_data> const &v_0){
return std::move(std::unique_ptr<semantic_data>(p.semantic_data_proc.identity(v_0.get())));
}
public:
SemanticDataProc &semantic_data_proc;
parser() = delete;
parser(SemanticDataProc &semantic_data_proc) : semantic_data_proc(semantic_data_proc){}
template<class InputIter>
bool parse(std::unique_ptr<semantic_data> &value, InputIter first, InputIter last){
parsing_data const &table = parsing_data_storage();
std::vector<std::size_t> state_stack;
std::vector<std::unique_ptr<semantic_data>> value_stack;
state_stack.push_back(table.first);
while(true){
token_type &token = *first;
term const &t = static_cast<term>(token.identifier);
std::size_t s = state_stack.back();
auto const &table_second(table.parsing_table.find(s)->second);
auto iter = table_second.find(t);
if(iter == table_second.end()){
std::stringstream ss;
ss << "parsing error, 'no action'. :" << token.line_num << ":" << token.char_num;
throw std::runtime_error(ss.str());
}
parsing_table_item const &i = table.parsing_table.find(s)->second.find(t)->second;
if(i.action == parsing_table_item::enum_action::shift){
state_stack.push_back(i.num);
value_stack.push_back(std::unique_ptr<semantic_data>(nullptr));
value_stack.back().swap(token.value);
++first;
}else if(i.action == parsing_table_item::enum_action::reduce){
auto &p = *table.n2r.find(i.num);
std::size_t norm = p.second.second.norm;
state_stack.resize(state_stack.size() - norm);
if(state_stack.empty()){
std::stringstream ss;
ss << "parsing error, 'reduce'. :" << token.line_num << ":" << token.char_num;
throw std::runtime_error(ss.str());
}
std::vector<std::unique_ptr<semantic_data>> arg;
arg.reserve(norm);
for(std::size_t i = 0; i < norm; ++i){
arg.push_back(std::unique_ptr<semantic_data>(nullptr));
arg.back().swap(std::move(value_stack[value_stack.size() - norm + i]));
}
value_stack.resize(value_stack.size() - norm);
value_stack.push_back(std::move(p.second.second.call(*this, arg)));
state_stack.push_back(table.goto_table.find(state_stack.back())->second.find(p.second.first)->second);
}else if(i.action == parsing_table_item::enum_action::accept){
if(state_stack.empty()){
std::stringstream ss;
ss << "parsing error, 'accept'. :" << token.line_num << ":" << token.char_num;
throw std::runtime_error(ss.str());
}
break;
}
}
value = std::move(value_stack.front());
return true;
}
};
}
#endif
#include <iostream>
#include <cstdlib>
#include "lxq.hpp"
#include "lexer.hpp"
#include "parser.hpp"
// セマンティックデータとセマンティックアクション
// ここあたりは人力で書く
struct semantic_data : public lxq::semantic_data{
semantic_data(double value) : value(value){}
double value;
};
struct semantic_action{
double default_value(){
return 0.0;
}
// lexer用.
// 文字列(range)から値を生成する.
template<class Iter>
lxq::semantic_data *make_id(Iter first, Iter last){
return new semantic_data(static_cast<double>(std::atoi(std::string(first, last).c_str())));
}
// 足す.
lxq::semantic_data *make_add(lxq::semantic_data *x_, lxq::semantic_data *y_){
semantic_data *x = static_cast<semantic_data*>(x_);
semantic_data *y = static_cast<semantic_data*>(y_);
return new semantic_data(x->value + y->value);
}
// 引く.
lxq::semantic_data *make_sub(lxq::semantic_data *x_, lxq::semantic_data *y_){
semantic_data *x = static_cast<semantic_data*>(x_);
semantic_data *y = static_cast<semantic_data*>(y_);
return new semantic_data(x->value - y->value);
}
// 掛ける.
lxq::semantic_data *make_mlt(lxq::semantic_data *x_, lxq::semantic_data *y_){
semantic_data *x = static_cast<semantic_data*>(x_);
semantic_data *y = static_cast<semantic_data*>(y_);
return new semantic_data(x->value * y->value);
}
// 割る.
lxq::semantic_data *make_div(lxq::semantic_data *x_, lxq::semantic_data *y_){
semantic_data *x = static_cast<semantic_data*>(x_);
semantic_data *y = static_cast<semantic_data*>(y_);
return new semantic_data(x->value / y->value);
}
// アイデンティティ.
lxq::semantic_data *identity(lxq::semantic_data *x){
return new semantic_data(static_cast<semantic_data*>(x)->value);
}
// 符号を反転する.
lxq::semantic_data *make_inv(lxq::semantic_data *x_){
semantic_data *x = static_cast<semantic_data*>(x_);
return new semantic_data(-x->value);
}
};
int main(){
// 解析対象の文字列.
std::string str = "-1+2-3*4/5";
// セマンティックアクションを内部に持つインスタンスを生成する.
semantic_action sa;
// 字句解析
using lexer = lexer<std::string::iterator>;
std::vector<lexer::token_type> result = lexer::tokenize(str.begin(), str.end(), sa);
// 構文解析
std::unique_ptr<lxq::semantic_data> ptr;
parser::parser<lexer, semantic_action> p(sa);
p.parse(ptr, result.begin(), result.end());
// -1.4
std::cout << static_cast<semantic_data*>(ptr.get())->value << std::endl;
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment