filter lambda support
This commit is contained in:
parent
bfa6d161bb
commit
0ddf2cd121
@ -32,8 +32,21 @@ namespace mstch {
|
||||
};
|
||||
}
|
||||
|
||||
using renderer = std::function<std::string(const std::string&)>;
|
||||
class lambda {
|
||||
public:
|
||||
lambda(std::function<std::string(const std::string&,renderer)> fun): fun(fun) {
|
||||
}
|
||||
|
||||
std::string operator()(const std::string& text, renderer renderer) const {
|
||||
return fun(text, renderer);
|
||||
}
|
||||
private:
|
||||
std::function<std::string(const std::string&,renderer)> fun;
|
||||
};
|
||||
|
||||
using node = boost::make_recursive_variant<
|
||||
boost::blank, std::string, int, bool,
|
||||
boost::blank, std::string, int, bool, lambda,
|
||||
std::shared_ptr<internal::object_t<boost::recursive_variant_>>,
|
||||
std::map<const std::string,boost::recursive_variant_>,
|
||||
std::vector<boost::recursive_variant_>>::type;
|
||||
|
@ -1,4 +1,4 @@
|
||||
find_package(Boost 1.54 REQUIRED)
|
||||
find_package(Boost 1.46 REQUIRED)
|
||||
|
||||
include_directories(
|
||||
${CMAKE_SOURCE_DIR}/include
|
||||
|
@ -12,7 +12,7 @@ state::in_section::in_section(type type, const std::string& section_name):
|
||||
|
||||
std::string state::in_section::render(render_context& ctx, const token& token) {
|
||||
if(token.token_type() == token::type::section_close) {
|
||||
if(token.content() == section_name && skipped_openings == 0) {
|
||||
if(token.name() == section_name && skipped_openings == 0) {
|
||||
auto& node = ctx.get_node(section_name);
|
||||
std::string out;
|
||||
if(m_type == type::normal) {
|
||||
|
@ -10,23 +10,23 @@ std::string state::outside_section::render(
|
||||
{
|
||||
switch(token.token_type()) {
|
||||
case token::type::section_open:
|
||||
ctx.set_state<in_section>(in_section::type::normal, token.content());
|
||||
ctx.set_state<in_section>(in_section::type::normal, token.name());
|
||||
break;
|
||||
case token::type::inverted_section_open:
|
||||
ctx.set_state<in_section>(in_section::type::inverted, token.content());
|
||||
ctx.set_state<in_section>(in_section::type::inverted, token.name());
|
||||
break;
|
||||
case token::type::variable:
|
||||
return boost::apply_visitor(
|
||||
visitor::render_node(visitor::render_node::flag::escape_html),
|
||||
ctx.get_node(token.content()));
|
||||
ctx.get_node(token.name()));
|
||||
case token::type::unescaped_variable:
|
||||
return boost::apply_visitor(
|
||||
visitor::render_node(visitor::render_node::flag::none),
|
||||
ctx.get_node(token.content()));
|
||||
ctx.get_node(token.name()));
|
||||
case token::type::text:
|
||||
return token.content();
|
||||
return token.raw();
|
||||
case token::type::partial:
|
||||
return ctx.render_partial(token.content());
|
||||
return ctx.render_partial(token.name());
|
||||
default: break;
|
||||
}
|
||||
return "";
|
||||
|
@ -13,7 +13,6 @@ void template_type::tokenize(const std::string& t) {
|
||||
std::string::const_iterator tok_end, tok_start = t.begin();
|
||||
parse_state pstate = parse_state::start;
|
||||
unsigned int del_pos = 0;
|
||||
bool ws_only = true;
|
||||
for (std::string::const_iterator it = t.begin(); it != t.end(); ++it) {
|
||||
if (pstate == parse_state::start) {
|
||||
if (*it == delim_start[0]) {
|
||||
@ -21,11 +20,8 @@ void template_type::tokenize(const std::string& t) {
|
||||
tok_end = it;
|
||||
del_pos = 1;
|
||||
} else if(*it == '\n') {
|
||||
tokens.push_back({false, true, ws_only, {tok_start, it + 1}});
|
||||
ws_only = true;
|
||||
tokens.push_back({{tok_start, it + 1}});
|
||||
tok_start = it + 1;
|
||||
} else if (*it != ' ' && *it != '\t') {
|
||||
ws_only = false;
|
||||
}
|
||||
} else if(pstate == parse_state::in_del_start) {
|
||||
if (*it == delim_start[del_pos] && ++del_pos == delim_start.size())
|
||||
@ -47,17 +43,15 @@ void template_type::tokenize(const std::string& t) {
|
||||
} else if(pstate == parse_state::in_del_end) {
|
||||
if (*it == delim_end[del_pos] && ++del_pos == delim_end.size()) {
|
||||
pstate = parse_state::start;
|
||||
tokens.push_back({false, false, ws_only, {tok_start, tok_end}});
|
||||
tokens.push_back({true, false, false,
|
||||
{tok_end+delim_start.size(), it-delim_end.size()+1}});
|
||||
ws_only = true;
|
||||
tokens.push_back({{tok_start, tok_end}});
|
||||
tokens.push_back({{tok_end, it + 1}, delim_start.size(), delim_end.size()});
|
||||
tok_start = it + 1;
|
||||
} else {
|
||||
pstate = parse_state::start;
|
||||
}
|
||||
}
|
||||
}
|
||||
tokens.push_back({false, false, ws_only, {tok_start, t.end()}});
|
||||
tokens.push_back({{tok_start, t.end()}});
|
||||
}
|
||||
|
||||
void template_type::strip_whitespace() {
|
||||
@ -71,13 +65,17 @@ void template_type::strip_whitespace() {
|
||||
else if (!(*it).ws_only())
|
||||
non_space = true;
|
||||
if ((*it).eol()) {
|
||||
if (has_tag && !non_space)
|
||||
for (auto line_it = line_begin; line_it != it + 1; ++line_it)
|
||||
if ((*line_it).ws_only()) (*line_it).mark();
|
||||
if (has_tag && !non_space) {
|
||||
auto line_it = line_begin;
|
||||
for (; !(*line_it).eol(); ++line_it)
|
||||
if ((*line_it).ws_only())
|
||||
line_it = tokens.erase(line_it);
|
||||
if ((*line_it).ws_only())
|
||||
line_it = tokens.erase(line_it);
|
||||
it = line_it - 1;
|
||||
}
|
||||
non_space = has_tag = false;
|
||||
line_begin = it + 1;
|
||||
}
|
||||
}
|
||||
for (auto it = tokens.begin(); it != tokens.end();)
|
||||
((*it).marked())?(it = tokens.erase(it)):(++it);
|
||||
}
|
||||
|
@ -15,23 +15,24 @@ token::type token::token_info(char c) {
|
||||
}
|
||||
}
|
||||
|
||||
token::token(bool is_tag, bool eol, bool ws_only, const std::string& str):
|
||||
m_eol(eol), m_ws_only(ws_only), m_marked(false)
|
||||
token::token(const std::string& str, std::size_t skip_left, std::size_t skip_right):
|
||||
m_eol(false), m_ws_only(false), m_raw(str)
|
||||
{
|
||||
if(is_tag) {
|
||||
if(str[0] == '{' && str[str.size() - 1] == '}') {
|
||||
if(skip_left != 0 && skip_right != 0) {
|
||||
if(str[skip_left] == '{' && str[str.size() - skip_right - 1] == '}') {
|
||||
m_type = type::unescaped_variable;
|
||||
m_content = {first_not_ws(str.begin() + 1, str.end()),
|
||||
first_not_ws(str.rbegin() + 1, str.rend()) + 1};
|
||||
m_name = {first_not_ws(str.begin() + skip_left + 1, str.end() - skip_right),
|
||||
first_not_ws(str.rbegin() + 1 + skip_right, str.rend() - skip_left) + 1};
|
||||
} else {
|
||||
auto first = first_not_ws(str.begin(), str.end());
|
||||
auto first = first_not_ws(str.begin() + skip_left, str.end() - skip_right);
|
||||
m_type = token_info(*first);
|
||||
if(m_type != type::variable)
|
||||
first = first_not_ws(first + 1, str.end());
|
||||
m_content = {first, first_not_ws(str.rbegin(), str.rend()) + 1};
|
||||
first = first_not_ws(first + 1, str.end() - skip_right);
|
||||
m_name = {first, first_not_ws(str.rbegin() + skip_right, str.rend() - skip_left) + 1};
|
||||
}
|
||||
} else {
|
||||
m_type = type::text;
|
||||
m_content = str;
|
||||
m_eol = (str.size() > 0 && str[str.size() - 1] == '\n');
|
||||
m_ws_only = (str.find_first_not_of(" \n\t") == std::string::npos);
|
||||
}
|
||||
}
|
||||
|
@ -9,19 +9,18 @@ namespace mstch {
|
||||
text, variable, section_open, section_close, inverted_section_open,
|
||||
unescaped_variable, comment, partial
|
||||
};
|
||||
token(bool is_tag, bool eol, bool ws_only, const std::string& str);
|
||||
token(const std::string& str, std::size_t skip_left = 0, std::size_t skip_right = 0);
|
||||
type token_type() const { return m_type; };
|
||||
const std::string& content() const { return m_content; };
|
||||
const std::string& raw() const { return m_raw; };
|
||||
const std::string& name() const { return m_name; };
|
||||
bool eol() const { return m_eol; }
|
||||
bool ws_only() const { return m_ws_only; }
|
||||
bool marked() const { return m_marked; }
|
||||
void mark() { m_marked = true; };
|
||||
private:
|
||||
type m_type;
|
||||
std::string m_content;
|
||||
std::string m_name;
|
||||
std::string m_raw;
|
||||
bool m_eol;
|
||||
bool m_ws_only;
|
||||
bool m_marked;
|
||||
type token_info(char c);
|
||||
};
|
||||
}
|
||||
|
@ -28,6 +28,18 @@ namespace mstch {
|
||||
flag m_flag;
|
||||
};
|
||||
|
||||
template<> inline
|
||||
std::string render_section::operator()<lambda>(const lambda& lam) const {
|
||||
std::string section_str;
|
||||
for(auto& token: section)
|
||||
section_str += token.raw();
|
||||
return lam(section_str, [this](const std::string& str) {
|
||||
std::cout << str << std::endl;
|
||||
std::cout << ctx.render(template_type{str}) << std::endl;
|
||||
return ctx.render(template_type{str});
|
||||
});
|
||||
}
|
||||
|
||||
template<> inline
|
||||
std::string render_section::operator()<array>(const array& arr) const {
|
||||
std::string out;
|
||||
|
@ -1,4 +1,4 @@
|
||||
find_package(Boost 1.54 COMPONENTS program_options REQUIRED)
|
||||
find_package(Boost 1.46 COMPONENTS program_options REQUIRED)
|
||||
|
||||
include_directories(
|
||||
${CMAKE_CURRENT_BINARY_DIR}
|
||||
|
@ -8,10 +8,9 @@ public:
|
||||
}
|
||||
|
||||
mstch::node bold() {
|
||||
return std::string{""};
|
||||
/*return [](const std::string& text, mstch::renderer render) {
|
||||
return mstch::lambda{[](const std::string& text, mstch::renderer render) {
|
||||
return std::string{"<b>"} + render(text) + std::string{"</b>"};
|
||||
};*/
|
||||
}};
|
||||
};
|
||||
|
||||
mstch::node person() {
|
||||
|
Loading…
x
Reference in New Issue
Block a user