Commit 5020e4bd by Aaron Leung

More thorough parsing.

parent 71c31368
/* hey, a comment */ /* hey, a comment */
/* hey, another comment */ /* and another */ /* hey, another comment */ /* and another */
/* all of these should be kept */ /* all of these should be kept */
...@@ -23,7 +23,6 @@ namespace Sass { ...@@ -23,7 +23,6 @@ namespace Sass {
} }
position = source; position = source;
line_number = 1; line_number = 1;
last_munch_succeeded = false;
} }
Document::~Document() { Document::~Document() {
......
...@@ -18,17 +18,47 @@ namespace Sass { ...@@ -18,17 +18,47 @@ namespace Sass {
map<Token, Node> environment; map<Token, Node> environment;
vector<Node> statements; vector<Node> statements;
Token top; Token lexed;
bool last_munch_succeeded;
Document(char* _path, char* _source = 0); Document(char* _path, char* _source = 0);
~Document(); ~Document();
template <prelexer mx> template <prelexer mx>
char* peek() { return mx(position); } char* peek(char* start = 0)
{
if (!start) start = position;
char* after_whitespace;
if (mx == block_comment) {
after_whitespace =
zero_plus< alternatives<spaces, line_comment> >(start);
}
else if (mx == spaces || mx == ancestor_of) {
after_whitespace = spaces(start);
if (after_whitespace) {
return after_whitespace;
}
else {
return 0;
}
}
else if (mx == optional_spaces) {
after_whitespace = optional_spaces(start);
}
else {
after_whitespace = spaces_and_comments(start);
}
char* after_token = mx(after_whitespace);
if (after_token) {
return after_token;
}
else {
return 0;
}
}
template <prelexer mx> template <prelexer mx>
bool try_munching() { char* lex()
{
char* after_whitespace; char* after_whitespace;
if (mx == block_comment) { if (mx == block_comment) {
after_whitespace = after_whitespace =
...@@ -37,13 +67,12 @@ namespace Sass { ...@@ -37,13 +67,12 @@ namespace Sass {
else if (mx == spaces || mx == ancestor_of) { else if (mx == spaces || mx == ancestor_of) {
after_whitespace = spaces(position); after_whitespace = spaces(position);
if (after_whitespace) { if (after_whitespace) {
top = Token(position, after_whitespace);
line_number += count_interval<'\n'>(position, after_whitespace); line_number += count_interval<'\n'>(position, after_whitespace);
position = after_whitespace; lexed = Token(position, after_whitespace);
return last_munch_succeeded = true; return position = after_whitespace;
} }
else { else {
return last_munch_succeeded = false; return 0;
} }
} }
else if (mx == optional_spaces) { else if (mx == optional_spaces) {
...@@ -52,26 +81,67 @@ namespace Sass { ...@@ -52,26 +81,67 @@ namespace Sass {
else { else {
after_whitespace = spaces_and_comments(position); after_whitespace = spaces_and_comments(position);
} }
line_number += count_interval<'\n'>(position, after_whitespace);
char* after_token = mx(after_whitespace); char* after_token = mx(after_whitespace);
if (after_token) { if (after_token) {
top = Token(after_whitespace, after_token); line_number += count_interval<'\n'>(position, after_token);
position = after_token; lexed = Token(after_whitespace, after_token);
return last_munch_succeeded = true; return position = after_token;
} }
else { else {
return last_munch_succeeded = false; return 0;
} }
} }
// template <prelexer mx>
// bool lex() {
// char* after_whitespace;
// if (mx == block_comment) {
// after_whitespace =
// zero_plus< alternatives<spaces, line_comment> >(position);
// }
// else if (mx == spaces || mx == ancestor_of) {
// after_whitespace = spaces(position);
// if (after_whitespace) {
// lexed = Token(position, after_whitespace);
// line_number += count_interval<'\n'>(position, after_whitespace);
// position = after_whitespace;
// return true;
// }
// else {
// return false;
// }
// }
// else if (mx == optional_spaces) {
// after_whitespace = optional_spaces(position);
// }
// else {
// after_whitespace = spaces_and_comments(position);
// }
// line_number += count_interval<'\n'>(position, after_whitespace);
// char* after_token = mx(after_whitespace);
// if (after_token) {
// lexed = Token(after_whitespace, after_token);
// position = after_token;
// return true;
// }
// else {
// return false;
// }
// }
void parse_scss(); void parse_scss();
Node parse_statement(); Node parse_statement();
Node parse_var_def(); Node parse_var_def();
Node parse_ruleset(); Node parse_ruleset();
Node parse_selector_group();
Node parse_selector(); Node parse_selector();
Node parse_block(); Node parse_block();
Node parse_rule();
Node parse_values(); Node parse_values();
char* look_for_rule(char* start = 0);
char* look_for_values(char* start = 0);
string emit_css(CSS_Style style); string emit_css(CSS_Style style);
}; };
} }
\ No newline at end of file
...@@ -2,13 +2,8 @@ ...@@ -2,13 +2,8 @@
namespace Sass { namespace Sass {
void Document::parse_scss() { void Document::parse_scss()
// try_munching<optional_spaces>(); {
// while (*position) {
// statements.push_back(parse_statement());
// try_munching<optional_spaces>();
// }
lex<optional_spaces>(); lex<optional_spaces>();
while(*position) { while(*position) {
statements.push_back(parse_statement()); statements.push_back(parse_statement());
...@@ -16,15 +11,8 @@ namespace Sass { ...@@ -16,15 +11,8 @@ namespace Sass {
} }
} }
Node Document::parse_statement() { Node Document::parse_statement()
// if (try_munching<block_comment>()) { {
// return Node(line_number, Node::comment, top);
// }
// else if (try_munching<variable>()) {
// return parse_var_def();
// }
// else return parse_ruleset();
if (lex<block_comment>()) { if (lex<block_comment>()) {
return Node(line_number, Node::comment, lexed); return Node(line_number, Node::comment, lexed);
} }
...@@ -34,13 +22,8 @@ namespace Sass { ...@@ -34,13 +22,8 @@ namespace Sass {
else return parse_ruleset(); else return parse_ruleset();
} }
Node Document::parse_var_def() { Node Document::parse_var_def()
// const Token key(top); {
// try_munching<exactly<':'> >();
// environment[key] = parse_values();
// try_munching<exactly<';'> >();
// return Node(line_number, Node::nil, top);
const Token key(lexed); const Token key(lexed);
lex< exactly<':'> >(); lex< exactly<':'> >();
environment[key] = parse_values(); environment[key] = parse_values();
...@@ -48,69 +31,124 @@ namespace Sass { ...@@ -48,69 +31,124 @@ namespace Sass {
return Node(); return Node();
} }
Node Document::parse_ruleset() { Node Document::parse_ruleset()
{
Node ruleset(line_number, Node::ruleset, 2); Node ruleset(line_number, Node::ruleset, 2);
ruleset << parse_selector(); ruleset << parse_selector_group();
ruleset << parse_block(); ruleset << parse_block();
return ruleset; return ruleset;
} }
Node Document::parse_selector_group()
{
Node group(line_number, Node::selector_group, 1);
group << parse_selector();
while (lex< exactly<','> >()) group << parse_selector();
return group;
}
Node Document::parse_selector() { Node Document::parse_selector()
try_munching<identifier>(); {
return Node(line_number, Node::selector, top); lex<identifier>();
return Node(line_number, Node::selector, lexed);
} }
Node Document::parse_block() { Node Document::parse_block()
try_munching<exactly<'{'> >(); {
Node decls(line_number, Node::block); lex< exactly<'{'> >();
while(!try_munching<exactly<'}'> >()) { Node block(line_number, Node::block);
if (try_munching<block_comment>()) { while (!lex< exactly<'}'> >()) {
decls << Node(line_number, Node::comment, top); if (lex< block_comment >()) {
block << Node(line_number, Node::comment, lexed);
block.has_comments = true;
continue; continue;
} }
else if (try_munching<variable>()) { else if (lex< variable >()) {
decls << parse_var_def(); block << parse_var_def();
continue; continue;
} }
try_munching<identifier>(); else if (look_for_rule()) {
Token id = top; block << parse_rule();
if (try_munching<exactly<':'> >()) { block.has_rules = true;
Node rule(line_number, Node::rule, 2); continue;
rule << Node(line_number, Node::property, id);
rule << parse_values();
decls << rule;
decls.has_rules = true;
try_munching<exactly<';'> >();
} }
else { else {
Node ruleset(line_number, Node::ruleset, 2); block << parse_ruleset();
ruleset << Node(line_number, Node::selector, id); block.has_rulesets = true;
ruleset << parse_block(); continue;
decls << ruleset;
decls.has_rulesets = true;
} }
} }
return decls; return block;
// lex< identifier >();
// // Token id(lexed);
// if (peek< exactly<':'> >()) {
// Node rule(line_number, Node::rule, 2);
// rule << Node(line_number, Node::property, lexed);
// lex< exactly<':'> >();
// rule << parse_values();
// block << rule;
// block.has_rules = true;
// lex< exactly<';'> >();
// }
// else {
// Node ruleset(line_number, Node::ruleset, 2);
// ruleset << Node(line_number, Node::selector, lexed);
// ruleset << parse_block();
// block << ruleset;
// block.has_rulesets = true;
// }
// }
// return block;
}
Node Document::parse_rule() {
Node rule(line_number, Node::rule, 2);
lex< identifier >();
rule << Node(line_number, Node::property, lexed);
lex< exactly<':'> >();
rule << parse_values();
return rule;
} }
Node Document::parse_values() { Node Document::parse_values()
{
Node values(line_number, Node::values); Node values(line_number, Node::values);
while(try_munching<identifier>() || try_munching<dimension>() || while (lex< identifier >() || lex < dimension >() ||
try_munching<percentage>() || try_munching<number>() || lex< percentage >() || lex < number >() ||
try_munching<hex>() || try_munching<string_constant>() || lex< hex >() || lex < string_constant >() ||
try_munching<variable>()) { lex< variable >()) {
if (top.begin[0] == '$') { if (lexed.begin[0] == '$') {
Node stuff(environment[top]); Node fetched(environment[lexed]);
for (int i = 0; i < stuff.children->size(); ++i) { for (int i = 0; i < fetched.children->size(); ++i) {
values << stuff.children->at(i); values << fetched.children->at(i);
} }
} }
else else {
{ values << Node(line_number, Node::value, lexed);
values << Node(line_number, Node::value, top);
} }
} }
return values; return values;
}
char* Document::look_for_rule(char* start)
{
char* p = start ? start : position;
(p = peek< identifier >(p)) &&
(p = peek< exactly<':'> >(p)) &&
(p = look_for_values(p)) &&
(p = peek< alternatives< exactly<';'>, exactly<'}'> > >(p));
return p;
}
char* Document::look_for_values(char* start)
{
char* p = start ? start : position;
char* q;
while ((q = peek< identifier >(p)) || (q = peek< dimension >(p)) ||
(q = peek< percentage >(p)) || (q = peek< number >(p)) ||
(q = peek< hex >(p)) || (q = peek< string_constant >(p)) ||
(q = peek< variable >(p)))
{ p = q; }
return p == start ? 0 : p;
} }
} }
\ No newline at end of file
...@@ -94,23 +94,24 @@ namespace Sass { ...@@ -94,23 +94,24 @@ namespace Sass {
vector<Node>* nodes; vector<Node>* nodes;
if (type == ruleset) { if (type == ruleset) {
nodes = children->at(1).children; nodes = children->at(1).children;
has_comments = children->at(1).has_comments;
has_rules = children->at(1).has_rules; has_rules = children->at(1).has_rules;
has_rulesets = children->at(1).has_rulesets; has_rulesets = children->at(1).has_rulesets;
} }
switch (type) { switch (type) {
case ruleset: case ruleset:
if (has_rules) { if (has_comments || has_rules) {
buf << indentation; buf << indentation;
children->at(0).emit_nested_css(buf, prefix, depth); // selector children->at(0).emit_nested_css(buf, prefix, depth); // selector group
buf << " {"; buf << " {";
for (int i = 0; i < nodes->size(); ++i) { for (int i = 0; i < nodes->size(); ++i) {
if (nodes->at(i).type == rule) nodes->at(i).emit_nested_css(buf, "", depth + 1); // rules if (nodes->at(i).type == comment || nodes->at(i).type == rule) nodes->at(i).emit_nested_css(buf, "", depth + 1); // rules
} }
buf << " }" << endl; buf << " }" << endl;
} }
if (has_rulesets) { if (has_rulesets) {
for (int i = 0; i < nodes->size(); ++i) { // do each nested ruleset for (int i = 0; i < nodes->size(); ++i) { // do each nested ruleset
if (nodes->at(i).type == ruleset) nodes->at(i).emit_nested_css(buf, prefix + (prefix.empty() ? "" : " ") + string((*children)[0].token), depth + (has_rules ? 1 : 0)); if (nodes->at(i).type == ruleset) nodes->at(i).emit_nested_css(buf, prefix + (prefix.empty() ? "" : " ") + string((*children)[0].token), depth + (has_comments || has_rules ? 1 : 0));
} }
} }
if (depth == 0 && prefix.empty()) buf << endl; if (depth == 0 && prefix.empty()) buf << endl;
...@@ -129,6 +130,10 @@ namespace Sass { ...@@ -129,6 +130,10 @@ namespace Sass {
buf << " " << string((*children)[i].token); buf << " " << string((*children)[i].token);
} }
break; break;
case selector_group:
// UNFINISHED
children->at(0).emit_nested_css(buf, prefix, depth);
break;
case selector: case selector:
buf << prefix << (prefix.empty() ? "" : " ") << string(token); buf << prefix << (prefix.empty() ? "" : " ") << string(token);
break; break;
......
...@@ -69,11 +69,12 @@ namespace Sass { ...@@ -69,11 +69,12 @@ namespace Sass {
mutable vector<Node>* children; mutable vector<Node>* children;
Token token; Token token;
Type type; Type type;
bool has_comments;
bool has_rules; bool has_rules;
bool has_rulesets; bool has_rulesets;
bool has_propsets; bool has_propsets;
Node() { ++fresh; } Node() : type(nil), children(0) { ++fresh; }
Node(const Node& n) Node(const Node& n)
: line_number(n.line_number), : line_number(n.line_number),
......
...@@ -12,16 +12,15 @@ namespace Sass { ...@@ -12,16 +12,15 @@ namespace Sass {
Token(); Token();
Token(const char* _begin, const char* _end); Token(const char* _begin, const char* _end);
inline bool is_null() const { inline operator string() const
return begin == 0 || end == 0 || begin >= end; { return string(begin, end - begin); }
}
inline operator string() const {
return string(begin, end - begin);
}
void stream_unquoted(std::stringstream& buf) const; void stream_unquoted(std::stringstream& buf) const;
bool operator<(const Token& rhs) const; bool operator<(const Token& rhs) const;
operator bool()
{ return begin && end && begin >= end; }
}; };
} }
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment