Commit 5020e4bd by Aaron Leung

More thorough parsing.

parent 71c31368
/* hey, a comment */
/* hey, another comment */ /* and another */
/* all of these should be kept */
/* hey, a comment */
/* hey, another comment */ /* and another */
/* all of these should be kept */
......@@ -23,7 +23,6 @@ namespace Sass {
}
position = source;
line_number = 1;
last_munch_succeeded = false;
}
Document::~Document() {
......
......@@ -18,17 +18,47 @@ namespace Sass {
map<Token, Node> environment;
vector<Node> statements;
Token top;
bool last_munch_succeeded;
Token lexed;
Document(char* _path, char* _source = 0);
~Document();
template <prelexer mx>
char* peek() { return mx(position); }
char* peek(char* start = 0)
{
if (!start) start = position;
char* after_whitespace;
if (mx == block_comment) {
after_whitespace =
zero_plus< alternatives<spaces, line_comment> >(start);
}
else if (mx == spaces || mx == ancestor_of) {
after_whitespace = spaces(start);
if (after_whitespace) {
return after_whitespace;
}
else {
return 0;
}
}
else if (mx == optional_spaces) {
after_whitespace = optional_spaces(start);
}
else {
after_whitespace = spaces_and_comments(start);
}
char* after_token = mx(after_whitespace);
if (after_token) {
return after_token;
}
else {
return 0;
}
}
template <prelexer mx>
bool try_munching() {
char* lex()
{
char* after_whitespace;
if (mx == block_comment) {
after_whitespace =
......@@ -37,13 +67,12 @@ namespace Sass {
else if (mx == spaces || mx == ancestor_of) {
after_whitespace = spaces(position);
if (after_whitespace) {
top = Token(position, after_whitespace);
line_number += count_interval<'\n'>(position, after_whitespace);
position = after_whitespace;
return last_munch_succeeded = true;
lexed = Token(position, after_whitespace);
return position = after_whitespace;
}
else {
return last_munch_succeeded = false;
return 0;
}
}
else if (mx == optional_spaces) {
......@@ -52,26 +81,67 @@ namespace Sass {
else {
after_whitespace = spaces_and_comments(position);
}
line_number += count_interval<'\n'>(position, after_whitespace);
char* after_token = mx(after_whitespace);
if (after_token) {
top = Token(after_whitespace, after_token);
position = after_token;
return last_munch_succeeded = true;
line_number += count_interval<'\n'>(position, after_token);
lexed = Token(after_whitespace, after_token);
return position = after_token;
}
else {
return last_munch_succeeded = false;
return 0;
}
}
// template <prelexer mx>
// bool lex() {
// char* after_whitespace;
// if (mx == block_comment) {
// after_whitespace =
// zero_plus< alternatives<spaces, line_comment> >(position);
// }
// else if (mx == spaces || mx == ancestor_of) {
// after_whitespace = spaces(position);
// if (after_whitespace) {
// lexed = Token(position, after_whitespace);
// line_number += count_interval<'\n'>(position, after_whitespace);
// position = after_whitespace;
// return true;
// }
// else {
// return false;
// }
// }
// else if (mx == optional_spaces) {
// after_whitespace = optional_spaces(position);
// }
// else {
// after_whitespace = spaces_and_comments(position);
// }
// line_number += count_interval<'\n'>(position, after_whitespace);
// char* after_token = mx(after_whitespace);
// if (after_token) {
// lexed = Token(after_whitespace, after_token);
// position = after_token;
// return true;
// }
// else {
// return false;
// }
// }
void parse_scss();
Node parse_statement();
Node parse_var_def();
Node parse_ruleset();
Node parse_selector_group();
Node parse_selector();
Node parse_block();
Node parse_rule();
Node parse_values();
char* look_for_rule(char* start = 0);
char* look_for_values(char* start = 0);
string emit_css(CSS_Style style);
};
}
\ No newline at end of file
......@@ -2,13 +2,8 @@
namespace Sass {
void Document::parse_scss() {
// try_munching<optional_spaces>();
// while (*position) {
// statements.push_back(parse_statement());
// try_munching<optional_spaces>();
// }
void Document::parse_scss()
{
lex<optional_spaces>();
while(*position) {
statements.push_back(parse_statement());
......@@ -16,15 +11,8 @@ namespace Sass {
}
}
Node Document::parse_statement() {
// if (try_munching<block_comment>()) {
// return Node(line_number, Node::comment, top);
// }
// else if (try_munching<variable>()) {
// return parse_var_def();
// }
// else return parse_ruleset();
Node Document::parse_statement()
{
if (lex<block_comment>()) {
return Node(line_number, Node::comment, lexed);
}
......@@ -34,13 +22,8 @@ namespace Sass {
else return parse_ruleset();
}
Node Document::parse_var_def() {
// const Token key(top);
// try_munching<exactly<':'> >();
// environment[key] = parse_values();
// try_munching<exactly<';'> >();
// return Node(line_number, Node::nil, top);
Node Document::parse_var_def()
{
const Token key(lexed);
lex< exactly<':'> >();
environment[key] = parse_values();
......@@ -48,69 +31,124 @@ namespace Sass {
return Node();
}
Node Document::parse_ruleset() {
Node Document::parse_ruleset()
{
Node ruleset(line_number, Node::ruleset, 2);
ruleset << parse_selector();
ruleset << parse_selector_group();
ruleset << parse_block();
return ruleset;
}
Node Document::parse_selector_group()
{
Node group(line_number, Node::selector_group, 1);
group << parse_selector();
while (lex< exactly<','> >()) group << parse_selector();
return group;
}
Node Document::parse_selector() {
try_munching<identifier>();
return Node(line_number, Node::selector, top);
Node Document::parse_selector()
{
lex<identifier>();
return Node(line_number, Node::selector, lexed);
}
Node Document::parse_block() {
try_munching<exactly<'{'> >();
Node decls(line_number, Node::block);
while(!try_munching<exactly<'}'> >()) {
if (try_munching<block_comment>()) {
decls << Node(line_number, Node::comment, top);
Node Document::parse_block()
{
lex< exactly<'{'> >();
Node block(line_number, Node::block);
while (!lex< exactly<'}'> >()) {
if (lex< block_comment >()) {
block << Node(line_number, Node::comment, lexed);
block.has_comments = true;
continue;
}
else if (try_munching<variable>()) {
decls << parse_var_def();
else if (lex< variable >()) {
block << parse_var_def();
continue;
}
try_munching<identifier>();
Token id = top;
if (try_munching<exactly<':'> >()) {
Node rule(line_number, Node::rule, 2);
rule << Node(line_number, Node::property, id);
rule << parse_values();
decls << rule;
decls.has_rules = true;
try_munching<exactly<';'> >();
else if (look_for_rule()) {
block << parse_rule();
block.has_rules = true;
continue;
}
else {
Node ruleset(line_number, Node::ruleset, 2);
ruleset << Node(line_number, Node::selector, id);
ruleset << parse_block();
decls << ruleset;
decls.has_rulesets = true;
block << parse_ruleset();
block.has_rulesets = true;
continue;
}
}
return decls;
return block;
// lex< identifier >();
// // Token id(lexed);
// if (peek< exactly<':'> >()) {
// Node rule(line_number, Node::rule, 2);
// rule << Node(line_number, Node::property, lexed);
// lex< exactly<':'> >();
// rule << parse_values();
// block << rule;
// block.has_rules = true;
// lex< exactly<';'> >();
// }
// else {
// Node ruleset(line_number, Node::ruleset, 2);
// ruleset << Node(line_number, Node::selector, lexed);
// ruleset << parse_block();
// block << ruleset;
// block.has_rulesets = true;
// }
// }
// return block;
}
Node Document::parse_rule() {
Node rule(line_number, Node::rule, 2);
lex< identifier >();
rule << Node(line_number, Node::property, lexed);
lex< exactly<':'> >();
rule << parse_values();
return rule;
}
Node Document::parse_values() {
Node Document::parse_values()
{
Node values(line_number, Node::values);
while(try_munching<identifier>() || try_munching<dimension>() ||
try_munching<percentage>() || try_munching<number>() ||
try_munching<hex>() || try_munching<string_constant>() ||
try_munching<variable>()) {
if (top.begin[0] == '$') {
Node stuff(environment[top]);
for (int i = 0; i < stuff.children->size(); ++i) {
values << stuff.children->at(i);
while (lex< identifier >() || lex < dimension >() ||
lex< percentage >() || lex < number >() ||
lex< hex >() || lex < string_constant >() ||
lex< variable >()) {
if (lexed.begin[0] == '$') {
Node fetched(environment[lexed]);
for (int i = 0; i < fetched.children->size(); ++i) {
values << fetched.children->at(i);
}
}
else
{
values << Node(line_number, Node::value, top);
else {
values << Node(line_number, Node::value, lexed);
}
}
return values;
return values;
}
char* Document::look_for_rule(char* start)
{
char* p = start ? start : position;
(p = peek< identifier >(p)) &&
(p = peek< exactly<':'> >(p)) &&
(p = look_for_values(p)) &&
(p = peek< alternatives< exactly<';'>, exactly<'}'> > >(p));
return p;
}
char* Document::look_for_values(char* start)
{
char* p = start ? start : position;
char* q;
while ((q = peek< identifier >(p)) || (q = peek< dimension >(p)) ||
(q = peek< percentage >(p)) || (q = peek< number >(p)) ||
(q = peek< hex >(p)) || (q = peek< string_constant >(p)) ||
(q = peek< variable >(p)))
{ p = q; }
return p == start ? 0 : p;
}
}
\ No newline at end of file
......@@ -94,23 +94,24 @@ namespace Sass {
vector<Node>* nodes;
if (type == ruleset) {
nodes = children->at(1).children;
has_comments = children->at(1).has_comments;
has_rules = children->at(1).has_rules;
has_rulesets = children->at(1).has_rulesets;
}
switch (type) {
case ruleset:
if (has_rules) {
if (has_comments || has_rules) {
buf << indentation;
children->at(0).emit_nested_css(buf, prefix, depth); // selector
children->at(0).emit_nested_css(buf, prefix, depth); // selector group
buf << " {";
for (int i = 0; i < nodes->size(); ++i) {
if (nodes->at(i).type == rule) nodes->at(i).emit_nested_css(buf, "", depth + 1); // rules
if (nodes->at(i).type == comment || nodes->at(i).type == rule) nodes->at(i).emit_nested_css(buf, "", depth + 1); // rules
}
buf << " }" << endl;
}
if (has_rulesets) {
for (int i = 0; i < nodes->size(); ++i) { // do each nested ruleset
if (nodes->at(i).type == ruleset) nodes->at(i).emit_nested_css(buf, prefix + (prefix.empty() ? "" : " ") + string((*children)[0].token), depth + (has_rules ? 1 : 0));
if (nodes->at(i).type == ruleset) nodes->at(i).emit_nested_css(buf, prefix + (prefix.empty() ? "" : " ") + string((*children)[0].token), depth + (has_comments || has_rules ? 1 : 0));
}
}
if (depth == 0 && prefix.empty()) buf << endl;
......@@ -129,6 +130,10 @@ namespace Sass {
buf << " " << string((*children)[i].token);
}
break;
case selector_group:
// UNFINISHED
children->at(0).emit_nested_css(buf, prefix, depth);
break;
case selector:
buf << prefix << (prefix.empty() ? "" : " ") << string(token);
break;
......
......@@ -69,11 +69,12 @@ namespace Sass {
mutable vector<Node>* children;
Token token;
Type type;
bool has_comments;
bool has_rules;
bool has_rulesets;
bool has_propsets;
Node() { ++fresh; }
Node() : type(nil), children(0) { ++fresh; }
Node(const Node& n)
: line_number(n.line_number),
......
......@@ -12,16 +12,15 @@ namespace Sass {
Token();
Token(const char* _begin, const char* _end);
inline bool is_null() const {
return begin == 0 || end == 0 || begin >= end;
}
inline operator string() const {
return string(begin, end - begin);
}
inline operator string() const
{ return string(begin, end - begin); }
void stream_unquoted(std::stringstream& buf) const;
bool operator<(const Token& rhs) const;
operator bool()
{ return begin && end && begin >= end; }
};
}
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment