diff src/tokenizer.cpp @ 19:b24369330483 stable-1-0-7

Fedora 9 compile and const correctness.
author Carl Byington <carl@five-ten-sg.com>
date Thu, 12 Jun 2008 18:17:33 -0700
parents 8ebecad6530f
children
line wrap: on
line diff
--- a/src/tokenizer.cpp	Fri Mar 21 16:02:40 2008 -0700
+++ b/src/tokenizer.cpp	Thu Jun 12 18:17:33 2008 -0700
@@ -287,7 +287,7 @@
 };
 
 
-TOKEN::TOKEN(char *fn, string_set *includes) {
+TOKEN::TOKEN(const char *fn, string_set *includes) {
 	pushed = false;
 	include_files = includes;
 	include(fn);
@@ -301,11 +301,11 @@
 
 void TOKEN::pop() {
 	ifstream *is = streams.front();
-	char *fn = filenames.front();
+    const char *fn = filenames.front();
 	streams.pop_front();
-	filenames.pop_front();
 	filenamess.erase(fn);
-	linenumbers.pop_front();
+    if (filenames.size() > 1)   filenames.pop_front();
+    if (linenumbers.size() > 1) linenumbers.pop_front();
 	is->close();
 	delete is;
 }
@@ -339,7 +339,7 @@
 }
 
 
-bool TOKEN::include(char *fn) {
+bool TOKEN::include(const char *fn) {
 	string_set::iterator i = filenamess.find(fn);
 	if (i != filenamess.end()) {
 		token_error("redundant or recursive include file detected");
@@ -363,15 +363,15 @@
 }
 
 
-char *TOKEN::next() {
+const char *TOKEN::next() {
 	if (!pending_tokens.empty()) {
-		char *t = pending_tokens.front();
+        const char *t = pending_tokens.front();
 		pending_tokens.pop_front();
 		return t;
 	}
 	if (streams.empty()) return NULL;
 	const int PENDING_LIMIT = 1000;
-	static u_char buffer[PENDING_LIMIT];
+    u_char buffer[PENDING_LIMIT];
 	int count = 0;
 	state st = s_init;
 	while (true) {
@@ -437,7 +437,7 @@
 
 			default: {
 				token_error();
-				token_error("unknown state %d %s \n", st, " ");
+                token_error("unknown state %d %s", st, " ");
 			} break;
 		}
 		if (st == s_init) break;
@@ -445,10 +445,10 @@
 
 	buffer[count] = '\0';
 	if (count == 0) return NULL;
-	char *t = register_string((char*)buffer);
+    const char *t = register_string((char*)buffer);
 	if (t == token_include) {
-		char *f = next();	// should be file name
-		char *s = next();	// should be semicolon
+        const char *f = next();   // should be file name
+        const char *s = next();   // should be semicolon
 		if (s == token_semi) {
 			include(f);
 			return next();
@@ -464,7 +464,7 @@
 
 
 int TOKEN::nextint() {
-	char *t = next();
+    const char *t = next();
 	char *e;
 	long i = strtol(t, &e, 10);
 	if (*e != '\0') {
@@ -509,19 +509,19 @@
 
 void TOKEN::token_error(const char *want, const char *have) {
 	token_error();
-	token_error("expecting %s, found %s \n", want, have);
+    token_error("expecting %s, found %s", want, have);
 }
 
 
 void TOKEN::token_error() {
 	token_error("syntax error at line %d in file %s -- ", cur_line(), cur_fn());
 	line_list::iterator   j = linenumbers.begin();
-	string_list::iterator i = filenames.begin();
+    string_list::const_iterator i = filenames.begin();
 	for (; i!=filenames.end(); i++,j++) {
 		if (i != filenames.begin()) {
-			char *fn = (*i);
+            const char *fn = (*i);
 			int   li = (*j);
-			token_error("\n    included from line %d in file %s -- ", li, fn);
+            token_error("    included from line %d in file %s -- ", li, fn);
 		}
 	}
 }