depend.cpp

Go to the documentation of this file.
00001 /* $Id: depend.cpp 24452 2012-08-01 19:07:04Z rubidium $ */
00002 
00003 /*
00004  * This file is part of OpenTTD.
00005  * OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
00006  * OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
00007  * See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>.
00008  */
00009 
00023 #include <stdio.h>
00024 #include <stdlib.h>
00025 #include <string.h>
00026 #include <ctype.h>
00027 #include <limits.h>
00028 #include <unistd.h>
00029 #include <map>
00030 #include <set>
00031 #include <stack>
00032 
00037 static inline void free(const void *ptr)
00038 {
00039   free(const_cast<void *>(ptr));
00040 }
00041 
00042 #ifndef PATH_MAX
00043 
00044 # define PATH_MAX 260
00045 #endif
00046 
00048 struct StringCompare {
00055   bool operator () (const char *a, const char *b) const
00056   {
00057     return strcmp(a, b) < 0;
00058   }
00059 };
00061 typedef std::set<const char*, StringCompare> StringSet;
00063 typedef std::map<const char*, StringSet*, StringCompare> StringMap;
00065 typedef std::pair<const char*, StringSet*> StringMapItem;
00066 
00068 static StringSet _include_dirs;
00070 static StringMap _files;
00072 static StringMap _headers;
00074 static StringSet _defines;
00075 
00079 class File {
00080 public:
00086   File(const char *filename) : filename(filename)
00087   {
00088     this->fp = fopen(filename, "r");
00089     if (this->fp == NULL) {
00090       fprintf(stdout, "Could not open %s for reading\n", filename);
00091       exit(1);
00092     }
00093     this->dirname = strdup(filename);
00094     char *last = strrchr(this->dirname, '/');
00095     if (last != NULL) {
00096       *last = '\0';
00097     } else {
00098       *this->dirname = '\0';
00099     }
00100   }
00101 
00103   ~File()
00104   {
00105     fclose(this->fp);
00106     free(this->dirname);
00107   }
00108 
00114   char GetChar() const
00115   {
00116     int c = fgetc(this->fp);
00117     return (c == EOF) ? '\0' : c;
00118   }
00119 
00124   const char *GetDirname() const
00125   {
00126     return this->dirname;
00127   }
00128 
00129 private:
00130   FILE *fp;             
00131   char *dirname;        
00132   const char *filename; 
00133 };
00134 
00136 enum Token {
00137   TOKEN_UNKNOWN,    
00138   TOKEN_END,        
00139   TOKEN_EOL,        
00140   TOKEN_SHARP,      
00141   TOKEN_LOCAL,      
00142   TOKEN_GLOBAL,     
00143   TOKEN_IDENTIFIER, 
00144   TOKEN_DEFINE,     
00145   TOKEN_IF,         
00146   TOKEN_IFDEF,      
00147   TOKEN_IFNDEF,     
00148   TOKEN_ELIF,       
00149   TOKEN_ELSE,       
00150   TOKEN_ENDIF,      
00151   TOKEN_UNDEF,      
00152   TOKEN_OR,         
00153   TOKEN_AND,        
00154   TOKEN_DEFINED,    
00155   TOKEN_OPEN,       
00156   TOKEN_CLOSE,      
00157   TOKEN_NOT,        
00158   TOKEN_ZERO,       
00159   TOKEN_INCLUDE,    
00160 };
00161 
00163 typedef std::map<const char*, Token, StringCompare> KeywordList;
00164 
00168 class Lexer {
00169 public:
00174   Lexer(const File *file) : file(file), current_char('\0'), string(NULL), token(TOKEN_UNKNOWN)
00175   {
00176     this->keywords["define"]  = TOKEN_DEFINE;
00177     this->keywords["defined"] = TOKEN_DEFINED;
00178     this->keywords["if"]      = TOKEN_IF;
00179     this->keywords["ifdef"]   = TOKEN_IFDEF;
00180     this->keywords["ifndef"]  = TOKEN_IFNDEF;
00181     this->keywords["include"] = TOKEN_INCLUDE;
00182     this->keywords["elif"]    = TOKEN_ELIF;
00183     this->keywords["else"]    = TOKEN_ELSE;
00184     this->keywords["endif"]   = TOKEN_ENDIF;
00185     this->keywords["undef"]   = TOKEN_UNDEF;
00186 
00187     /* Initialise currently read character. */
00188     this->Next();
00189 
00190     /* Allocate the buffer. */
00191     this->buf_len = 32;
00192     this->buf = (char*)malloc(sizeof(*this->buf) * this->buf_len);
00193   }
00194 
00196   ~Lexer()
00197   {
00198     free(this->buf);
00199   }
00200 
00204   void Next()
00205   {
00206     this->current_char = this->file->GetChar();
00207   }
00208 
00213   Token GetToken() const
00214   {
00215     return this->token;
00216   }
00217 
00222   const char *GetString() const
00223   {
00224     return this->string;
00225   }
00226 
00231   void Lex()
00232   {
00233     for (;;) {
00234       free(this->string);
00235       this->string = NULL;
00236       this->token  = TOKEN_UNKNOWN;
00237 
00238       switch (this->current_char) {
00239         /* '\0' means End-Of-File */
00240         case '\0': this->token = TOKEN_END; return;
00241 
00242         /* Skip some chars, as they don't do anything */
00243         case '\t': this->Next(); break;
00244         case '\r': this->Next(); break;
00245         case ' ':  this->Next(); break;
00246 
00247         case '\\':
00248           this->Next();
00249           if (this->current_char == '\n') this->Next();
00250           break;
00251 
00252         case '\n':
00253           this->token = TOKEN_EOL;
00254           this->Next();
00255           return;
00256 
00257         case '#':
00258           this->token = TOKEN_SHARP;
00259           this->Next();
00260           return;
00261 
00262         case '"':
00263           this->ReadString('"', TOKEN_LOCAL);
00264           this->Next();
00265           return;
00266 
00267         case '<':
00268           this->ReadString('>', TOKEN_GLOBAL);
00269           this->Next();
00270           return;
00271 
00272         case '&':
00273           this->Next();
00274           if (this->current_char == '&') {
00275             this->Next();
00276             this->token = TOKEN_AND;
00277             return;
00278           }
00279           break;
00280 
00281         case '|':
00282           this->Next();
00283           if (this->current_char == '|') {
00284             this->Next();
00285             this->token = TOKEN_OR;
00286             return;
00287           }
00288           break;
00289 
00290         case '(':
00291           this->Next();
00292           this->token = TOKEN_OPEN;
00293           return;
00294 
00295         case ')':
00296           this->Next();
00297           this->token = TOKEN_CLOSE;
00298           return;
00299 
00300         case '!':
00301           this->Next();
00302           if (this->current_char != '=') {
00303             this->token = TOKEN_NOT;
00304             return;
00305           }
00306           break;
00307 
00308         /* Possible begin of comment */
00309         case '/':
00310           this->Next();
00311           switch (this->current_char) {
00312             case '*': {
00313               this->Next();
00314               char previous_char = '\0';
00315               while ((this->current_char != '/' || previous_char != '*') && this->current_char != '\0') {
00316                 previous_char = this->current_char;
00317                 this->Next();
00318               }
00319               this->Next();
00320               break;
00321             }
00322             case '/': while (this->current_char != '\n' && this->current_char != '\0') this->Next(); break;
00323             default: break;
00324           }
00325           break;
00326 
00327         default:
00328           if (isalpha(this->current_char) || this->current_char == '_') {
00329             /* If the name starts with a letter, it is an identifier */
00330             this->ReadIdentifier();
00331             return;
00332           }
00333           if (isdigit(this->current_char)) {
00334             bool zero = this->current_char == '0';
00335             this->Next();
00336             if (this->current_char == 'x' || this->current_char == 'X') Next();
00337             while (isdigit(this->current_char) || this->current_char == '.' || (this->current_char >= 'a' && this->current_char <= 'f') || (this->current_char >= 'A' && this->current_char <= 'F')) {
00338               zero &= this->current_char == '0';
00339               this->Next();
00340             }
00341             if (zero) this->token = TOKEN_ZERO;
00342             return;
00343           }
00344           this->Next();
00345           break;
00346       }
00347     }
00348   }
00349 
00350 private:
00356   Token FindKeyword(const char *name) const
00357   {
00358     KeywordList::const_iterator it = this->keywords.find(name);
00359     if (it == this->keywords.end()) return TOKEN_IDENTIFIER;
00360     return (*it).second;
00361   }
00362 
00366   void ReadIdentifier()
00367   {
00368     size_t count = 0;
00369 
00370     /* Read the rest of the identifier */
00371     do {
00372       this->buf[count++] = this->current_char;
00373       this->Next();
00374 
00375       if (count >= buf_len) {
00376         /* Scale the buffer if required */
00377         this->buf_len *= 2;
00378         this->buf = (char *)realloc(this->buf, sizeof(*this->buf) * this->buf_len);
00379       }
00380     } while ((isalpha(this->current_char) || this->current_char == '_' || isdigit(this->current_char)));
00381     this->buf[count] = '\0';
00382 
00383     free(this->string);
00384     this->string = strdup(this->buf);
00385     this->token = FindKeyword(this->string);
00386   }
00387 
00393   void ReadString(char end, Token token)
00394   {
00395     size_t count = 0;
00396     this->Next();
00397     while (this->current_char != end && this->current_char != ')' && this->current_char != '\n' && this->current_char != '\0') {
00398       this->buf[count++] = this->current_char;
00399       this->Next();
00400 
00401       if (count >= this->buf_len) {
00402         /* Scale the buffer if required */
00403         this->buf_len *= 2;
00404         this->buf = (char *)realloc(this->buf, sizeof(*this->buf) * this->buf_len);
00405       }
00406     }
00407     this->buf[count] = '\0';
00408     free(this->string);
00409     this->string = strdup(this->buf);
00410     this->token = token;
00411   }
00412 
00413   const File *file;     
00414   char current_char;    
00415   char *string;         
00416   Token token;          
00417   char *buf;            
00418   size_t buf_len;       
00419   KeywordList keywords; 
00420 };
00421 
00432 const char *GeneratePath(const char *dirname, const char *filename, bool local)
00433 {
00434   if (local) {
00435     if (access(filename, R_OK) == 0) return strdup(filename);
00436 
00437     char path[PATH_MAX];
00438     strcpy(path, dirname);
00439     const char *p = filename;
00440     /* Remove '..' from the begin of the filename. */
00441     while (*p == '.') {
00442       if (*(++p) == '.') {
00443         char *s = strrchr(path, '/');
00444         if (s != NULL) *s = '\0';
00445         p += 2;
00446       }
00447     }
00448     strcat(path, "/");
00449     strcat(path, p);
00450 
00451     if (access(path, R_OK) == 0) return strdup(path);
00452   }
00453 
00454   for (StringSet::iterator it = _include_dirs.begin(); it != _include_dirs.end(); it++) {
00455     char path[PATH_MAX];
00456     strcpy(path, *it);
00457     const char *p = filename;
00458     /* Remove '..' from the begin of the filename. */
00459     while (*p == '.') {
00460       if (*(++p) == '.') {
00461         char *s = strrchr(path, '/');
00462         if (s != NULL) *s = '\0';
00463         p += 2;
00464       }
00465     }
00466     strcat(path, "/");
00467     strcat(path, p);
00468 
00469     if (access(path, R_OK) == 0) return strdup(path);
00470   }
00471 
00472   return NULL;
00473 }
00474 
00482 bool ExpressionDefined(Lexer *lexer, StringSet *defines, bool verbose);
00483 
00491 bool ExpressionOr(Lexer *lexer, StringSet *defines, bool verbose);
00492 
00501 bool ExpressionNot(Lexer *lexer, StringSet *defines, bool verbose)
00502 {
00503   if (lexer->GetToken() == TOKEN_NOT) {
00504     if (verbose) fprintf(stderr, "!");
00505     lexer->Lex();
00506     bool value = !ExpressionDefined(lexer, defines, verbose);
00507     if (verbose) fprintf(stderr, "[%d]", value);
00508     return value;
00509   }
00510 
00511   if (lexer->GetToken() == TOKEN_OPEN) {
00512     if (verbose) fprintf(stderr, "(");
00513     lexer->Lex();
00514     bool value = ExpressionOr(lexer, defines, verbose);
00515     if (verbose) fprintf(stderr, ")[%d]", value);
00516     lexer->Lex();
00517     return value;
00518   }
00519 
00520   if (lexer->GetToken() == TOKEN_ZERO) {
00521     if (verbose) fprintf(stderr, "0");
00522     lexer->Lex();
00523     if (verbose) fprintf(stderr, "[0]");
00524     return false;
00525   }
00526 
00527   bool first = true;
00528   while (lexer->GetToken() == TOKEN_UNKNOWN || lexer->GetToken() == TOKEN_IDENTIFIER) {
00529     if (verbose && first) fprintf(stderr, "<assumed true>");
00530     first = false;
00531     lexer->Lex();
00532   }
00533 
00534   return true;
00535 }
00536 
00544 bool ExpressionDefined(Lexer *lexer, StringSet *defines, bool verbose)
00545 {
00546   bool value = ExpressionNot(lexer, defines, verbose);
00547 
00548   if (lexer->GetToken() != TOKEN_DEFINED) return value;
00549   lexer->Lex();
00550   if (verbose) fprintf(stderr, "defined");
00551   bool open = (lexer->GetToken() == TOKEN_OPEN);
00552   if (open) lexer->Lex();
00553   if (verbose) fprintf(stderr, open ? "(" : " ");
00554   if (lexer->GetToken() == TOKEN_IDENTIFIER) {
00555     if (verbose) fprintf(stderr, "%s", lexer->GetString());
00556     value = defines->find(lexer->GetString()) != defines->end();
00557   }
00558   if (open) {
00559     if (verbose) fprintf(stderr, ")");
00560     lexer->Lex();
00561   }
00562   lexer->Lex();
00563   if (verbose) fprintf(stderr, "[%d]", value);
00564   return value;
00565 }
00566 
00574 bool ExpressionAnd(Lexer *lexer, StringSet *defines, bool verbose)
00575 {
00576   bool value = ExpressionDefined(lexer, defines, verbose);
00577 
00578   for (;;) {
00579     if (lexer->GetToken() != TOKEN_AND) return value;
00580     if (verbose) fprintf(stderr, " && ");
00581     lexer->Lex();
00582     value = value && ExpressionDefined(lexer, defines, verbose);
00583   }
00584 }
00585 
00593 bool ExpressionOr(Lexer *lexer, StringSet *defines, bool verbose)
00594 {
00595   bool value = ExpressionAnd(lexer, defines, verbose);
00596 
00597   for (;;) {
00598     if (lexer->GetToken() != TOKEN_OR) return value;
00599     if (verbose) fprintf(stderr, " || ");
00600     lexer->Lex();
00601     value = value || ExpressionAnd(lexer, defines, verbose);
00602   }
00603 }
00604 
00606 enum Ignore {
00607   NOT_IGNORE,         
00608   IGNORE_UNTIL_ELSE,  
00609   IGNORE_UNTIL_ENDIF, 
00610 };
00611 
00619 void ScanFile(const char *filename, const char *ext, bool header, bool verbose)
00620 {
00621   static StringSet defines;
00622   static std::stack<Ignore> ignore;
00623   /* Copy in the default defines (parameters of depend) */
00624   if (!header) {
00625     for (StringSet::iterator it = _defines.begin(); it != _defines.end(); it++) {
00626       defines.insert(strdup(*it));
00627     }
00628   }
00629 
00630   File file(filename);
00631   Lexer lexer(&file);
00632 
00633   /* Start the lexing! */
00634   lexer.Lex();
00635 
00636   while (lexer.GetToken() != TOKEN_END) {
00637     switch (lexer.GetToken()) {
00638       /* We reached the end of the file... yay, we're done! */
00639       case TOKEN_END: break;
00640 
00641       /* The line started with a # (minus whitespace) */
00642       case TOKEN_SHARP:
00643         lexer.Lex();
00644         switch (lexer.GetToken()) {
00645           case TOKEN_INCLUDE:
00646             if (verbose) fprintf(stderr, "%s #include ", filename);
00647             lexer.Lex();
00648             switch (lexer.GetToken()) {
00649               case TOKEN_LOCAL:
00650               case TOKEN_GLOBAL: {
00651                 if (verbose) fprintf(stderr, "%s", lexer.GetString());
00652                 if (!ignore.empty() && ignore.top() != NOT_IGNORE) {
00653                   if (verbose) fprintf(stderr, " (ignored)");
00654                   break;
00655                 }
00656                 const char *h = GeneratePath(file.GetDirname(), lexer.GetString(), lexer.GetToken() == TOKEN_LOCAL);
00657                 if (h != NULL) {
00658                   StringMap::iterator it = _headers.find(h);
00659                   if (it == _headers.end()) {
00660                     it = (_headers.insert(StringMapItem(strdup(h), new StringSet()))).first;
00661                     if (verbose) fprintf(stderr, "\n");
00662                     ScanFile(h, ext, true, verbose);
00663                   }
00664                   StringMap::iterator curfile;
00665                   if (header) {
00666                     curfile = _headers.find(filename);
00667                   } else {
00668                     /* Replace the extension with the provided extension of '.o'. */
00669                     char path[PATH_MAX];
00670                     strcpy(path, filename);
00671                     *(strrchr(path, '.')) = '\0';
00672                     strcat(path, ext != NULL ? ext : ".o");
00673                     curfile = _files.find(path);
00674                     if (curfile == _files.end()) {
00675                       curfile = (_files.insert(StringMapItem(strdup(path), new StringSet()))).first;
00676                     }
00677                   }
00678                   if (it != _headers.end()) {
00679                     for (StringSet::iterator header = it->second->begin(); header != it->second->end(); header++) {
00680                       if (curfile->second->find(*header) == curfile->second->end()) curfile->second->insert(strdup(*header));
00681                     }
00682                   }
00683                   if (curfile->second->find(h) == curfile->second->end()) curfile->second->insert(strdup(h));
00684                   free(h);
00685                 }
00686               }
00687               /* FALL THROUGH */
00688               default: break;
00689             }
00690             break;
00691 
00692           case TOKEN_DEFINE:
00693             if (verbose) fprintf(stderr, "%s #define ", filename);
00694             lexer.Lex();
00695             if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00696               if (verbose) fprintf(stderr, "%s", lexer.GetString());
00697               if (!ignore.empty() && ignore.top() != NOT_IGNORE) {
00698                 if (verbose) fprintf(stderr, " (ignored)");
00699                 break;
00700               }
00701               if (defines.find(lexer.GetString()) == defines.end()) defines.insert(strdup(lexer.GetString()));
00702               lexer.Lex();
00703             }
00704             break;
00705 
00706           case TOKEN_UNDEF:
00707             if (verbose) fprintf(stderr, "%s #undef ", filename);
00708             lexer.Lex();
00709             if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00710               if (verbose) fprintf(stderr, "%s", lexer.GetString());
00711               if (!ignore.empty() && ignore.top() != NOT_IGNORE) {
00712                 if (verbose) fprintf(stderr, " (ignored)");
00713                 break;
00714               }
00715               StringSet::iterator it = defines.find(lexer.GetString());
00716               if (it != defines.end()) {
00717                 free(*it);
00718                 defines.erase(it);
00719               }
00720               lexer.Lex();
00721             }
00722             break;
00723 
00724           case TOKEN_ENDIF:
00725             if (verbose) fprintf(stderr, "%s #endif", filename);
00726             lexer.Lex();
00727             if (!ignore.empty()) ignore.pop();
00728             if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00729             break;
00730 
00731           case TOKEN_ELSE: {
00732             if (verbose) fprintf(stderr, "%s #else", filename);
00733             lexer.Lex();
00734             Ignore last = ignore.empty() ? NOT_IGNORE : ignore.top();
00735             if (!ignore.empty()) ignore.pop();
00736             if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00737               ignore.push(last == IGNORE_UNTIL_ELSE ? NOT_IGNORE : IGNORE_UNTIL_ENDIF);
00738             } else {
00739               ignore.push(IGNORE_UNTIL_ENDIF);
00740             }
00741             if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00742             break;
00743           }
00744 
00745           case TOKEN_ELIF: {
00746             if (verbose) fprintf(stderr, "%s #elif ", filename);
00747             lexer.Lex();
00748             Ignore last = ignore.empty() ? NOT_IGNORE : ignore.top();
00749             if (!ignore.empty()) ignore.pop();
00750             if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00751               bool value = ExpressionOr(&lexer, &defines, verbose);
00752               ignore.push(last == IGNORE_UNTIL_ELSE ? (value ? NOT_IGNORE : IGNORE_UNTIL_ELSE) : IGNORE_UNTIL_ENDIF);
00753             } else {
00754               ignore.push(IGNORE_UNTIL_ENDIF);
00755             }
00756             if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00757             break;
00758           }
00759 
00760           case TOKEN_IF: {
00761             if (verbose) fprintf(stderr, "%s #if ", filename);
00762             lexer.Lex();
00763             if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00764               bool value = ExpressionOr(&lexer, &defines, verbose);
00765               ignore.push(value ? NOT_IGNORE : IGNORE_UNTIL_ELSE);
00766             } else {
00767               ignore.push(IGNORE_UNTIL_ENDIF);
00768             }
00769             if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00770             break;
00771           }
00772 
00773           case TOKEN_IFDEF:
00774             if (verbose) fprintf(stderr, "%s #ifdef ", filename);
00775             lexer.Lex();
00776             if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00777               bool value = defines.find(lexer.GetString()) != defines.end();
00778               if (verbose) fprintf(stderr, "%s[%d]", lexer.GetString(), value);
00779               if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00780                 ignore.push(value ? NOT_IGNORE : IGNORE_UNTIL_ELSE);
00781               } else {
00782                 ignore.push(IGNORE_UNTIL_ENDIF);
00783               }
00784             }
00785             if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00786             break;
00787 
00788           case TOKEN_IFNDEF:
00789             if (verbose) fprintf(stderr, "%s #ifndef ", filename);
00790             lexer.Lex();
00791             if (lexer.GetToken() == TOKEN_IDENTIFIER) {
00792               bool value = defines.find(lexer.GetString()) != defines.end();
00793               if (verbose) fprintf(stderr, "%s[%d]", lexer.GetString(), value);
00794               if (ignore.empty() || ignore.top() == NOT_IGNORE) {
00795                 ignore.push(!value ? NOT_IGNORE : IGNORE_UNTIL_ELSE);
00796               } else {
00797                 ignore.push(IGNORE_UNTIL_ENDIF);
00798               }
00799             }
00800             if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not ");
00801             break;
00802 
00803           default:
00804             if (verbose) fprintf(stderr, "%s #<unknown>", filename);
00805             lexer.Lex();
00806             break;
00807         }
00808         if (verbose) fprintf(stderr, "\n");
00809         /* FALL THROUGH */
00810       default:
00811         /* Ignore the rest of the garbage on this line */
00812         while (lexer.GetToken() != TOKEN_EOL && lexer.GetToken() != TOKEN_END) lexer.Lex();
00813         lexer.Lex();
00814         break;
00815     }
00816   }
00817 
00818   if (!header) {
00819     for (StringSet::iterator it = defines.begin(); it != defines.end(); it++) {
00820       free(*it);
00821     }
00822     defines.clear();
00823     while (!ignore.empty()) ignore.pop();
00824   }
00825 }
00826 
00833 int main(int argc, char *argv[])
00834 {
00835   bool ignorenext = true;
00836   char *filename = NULL;
00837   char *ext = NULL;
00838   char *delimiter = NULL;
00839   bool append = false;
00840   bool verbose = false;
00841 
00842   for (int i = 0; i < argc; i++) {
00843     if (ignorenext) {
00844       ignorenext = false;
00845       continue;
00846     }
00847     if (argv[i][0] == '-') {
00848       /* Append */
00849       if (strncmp(argv[i], "-a", 2) == 0) append = true;
00850       /* Include dir */
00851       if (strncmp(argv[i], "-I", 2) == 0) {
00852         if (argv[i][2] == '\0') {
00853           i++;
00854           _include_dirs.insert(strdup(argv[i]));
00855         } else {
00856           _include_dirs.insert(strdup(&argv[i][2]));
00857         }
00858         continue;
00859       }
00860       /* Define */
00861       if (strncmp(argv[i], "-D", 2) == 0) {
00862         char *p = strchr(argv[i], '=');
00863         if (p != NULL) *p = '\0';
00864         _defines.insert(strdup(&argv[i][2]));
00865         continue;
00866       }
00867       /* Output file */
00868       if (strncmp(argv[i], "-f", 2) == 0) {
00869         if (filename != NULL) continue;
00870         filename = strdup(&argv[i][2]);
00871         continue;
00872       }
00873       /* Object file extension */
00874       if (strncmp(argv[i], "-o", 2) == 0) {
00875         if (ext != NULL) continue;
00876         ext = strdup(&argv[i][2]);
00877         continue;
00878       }
00879       /* Starting string delimiter */
00880       if (strncmp(argv[i], "-s", 2) == 0) {
00881         if (delimiter != NULL) continue;
00882         delimiter = strdup(&argv[i][2]);
00883         continue;
00884       }
00885       /* Verbose */
00886       if (strncmp(argv[i], "-v", 2) == 0) verbose = true;
00887       continue;
00888     }
00889     ScanFile(argv[i], ext, false, verbose);
00890   }
00891 
00892   /* Default output file is Makefile */
00893   if (filename == NULL) filename = strdup("Makefile");
00894 
00895   /* Default delimiter string */
00896   if (delimiter == NULL) delimiter = strdup("# DO NOT DELETE");
00897 
00898   char backup[PATH_MAX];
00899   strcpy(backup, filename);
00900   strcat(backup, ".bak");
00901 
00902   char *content = NULL;
00903   long size = 0;
00904 
00905   /* Read in the current file; so we can overwrite everything from the
00906    * end of non-depend data marker down till the end. */
00907   FILE *src = fopen(filename, "rb");
00908   if (src != NULL) {
00909     fseek(src, 0, SEEK_END);
00910     size = ftell(src);
00911     rewind(src);
00912     content = (char*)malloc(size * sizeof(*content));
00913     if (fread(content, 1, size, src) != (size_t)size) {
00914       fprintf(stderr, "Could not read %s\n", filename);
00915       exit(-2);
00916     }
00917     fclose(src);
00918   }
00919 
00920   FILE *dst = fopen(filename, "w");
00921   bool found_delimiter = false;
00922 
00923   if (size != 0) {
00924     src = fopen(backup, "wb");
00925     if (fwrite(content, 1, size, src) != (size_t)size) {
00926       fprintf(stderr, "Could not write %s\n", filename);
00927       exit(-2);
00928     }
00929     fclose(src);
00930 
00931     /* Then append it to the real file. */
00932     src = fopen(backup, "rb");
00933     while (fgets(content, size, src) != NULL) {
00934       fputs(content, dst);
00935       if (!strncmp(content, delimiter, strlen(delimiter))) found_delimiter = true;
00936       if (!append && found_delimiter) break;
00937     }
00938     fclose(src);
00939   }
00940   if (!found_delimiter) fprintf(dst, "\n%s\n", delimiter);
00941 
00942   for (StringMap::iterator it = _files.begin(); it != _files.end(); it++) {
00943     for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) {
00944       fprintf(dst, "%s: %s\n", it->first, *h);
00945     }
00946   }
00947 
00948   /* Clean up our mess. */
00949   fclose(dst);
00950 
00951   free(delimiter);
00952   free(filename);
00953   free(ext);
00954   free(content);
00955 
00956   for (StringMap::iterator it = _files.begin(); it != _files.end(); it++) {
00957     for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) {
00958       free(*h);
00959     }
00960     it->second->clear();
00961     delete it->second;
00962     free(it->first);
00963   }
00964   _files.clear();
00965 
00966   for (StringMap::iterator it = _headers.begin(); it != _headers.end(); it++) {
00967     for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) {
00968       free(*h);
00969     }
00970     it->second->clear();
00971     delete it->second;
00972     free(it->first);
00973   }
00974   _headers.clear();
00975 
00976   for (StringSet::iterator it = _defines.begin(); it != _defines.end(); it++) {
00977     free(*it);
00978   }
00979   _defines.clear();
00980 
00981   for (StringSet::iterator it = _include_dirs.begin(); it != _include_dirs.end(); it++) {
00982     free(*it);
00983   }
00984   _include_dirs.clear();
00985 
00986   return 0;
00987 }