Make SourceLocation::getFileLoc private to reduce the API exposure of

SourceLocation.  This requires making some cleanups to token pasting
and _Pragma expansion.

llvm-svn: 62490
This commit is contained in:
Chris Lattner 2009-01-19 06:46:35 +00:00
parent 3b15170bd9
commit 29a2a191f2
6 changed files with 29 additions and 16 deletions

View File

@ -64,6 +64,7 @@ private:
/// an input translation unit.
class SourceLocation {
unsigned ID;
friend class SourceManager;
public:
enum {
// FileID Layout:
@ -100,6 +101,7 @@ public:
bool isValid() const { return ID != 0; }
bool isInvalid() const { return ID == 0; }
private:
static SourceLocation getFileLoc(unsigned ChunkID, unsigned FilePos) {
SourceLocation L;
// If a FilePos is larger than (1<<FilePosBits), the SourceManager makes
@ -116,6 +118,7 @@ public:
L.ID = (ChunkID << FilePosBits) | FilePos;
return L;
}
public:
static bool isValidMacroSpellingOffs(int Val) {
if (Val >= 0)

View File

@ -324,7 +324,6 @@ public:
return SourceLocation::getFileLoc(FID.ID, 0);
}
/// getInstantiationLoc - Return a new SourceLocation that encodes the fact
/// that a token at Loc should actually be referenced from InstantiationLoc.
SourceLocation getInstantiationLoc(SourceLocation Loc,

View File

@ -93,8 +93,9 @@ public:
/// Create_PragmaLexer: Lexer constructor - Create a new lexer object for
/// _Pragma expansion. This has a variety of magic semantics that this method
/// sets up. It returns a new'd Lexer that must be delete'd when done.
static Lexer *Create_PragmaLexer(SourceLocation TokStartLoc, unsigned TokLen,
Preprocessor &PP);
static Lexer *Create_PragmaLexer(SourceLocation SpellingLoc,
SourceLocation InstantiationLoc,
unsigned TokLen, Preprocessor &PP);
/// getFeatures - Return the language features currently enabled. NOTE: this

View File

@ -150,13 +150,14 @@ Lexer::Lexer(FileID FID, const SourceManager &SM, const LangOptions &features)
/// interface that could handle this stuff. This would pull GetMappedTokenLoc
/// out of the critical path of the lexer!
///
Lexer *Lexer::Create_PragmaLexer(SourceLocation TokStartLoc, unsigned TokLen,
Preprocessor &PP) {
Lexer *Lexer::Create_PragmaLexer(SourceLocation SpellingLoc,
SourceLocation InstantiationLoc,
unsigned TokLen, Preprocessor &PP) {
SourceManager &SM = PP.getSourceManager();
SourceLocation SpellingLoc = SM.getSpellingLoc(TokStartLoc);
// Create the lexer as if we were going to lex the file normally.
Lexer *L = new Lexer(SM.getCanonicalFileID(SpellingLoc), PP);
FileID SpellingFID = SM.getCanonicalFileID(SpellingLoc);
Lexer *L = new Lexer(SpellingFID, PP);
// Now that the lexer is created, change the start/end locations so that we
// just lex the subsection of the file that we want. This is lexing from a
@ -168,7 +169,8 @@ Lexer *Lexer::Create_PragmaLexer(SourceLocation TokStartLoc, unsigned TokLen,
// Set the SourceLocation with the remapping information. This ensures that
// GetMappedTokenLoc will remap the tokens as they are lexed.
L->FileLoc = TokStartLoc;
L->FileLoc = SM.getInstantiationLoc(SM.getLocForStartOfFile(SpellingFID),
InstantiationLoc);
// Ensure that the lexer thinks it is inside a directive, so that end \n will
// return an EOM token.
@ -321,7 +323,7 @@ static SourceLocation GetMappedTokenLoc(Preprocessor &PP,
// characters come from spelling(FileLoc)+Offset.
SourceLocation InstLoc = SourceMgr.getInstantiationLoc(FileLoc);
SourceLocation SpellingLoc = SourceMgr.getSpellingLoc(FileLoc);
SpellingLoc = SourceLocation::getFileLoc(SpellingLoc.getChunkID(), CharNo);
SpellingLoc = SpellingLoc.getFileLocWithOffset(CharNo);
return SourceMgr.getInstantiationLoc(SpellingLoc, InstLoc);
}
@ -335,7 +337,7 @@ SourceLocation Lexer::getSourceLocation(const char *Loc) const {
// the file id from FileLoc with the offset specified.
unsigned CharNo = Loc-BufferStart;
if (FileLoc.isFileID())
return SourceLocation::getFileLoc(FileLoc.getChunkID(), CharNo);
return FileLoc.getFileLocWithOffset(CharNo);
// Otherwise, this is the _Pragma lexer case, which pretends that all of the
// tokens are lexed from where the _Pragma was defined.

View File

@ -156,11 +156,11 @@ void Preprocessor::Handle_Pragma(Token &Tok) {
// Plop the string (including the newline and trailing null) into a buffer
// where we can lex it.
SourceLocation TokLoc = CreateString(&StrVal[0], StrVal.size(), StrLoc);
SourceLocation TokLoc = CreateString(&StrVal[0], StrVal.size());
// Make and enter a lexer object so that we lex and expand the tokens just
// like any others.
Lexer *TL = Lexer::Create_PragmaLexer(TokLoc,
Lexer *TL = Lexer::Create_PragmaLexer(TokLoc, StrLoc,
// do not include the null in the count.
StrVal.size()-1, *this);

View File

@ -388,13 +388,21 @@ bool TokenLexer::PasteTokens(Token &Tok) {
} else {
PP.IncrementPasteCounter(false);
// Make a lexer to lex this string from.
assert(ResultTokLoc.isFileID() &&
"Should be a raw location into scratch buffer");
SourceManager &SourceMgr = PP.getSourceManager();
const char *ResultStrData = SourceMgr.getCharacterData(ResultTokLoc);
std::pair<FileID, unsigned> LocInfo =
SourceMgr.getDecomposedFileLoc(ResultTokLoc);
const char *ScratchBufStart =SourceMgr.getBufferData(LocInfo.first).first;
// Make a lexer to lex this string from. Lex just this one token.
const char *ResultStrData = ScratchBufStart+LocInfo.second;
// Make a lexer object so that we lex and expand the paste result.
Lexer TL(ResultTokLoc, PP.getLangOptions(),
SourceMgr.getBufferData(ResultTokLoc).first,
Lexer TL(SourceMgr.getLocForStartOfFile(LocInfo.first),
PP.getLangOptions(),
ScratchBufStart,
ResultStrData,
ResultStrData+LHSLen+RHSLen /*don't include null*/);