mirror of
https://github.com/RPCS3/rpcs3.git
synced 2025-07-04 22:11:26 +12:00
Instead of deriving different tokens from a base Token class, we now use an enumerated TOKEN_TYPE to distinguish types. This is so we don't have to cast all the time when parsing the resulting token stream.
Also, removed start/end stream tokens.
This commit is contained in:
parent
b6a0ef207b
commit
852e5b63e5
6 changed files with 201 additions and 214 deletions
|
@ -8,9 +8,6 @@ namespace YAML
|
||||||
{
|
{
|
||||||
Parser::Parser(std::istream& in): m_scanner(in)
|
Parser::Parser(std::istream& in): m_scanner(in)
|
||||||
{
|
{
|
||||||
// eat the stream start token
|
|
||||||
// TODO: check?
|
|
||||||
Token *pToken = m_scanner.GetNextToken();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Parser::~Parser()
|
Parser::~Parser()
|
||||||
|
@ -25,7 +22,7 @@ namespace YAML
|
||||||
if(!pToken)
|
if(!pToken)
|
||||||
break;
|
break;
|
||||||
|
|
||||||
std::cout << typeid(*pToken).name() << ": " << *pToken << std::endl;
|
std::cout << *pToken << std::endl;
|
||||||
delete pToken;
|
delete pToken;
|
||||||
}
|
}
|
||||||
getchar();
|
getchar();
|
||||||
|
|
86
scanner.cpp
86
scanner.cpp
|
@ -16,10 +16,6 @@ namespace YAML
|
||||||
delete m_tokens.front();
|
delete m_tokens.front();
|
||||||
m_tokens.pop();
|
m_tokens.pop();
|
||||||
}
|
}
|
||||||
|
|
||||||
// delete limbo tokens (they're here for RAII)
|
|
||||||
for(std::set <Token *>::const_iterator it=m_limboTokens.begin();it!=m_limboTokens.end();++it)
|
|
||||||
delete *it;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetNextToken
|
// GetNextToken
|
||||||
|
@ -78,7 +74,7 @@ namespace YAML
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if(!m_startedStream)
|
if(!m_startedStream)
|
||||||
return ScanAndEnqueue(new StreamStartToken);
|
return StartStream();
|
||||||
|
|
||||||
// get rid of whitespace, etc. (in between tokens it should be irrelevent)
|
// get rid of whitespace, etc. (in between tokens it should be irrelevent)
|
||||||
ScanToNextToken();
|
ScanToNextToken();
|
||||||
|
@ -95,62 +91,56 @@ namespace YAML
|
||||||
|
|
||||||
// end of stream
|
// end of stream
|
||||||
if(INPUT.peek() == EOF)
|
if(INPUT.peek() == EOF)
|
||||||
return ScanAndEnqueue(new StreamEndToken);
|
return EndStream();
|
||||||
|
|
||||||
if(INPUT.column == 0 && INPUT.peek() == Keys::Directive)
|
if(INPUT.column == 0 && INPUT.peek() == Keys::Directive)
|
||||||
return ScanAndEnqueue(new DirectiveToken);
|
return ScanDirective();
|
||||||
|
|
||||||
// document token
|
// document token
|
||||||
if(INPUT.column == 0 && Exp::DocStart.Matches(INPUT))
|
if(INPUT.column == 0 && Exp::DocStart.Matches(INPUT))
|
||||||
return ScanAndEnqueue(new DocumentStartToken);
|
return ScanDocStart();
|
||||||
|
|
||||||
if(INPUT.column == 0 && Exp::DocEnd.Matches(INPUT))
|
if(INPUT.column == 0 && Exp::DocEnd.Matches(INPUT))
|
||||||
return ScanAndEnqueue(new DocumentEndToken);
|
return ScanDocEnd();
|
||||||
|
|
||||||
// flow start/end/entry
|
// flow start/end/entry
|
||||||
if(INPUT.peek() == Keys::FlowSeqStart)
|
if(INPUT.peek() == Keys::FlowSeqStart || INPUT.peek() == Keys::FlowMapStart)
|
||||||
return ScanAndEnqueue(new FlowSeqStartToken);
|
return ScanFlowStart();
|
||||||
|
|
||||||
if(INPUT.peek() == Keys::FlowSeqEnd)
|
|
||||||
return ScanAndEnqueue(new FlowSeqEndToken);
|
|
||||||
|
|
||||||
if(INPUT.peek() == Keys::FlowMapStart)
|
|
||||||
return ScanAndEnqueue(new FlowMapStartToken);
|
|
||||||
|
|
||||||
if(INPUT.peek() == Keys::FlowMapEnd)
|
|
||||||
return ScanAndEnqueue(new FlowMapEndToken);
|
|
||||||
|
|
||||||
|
if(INPUT.peek() == Keys::FlowSeqEnd || INPUT.peek() == Keys::FlowMapEnd)
|
||||||
|
return ScanFlowEnd();
|
||||||
|
|
||||||
if(INPUT.peek() == Keys::FlowEntry)
|
if(INPUT.peek() == Keys::FlowEntry)
|
||||||
return ScanAndEnqueue(new FlowEntryToken);
|
return ScanFlowEntry();
|
||||||
|
|
||||||
// block/map stuff
|
// block/map stuff
|
||||||
if(Exp::BlockEntry.Matches(INPUT))
|
if(Exp::BlockEntry.Matches(INPUT))
|
||||||
return ScanAndEnqueue(new BlockEntryToken);
|
return ScanBlockEntry();
|
||||||
|
|
||||||
if((m_flowLevel == 0 ? Exp::Key : Exp::KeyInFlow).Matches(INPUT))
|
if((m_flowLevel == 0 ? Exp::Key : Exp::KeyInFlow).Matches(INPUT))
|
||||||
return ScanAndEnqueue(new KeyToken);
|
return ScanKey();
|
||||||
|
|
||||||
if((m_flowLevel == 0 ? Exp::Value : Exp::ValueInFlow).Matches(INPUT))
|
if((m_flowLevel == 0 ? Exp::Value : Exp::ValueInFlow).Matches(INPUT))
|
||||||
return ScanAndEnqueue(new ValueToken);
|
return ScanValue();
|
||||||
|
|
||||||
// alias/anchor
|
// alias/anchor
|
||||||
if(INPUT.peek() == Keys::Alias || INPUT.peek() == Keys::Anchor)
|
if(INPUT.peek() == Keys::Alias || INPUT.peek() == Keys::Anchor)
|
||||||
return ScanAndEnqueue(new AnchorToken);
|
return ScanAnchorOrAlias();
|
||||||
|
|
||||||
// tag
|
// tag
|
||||||
if(INPUT.peek() == Keys::Tag)
|
if(INPUT.peek() == Keys::Tag)
|
||||||
return ScanAndEnqueue(new TagToken);
|
return ScanTag();
|
||||||
|
|
||||||
// special scalars
|
// special scalars
|
||||||
if(m_flowLevel == 0 && (INPUT.peek() == Keys::LiteralScalar || INPUT.peek() == Keys::FoldedScalar))
|
if(m_flowLevel == 0 && (INPUT.peek() == Keys::LiteralScalar || INPUT.peek() == Keys::FoldedScalar))
|
||||||
return ScanAndEnqueue(new BlockScalarToken);
|
return ScanBlockScalar();
|
||||||
|
|
||||||
if(INPUT.peek() == '\'' || INPUT.peek() == '\"')
|
if(INPUT.peek() == '\'' || INPUT.peek() == '\"')
|
||||||
return ScanAndEnqueue(new QuotedScalarToken);
|
return ScanQuotedScalar();
|
||||||
|
|
||||||
// plain scalars
|
// plain scalars
|
||||||
if((m_flowLevel == 0 ? Exp::PlainScalar : Exp::PlainScalarInFlow).Matches(INPUT))
|
if((m_flowLevel == 0 ? Exp::PlainScalar : Exp::PlainScalarInFlow).Matches(INPUT))
|
||||||
return ScanAndEnqueue(new PlainScalarToken);
|
return ScanPlainScalar();
|
||||||
|
|
||||||
// don't know what it is!
|
// don't know what it is!
|
||||||
throw UnknownToken();
|
throw UnknownToken();
|
||||||
|
@ -210,18 +200,28 @@ namespace YAML
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ScanAndEnqueue
|
// StartStream
|
||||||
// . Scans the token, then pushes it in the queue.
|
// . Set the initial conditions for starting a stream.
|
||||||
// . Note: we also use a set of "limbo tokens", i.e., tokens
|
void Scanner::StartStream()
|
||||||
// that haven't yet been pushed. This way, if ScanToken()
|
|
||||||
// throws an exception, we'll be keeping track of 'pToken'
|
|
||||||
// somewhere, and it will be automatically cleaned up when
|
|
||||||
// the Scanner destructs.
|
|
||||||
template <typename T> void Scanner::ScanAndEnqueue(T *pToken)
|
|
||||||
{
|
{
|
||||||
m_limboTokens.insert(pToken);
|
m_startedStream = true;
|
||||||
m_tokens.push(ScanToken(pToken));
|
m_simpleKeyAllowed = true;
|
||||||
m_limboTokens.erase(pToken);
|
m_indents.push(-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// EndStream
|
||||||
|
// . Close out the stream, finish up, etc.
|
||||||
|
void Scanner::EndStream()
|
||||||
|
{
|
||||||
|
// force newline
|
||||||
|
if(INPUT.column > 0)
|
||||||
|
INPUT.column = 0;
|
||||||
|
|
||||||
|
PopIndentTo(-1);
|
||||||
|
VerifyAllSimpleKeys();
|
||||||
|
|
||||||
|
m_simpleKeyAllowed = false;
|
||||||
|
m_endedStream = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// PushIndentTo
|
// PushIndentTo
|
||||||
|
@ -241,9 +241,9 @@ namespace YAML
|
||||||
// now push
|
// now push
|
||||||
m_indents.push(column);
|
m_indents.push(column);
|
||||||
if(sequence)
|
if(sequence)
|
||||||
m_tokens.push(new BlockSeqStartToken);
|
m_tokens.push(new Token(TT_BLOCK_SEQ_START));
|
||||||
else
|
else
|
||||||
m_tokens.push(new BlockMapStartToken);
|
m_tokens.push(new Token(TT_BLOCK_MAP_START));
|
||||||
|
|
||||||
return m_tokens.front();
|
return m_tokens.front();
|
||||||
}
|
}
|
||||||
|
@ -260,7 +260,7 @@ namespace YAML
|
||||||
// now pop away
|
// now pop away
|
||||||
while(!m_indents.empty() && m_indents.top() > column) {
|
while(!m_indents.empty() && m_indents.top() > column) {
|
||||||
m_indents.pop();
|
m_indents.pop();
|
||||||
m_tokens.push(new BlockEndToken);
|
m_tokens.push(new Token(TT_BLOCK_END));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
23
scanner.h
23
scanner.h
|
@ -23,6 +23,8 @@ namespace YAML
|
||||||
|
|
||||||
private:
|
private:
|
||||||
// scanning
|
// scanning
|
||||||
|
void StartStream();
|
||||||
|
void EndStream();
|
||||||
void ScanNextToken();
|
void ScanNextToken();
|
||||||
void ScanToNextToken();
|
void ScanToNextToken();
|
||||||
Token *PushIndentTo(int column, bool sequence);
|
Token *PushIndentTo(int column, bool sequence);
|
||||||
|
@ -46,8 +48,24 @@ namespace YAML
|
||||||
Token *pMapStart, *pKey;
|
Token *pMapStart, *pKey;
|
||||||
};
|
};
|
||||||
|
|
||||||
template <typename T> void ScanAndEnqueue(T *pToken);
|
// and the tokens
|
||||||
template <typename T> T *ScanToken(T *pToken);
|
void ScanDirective();
|
||||||
|
void ScanDocStart();
|
||||||
|
void ScanDocEnd();
|
||||||
|
void ScanBlockSeqStart();
|
||||||
|
void ScanBlockMapSTart();
|
||||||
|
void ScanBlockEnd();
|
||||||
|
void ScanBlockEntry();
|
||||||
|
void ScanFlowStart();
|
||||||
|
void ScanFlowEnd();
|
||||||
|
void ScanFlowEntry();
|
||||||
|
void ScanKey();
|
||||||
|
void ScanValue();
|
||||||
|
void ScanAnchorOrAlias();
|
||||||
|
void ScanTag();
|
||||||
|
void ScanPlainScalar();
|
||||||
|
void ScanQuotedScalar();
|
||||||
|
void ScanBlockScalar();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
// the stream
|
// the stream
|
||||||
|
@ -55,7 +73,6 @@ namespace YAML
|
||||||
|
|
||||||
// the output (tokens)
|
// the output (tokens)
|
||||||
std::queue <Token *> m_tokens;
|
std::queue <Token *> m_tokens;
|
||||||
std::set <Token *> m_limboTokens;
|
|
||||||
|
|
||||||
// state info
|
// state info
|
||||||
bool m_startedStream, m_endedStream;
|
bool m_startedStream, m_endedStream;
|
||||||
|
|
194
scantoken.cpp
194
scantoken.cpp
|
@ -9,36 +9,13 @@ namespace YAML
|
||||||
///////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////
|
||||||
// Specialization for scanning specific tokens
|
// Specialization for scanning specific tokens
|
||||||
|
|
||||||
// StreamStartToken
|
// Directive
|
||||||
template <> StreamStartToken *Scanner::ScanToken(StreamStartToken *pToken)
|
|
||||||
{
|
|
||||||
m_startedStream = true;
|
|
||||||
m_simpleKeyAllowed = true;
|
|
||||||
m_indents.push(-1);
|
|
||||||
|
|
||||||
return pToken;
|
|
||||||
}
|
|
||||||
|
|
||||||
// StreamEndToken
|
|
||||||
template <> StreamEndToken *Scanner::ScanToken(StreamEndToken *pToken)
|
|
||||||
{
|
|
||||||
// force newline
|
|
||||||
if(INPUT.column > 0)
|
|
||||||
INPUT.column = 0;
|
|
||||||
|
|
||||||
PopIndentTo(-1);
|
|
||||||
VerifyAllSimpleKeys();
|
|
||||||
|
|
||||||
m_simpleKeyAllowed = false;
|
|
||||||
m_endedStream = true;
|
|
||||||
|
|
||||||
return pToken;
|
|
||||||
}
|
|
||||||
|
|
||||||
// DirectiveToken
|
|
||||||
// . Note: no semantic checking is done here (that's for the parser to do)
|
// . Note: no semantic checking is done here (that's for the parser to do)
|
||||||
template <> DirectiveToken *Scanner::ScanToken(DirectiveToken *pToken)
|
void Scanner::ScanDirective()
|
||||||
{
|
{
|
||||||
|
std::string name;
|
||||||
|
std::vector <std::string> params;
|
||||||
|
|
||||||
// pop indents and simple keys
|
// pop indents and simple keys
|
||||||
PopIndentTo(-1);
|
PopIndentTo(-1);
|
||||||
VerifyAllSimpleKeys();
|
VerifyAllSimpleKeys();
|
||||||
|
@ -50,7 +27,7 @@ namespace YAML
|
||||||
|
|
||||||
// read name
|
// read name
|
||||||
while(INPUT.peek() != EOF && !Exp::BlankOrBreak.Matches(INPUT))
|
while(INPUT.peek() != EOF && !Exp::BlankOrBreak.Matches(INPUT))
|
||||||
pToken->name += INPUT.GetChar();
|
name += INPUT.GetChar();
|
||||||
|
|
||||||
// read parameters
|
// read parameters
|
||||||
while(1) {
|
while(1) {
|
||||||
|
@ -67,14 +44,17 @@ namespace YAML
|
||||||
while(INPUT.peek() != EOF && !Exp::BlankOrBreak.Matches(INPUT))
|
while(INPUT.peek() != EOF && !Exp::BlankOrBreak.Matches(INPUT))
|
||||||
param += INPUT.GetChar();
|
param += INPUT.GetChar();
|
||||||
|
|
||||||
pToken->params.push_back(param);
|
params.push_back(param);
|
||||||
}
|
}
|
||||||
|
|
||||||
return pToken;
|
Token *pToken = new Token(TT_DIRECTIVE);
|
||||||
|
pToken->value = name;
|
||||||
|
pToken->params = params;
|
||||||
|
m_tokens.push(pToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
// DocumentStartToken
|
// DocStart
|
||||||
template <> DocumentStartToken *Scanner::ScanToken(DocumentStartToken *pToken)
|
void Scanner::ScanDocStart()
|
||||||
{
|
{
|
||||||
PopIndentTo(INPUT.column);
|
PopIndentTo(INPUT.column);
|
||||||
VerifyAllSimpleKeys();
|
VerifyAllSimpleKeys();
|
||||||
|
@ -82,11 +62,11 @@ namespace YAML
|
||||||
|
|
||||||
// eat
|
// eat
|
||||||
INPUT.Eat(3);
|
INPUT.Eat(3);
|
||||||
return pToken;
|
m_tokens.push(new Token(TT_DOC_START));
|
||||||
}
|
}
|
||||||
|
|
||||||
// DocumentEndToken
|
// DocEnd
|
||||||
template <> DocumentEndToken *Scanner::ScanToken(DocumentEndToken *pToken)
|
void Scanner::ScanDocEnd()
|
||||||
{
|
{
|
||||||
PopIndentTo(-1);
|
PopIndentTo(-1);
|
||||||
VerifyAllSimpleKeys();
|
VerifyAllSimpleKeys();
|
||||||
|
@ -94,37 +74,25 @@ namespace YAML
|
||||||
|
|
||||||
// eat
|
// eat
|
||||||
INPUT.Eat(3);
|
INPUT.Eat(3);
|
||||||
return pToken;
|
m_tokens.push(new Token(TT_DOC_END));
|
||||||
}
|
}
|
||||||
|
|
||||||
// FlowSeqStartToken
|
// FlowStart
|
||||||
template <> FlowSeqStartToken *Scanner::ScanToken(FlowSeqStartToken *pToken)
|
void Scanner::ScanFlowStart()
|
||||||
{
|
{
|
||||||
// flow sequences can be simple keys
|
// flows can be simple keys
|
||||||
InsertSimpleKey();
|
InsertSimpleKey();
|
||||||
m_flowLevel++;
|
m_flowLevel++;
|
||||||
m_simpleKeyAllowed = true;
|
m_simpleKeyAllowed = true;
|
||||||
|
|
||||||
// eat
|
// eat
|
||||||
INPUT.Eat(1);
|
char ch = INPUT.GetChar();
|
||||||
return pToken;
|
TOKEN_TYPE type = (ch == Keys::FlowSeqStart ? TT_FLOW_SEQ_START : TT_FLOW_MAP_START);
|
||||||
|
m_tokens.push(new Token(type));
|
||||||
}
|
}
|
||||||
|
|
||||||
// FlowMapStartToken
|
// FlowEnd
|
||||||
template <> FlowMapStartToken *Scanner::ScanToken(FlowMapStartToken *pToken)
|
void Scanner::ScanFlowEnd()
|
||||||
{
|
|
||||||
// flow maps can be simple keys
|
|
||||||
InsertSimpleKey();
|
|
||||||
m_flowLevel++;
|
|
||||||
m_simpleKeyAllowed = true;
|
|
||||||
|
|
||||||
// eat
|
|
||||||
INPUT.Eat(1);
|
|
||||||
return pToken;
|
|
||||||
}
|
|
||||||
|
|
||||||
// FlowSeqEndToken
|
|
||||||
template <> FlowSeqEndToken *Scanner::ScanToken(FlowSeqEndToken *pToken)
|
|
||||||
{
|
{
|
||||||
if(m_flowLevel == 0)
|
if(m_flowLevel == 0)
|
||||||
throw IllegalFlowEnd();
|
throw IllegalFlowEnd();
|
||||||
|
@ -133,36 +101,23 @@ namespace YAML
|
||||||
m_simpleKeyAllowed = false;
|
m_simpleKeyAllowed = false;
|
||||||
|
|
||||||
// eat
|
// eat
|
||||||
INPUT.Eat(1);
|
char ch = INPUT.GetChar();
|
||||||
return pToken;
|
TOKEN_TYPE type = (ch == Keys::FlowSeqEnd ? TT_FLOW_SEQ_END : TT_FLOW_MAP_END);
|
||||||
|
m_tokens.push(new Token(type));
|
||||||
}
|
}
|
||||||
|
|
||||||
// FlowMapEndToken
|
// FlowEntry
|
||||||
template <> FlowMapEndToken *Scanner::ScanToken(FlowMapEndToken *pToken)
|
void Scanner::ScanFlowEntry()
|
||||||
{
|
|
||||||
if(m_flowLevel == 0)
|
|
||||||
throw IllegalFlowEnd();
|
|
||||||
|
|
||||||
m_flowLevel--;
|
|
||||||
m_simpleKeyAllowed = false;
|
|
||||||
|
|
||||||
// eat
|
|
||||||
INPUT.Eat(1);
|
|
||||||
return pToken;
|
|
||||||
}
|
|
||||||
|
|
||||||
// FlowEntryToken
|
|
||||||
template <> FlowEntryToken *Scanner::ScanToken(FlowEntryToken *pToken)
|
|
||||||
{
|
{
|
||||||
m_simpleKeyAllowed = true;
|
m_simpleKeyAllowed = true;
|
||||||
|
|
||||||
// eat
|
// eat
|
||||||
INPUT.Eat(1);
|
INPUT.Eat(1);
|
||||||
return pToken;
|
m_tokens.push(new Token(TT_FLOW_ENTRY));
|
||||||
}
|
}
|
||||||
|
|
||||||
// BlockEntryToken
|
// BlockEntry
|
||||||
template <> BlockEntryToken *Scanner::ScanToken(BlockEntryToken *pToken)
|
void Scanner::ScanBlockEntry()
|
||||||
{
|
{
|
||||||
// we better be in the block context!
|
// we better be in the block context!
|
||||||
if(m_flowLevel > 0)
|
if(m_flowLevel > 0)
|
||||||
|
@ -177,11 +132,11 @@ namespace YAML
|
||||||
|
|
||||||
// eat
|
// eat
|
||||||
INPUT.Eat(1);
|
INPUT.Eat(1);
|
||||||
return pToken;
|
m_tokens.push(new Token(TT_BLOCK_ENTRY));
|
||||||
}
|
}
|
||||||
|
|
||||||
// KeyToken
|
// Key
|
||||||
template <> KeyToken *Scanner::ScanToken(KeyToken *pToken)
|
void Scanner::ScanKey()
|
||||||
{
|
{
|
||||||
// handle keys diffently in the block context (and manage indents)
|
// handle keys diffently in the block context (and manage indents)
|
||||||
if(m_flowLevel == 0) {
|
if(m_flowLevel == 0) {
|
||||||
|
@ -199,11 +154,11 @@ namespace YAML
|
||||||
|
|
||||||
// eat
|
// eat
|
||||||
INPUT.Eat(1);
|
INPUT.Eat(1);
|
||||||
return pToken;
|
m_tokens.push(new Token(TT_KEY));
|
||||||
}
|
}
|
||||||
|
|
||||||
// ValueToken
|
// Value
|
||||||
template <> ValueToken *Scanner::ScanToken(ValueToken *pToken)
|
void Scanner::ScanValue()
|
||||||
{
|
{
|
||||||
// does this follow a simple key?
|
// does this follow a simple key?
|
||||||
if(m_isLastKeyValid) {
|
if(m_isLastKeyValid) {
|
||||||
|
@ -227,12 +182,15 @@ namespace YAML
|
||||||
|
|
||||||
// eat
|
// eat
|
||||||
INPUT.Eat(1);
|
INPUT.Eat(1);
|
||||||
return pToken;
|
m_tokens.push(new Token(TT_VALUE));
|
||||||
}
|
}
|
||||||
|
|
||||||
// AnchorToken
|
// AnchorOrAlias
|
||||||
template <> AnchorToken *Scanner::ScanToken(AnchorToken *pToken)
|
void Scanner::ScanAnchorOrAlias()
|
||||||
{
|
{
|
||||||
|
bool alias;
|
||||||
|
std::string tag;
|
||||||
|
|
||||||
// insert a potential simple key
|
// insert a potential simple key
|
||||||
if(m_simpleKeyAllowed)
|
if(m_simpleKeyAllowed)
|
||||||
InsertSimpleKey();
|
InsertSimpleKey();
|
||||||
|
@ -240,10 +198,9 @@ namespace YAML
|
||||||
|
|
||||||
// eat the indicator
|
// eat the indicator
|
||||||
char indicator = INPUT.GetChar();
|
char indicator = INPUT.GetChar();
|
||||||
pToken->alias = (indicator == Keys::Alias);
|
alias = (indicator == Keys::Alias);
|
||||||
|
|
||||||
// now eat the content
|
// now eat the content
|
||||||
std::string tag;
|
|
||||||
while(Exp::AlphaNumeric.Matches(INPUT))
|
while(Exp::AlphaNumeric.Matches(INPUT))
|
||||||
tag += INPUT.GetChar();
|
tag += INPUT.GetChar();
|
||||||
|
|
||||||
|
@ -256,13 +213,16 @@ namespace YAML
|
||||||
throw IllegalCharacterInAnchor();
|
throw IllegalCharacterInAnchor();
|
||||||
|
|
||||||
// and we're done
|
// and we're done
|
||||||
|
Token *pToken = new Token(alias ? TT_ALIAS : TT_ANCHOR);
|
||||||
pToken->value = tag;
|
pToken->value = tag;
|
||||||
return pToken;
|
m_tokens.push(pToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TagToken
|
// Tag
|
||||||
template <> TagToken *Scanner::ScanToken(TagToken *pToken)
|
void Scanner::ScanTag()
|
||||||
{
|
{
|
||||||
|
std::string handle, suffix;
|
||||||
|
|
||||||
// insert a potential simple key
|
// insert a potential simple key
|
||||||
if(m_simpleKeyAllowed)
|
if(m_simpleKeyAllowed)
|
||||||
InsertSimpleKey();
|
InsertSimpleKey();
|
||||||
|
@ -273,7 +233,7 @@ namespace YAML
|
||||||
|
|
||||||
// read the handle
|
// read the handle
|
||||||
while(INPUT.peek() != EOF && INPUT.peek() != Keys::Tag && !Exp::BlankOrBreak.Matches(INPUT))
|
while(INPUT.peek() != EOF && INPUT.peek() != Keys::Tag && !Exp::BlankOrBreak.Matches(INPUT))
|
||||||
pToken->handle += INPUT.GetChar();
|
handle += INPUT.GetChar();
|
||||||
|
|
||||||
// is there a suffix?
|
// is there a suffix?
|
||||||
if(INPUT.peek() == Keys::Tag) {
|
if(INPUT.peek() == Keys::Tag) {
|
||||||
|
@ -282,15 +242,20 @@ namespace YAML
|
||||||
|
|
||||||
// then read it
|
// then read it
|
||||||
while(INPUT.peek() != EOF && !Exp::BlankOrBreak.Matches(INPUT))
|
while(INPUT.peek() != EOF && !Exp::BlankOrBreak.Matches(INPUT))
|
||||||
pToken->suffix += INPUT.GetChar();
|
suffix += INPUT.GetChar();
|
||||||
}
|
}
|
||||||
|
|
||||||
return pToken;
|
Token *pToken = new Token(TT_TAG);
|
||||||
|
pToken->value = handle;
|
||||||
|
pToken->params.push_back(suffix);
|
||||||
|
m_tokens.push(pToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
// PlainScalarToken
|
// PlainScalar
|
||||||
template <> PlainScalarToken *Scanner::ScanToken(PlainScalarToken *pToken)
|
void Scanner::ScanPlainScalar()
|
||||||
{
|
{
|
||||||
|
std::string scalar;
|
||||||
|
|
||||||
// set up the scanning parameters
|
// set up the scanning parameters
|
||||||
ScanScalarParams params;
|
ScanScalarParams params;
|
||||||
params.end = (m_flowLevel > 0 ? Exp::EndScalarInFlow : Exp::EndScalar) || (RegEx(' ') + Exp::Comment);
|
params.end = (m_flowLevel > 0 ? Exp::EndScalarInFlow : Exp::EndScalar) || (RegEx(' ') + Exp::Comment);
|
||||||
|
@ -307,7 +272,7 @@ namespace YAML
|
||||||
if(m_simpleKeyAllowed)
|
if(m_simpleKeyAllowed)
|
||||||
InsertSimpleKey();
|
InsertSimpleKey();
|
||||||
|
|
||||||
pToken->value = ScanScalar(INPUT, params);
|
scalar = ScanScalar(INPUT, params);
|
||||||
|
|
||||||
// can have a simple key only if we ended the scalar by starting a new line
|
// can have a simple key only if we ended the scalar by starting a new line
|
||||||
m_simpleKeyAllowed = params.leadingSpaces;
|
m_simpleKeyAllowed = params.leadingSpaces;
|
||||||
|
@ -317,21 +282,25 @@ namespace YAML
|
||||||
if(Exp::IllegalColonInScalar.Matches(INPUT))
|
if(Exp::IllegalColonInScalar.Matches(INPUT))
|
||||||
throw IllegalScalar();
|
throw IllegalScalar();
|
||||||
|
|
||||||
return pToken;
|
Token *pToken = new Token(TT_SCALAR);
|
||||||
|
pToken->value = scalar;
|
||||||
|
m_tokens.push(pToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
// QuotedScalarToken
|
// QuotedScalar
|
||||||
template <> QuotedScalarToken *Scanner::ScanToken(QuotedScalarToken *pToken)
|
void Scanner::ScanQuotedScalar()
|
||||||
{
|
{
|
||||||
|
std::string scalar;
|
||||||
|
|
||||||
// eat single or double quote
|
// eat single or double quote
|
||||||
char quote = INPUT.GetChar();
|
char quote = INPUT.GetChar();
|
||||||
pToken->single = (quote == '\'');
|
bool single = (quote == '\'');
|
||||||
|
|
||||||
// setup the scanning parameters
|
// setup the scanning parameters
|
||||||
ScanScalarParams params;
|
ScanScalarParams params;
|
||||||
params.end = (pToken->single ? RegEx(quote) && !Exp::EscSingleQuote : RegEx(quote));
|
params.end = (single ? RegEx(quote) && !Exp::EscSingleQuote : RegEx(quote));
|
||||||
params.eatEnd = true;
|
params.eatEnd = true;
|
||||||
params.escape = (pToken->single ? '\'' : '\\');
|
params.escape = (single ? '\'' : '\\');
|
||||||
params.indent = 0;
|
params.indent = 0;
|
||||||
params.fold = true;
|
params.fold = true;
|
||||||
params.eatLeadingWhitespace = true;
|
params.eatLeadingWhitespace = true;
|
||||||
|
@ -343,18 +312,22 @@ namespace YAML
|
||||||
if(m_simpleKeyAllowed)
|
if(m_simpleKeyAllowed)
|
||||||
InsertSimpleKey();
|
InsertSimpleKey();
|
||||||
|
|
||||||
pToken->value = ScanScalar(INPUT, params);
|
scalar = ScanScalar(INPUT, params);
|
||||||
m_simpleKeyAllowed = false;
|
m_simpleKeyAllowed = false;
|
||||||
|
|
||||||
return pToken;
|
Token *pToken = new Token(TT_SCALAR);
|
||||||
|
pToken->value = scalar;
|
||||||
|
m_tokens.push(pToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
// BlockScalarToken
|
// BlockScalarToken
|
||||||
// . These need a little extra processing beforehand.
|
// . These need a little extra processing beforehand.
|
||||||
// . We need to scan the line where the indicator is (this doesn't count as part of the scalar),
|
// . We need to scan the line where the indicator is (this doesn't count as part of the scalar),
|
||||||
// and then we need to figure out what level of indentation we'll be using.
|
// and then we need to figure out what level of indentation we'll be using.
|
||||||
template <> BlockScalarToken *Scanner::ScanToken(BlockScalarToken *pToken)
|
void Scanner::ScanBlockScalar()
|
||||||
{
|
{
|
||||||
|
std::string scalar;
|
||||||
|
|
||||||
ScanScalarParams params;
|
ScanScalarParams params;
|
||||||
params.indent = 1;
|
params.indent = 1;
|
||||||
params.detectIndent = true;
|
params.detectIndent = true;
|
||||||
|
@ -401,10 +374,13 @@ namespace YAML
|
||||||
params.trimTrailingSpaces = false;
|
params.trimTrailingSpaces = false;
|
||||||
params.onTabInIndentation = THROW;
|
params.onTabInIndentation = THROW;
|
||||||
|
|
||||||
pToken->value = ScanScalar(INPUT, params);
|
scalar = ScanScalar(INPUT, params);
|
||||||
|
|
||||||
// simple keys always ok after block scalars (since we're gonna start a new line anyways)
|
// simple keys always ok after block scalars (since we're gonna start a new line anyways)
|
||||||
m_simpleKeyAllowed = true;
|
m_simpleKeyAllowed = true;
|
||||||
return pToken;
|
|
||||||
|
Token *pToken = new Token(TT_SCALAR);
|
||||||
|
pToken->value = scalar;
|
||||||
|
m_tokens.push(pToken);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,7 +44,7 @@ namespace YAML
|
||||||
// key.required = true; // TODO: is this correct?
|
// key.required = true; // TODO: is this correct?
|
||||||
|
|
||||||
// then add the (now unverified) key
|
// then add the (now unverified) key
|
||||||
key.pKey = new KeyToken;
|
key.pKey = new Token(TT_KEY);
|
||||||
key.pKey->status = TS_UNVERIFIED;
|
key.pKey->status = TS_UNVERIFIED;
|
||||||
m_tokens.push(key.pKey);
|
m_tokens.push(key.pKey);
|
||||||
|
|
||||||
|
|
105
token.h
105
token.h
|
@ -7,64 +7,61 @@
|
||||||
namespace YAML
|
namespace YAML
|
||||||
{
|
{
|
||||||
enum TOKEN_STATUS { TS_VALID, TS_INVALID, TS_UNVERIFIED };
|
enum TOKEN_STATUS { TS_VALID, TS_INVALID, TS_UNVERIFIED };
|
||||||
|
enum TOKEN_TYPE {
|
||||||
|
TT_DIRECTIVE,
|
||||||
|
TT_DOC_START,
|
||||||
|
TT_DOC_END,
|
||||||
|
TT_BLOCK_SEQ_START,
|
||||||
|
TT_BLOCK_MAP_START,
|
||||||
|
TT_BLOCK_END,
|
||||||
|
TT_BLOCK_ENTRY,
|
||||||
|
TT_FLOW_SEQ_START,
|
||||||
|
TT_FLOW_MAP_START,
|
||||||
|
TT_FLOW_SEQ_END,
|
||||||
|
TT_FLOW_MAP_END,
|
||||||
|
TT_FLOW_ENTRY,
|
||||||
|
TT_KEY,
|
||||||
|
TT_VALUE,
|
||||||
|
TT_ANCHOR,
|
||||||
|
TT_ALIAS,
|
||||||
|
TT_TAG,
|
||||||
|
TT_SCALAR,
|
||||||
|
};
|
||||||
|
|
||||||
|
const std::string TokenNames[] = {
|
||||||
|
"DIRECTIVE",
|
||||||
|
"DOC_START",
|
||||||
|
"DOC_END",
|
||||||
|
"BLOCK_SEQ_START",
|
||||||
|
"BLOCK_MAP_START",
|
||||||
|
"BLOCK_END",
|
||||||
|
"BLOCK_ENTRY",
|
||||||
|
"FLOW_SEQ_START",
|
||||||
|
"FLOW_MAP_START",
|
||||||
|
"FLOW_SEQ_END",
|
||||||
|
"FLOW_MAP_END",
|
||||||
|
"FLOW_ENTRY",
|
||||||
|
"KEY",
|
||||||
|
"VALUE",
|
||||||
|
"ANCHOR",
|
||||||
|
"ALIAS",
|
||||||
|
"TAG",
|
||||||
|
"SCALAR",
|
||||||
|
};
|
||||||
|
|
||||||
struct Token {
|
struct Token {
|
||||||
Token(): status(TS_VALID) {}
|
Token(TOKEN_TYPE type_): status(TS_VALID), type(type_) {}
|
||||||
virtual ~Token() {}
|
|
||||||
virtual void Write(std::ostream& out) const {}
|
friend std::ostream& operator << (std::ostream& out, const Token& token) {
|
||||||
|
out << TokenNames[token.type] << ": " << token.value;
|
||||||
|
for(unsigned i=0;i<token.params.size();i++)
|
||||||
|
out << " " << token.params[i];
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
friend std::ostream& operator << (std::ostream& out, const Token& token) { token.Write(out); return out; }
|
|
||||||
TOKEN_STATUS status;
|
TOKEN_STATUS status;
|
||||||
};
|
TOKEN_TYPE type;
|
||||||
|
std::string value;
|
||||||
struct StreamStartToken: public Token {};
|
|
||||||
struct StreamEndToken: public Token {};
|
|
||||||
struct DirectiveToken: public Token {
|
|
||||||
std::string name;
|
|
||||||
std::vector <std::string> params;
|
std::vector <std::string> params;
|
||||||
|
|
||||||
virtual void Write(std::ostream& out) const { out << name; for(unsigned i=0;i<params.size();i++) out << " " << params[i]; }
|
|
||||||
};
|
};
|
||||||
|
|
||||||
struct DocumentStartToken: public Token {};
|
|
||||||
struct DocumentEndToken: public Token {};
|
|
||||||
|
|
||||||
struct BlockSeqStartToken: public Token {};
|
|
||||||
struct BlockMapStartToken: public Token {};
|
|
||||||
struct BlockEndToken: public Token {};
|
|
||||||
struct BlockEntryToken: public Token {};
|
|
||||||
|
|
||||||
struct FlowSeqStartToken: public Token {};
|
|
||||||
struct FlowMapStartToken: public Token {};
|
|
||||||
struct FlowSeqEndToken: public Token {};
|
|
||||||
struct FlowMapEndToken: public Token {};
|
|
||||||
struct FlowEntryToken: public Token {};
|
|
||||||
|
|
||||||
struct KeyToken: public Token {};
|
|
||||||
struct ValueToken: public Token {};
|
|
||||||
struct AnchorToken: public Token {
|
|
||||||
bool alias;
|
|
||||||
std::string value;
|
|
||||||
|
|
||||||
virtual void Write(std::ostream& out) const { out << (alias ? '*' : '&') << value; }
|
|
||||||
};
|
|
||||||
|
|
||||||
struct TagToken: public Token {
|
|
||||||
std::string handle, suffix;
|
|
||||||
|
|
||||||
virtual void Write(std::ostream& out) const { out << "!" << handle << "!" << suffix; }
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ScalarToken: public Token {
|
|
||||||
std::string value;
|
|
||||||
virtual void Write(std::ostream& out) const { out << value; }
|
|
||||||
};
|
|
||||||
|
|
||||||
struct PlainScalarToken: public ScalarToken {};
|
|
||||||
struct QuotedScalarToken: public ScalarToken {
|
|
||||||
bool single;
|
|
||||||
virtual void Write(std::ostream& out) const { out << (single ? '\'' : '\"') << value << (single ? '\'' : '\"'); }
|
|
||||||
};
|
|
||||||
|
|
||||||
struct BlockScalarToken: public ScalarToken {};
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue