vrshoot

view libs/assimp/FBXTokenizer.cpp @ 2:334d17aed7de

visual studio project files
author John Tsiombikas <nuclear@member.fsf.org>
date Sun, 02 Feb 2014 18:36:38 +0200
parents
children
line source
1 /*
2 Open Asset Import Library (assimp)
3 ----------------------------------------------------------------------
5 Copyright (c) 2006-2012, assimp team
6 All rights reserved.
8 Redistribution and use of this software in source and binary forms,
9 with or without modification, are permitted provided that the
10 following conditions are met:
12 * Redistributions of source code must retain the above
13 copyright notice, this list of conditions and the
14 following disclaimer.
16 * Redistributions in binary form must reproduce the above
17 copyright notice, this list of conditions and the
18 following disclaimer in the documentation and/or other
19 materials provided with the distribution.
21 * Neither the name of the assimp team, nor the names of its
22 contributors may be used to endorse or promote products
23 derived from this software without specific prior
24 written permission of the assimp team.
26 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27 "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28 LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29 A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30 OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31 SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32 LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33 DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34 THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
38 ----------------------------------------------------------------------
39 */
41 /** @file FBXTokenizer.cpp
42 * @brief Implementation of the FBX broadphase lexer
43 */
44 #include "AssimpPCH.h"
46 #ifndef ASSIMP_BUILD_NO_FBX_IMPORTER
48 // tab width for logging columns
49 #define ASSIMP_FBX_TAB_WIDTH 4
51 #include "ParsingUtils.h"
53 #include "FBXTokenizer.h"
54 #include "FBXUtil.h"
56 namespace Assimp {
57 namespace FBX {
59 // ------------------------------------------------------------------------------------------------
60 Token::Token(const char* sbegin, const char* send, TokenType type, unsigned int line, unsigned int column)
61 : sbegin(sbegin)
62 , send(send)
63 , type(type)
64 , line(line)
65 , column(column)
66 #ifdef DEBUG
67 , contents(sbegin, static_cast<size_t>(send-sbegin))
68 #endif
69 {
70 ai_assert(sbegin);
71 ai_assert(send);
73 // tokens must be of non-zero length
74 ai_assert(static_cast<size_t>(send-sbegin) > 0);
75 }
78 // ------------------------------------------------------------------------------------------------
79 Token::~Token()
80 {
81 }
84 namespace {
86 // ------------------------------------------------------------------------------------------------
87 // signal tokenization error, this is always unrecoverable. Throws DeadlyImportError.
88 void TokenizeError(const std::string& message, unsigned int line, unsigned int column)
89 {
90 throw DeadlyImportError(Util::AddLineAndColumn("FBX-Tokenize",message,line,column));
91 }
94 // process a potential data token up to 'cur', adding it to 'output_tokens'.
95 // ------------------------------------------------------------------------------------------------
96 void ProcessDataToken( TokenList& output_tokens, const char*& start, const char*& end,
97 unsigned int line,
98 unsigned int column,
99 TokenType type = TokenType_DATA,
100 bool must_have_token = false)
101 {
102 if (start && end) {
103 // sanity check:
104 // tokens should have no whitespace outside quoted text and [start,end] should
105 // properly delimit the valid range.
106 bool in_double_quotes = false;
107 for (const char* c = start; c != end + 1; ++c) {
108 if (*c == '\"') {
109 in_double_quotes = !in_double_quotes;
110 }
112 if (!in_double_quotes && IsSpaceOrNewLine(*c)) {
113 TokenizeError("unexpected whitespace in token", line, column);
114 }
115 }
117 if (in_double_quotes) {
118 TokenizeError("non-terminated double quotes", line, column);
119 }
121 output_tokens.push_back(new_Token(start,end + 1,type,line,column));
122 }
123 else if (must_have_token) {
124 TokenizeError("unexpected character, expected data token", line, column);
125 }
127 start = end = NULL;
128 }
130 }
132 // ------------------------------------------------------------------------------------------------
133 void Tokenize(TokenList& output_tokens, const char* input)
134 {
135 ai_assert(input);
137 // line and column numbers numbers are one-based
138 unsigned int line = 1;
139 unsigned int column = 1;
141 bool comment = false;
142 bool in_double_quotes = false;
143 bool pending_data_token = false;
145 const char* token_begin = NULL, *token_end = NULL;
146 for (const char* cur = input;*cur;column += (*cur == '\t' ? ASSIMP_FBX_TAB_WIDTH : 1), ++cur) {
147 const char c = *cur;
149 if (IsLineEnd(c)) {
150 comment = false;
152 column = 0;
153 ++line;
154 }
156 if(comment) {
157 continue;
158 }
160 if(in_double_quotes) {
161 if (c == '\"') {
162 in_double_quotes = false;
163 token_end = cur;
165 ProcessDataToken(output_tokens,token_begin,token_end,line,column);
166 pending_data_token = false;
167 }
168 continue;
169 }
171 switch(c)
172 {
173 case '\"':
174 if (token_begin) {
175 TokenizeError("unexpected double-quote", line, column);
176 }
177 token_begin = cur;
178 in_double_quotes = true;
179 continue;
181 case ';':
182 ProcessDataToken(output_tokens,token_begin,token_end,line,column);
183 comment = true;
184 continue;
186 case '{':
187 ProcessDataToken(output_tokens,token_begin,token_end, line, column);
188 output_tokens.push_back(new_Token(cur,cur+1,TokenType_OPEN_BRACKET,line,column));
189 continue;
191 case '}':
192 ProcessDataToken(output_tokens,token_begin,token_end,line,column);
193 output_tokens.push_back(new_Token(cur,cur+1,TokenType_CLOSE_BRACKET,line,column));
194 continue;
196 case ',':
197 if (pending_data_token) {
198 ProcessDataToken(output_tokens,token_begin,token_end,line,column,TokenType_DATA,true);
199 }
200 output_tokens.push_back(new_Token(cur,cur+1,TokenType_COMMA,line,column));
201 continue;
203 case ':':
204 if (pending_data_token) {
205 ProcessDataToken(output_tokens,token_begin,token_end,line,column,TokenType_KEY,true);
206 }
207 else {
208 TokenizeError("unexpected colon", line, column);
209 }
210 continue;
211 }
213 if (IsSpaceOrNewLine(c)) {
215 if (token_begin) {
216 // peek ahead and check if the next token is a colon in which
217 // case this counts as KEY token.
218 TokenType type = TokenType_DATA;
219 for (const char* peek = cur; *peek && IsSpaceOrNewLine(*peek); ++peek) {
220 if (*peek == ':') {
221 type = TokenType_KEY;
222 cur = peek;
223 break;
224 }
225 }
227 ProcessDataToken(output_tokens,token_begin,token_end,line,column,type);
228 }
230 pending_data_token = false;
231 }
232 else {
233 token_end = cur;
234 if (!token_begin) {
235 token_begin = cur;
236 }
238 pending_data_token = true;
239 }
240 }
241 }
243 } // !FBX
244 } // !Assimp
246 #endif