View Javadoc

1   /*
2    * ModeShape (http://www.modeshape.org)
3    * See the COPYRIGHT.txt file distributed with this work for information
4    * regarding copyright ownership.  Some portions may be licensed
5    * to Red Hat, Inc. under one or more contributor license agreements.
6    * See the AUTHORS.txt file in the distribution for a full listing of 
7    * individual contributors.
8    *
9    * ModeShape is free software. Unless otherwise indicated, all code in ModeShape
10   * is licensed to you under the terms of the GNU Lesser General Public License as
11   * published by the Free Software Foundation; either version 2.1 of
12   * the License, or (at your option) any later version.
13   * 
14   * ModeShape is distributed in the hope that it will be useful,
15   * but WITHOUT ANY WARRANTY; without even the implied warranty of
16   * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17   * Lesser General Public License for more details.
18   *
19   * You should have received a copy of the GNU Lesser General Public
20   * License along with this software; if not, write to the Free
21   * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
22   * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
23   */
24  
25  /**
26   * This class provides basic parsing of SQL-92 based DDL files.  The initial implementation does NOT handle generic SQL query
27   * statements, but rather database schema manipulation (i.e. CREATE, DROP, ALTER, etc...)
28   * 
29   */
30  package org.modeshape.sequencer.ddl;
31  
32  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.ALL_PRIVILEGES;
33  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CHECK_SEARCH_CONDITION;
34  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.COLLATION_NAME;
35  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CONSTRAINT_ATTRIBUTE_TYPE;
36  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CONSTRAINT_TYPE;
37  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CREATE_VIEW_QUERY_EXPRESSION;
38  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_EXPRESSION;
39  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_CHAR_INDEX;
40  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_COLUMN_NUMBER;
41  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_LINE_NUMBER;
42  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_OPTION;
43  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_PRECISION;
44  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_VALUE;
45  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DROP_BEHAVIOR;
46  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANTEE;
47  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANT_PRIVILEGE;
48  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.MESSAGE;
49  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.NAME;
50  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.NULLABLE;
51  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.PROBLEM_LEVEL;
52  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.PROPERTY_VALUE;
53  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TEMPORARY;
54  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE;
55  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ADD_TABLE_CONSTRAINT_DEFINITION;
56  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_COLUMN_DEFINITION;
57  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_DOMAIN_STATEMENT;
58  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_TABLE_STATEMENT;
59  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_DEFINITION;
60  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_REFERENCE;
61  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_ASSERTION_STATEMENT;
62  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_CHARACTER_SET_STATEMENT;
63  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_COLLATION_STATEMENT;
64  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_DOMAIN_STATEMENT;
65  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_SCHEMA_STATEMENT;
66  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_TABLE_STATEMENT;
67  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_TRANSLATION_STATEMENT;
68  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_VIEW_STATEMENT;
69  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_ASSERTION_STATEMENT;
70  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_CHARACTER_SET_STATEMENT;
71  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_COLLATION_STATEMENT;
72  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_COLUMN_DEFINITION;
73  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_DOMAIN_STATEMENT;
74  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_SCHEMA_STATEMENT;
75  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TABLE_CONSTRAINT_DEFINITION;
76  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TABLE_STATEMENT;
77  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TRANSLATION_STATEMENT;
78  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_VIEW_STATEMENT;
79  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_FK_COLUMN_REFERENCE;
80  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_CHARACTER_SET_STATEMENT;
81  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_COLLATION_STATEMENT;
82  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_DOMAIN_STATEMENT;
83  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_TABLE_STATEMENT;
84  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_TRANSLATION_STATEMENT;
85  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_INSERT_STATEMENT;
86  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_MISSING_TERMINATOR;
87  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_PROBLEM;
88  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_CHARACTER_SET_STATEMENT;
89  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_COLLATION_STATEMENT;
90  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_DOMAIN_STATEMENT;
91  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_TABLE_STATEMENT;
92  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_TRANSLATION_STATEMENT;
93  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_SET_STATEMENT;
94  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_STATEMENT;
95  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_STATEMENT_OPTION;
96  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_TABLE_CONSTRAINT;
97  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_TABLE_REFERENCE;
98  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.VALUE;
99  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.WITH_GRANT_OPTION;
100 import java.math.BigInteger;
101 import java.util.ArrayList;
102 import java.util.Collections;
103 import java.util.LinkedList;
104 import java.util.List;
105 import net.jcip.annotations.NotThreadSafe;
106 import org.modeshape.common.text.ParsingException;
107 import org.modeshape.common.text.Position;
108 import org.modeshape.common.util.CheckArg;
109 import org.modeshape.graph.JcrLexicon;
110 import org.modeshape.graph.property.Name;
111 import org.modeshape.sequencer.ddl.DdlTokenStream.DdlTokenizer;
112 import org.modeshape.sequencer.ddl.datatype.DataType;
113 import org.modeshape.sequencer.ddl.datatype.DataTypeParser;
114 import org.modeshape.sequencer.ddl.node.AstNode;
115 import org.modeshape.sequencer.ddl.node.AstNodeFactory;
116 
117 /**
118  * Standard SQL 92 DDL file content parser.
119  */
120 @NotThreadSafe
121 public class StandardDdlParser implements DdlParser, DdlConstants, DdlConstants.StatementStartPhrases {
122 
123     private final String parserId = "SQL92";
124     private boolean testMode = false;
125     private final List<DdlParserProblem> problems;
126     private final AstNodeFactory nodeFactory;
127     private AstNode rootNode;
128     private List<String> allDataTypeStartWords = null;
129     private DataTypeParser datatypeParser = null;
130     private String terminator = DEFAULT_TERMINATOR;
131     private boolean useTerminator = false;
132     private Position currentMarkedPosition;
133 
134     public StandardDdlParser() {
135         super();
136         setDoUseTerminator(true);
137         setDatatypeParser(new DataTypeParser());
138         nodeFactory = new AstNodeFactory();
139         problems = new ArrayList<DdlParserProblem>();
140     }
141 
142     /**
143      * Returns the data type parser instance.
144      * 
145      * @return the {@link DataTypeParser}
146      */
147     public DataTypeParser getDatatypeParser() {
148         return datatypeParser;
149     }
150 
151     /**
152      * @param datatypeParser
153      */
154     public void setDatatypeParser( DataTypeParser datatypeParser ) {
155         this.datatypeParser = datatypeParser;
156     }
157 
158     /**
159      * Method to access the node utility class.
160      * 
161      * @return the instance of the {@link AstNodeFactory} node utility class
162      */
163     public AstNodeFactory nodeFactory() {
164         return this.nodeFactory;
165     }
166 
167     /**
168      * @return rootNode
169      */
170     public AstNode getRootNode() {
171         return rootNode;
172     }
173 
174     /**
175      * @param rootNode Sets rootNode to the specified value.
176      */
177     public void setRootNode( AstNode rootNode ) {
178         this.rootNode = rootNode;
179     }
180 
181     /**
182      * {@inheritDoc}
183      * 
184      * @see org.modeshape.sequencer.ddl.DdlParser#score(java.lang.String, java.lang.String,
185      *      org.modeshape.sequencer.ddl.DdlParserScorer)
186      */
187     public Object score( String ddl,
188                          String fileName,
189                          DdlParserScorer scorer ) throws ParsingException {
190         CheckArg.isNotNull(ddl, "ddl");
191         CheckArg.isNotNull(scorer, "scorer");
192 
193         if (fileName != null) {
194             // Score the filename using the identifier only ...
195             scorer.scoreText(fileName, 2, getIdentifyingKeywords());
196         }
197 
198         // Create the state of this parser ...
199         problems.clear();
200         boolean includeComments = true;
201         DdlTokenStream tokens = new DdlTokenStream(ddl, DdlTokenStream.ddlTokenizer(includeComments), false);
202         initializeTokenStream(tokens);
203         tokens.start();
204 
205         testPrint("\n== >> StandardDdlParser.parse() PARSING STARTED: ");
206 
207         // Consume the first block of comments ...
208         while (tokens.matches(DdlTokenizer.COMMENT)) {
209             // Consume the comment ...
210             String comment = tokens.consume();
211             scorer.scoreText(comment, 2, getIdentifyingKeywords());
212         }
213 
214         // Compute the score for the rest of this content ...
215         computeScore(tokens, scorer);
216 
217         // Return the tokens so parse(...) won't have to re-tokenize ...
218         return tokens;
219     }
220 
221     protected void computeScore( DdlTokenStream tokens,
222                                  DdlParserScorer scorer ) {
223         while (tokens.hasNext()) {
224             if (tokens.isNextKeyWord()) {
225                 scorer.scoreStatements(1);
226             }
227             tokens.consume();
228         }
229     }
230 
231     public String[] getIdentifyingKeywords() {
232         return new String[] {getId()};
233     }
234 
235     /**
236      * {@inheritDoc}
237      * 
238      * @see org.modeshape.sequencer.ddl.DdlParser#parse(java.lang.String, org.modeshape.sequencer.ddl.node.AstNode,
239      *      java.lang.Object)
240      */
241     public void parse( String ddl,
242                        AstNode rootNode,
243                        Object scoreReturnObject ) throws ParsingException {
244         CheckArg.isNotNull(ddl, "ddl");
245         CheckArg.isNotNull(rootNode, "rootNode");
246         problems.clear();
247         setRootNode(rootNode);
248 
249         DdlTokenStream tokens = null;
250         if (scoreReturnObject instanceof DdlTokenStream) {
251             tokens = (DdlTokenStream)scoreReturnObject;
252             tokens.rewind();
253         } else {
254             // Need to create the token stream ...
255             boolean includeComments = false;
256             tokens = new DdlTokenStream(ddl, DdlTokenStream.ddlTokenizer(includeComments), false);
257             initializeTokenStream(tokens);
258             tokens.start();
259         }
260 
261         testPrint("\n== >> StandardDdlParser.parse() PARSING STARTED: ");
262 
263         // Simply move to the next statement start (registered prior to tokenizing).
264         while (moveToNextStatementStart(tokens)) {
265 
266             // It is assumed that if a statement is registered, the registering dialect will handle the parsing of that object
267             // and successfully create a statement {@link AstNode}
268             AstNode stmtNode = parseNextStatement(tokens, rootNode);
269             if (stmtNode == null) {
270                 markStartOfStatement(tokens);
271                 String stmtName = tokens.consume();
272                 stmtNode = parseIgnorableStatement(tokens, stmtName, rootNode);
273                 markEndOfStatement(tokens, stmtNode);
274             }
275             // testPrint("== >> Found Statement" + "(" + (++count) + "):\n" + stmtNode);
276         }
277 
278         rewrite(tokens, rootNode);
279 
280         for (DdlParserProblem problem : problems) {
281             attachNewProblem(problem, rootNode);
282         }
283 
284         // // Compute the score based upon the number of AST nodes ...
285         // // System.out.println("\n\n " + getId() + " (score=" + (getScore(rootNode) - 1 - (problems.size() * 2)) + ")\n" +
286         // // rootNode);
287         // int score = getScore(rootNode) - 1; // exclude the root, since we didn't create it
288         // score -= (problems.size() * 2); // remove double the # of problems
289         // scorer.scoreStatements(score);
290 
291         if (testMode) {
292             // testPrint("== >> StandardDdlParser.parse() PARSING COMPLETE: " + statements.size() + " statements parsed.\n\n");
293             int count = 0;
294             for (AstNode child : rootNode.getChildren()) {
295                 testPrint("== >> Found Statement" + "(" + (++count) + "):\n" + child);
296             }
297         }
298     }
299 
300     /**
301      * Method called by {@link #score(String, String, DdlParserScorer)} and {@link #parse(String, AstNode, Object)} to initialize
302      * the {@link DdlTokenStream token stream}, giving subclasses a chance to {@link DdlTokenStream#registeredKeyWords register
303      * key words} and {@link DdlTokenStream#registerStatementStartPhrase(String[]) statement start phrases}.
304      * 
305      * @param tokens the stream of tokens
306      */
307     protected void initializeTokenStream( DdlTokenStream tokens ) {
308         tokens.registerKeyWords(SQL_92_RESERVED_WORDS);
309         tokens.registerStatementStartPhrase(SQL_92_ALL_PHRASES);
310     }
311 
312     /**
313      * Performs token match checks for initial statement type and delegates to specific parser methods. If no specific statement
314      * is found, then a call is made to parse a custom statement type. Subclasses may override this method, but the
315      * {@link StandardDdlParser}.parseCustomStatement() method is designed to allow for parsing db-specific statement types.
316      * 
317      * @param tokens the tokenized {@link DdlTokenStream} of the DDL input content; may not be null
318      * @param node the top level {@link AstNode}; may not be null
319      * @return node the new statement node
320      */
321     protected AstNode parseNextStatement( DdlTokenStream tokens,
322                                           AstNode node ) {
323         assert tokens != null;
324         assert node != null;
325 
326         AstNode stmtNode = null;
327 
328         if (tokens.matches(CREATE)) {
329             stmtNode = parseCreateStatement(tokens, node);
330         } else if (tokens.matches(ALTER)) {
331             stmtNode = parseAlterStatement(tokens, node);
332         } else if (tokens.matches(DROP)) {
333             stmtNode = parseDropStatement(tokens, node);
334         } else if (tokens.matches(INSERT)) {
335             stmtNode = parseInsertStatement(tokens, node);
336         } else if (tokens.matches(SET)) {
337             stmtNode = parseSetStatement(tokens, node);
338         } else if (tokens.matches(GRANT)) {
339             stmtNode = parseGrantStatement(tokens, node);
340         } else if (tokens.matches(REVOKE)) {
341             stmtNode = parseRevokeStatement(tokens, node);
342         }
343 
344         if (stmtNode == null) {
345             stmtNode = parseCustomStatement(tokens, node);
346         }
347 
348         return stmtNode;
349     }
350 
351     private boolean moveToNextStatementStart( DdlTokenStream tokens ) throws ParsingException {
352         assert tokens != null;
353 
354         StringBuffer sb = new StringBuffer();
355         DdlParserProblem problem = null;
356 
357         // Check to see if any more tokens exists
358         if (tokens.hasNext()) {
359             while (tokens.hasNext()) {
360                 if (tokens.canConsume(DdlTokenizer.COMMENT)) continue;
361 
362                 // If the next toke is a STATEMENT_KEY, then stop
363                 if (!tokens.matches(DdlTokenizer.STATEMENT_KEY)) {
364                     // If the next toke is NOT a statement, create a problem statement in case it can't be fully recognized as
365                     // a statement.
366                     if (problem == null) {
367                         markStartOfStatement(tokens);
368 
369                         String msg = DdlSequencerI18n.unusedTokensDiscovered.text(tokens.nextPosition().getLine(),
370                                                                                   tokens.nextPosition().getColumn());
371                         problem = new DdlParserProblem(DdlConstants.Problems.WARNING, tokens.nextPosition(), msg);
372                     }
373 
374                     String nextTokenValue = null;
375 
376                     // For known, parsed statements, the terminator is consumed in the markEndOfStatement() method. So if we get
377                     // here, we then we know we've got an unknown statement.
378                     if (tokens.matches(getTerminator()) && sb.length() > 0) {
379                         nextTokenValue = getTerminator();
380                         // Let's call this a statement up until now
381                         AstNode unknownNode = unknownTerminatedNode(getRootNode());
382                         markEndOfStatement(tokens, unknownNode);
383                         // We've determined that it's just an unknown node, which we determine is not a problem node.
384                         problem = null;
385                     } else {
386                         // Just keep consuming, but check each token value and allow sub-classes to handle the token if they wish.
387                         // ORACLE, for instance can terminator a complex statement with a backslash, '/'. Calling
388                         // handleUnknownToken() allows that dialect to create it's own statement node that can be assessed and
389                         // used during the rewrite() call at the end of parsing.
390                         nextTokenValue = tokens.consume();
391                         AstNode unknownNode = handleUnknownToken(tokens, nextTokenValue);
392                         if (unknownNode != null) {
393                             markEndOfStatement(tokens, unknownNode);
394                             // We've determined that it's just an unknown node, which we determine is not a problem node.
395                             problem = null;
396                         }
397                     }
398                     sb.append(SPACE).append(nextTokenValue);
399 
400                 } else {
401                     // If we have a problem, add it.
402                     if (problem != null && sb.length() > 0) {
403                         problem.setUnusedSource(sb.toString());
404                         addProblem(problem);
405                     }
406                     return true;
407                 }
408             }
409 
410             // If we still have a problem, add it.
411             if (problem != null && sb.length() > 0) {
412                 problem.setUnusedSource(sb.toString());
413                 addProblem(problem);
414             }
415         }
416         return false;
417     }
418 
419     public final void addProblem( DdlParserProblem problem,
420                                   AstNode node ) {
421         addProblem(problem);
422         attachNewProblem(problem, node);
423     }
424 
425     public final void addProblem( DdlParserProblem problem ) {
426         problems.add(problem);
427     }
428 
429     public final List<DdlParserProblem> getProblems() {
430         return this.problems;
431     }
432 
433     public final void attachNewProblem( DdlParserProblem problem,
434                                         AstNode parentNode ) {
435         assert problem != null;
436         assert parentNode != null;
437 
438         AstNode problemNode = nodeFactory().node("DDL PROBLEM", parentNode, TYPE_PROBLEM);
439         problemNode.setProperty(PROBLEM_LEVEL, problem.getLevel());
440         problemNode.setProperty(MESSAGE, problem.toString() + "[" + problem.getUnusedSource() + "]");
441 
442         testPrint(problem.toString());
443     }
444 
445     protected void rewrite( DdlTokenStream tokens,
446                             AstNode rootNode ) {
447         assert tokens != null;
448         assert rootNode != null;
449         // Walk the tree and remove any missing missing terminator nodes
450 
451         removeMissingTerminatorNodes(rootNode);
452     }
453 
454     protected void removeMissingTerminatorNodes( AstNode parentNode ) {
455         assert parentNode != null;
456         // Walk the tree and remove any missing missing terminator nodes
457         List<AstNode> copyOfNodes = new ArrayList<AstNode>(parentNode.getChildren());
458 
459         for (AstNode child : copyOfNodes) {
460             if (nodeFactory().hasMixinType(child, TYPE_MISSING_TERMINATOR)) {
461                 parentNode.removeChild(child);
462             } else {
463                 removeMissingTerminatorNodes(child);
464             }
465         }
466     }
467 
468     /**
469      * Merges second node into first node by re-setting expression source and length.
470      * 
471      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
472      * @param firstNode the node to merge into; may not be null
473      * @param secondNode the node to merge into first node; may not be null
474      */
475     public void mergeNodes( DdlTokenStream tokens,
476                             AstNode firstNode,
477                             AstNode secondNode ) {
478         assert tokens != null;
479         assert firstNode != null;
480         assert secondNode != null;
481 
482         int firstStartIndex = (Integer)firstNode.getProperty(DDL_START_CHAR_INDEX).getFirstValue();
483         int secondStartIndex = (Integer)secondNode.getProperty(DDL_START_CHAR_INDEX).getFirstValue();
484         int deltaLength = ((String)secondNode.getProperty(DDL_EXPRESSION).getFirstValue()).length();
485         Position startPosition = new Position(firstStartIndex, 1, 0);
486         Position endPosition = new Position((secondStartIndex + deltaLength), 1, 0);
487         String source = tokens.getContentBetween(startPosition, endPosition);
488         firstNode.setProperty(DDL_EXPRESSION, source);
489     }
490 
491     /**
492      * Utility method subclasses can override to check unknown tokens and perform additional node manipulation. Example would be
493      * in Oracle dialect for CREATE FUNCTION statements that can end with an '/' character because statement can contain multiple
494      * statements.
495      * 
496      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
497      * @param tokenValue the string value of the unknown token; never null
498      * @return the new node
499      * @throws ParsingException
500      */
501     public AstNode handleUnknownToken( DdlTokenStream tokens,
502                                        String tokenValue ) throws ParsingException {
503         assert tokens != null;
504         assert tokenValue != null;
505         // DEFAULT IMPLEMENTATION DOES NOTHING
506         return null;
507     }
508 
509     /**
510      * Parses DDL CREATE statement based on SQL 92 specifications.
511      * 
512      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
513      * @param parentNode the parent {@link AstNode} node; may not be null
514      * @return the parsed CREATE {@link AstNode}
515      * @throws ParsingException
516      */
517     protected AstNode parseCreateStatement( DdlTokenStream tokens,
518                                             AstNode parentNode ) throws ParsingException {
519         assert tokens != null;
520         assert parentNode != null;
521 
522         AstNode stmtNode = null;
523 
524         // DEFAULT DOES NOTHING
525         // Subclasses can implement additional parsing
526         // System.out.println(" >>> FOUND [CREATE] STATEMENT: TOKEN = " + tokens.consume() + " " + tokens.consume() + " " +
527         // tokens.consume());
528         // SQL 92 CREATE OPTIONS:
529         // CREATE SCHEMA
530         // CREATE DOMAIN
531         // CREATE [ { GLOBAL | LOCAL } TEMPORARY ] TABLE
532         // CREATE VIEW
533         // CREATE ASSERTION
534         // CREATE CHARACTER SET
535         // CREATE COLLATION
536         // CREATE TRANSLATION
537 
538         if (tokens.matches(STMT_CREATE_SCHEMA)) {
539             stmtNode = parseCreateSchemaStatement(tokens, parentNode);
540         } else if (tokens.matches(STMT_CREATE_TABLE) || tokens.matches(STMT_CREATE_GLOBAL_TEMPORARY_TABLE)
541                    || tokens.matches(STMT_CREATE_LOCAL_TEMPORARY_TABLE)) {
542             stmtNode = parseCreateTableStatement(tokens, parentNode);
543         } else if (tokens.matches(STMT_CREATE_VIEW) || tokens.matches(STMT_CREATE_OR_REPLACE_VIEW)) {
544             stmtNode = parseCreateViewStatement(tokens, parentNode);
545         } else if (tokens.matches(STMT_CREATE_ASSERTION)) {
546             stmtNode = parseCreateAssertionStatement(tokens, parentNode);
547         } else if (tokens.matches(STMT_CREATE_CHARACTER_SET)) {
548             stmtNode = parseCreateCharacterSetStatement(tokens, parentNode);
549         } else if (tokens.matches(STMT_CREATE_COLLATION)) {
550             stmtNode = parseCreateCollationStatement(tokens, parentNode);
551         } else if (tokens.matches(STMT_CREATE_TRANSLATION)) {
552             stmtNode = parseCreateTranslationStatement(tokens, parentNode);
553         } else if (tokens.matches(STMT_CREATE_DOMAIN)) {
554             stmtNode = parseCreateDomainStatement(tokens, parentNode);
555         } else {
556             markStartOfStatement(tokens);
557 
558             stmtNode = parseIgnorableStatement(tokens, "CREATE UNKNOWN", parentNode);
559             Position position = getCurrentMarkedPosition();
560             String msg = DdlSequencerI18n.unknownCreateStatement.text(position.getLine(), position.getColumn());
561             DdlParserProblem problem = new DdlParserProblem(DdlConstants.Problems.WARNING, position, msg);
562 
563             stmtNode.setProperty(TYPE_PROBLEM, problem.toString());
564 
565             markEndOfStatement(tokens, stmtNode);
566         }
567 
568         return stmtNode;
569     }
570 
571     /**
572      * Parses DDL ALTER statement based on SQL 92 specifications.
573      * 
574      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
575      * @param parentNode the parent {@link AstNode} node; may not be null
576      * @return the parsed ALTER {@link AstNode}
577      * @throws ParsingException
578      */
579     protected AstNode parseAlterStatement( DdlTokenStream tokens,
580                                            AstNode parentNode ) throws ParsingException {
581         assert tokens != null;
582         assert parentNode != null;
583 
584         if (tokens.matches(ALTER, TABLE)) {
585             return parseAlterTableStatement(tokens, parentNode);
586         } else if (tokens.matches("ALTER", "DOMAIN")) {
587             markStartOfStatement(tokens);
588             tokens.consume("ALTER", "DOMAIN");
589             String domainName = parseName(tokens);
590             AstNode alterNode = nodeFactory().node(domainName, parentNode, TYPE_ALTER_DOMAIN_STATEMENT);
591             parseUntilTerminator(tokens);
592             markEndOfStatement(tokens, alterNode);
593             return alterNode;
594         }
595         return null;
596     }
597 
598     /**
599      * Parses DDL ALTER TABLE {@link AstNode} based on SQL 92 specifications.
600      * 
601      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
602      * @param parentNode the parent {@link AstNode} node; may not be null
603      * @return the parsed ALTER TABLE {@link AstNode}
604      * @throws ParsingException
605      */
606     protected AstNode parseAlterTableStatement( DdlTokenStream tokens,
607                                                 AstNode parentNode ) throws ParsingException {
608         assert tokens != null;
609         assert parentNode != null;
610 
611         markStartOfStatement(tokens);
612 
613         // <alter table statement> ::=
614         // ALTER TABLE <table name> <alter table action>
615         //
616         // <alter table action> ::=
617         // <add column definition>
618         // | <alter column definition>
619         // | <drop column definition>
620         // | <add table constraint definition>
621         // | <drop table constraint definition>
622 
623         tokens.consume("ALTER", "TABLE"); // consumes 'ALTER'
624         String tableName = parseName(tokens);
625 
626         AstNode alterTableNode = nodeFactory().node(tableName, parentNode, TYPE_ALTER_TABLE_STATEMENT);
627 
628         if (tokens.canConsume("ADD")) {
629             if (isTableConstraint(tokens)) {
630                 parseTableConstraint(tokens, alterTableNode, true);
631             } else {
632                 parseSingleTerminatedColumnDefinition(tokens, alterTableNode, true);
633             }
634         } else if (tokens.canConsume("DROP")) {
635             if (tokens.canConsume("CONSTRAINT")) {
636                 String constraintName = parseName(tokens); // constraint name
637                 AstNode constraintNode = nodeFactory().node(constraintName, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION);
638                 if (tokens.canConsume(DropBehavior.CASCADE)) {
639                     constraintNode.setProperty(DROP_BEHAVIOR, DropBehavior.CASCADE);
640                 } else if (tokens.canConsume(DropBehavior.RESTRICT)) {
641                     constraintNode.setProperty(DROP_BEHAVIOR, DropBehavior.RESTRICT);
642                 }
643             } else {
644                 // ALTER TABLE supplier
645                 // DROP COLUMN supplier_name;
646 
647                 // DROP [ COLUMN ] <column name> <drop behavior>
648                 tokens.canConsume("COLUMN"); // "COLUMN" is optional
649                 String columnName = parseName(tokens);
650                 AstNode columnNode = nodeFactory().node(columnName, alterTableNode, TYPE_DROP_COLUMN_DEFINITION);
651                 if (tokens.canConsume(DropBehavior.CASCADE)) {
652                     columnNode.setProperty(DROP_BEHAVIOR, DropBehavior.CASCADE);
653                 } else if (tokens.canConsume(DropBehavior.RESTRICT)) {
654                     columnNode.setProperty(DROP_BEHAVIOR, DropBehavior.RESTRICT);
655                 }
656             }
657         } else if (tokens.canConsume("ALTER")) {
658             // EXAMPLE: ALTER TABLE table_name [ ALTER column_name SET DEFAULT (0) ]
659             //
660             // ALTER [ COLUMN ] <column name> {SET <default clause> | DROP DEFAULT}
661 
662             tokens.canConsume("COLUMN");
663             String alterColumnName = parseName(tokens);
664             AstNode columnNode = nodeFactory().node(alterColumnName, alterTableNode, TYPE_ALTER_COLUMN_DEFINITION);
665             if (tokens.canConsume("SET")) {
666                 parseDefaultClause(tokens, columnNode);
667             } else if (tokens.canConsume("DROP", "DEFAULT")) {
668                 columnNode.setProperty(DROP_BEHAVIOR, "DROP DEFAULT");
669             }
670         } else {
671             parseUntilTerminator(tokens); // COULD BE "NESTED TABLE xxxxxxxx" option clause
672         }
673 
674         markEndOfStatement(tokens, alterTableNode);
675         return alterTableNode;
676     }
677 
678     /**
679      * Parses DDL DROP {@link AstNode} based on SQL 92 specifications.
680      * 
681      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
682      * @param parentNode the parent {@link AstNode} node; may not be null
683      * @return the parsed DROP {@link AstNode}
684      * @throws ParsingException
685      */
686     protected AstNode parseDropStatement( DdlTokenStream tokens,
687                                           AstNode parentNode ) throws ParsingException {
688         assert tokens != null;
689         assert parentNode != null;
690 
691         if (tokens.matches(STMT_DROP_TABLE)) {
692             // <drop table statement> ::=
693             // DROP TABLE <table name> <drop behavior>
694             //
695             // <drop behavior> ::= CASCADE | RESTRICT
696             return parseSimpleDropStatement(tokens, STMT_DROP_TABLE, parentNode, TYPE_DROP_TABLE_STATEMENT);
697         } else if (tokens.matches(STMT_DROP_VIEW)) {
698             return parseSimpleDropStatement(tokens, STMT_DROP_VIEW, parentNode, TYPE_DROP_VIEW_STATEMENT);
699         } else if (tokens.matches(STMT_DROP_SCHEMA)) {
700             return parseSimpleDropStatement(tokens, STMT_DROP_SCHEMA, parentNode, TYPE_DROP_SCHEMA_STATEMENT);
701         } else if (tokens.matches(STMT_DROP_DOMAIN)) {
702             return parseSimpleDropStatement(tokens, STMT_DROP_DOMAIN, parentNode, TYPE_DROP_DOMAIN_STATEMENT);
703         } else if (tokens.matches(STMT_DROP_TRANSLATION)) {
704             return parseSimpleDropStatement(tokens, STMT_DROP_TRANSLATION, parentNode, TYPE_DROP_TRANSLATION_STATEMENT);
705         } else if (tokens.matches(STMT_DROP_CHARACTER_SET)) {
706             return parseSimpleDropStatement(tokens, STMT_DROP_CHARACTER_SET, parentNode, TYPE_DROP_CHARACTER_SET_STATEMENT);
707         } else if (tokens.matches(STMT_DROP_ASSERTION)) {
708             return parseSimpleDropStatement(tokens, STMT_DROP_ASSERTION, parentNode, TYPE_DROP_ASSERTION_STATEMENT);
709         } else if (tokens.matches(STMT_DROP_COLLATION)) {
710             return parseSimpleDropStatement(tokens, STMT_DROP_COLLATION, parentNode, TYPE_DROP_COLLATION_STATEMENT);
711         }
712 
713         return null;
714     }
715 
716     private AstNode parseSimpleDropStatement( DdlTokenStream tokens,
717                                               String[] startPhrase,
718                                               AstNode parentNode,
719                                               Name stmtType ) throws ParsingException {
720         assert tokens != null;
721         assert startPhrase != null && startPhrase.length > 0;
722         assert parentNode != null;
723 
724         markStartOfStatement(tokens);
725         String behavior = null;
726         tokens.consume(startPhrase);
727         List<String> nameList = new ArrayList<String>();
728         nameList.add(parseName(tokens));
729         while (tokens.matches(COMMA)) {
730             tokens.consume(COMMA);
731             nameList.add(parseName(tokens));
732         }
733 
734         if (tokens.canConsume("CASCADE")) {
735             behavior = "CASCADE";
736         } else if (tokens.canConsume("RESTRICT")) {
737             behavior = "RESTRICT";
738         }
739 
740         AstNode dropNode = nodeFactory().node(nameList.get(0), parentNode, stmtType);
741         if (behavior != null) {
742             dropNode.setProperty(DROP_BEHAVIOR, behavior);
743         }
744         markEndOfStatement(tokens, dropNode);
745 
746         return dropNode;
747     }
748 
749     /**
750      * Parses DDL INSERT {@link AstNode} based on SQL 92 specifications.
751      * 
752      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
753      * @param parentNode the parent {@link AstNode} node; may not be null
754      * @return the {@link AstNode}
755      * @throws ParsingException
756      */
757     protected AstNode parseInsertStatement( DdlTokenStream tokens,
758                                             AstNode parentNode ) throws ParsingException {
759         assert tokens != null;
760         assert parentNode != null;
761 
762         // Original implementation does NOT parse Insert statement, but just returns a generic TypedStatement
763         if (tokens.matches(STMT_INSERT_INTO)) {
764             markStartOfStatement(tokens);
765             tokens.consume(STMT_INSERT_INTO);
766             String prefix = getStatementTypeName(STMT_INSERT_INTO);
767             AstNode node = nodeFactory().node(prefix, parentNode, TYPE_INSERT_STATEMENT);
768             parseUntilTerminator(tokens);
769             markEndOfStatement(tokens, node);
770             return node;
771         }
772         return null;
773     }
774 
775     /**
776      * Parses DDL SET {@link AstNode} based on SQL 92 specifications.
777      * 
778      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
779      * @param parentNode the parent {@link AstNode} node; may not be null
780      * @return the {@link AstNode}
781      * @throws ParsingException
782      */
783     protected AstNode parseSetStatement( DdlTokenStream tokens,
784                                          AstNode parentNode ) throws ParsingException {
785         assert tokens != null;
786         assert parentNode != null;
787 
788         // Original implementation does NOT parse Insert statement, but just returns a generic TypedStatement
789         if (tokens.matches(SET)) {
790             markStartOfStatement(tokens);
791             tokens.consume(SET);
792             AstNode node = nodeFactory().node("SET", parentNode, TYPE_SET_STATEMENT);
793             parseUntilTerminator(tokens);
794             markEndOfStatement(tokens, node);
795             return node;
796         }
797         return null;
798     }
799 
800     /**
801      * Parses DDL GRANT statement {@link AstNode} based on SQL 92 specifications.
802      * 
803      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
804      * @param parentNode the parent {@link AstNode} node; may not be null
805      * @return the {@link AstNode}
806      * @throws ParsingException
807      */
808     protected AstNode parseGrantStatement( DdlTokenStream tokens,
809                                            AstNode parentNode ) throws ParsingException {
810         assert tokens != null;
811         assert parentNode != null;
812         assert tokens.matches(GRANT);
813 
814         markStartOfStatement(tokens);
815 
816         // Syntax for tables
817         //
818         // GRANT <privileges> ON <object name>
819         // TO <grantee> [ { <comma> <grantee> }... ]
820         // [ WITH GRANT OPTION ]
821         //
822         // <object name> ::=
823         // [ TABLE ] <table name>
824         // | DOMAIN <domain name>
825         // | COLLATION <collation name>
826         // | CHARACTER SET <character set name>
827         // | TRANSLATION <translation name>
828         //
829         // Syntax for roles
830         //
831         // GRANT roleName [ {, roleName }* ] TO grantees
832 
833         // privilege-types
834         //
835         // ALL PRIVILEGES | privilege-list
836         //
837         AstNode grantNode = null;
838         boolean allPrivileges = false;
839 
840         List<AstNode> privileges = new ArrayList<AstNode>();
841 
842         tokens.consume("GRANT");
843 
844         if (tokens.canConsume("ALL", "PRIVILEGES")) {
845             allPrivileges = true;
846         } else {
847             parseGrantPrivileges(tokens, privileges);
848         }
849         tokens.consume("ON");
850 
851         if (tokens.canConsume("DOMAIN")) {
852             String name = parseName(tokens);
853             grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_DOMAIN_STATEMENT);
854         } else if (tokens.canConsume("COLLATION")) {
855             String name = parseName(tokens);
856             grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_COLLATION_STATEMENT);
857         } else if (tokens.canConsume("CHARACTER", "SET")) {
858             String name = parseName(tokens);
859             grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_CHARACTER_SET_STATEMENT);
860         } else if (tokens.canConsume("TRANSLATION")) {
861             String name = parseName(tokens);
862             grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_TRANSLATION_STATEMENT);
863         } else {
864             tokens.canConsume(TABLE); // OPTIONAL
865             String name = parseName(tokens);
866             grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_TABLE_STATEMENT);
867         }
868 
869         // Attach privileges to grant node
870         for (AstNode node : privileges) {
871             node.setParent(grantNode);
872         }
873         if (allPrivileges) {
874             grantNode.setProperty(ALL_PRIVILEGES, allPrivileges);
875         }
876 
877         tokens.consume("TO");
878 
879         do {
880             String grantee = parseName(tokens);
881             nodeFactory().node(grantee, grantNode, GRANTEE);
882         } while (tokens.canConsume(COMMA));
883 
884         if (tokens.canConsume("WITH", "GRANT", "OPTION")) {
885             grantNode.setProperty(WITH_GRANT_OPTION, "WITH GRANT OPTION");
886         }
887 
888         markEndOfStatement(tokens, grantNode);
889 
890         return grantNode;
891     }
892 
893     protected void parseGrantPrivileges( DdlTokenStream tokens,
894                                          List<AstNode> privileges ) throws ParsingException {
895         // privilege-types
896         //
897         // ALL PRIVILEGES | privilege-list
898         //
899         // privilege-list
900         //
901         // table-privilege {, table-privilege }*
902         //
903         // table-privilege
904         // SELECT
905         // | DELETE
906         // | INSERT [ <left paren> <privilege column list> <right paren> ]
907         // | UPDATE [ <left paren> <privilege column list> <right paren> ]
908         // | REFERENCES [ <left paren> <privilege column list> <right paren> ]
909         // | USAGE
910 
911         do {
912             AstNode node = null;
913 
914             if (tokens.canConsume(DELETE)) {
915                 node = nodeFactory().node("privilege");
916                 node.setProperty(TYPE, DELETE);
917             } else if (tokens.canConsume(INSERT)) {
918                 node = nodeFactory().node("privilege");
919                 node.setProperty(TYPE, INSERT);
920                 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
921             } else if (tokens.canConsume("REFERENCES")) {
922                 node = nodeFactory().node("privilege");
923                 node.setProperty(TYPE, "REFERENCES");
924                 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
925             } else if (tokens.canConsume(SELECT)) {
926                 node = nodeFactory().node("privilege");
927                 node.setProperty(TYPE, SELECT);
928             } else if (tokens.canConsume("USAGE")) {
929                 node = nodeFactory().node("privilege");
930                 node.setProperty(TYPE, "USAGE");
931             } else if (tokens.canConsume(UPDATE)) {
932                 node = nodeFactory().node("privilege");
933                 node.setProperty(TYPE, UPDATE);
934                 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
935             }
936             if (node == null) {
937                 break;
938             }
939             nodeFactory().setType(node, GRANT_PRIVILEGE);
940             privileges.add(node);
941 
942         } while (tokens.canConsume(COMMA));
943 
944     }
945 
946     protected AstNode parseRevokeStatement( DdlTokenStream tokens,
947                                             AstNode parentNode ) throws ParsingException {
948         assert tokens != null;
949         assert parentNode != null;
950         assert tokens.matches(REVOKE);
951 
952         markStartOfStatement(tokens);
953 
954         // <revoke statement> ::=
955         // REVOKE [ GRANT OPTION FOR ]
956         // <privileges>
957         // ON <object name>
958         // FROM <grantee> [ { <comma> <grantee> }... ] <drop behavior>
959 
960         AstNode revokeNode = null;
961         boolean allPrivileges = false;
962         boolean withGrantOption = false;
963 
964         List<AstNode> privileges = new ArrayList<AstNode>();
965 
966         tokens.consume("REVOKE");
967 
968         withGrantOption = tokens.canConsume("WITH", "GRANT", "OPTION");
969 
970         if (tokens.canConsume("ALL", "PRIVILEGES")) {
971             allPrivileges = true;
972         } else {
973             parseGrantPrivileges(tokens, privileges);
974         }
975         tokens.consume("ON");
976 
977         if (tokens.canConsume("DOMAIN")) {
978             String name = parseName(tokens);
979             revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_DOMAIN_STATEMENT);
980         } else if (tokens.canConsume("COLLATION")) {
981             String name = parseName(tokens);
982             revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_COLLATION_STATEMENT);
983         } else if (tokens.canConsume("CHARACTER", "SET")) {
984             String name = parseName(tokens);
985             revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_CHARACTER_SET_STATEMENT);
986         } else if (tokens.canConsume("TRANSLATION")) {
987             String name = parseName(tokens);
988             revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_TRANSLATION_STATEMENT);
989         } else {
990             tokens.canConsume(TABLE); // OPTIONAL
991             String name = parseName(tokens);
992             revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_TABLE_STATEMENT);
993         }
994 
995         // Attach privileges to grant node
996         for (AstNode node : privileges) {
997             node.setParent(revokeNode);
998         }
999 
1000         if (allPrivileges) {
1001             revokeNode.setProperty(ALL_PRIVILEGES, allPrivileges);
1002         }
1003 
1004         tokens.consume("FROM");
1005 
1006         do {
1007             String grantee = parseName(tokens);
1008             nodeFactory().node(grantee, revokeNode, GRANTEE);
1009         } while (tokens.canConsume(COMMA));
1010 
1011         String behavior = null;
1012 
1013         if (tokens.canConsume("CASCADE")) {
1014             behavior = "CASCADE";
1015         } else if (tokens.canConsume("RESTRICT")) {
1016             behavior = "RESTRICT";
1017         }
1018 
1019         if (behavior != null) {
1020             revokeNode.setProperty(DROP_BEHAVIOR, behavior);
1021         }
1022 
1023         if (withGrantOption) {
1024             revokeNode.setProperty(WITH_GRANT_OPTION, "WITH GRANT OPTION");
1025         }
1026 
1027         markEndOfStatement(tokens, revokeNode);
1028 
1029         return revokeNode;
1030     }
1031 
1032     /**
1033      * Parses DDL CREATE DOMAIN {@link AstNode} based on SQL 92 specifications.
1034      * 
1035      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1036      * @param parentNode the parent {@link AstNode} node; may not be null
1037      * @return the parsed statement node {@link AstNode}
1038      * @throws ParsingException
1039      */
1040     protected AstNode parseCreateDomainStatement( DdlTokenStream tokens,
1041                                                   AstNode parentNode ) throws ParsingException {
1042         assert tokens != null;
1043         assert parentNode != null;
1044 
1045         // <domain definition> ::=
1046         // CREATE DOMAIN <domain name>
1047         // [ AS ] <data type>
1048         // [ <default clause> ]
1049         // [ <domain constraint>... ]
1050         // [ <collate clause> ]
1051 
1052         markStartOfStatement(tokens);
1053 
1054         tokens.consume(STMT_CREATE_DOMAIN);
1055 
1056         String name = parseName(tokens);
1057 
1058         AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_DOMAIN_STATEMENT);
1059 
1060         parseUntilTerminator(tokens);
1061 
1062         markEndOfStatement(tokens, node);
1063 
1064         return node;
1065     }
1066 
1067     /**
1068      * Parses DDL CREATE COLLATION {@link AstNode} based on SQL 92 specifications.
1069      * 
1070      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1071      * @param parentNode the parent {@link AstNode} node; may not be null
1072      * @return the parsed statement node {@link AstNode}
1073      * @throws ParsingException
1074      */
1075     protected AstNode parseCreateCollationStatement( DdlTokenStream tokens,
1076                                                      AstNode parentNode ) throws ParsingException {
1077         assert tokens != null;
1078         assert parentNode != null;
1079 
1080         markStartOfStatement(tokens);
1081 
1082         tokens.consume(STMT_CREATE_COLLATION);
1083 
1084         String name = parseName(tokens);
1085 
1086         AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_COLLATION_STATEMENT);
1087 
1088         parseUntilTerminator(tokens);
1089 
1090         markEndOfStatement(tokens, node);
1091 
1092         return node;
1093     }
1094 
1095     /**
1096      * Parses DDL CREATE TRANSLATION {@link AstNode} based on SQL 92 specifications.
1097      * 
1098      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1099      * @param parentNode the parent {@link AstNode} node; may not be null
1100      * @return the parsed statement node {@link AstNode}
1101      * @throws ParsingException
1102      */
1103     protected AstNode parseCreateTranslationStatement( DdlTokenStream tokens,
1104                                                        AstNode parentNode ) throws ParsingException {
1105         assert tokens != null;
1106         assert parentNode != null;
1107 
1108         markStartOfStatement(tokens);
1109 
1110         tokens.consume(STMT_CREATE_TRANSLATION);
1111 
1112         String name = parseName(tokens);
1113 
1114         AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_TRANSLATION_STATEMENT);
1115 
1116         parseUntilTerminator(tokens);
1117 
1118         markEndOfStatement(tokens, node);
1119 
1120         return node;
1121     }
1122 
1123     /**
1124      * Parses DDL CREATE CHARACTER SET {@link AstNode} based on SQL 92 specifications.
1125      * 
1126      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1127      * @param parentNode the parent {@link AstNode} node; may not be null
1128      * @return the parsed statement node {@link AstNode}
1129      * @throws ParsingException
1130      */
1131     protected AstNode parseCreateCharacterSetStatement( DdlTokenStream tokens,
1132                                                         AstNode parentNode ) throws ParsingException {
1133         assert tokens != null;
1134         assert parentNode != null;
1135 
1136         markStartOfStatement(tokens);
1137 
1138         tokens.consume(STMT_CREATE_CHARACTER_SET);
1139 
1140         String name = parseName(tokens);
1141 
1142         AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_CHARACTER_SET_STATEMENT);
1143 
1144         parseUntilTerminator(tokens);
1145 
1146         markEndOfStatement(tokens, node);
1147 
1148         return node;
1149     }
1150 
1151     /**
1152      * Catch-all method to parse unknown (not registered or handled by sub-classes) statements.
1153      * 
1154      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1155      * @param parentNode the parent {@link AstNode} node; may not be null
1156      * @return the {@link AstNode}
1157      * @throws ParsingException
1158      */
1159     protected AstNode parseCustomStatement( DdlTokenStream tokens,
1160                                             AstNode parentNode ) throws ParsingException {
1161         assert tokens != null;
1162         assert parentNode != null;
1163 
1164         // DEFAULT DOES NOTHING
1165         // Subclasses can implement additional parsing
1166 
1167         return null;
1168     }
1169 
1170     // ===========================================================================================================================
1171     // PARSING CREATE TABLE
1172     // ===========================================================================================================================
1173 
1174     /**
1175      * Parses DDL CREATE TABLE {@link AstNode} based on SQL 92 specifications.
1176      * 
1177      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1178      * @param parentNode the parent {@link AstNode} node; may not be null
1179      * @return the parsed CREATE TABLE {@link AstNode}
1180      * @throws ParsingException
1181      */
1182     protected AstNode parseCreateTableStatement( DdlTokenStream tokens,
1183                                                  AstNode parentNode ) throws ParsingException {
1184         assert tokens != null;
1185         assert parentNode != null;
1186 
1187         markStartOfStatement(tokens);
1188 
1189         tokens.consume(CREATE); // CREATE
1190         String temporaryValue = null;
1191         if (tokens.canConsume("LOCAL")) {
1192             tokens.consume("TEMPORARY");
1193             temporaryValue = "LOCAL";
1194         } else if (tokens.canConsume("GLOBAL")) {
1195             tokens.consume("TEMPORARY");
1196             temporaryValue = "GLOBAL";
1197         }
1198 
1199         tokens.consume(TABLE);
1200 
1201         String tableName = parseName(tokens);
1202 
1203         AstNode tableNode = nodeFactory().node(tableName, parentNode, TYPE_CREATE_TABLE_STATEMENT);
1204 
1205         if (temporaryValue != null) {
1206             tableNode.setProperty(TEMPORARY, temporaryValue);
1207         }
1208 
1209         // System.out.println("  >> PARSING CREATE TABLE >>  Name = " + tableName);
1210         parseColumnsAndConstraints(tokens, tableNode);
1211 
1212         parseCreateTableOptions(tokens, tableNode);
1213 
1214         markEndOfStatement(tokens, tableNode);
1215 
1216         return tableNode;
1217     }
1218 
1219     protected void parseCreateTableOptions( DdlTokenStream tokens,
1220                                             AstNode tableNode ) throws ParsingException {
1221         assert tokens != null;
1222         assert tableNode != null;
1223 
1224         // [ ON COMMIT { PRESERVE ROWS | DELETE ROWS | DROP } ]
1225         while (areNextTokensCreateTableOptions(tokens)) {
1226             parseNextCreateTableOption(tokens, tableNode);
1227         }
1228 
1229     }
1230 
1231     protected void parseNextCreateTableOption( DdlTokenStream tokens,
1232                                                AstNode tableNode ) throws ParsingException {
1233         assert tokens != null;
1234         assert tableNode != null;
1235 
1236         if (tokens.canConsume("ON", "COMMIT")) {
1237             String option = "";
1238             // PRESERVE ROWS | DELETE ROWS | DROP
1239             if (tokens.canConsume("PRESERVE", "ROWS")) {
1240                 option = option + "ON COMMIT PRESERVE ROWS";
1241             } else if (tokens.canConsume("DELETE", "ROWS")) {
1242                 option = option + "ON COMMIT DELETE ROWS";
1243             } else if (tokens.canConsume("DROP")) {
1244                 option = option + "ON COMMIT DROP";
1245             }
1246 
1247             if (option.length() > 0) {
1248                 AstNode tableOption = nodeFactory().node("option", tableNode, TYPE_STATEMENT_OPTION);
1249                 tableOption.setProperty(VALUE, option);
1250             }
1251         }
1252     }
1253 
1254     protected boolean areNextTokensCreateTableOptions( DdlTokenStream tokens ) throws ParsingException {
1255         assert tokens != null;
1256 
1257         boolean result = false;
1258 
1259         // [ ON COMMIT { PRESERVE ROWS | DELETE ROWS | DROP } ]
1260         if (tokens.matches("ON", "COMMIT")) {
1261             result = true;
1262         }
1263 
1264         return result;
1265     }
1266 
1267     /**
1268      * Utility method to parse columns and table constraints within either a CREATE TABLE statement. Method first parses and
1269      * copies the text enclosed within the bracketed "( xxxx  )" statement. Then the individual column definition or table
1270      * constraint definition sub-statements are parsed assuming they are comma delimited.
1271      * 
1272      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1273      * @param tableNode
1274      * @throws ParsingException
1275      */
1276     protected void parseColumnsAndConstraints( DdlTokenStream tokens,
1277                                                AstNode tableNode ) throws ParsingException {
1278         assert tokens != null;
1279         assert tableNode != null;
1280 
1281         if (!tokens.matches(L_PAREN)) {
1282             return;
1283         }
1284 
1285         String tableElementString = getTableElementsString(tokens, false);
1286 
1287         DdlTokenStream localTokens = new DdlTokenStream(tableElementString, DdlTokenStream.ddlTokenizer(false), false);
1288 
1289         localTokens.start();
1290 
1291         StringBuffer unusedTokensSB = new StringBuffer();
1292         do {
1293             if (isTableConstraint(localTokens)) {
1294                 parseTableConstraint(localTokens, tableNode, false);
1295             } else if (isColumnDefinitionStart(localTokens)) {
1296                 parseColumnDefinition(localTokens, tableNode, false);
1297             } else {
1298                 unusedTokensSB.append(SPACE).append(localTokens.consume());
1299             }
1300         } while (localTokens.canConsume(COMMA));
1301 
1302         if (unusedTokensSB.length() > 0) {
1303             String msg = DdlSequencerI18n.unusedTokensParsingColumnsAndConstraints.text(tableNode.getProperty(NAME));
1304             DdlParserProblem problem = new DdlParserProblem(DdlConstants.Problems.WARNING, Position.EMPTY_CONTENT_POSITION, msg);
1305             problem.setUnusedSource(unusedTokensSB.toString());
1306             addProblem(problem, tableNode);
1307         }
1308 
1309     }
1310 
1311     /**
1312      * Utility method to parse the actual column definition. SQL-92 Structural Specification <column definition> ::= <column name>
1313      * { <data type> | <domain name> } [ <default clause> ] [ <column constraint definition>... ] [ <collate clause> ]
1314      * 
1315      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1316      * @param tableNode
1317      * @param isAlterTable true if in-line constraint is part of add column in alter table statement
1318      * @throws ParsingException
1319      */
1320     protected void parseColumnDefinition( DdlTokenStream tokens,
1321                                           AstNode tableNode,
1322                                           boolean isAlterTable ) throws ParsingException {
1323         assert tokens != null;
1324         assert tableNode != null;
1325 
1326         tokens.canConsume("COLUMN");
1327         String columnName = parseName(tokens);
1328         DataType datatype = getDatatypeParser().parse(tokens);
1329 
1330         AstNode columnNode = nodeFactory().node(columnName, tableNode, TYPE_COLUMN_DEFINITION);
1331 
1332         getDatatypeParser().setPropertiesOnNode(columnNode, datatype);
1333 
1334         // Now clauses and constraints can be defined in any order, so we need to keep parsing until we get to a comma
1335         StringBuffer unusedTokensSB = new StringBuffer();
1336 
1337         while (tokens.hasNext() && !tokens.matches(COMMA)) {
1338             boolean parsedDefaultClause = parseDefaultClause(tokens, columnNode);
1339             if (!parsedDefaultClause) {
1340                 boolean parsedCollate = parseCollateClause(tokens, columnNode);
1341                 boolean parsedConstraint = parseColumnConstraint(tokens, columnNode, isAlterTable);
1342                 if (!parsedCollate && !parsedConstraint) {
1343                     // THIS IS AN ERROR. NOTHING FOUND.
1344                     // NEED TO absorb tokens
1345                     unusedTokensSB.append(SPACE).append(tokens.consume());
1346                 }
1347             }
1348             tokens.canConsume(DdlTokenizer.COMMENT);
1349         }
1350 
1351         if (unusedTokensSB.length() > 0) {
1352             String msg = DdlSequencerI18n.unusedTokensParsingColumnDefinition.text(tableNode.getName());
1353             DdlParserProblem problem = new DdlParserProblem(Problems.WARNING, Position.EMPTY_CONTENT_POSITION, msg);
1354             problem.setUnusedSource(unusedTokensSB.toString());
1355             addProblem(problem, tableNode);
1356         }
1357     }
1358 
1359     /**
1360      * Utility method to parse the actual column definition. SQL-92 Structural Specification <column definition> ::= <column name>
1361      * { <data type> | <domain name> } [ <default clause> ] [ <column constraint definition>... ] [ <collate clause> ]
1362      * 
1363      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1364      * @param tableNode the alter or create table statement node; may not be null
1365      * @param isAlterTable true if in-line constraint is part of add column in alter table statement
1366      * @throws ParsingException
1367      */
1368     protected void parseSingleTerminatedColumnDefinition( DdlTokenStream tokens,
1369                                                           AstNode tableNode,
1370                                                           boolean isAlterTable ) throws ParsingException {
1371         assert tokens != null;
1372         assert tableNode != null;
1373 
1374         tokens.canConsume("COLUMN");
1375         String columnName = parseName(tokens);
1376         DataType datatype = getDatatypeParser().parse(tokens);
1377 
1378         AstNode columnNode = nodeFactory().node(columnName, tableNode, TYPE_COLUMN_DEFINITION);
1379 
1380         getDatatypeParser().setPropertiesOnNode(columnNode, datatype);
1381         // Now clauses and constraints can be defined in any order, so we need to keep parsing until we get to a comma, a
1382         // terminator
1383         // or a new statement
1384 
1385         while (tokens.hasNext() && !tokens.matches(getTerminator()) && !tokens.matches(DdlTokenizer.STATEMENT_KEY)) {
1386             boolean parsedDefaultClause = parseDefaultClause(tokens, columnNode);
1387             boolean foundSomething = parsedDefaultClause;
1388             if (!parsedDefaultClause) {
1389                 foundSomething |= parseCollateClause(tokens, columnNode);
1390                 foundSomething |= parseColumnConstraint(tokens, columnNode, isAlterTable);
1391             }
1392             foundSomething |= consumeComment(tokens);
1393             if (tokens.canConsume(COMMA) || !foundSomething) break;
1394         }
1395     }
1396 
1397     /**
1398      * Method which extracts the table element string from a CREATE TABLE statement.
1399      * 
1400      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1401      * @param useTerminator
1402      * @return the parsed table elements String.
1403      * @throws ParsingException
1404      */
1405     protected String getTableElementsString( DdlTokenStream tokens,
1406                                              boolean useTerminator ) throws ParsingException {
1407         assert tokens != null;
1408 
1409         StringBuffer sb = new StringBuffer(100);
1410 
1411         if (useTerminator) {
1412             while (!isTerminator(tokens)) {
1413                 sb.append(SPACE).append(tokens.consume());
1414             }
1415         } else {
1416             // Assume we start with open parenthesis '(', then we can count on walking through ALL tokens until we find the close
1417             // parenthesis ')'. If there are intermediate parenthesis, we can count on them being pairs.
1418             tokens.consume(L_PAREN); // EXPECTED
1419 
1420             int iParen = 0;
1421             while (tokens.hasNext()) {
1422                 if (tokens.matches(L_PAREN)) {
1423                     iParen++;
1424                 } else if (tokens.matches(R_PAREN)) {
1425                     if (iParen == 0) {
1426                         tokens.consume(R_PAREN);
1427                         break;
1428                     }
1429                     iParen--;
1430                 }
1431                 if (isComment(tokens)) {
1432                     tokens.consume();
1433                 } else {
1434                     sb.append(SPACE).append(tokens.consume());
1435                 }
1436             }
1437         }
1438 
1439         return sb.toString();
1440 
1441     }
1442 
1443     /**
1444      * Simple method which parses, consumes and returns a string representing text found between parenthesis (i.e. '()') If
1445      * parents don't exist, method returns NULL;
1446      * 
1447      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1448      * @param includeParens
1449      * @return the parenthesis bounded text or null if no parens.
1450      * @throws ParsingException
1451      */
1452     protected String consumeParenBoundedTokens( DdlTokenStream tokens,
1453                                                 boolean includeParens ) throws ParsingException {
1454         assert tokens != null;
1455 
1456         // Assume we start with open parenthesis '(', then we can count on walking through ALL tokens until we find the close
1457         // parenthesis ')'. If there are intermediate parenthesis, we can count on them being pairs.
1458         if (tokens.canConsume(L_PAREN)) { // EXPECTED
1459             StringBuffer sb = new StringBuffer(100);
1460             if (includeParens) {
1461                 sb.append(L_PAREN);
1462             }
1463             int iParen = 0;
1464             while (tokens.hasNext()) {
1465                 if (tokens.matches(L_PAREN)) {
1466                     iParen++;
1467                 } else if (tokens.matches(R_PAREN)) {
1468                     if (iParen == 0) {
1469                         tokens.consume(R_PAREN);
1470                         if (includeParens) {
1471                             sb.append(SPACE).append(R_PAREN);
1472                         }
1473                         break;
1474                     }
1475                     iParen--;
1476                 }
1477                 if (isComment(tokens)) {
1478                     tokens.consume();
1479                 } else {
1480                     sb.append(SPACE).append(tokens.consume());
1481                 }
1482             }
1483             return sb.toString();
1484         }
1485 
1486         return null;
1487     }
1488 
1489     /**
1490      * Parses an in-line column constraint including NULLABLE value, UNIQUE, PRIMARY KEY and REFERENCES to a Foreign Key. The
1491      * values for the constraint are set as properties on the input columnNode.
1492      * 
1493      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1494      * @param columnNode the column definition being created; may not be null
1495      * @param isAlterTable true if in-line constraint is part of add column in alter table statement
1496      * @return true if parsed a constraint, else false.
1497      * @throws ParsingException
1498      */
1499     protected boolean parseColumnConstraint( DdlTokenStream tokens,
1500                                              AstNode columnNode,
1501                                              boolean isAlterTable ) throws ParsingException {
1502         assert tokens != null;
1503         assert columnNode != null;
1504 
1505         Name mixinType = isAlterTable ? TYPE_ADD_TABLE_CONSTRAINT_DEFINITION : TYPE_TABLE_CONSTRAINT;
1506 
1507         boolean result = false;
1508 
1509         // : [ CONSTRAINT <constraint name> ] <column constraint> [ <constraint attributes> ]
1510         // <column constraint> ::= NOT NULL | <unique specification> | <references specification> | <check constraint definition>
1511         // <unique specification> ::= UNIQUE | PRIMARY KEY
1512         // <references specification> ::= REFERENCES <referenced table and columns> [ MATCH <match type> ] [ <referential
1513         // triggered action> ]
1514         // <check constraint definition> ::= CHECK <left paren> <search condition> <right paren>
1515         String colName = columnNode.getName().getString();
1516 
1517         if (tokens.canConsume("NULL")) {
1518             columnNode.setProperty(NULLABLE, "NULL");
1519             result = true;
1520         } else if (tokens.canConsume("NOT", "NULL")) {
1521             columnNode.setProperty(NULLABLE, "NOT NULL");
1522             result = true;
1523         } else if (tokens.matches("CONSTRAINT")) {
1524             result = true;
1525             tokens.consume("CONSTRAINT");
1526             String constraintName = parseName(tokens);
1527             AstNode constraintNode = nodeFactory().node(constraintName, columnNode.getParent(), mixinType);
1528 
1529             if (tokens.matches("UNIQUE")) {
1530                 // CONSTRAINT P_KEY_2a UNIQUE (PERMISSIONUID)
1531                 tokens.consume("UNIQUE"); // UNIQUE
1532 
1533                 constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1534 
1535                 // CONSUME COLUMNS
1536                 boolean columnsAdded = parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1537 
1538                 if (!columnsAdded) {
1539                     nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1540                 }
1541 
1542                 parseConstraintAttributes(tokens, constraintNode);
1543             } else if (tokens.matches("PRIMARY", "KEY")) {
1544                 // CONSTRAINT U_KEY_2a PRIMARY KEY (PERMISSIONUID)
1545                 tokens.consume("PRIMARY"); // PRIMARY
1546                 tokens.consume("KEY"); // KEY
1547 
1548                 constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1549 
1550                 // CONSUME COLUMNS
1551                 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1552 
1553                 parseConstraintAttributes(tokens, constraintNode);
1554             } else if (tokens.matches("REFERENCES")) {
1555                 // References in an in-line constraint is really a foreign key definition
1556                 // EXAMPLE:
1557                 // COLUMN_NAME DATATYPE NOT NULL DEFAULT (0) CONSTRAINT SOME_FK_NAME REFERENCES SOME_TABLE_NAME (SOME_COLUMN_NAME,
1558                 // ...)
1559 
1560                 constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1561 
1562                 nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1563 
1564                 parseReferences(tokens, constraintNode);
1565 
1566                 parseConstraintAttributes(tokens, constraintNode);
1567             }
1568         } else if (tokens.matches("UNIQUE")) {
1569             result = true;
1570             tokens.consume("UNIQUE");
1571             // Unique constraint for this particular column
1572             String uc_name = "UC_1"; // UNIQUE CONSTRAINT NAME
1573 
1574             AstNode constraintNode = nodeFactory().node(uc_name, columnNode.getParent(), mixinType);
1575 
1576             constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1577 
1578             nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1579 
1580         } else if (tokens.matches("PRIMARY", "KEY")) {
1581             result = true;
1582             tokens.consume("PRIMARY", "KEY");
1583             // PRIMARY KEY for this particular column
1584             String pk_name = "PK_1"; // PRIMARY KEY NAME
1585 
1586             AstNode constraintNode = nodeFactory().node(pk_name, columnNode.getParent(), mixinType);
1587 
1588             constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1589 
1590             nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1591 
1592         } else if (tokens.matches("FOREIGN", "KEY")) {
1593             result = true;
1594             tokens.consume("FOREIGN", "KEY");
1595             // This is an auto-named FK
1596             // References in an in-line constraint is really a foreign key definition
1597             // EXAMPLE:
1598             // COLUMN_NAME DATATYPE NOT NULL DEFAULT (0) FOREIGN KEY MY_FK_NAME REFERENCES SOME_TABLE_NAME (SOME_COLUMN_NAME, ...)
1599 
1600             String constraintName = parseName(tokens);
1601 
1602             AstNode constraintNode = nodeFactory().node(constraintName, columnNode.getParent(), mixinType);
1603 
1604             constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1605 
1606             nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1607 
1608             parseReferences(tokens, constraintNode);
1609             parseConstraintAttributes(tokens, constraintNode);
1610         } else if (tokens.matches("REFERENCES")) {
1611             result = true;
1612             // This is an auto-named FK
1613             // References in an in-line constraint is really a foreign key definition
1614             // EXAMPLE:
1615             // COLUMN_NAME DATATYPE NOT NULL DEFAULT (0) REFERENCES SOME_TABLE_NAME (SOME_COLUMN_NAME, ...)
1616 
1617             String constraintName = "FK_1";
1618 
1619             AstNode constraintNode = nodeFactory().node(constraintName, columnNode.getParent(), mixinType);
1620 
1621             constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1622 
1623             nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1624 
1625             parseReferences(tokens, constraintNode);
1626             parseConstraintAttributes(tokens, constraintNode);
1627         } else if (tokens.matches("CHECK")) {
1628             result = true;
1629             tokens.consume("CHECK"); // CHECK
1630 
1631             String ck_name = "CHECK_1";
1632 
1633             AstNode constraintNode = nodeFactory().node(ck_name, columnNode.getParent(), mixinType);
1634             constraintNode.setProperty(NAME, ck_name);
1635             constraintNode.setProperty(CONSTRAINT_TYPE, CHECK);
1636 
1637             String clause = consumeParenBoundedTokens(tokens, true);
1638             constraintNode.setProperty(CHECK_SEARCH_CONDITION, clause);
1639         }
1640 
1641         return result;
1642     }
1643 
1644     /**
1645      * Parses full table constraint definition including the "CONSTRAINT" token Examples: CONSTRAINT P_KEY_2a UNIQUE
1646      * (PERMISSIONUID)
1647      * 
1648      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1649      * @param tableNode
1650      * @param isAlterTable true if in-line constraint is part of add column in alter table statement
1651      * @throws ParsingException
1652      */
1653     protected void parseTableConstraint( DdlTokenStream tokens,
1654                                          AstNode tableNode,
1655                                          boolean isAlterTable ) throws ParsingException {
1656         assert tokens != null;
1657         assert tableNode != null;
1658 
1659         Name mixinType = isAlterTable ? TYPE_ADD_TABLE_CONSTRAINT_DEFINITION : TYPE_TABLE_CONSTRAINT;
1660 
1661         /*
1662         <table constraint definition> ::=
1663             [ <constraint name definition> ]
1664             <table constraint> [ <constraint attributes> ]
1665         
1666         <table constraint> ::=
1667               <unique constraint definition>
1668             | <referential constraint definition>
1669             | <check constraint definition>
1670             
1671         <constraint attributes> ::=
1672               <constraint check time> [ [ NOT ] DEFERRABLE ]
1673             | [ NOT ] DEFERRABLE [ <constraint check time> ]
1674         
1675         <unique constraint definition> ::=
1676                     <unique specification> even in SQL3)
1677             <unique specification>
1678               <left paren> <unique column list> <right paren>
1679         
1680         <unique column list> ::= <column name list>
1681         
1682         <referential constraint definition> ::=
1683             FOREIGN KEY
1684                 <left paren> <referencing columns> <right paren>
1685               <references specification>
1686         
1687         <referencing columns> ::=
1688             <reference column list>
1689             
1690         <constraint attributes> ::=
1691               <constraint check time> [ [ NOT ] DEFERRABLE ]
1692             | [ NOT ] DEFERRABLE [ <constraint check time> ]
1693         
1694         <constraint check time> ::=
1695               INITIALLY DEFERRED
1696             | INITIALLY IMMEDIATE
1697             
1698         <check constraint definition> ::=
1699         	CHECK
1700         		<left paren> <search condition> <right paren>
1701          */
1702         consumeComment(tokens);
1703 
1704         if ((tokens.matches("PRIMARY", "KEY")) || (tokens.matches("FOREIGN", "KEY")) || (tokens.matches("UNIQUE"))) {
1705 
1706             // This is the case where the PK/FK/UK is NOT NAMED
1707             if (tokens.matches("UNIQUE")) {
1708                 String uc_name = "UC_1"; // UNIQUE CONSTRAINT NAME
1709                 tokens.consume(); // UNIQUE
1710 
1711                 AstNode constraintNode = nodeFactory().node(uc_name, tableNode, mixinType);
1712                 constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1713 
1714                 // CONSUME COLUMNS
1715                 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1716 
1717                 parseConstraintAttributes(tokens, constraintNode);
1718 
1719                 consumeComment(tokens);
1720             } else if (tokens.matches("PRIMARY", "KEY")) {
1721                 String pk_name = "PK_1"; // PRIMARY KEY NAME
1722                 tokens.consume("PRIMARY", "KEY"); // PRIMARY KEY
1723 
1724                 AstNode constraintNode = nodeFactory().node(pk_name, tableNode, mixinType);
1725                 constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1726 
1727                 // CONSUME COLUMNS
1728                 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1729 
1730                 parseConstraintAttributes(tokens, constraintNode);
1731 
1732                 consumeComment(tokens);
1733             } else if (tokens.matches("FOREIGN", "KEY")) {
1734                 String fk_name = "FK_1"; // FOREIGN KEY NAME
1735                 tokens.consume("FOREIGN", "KEY"); // FOREIGN KEY
1736 
1737                 if (!tokens.matches(L_PAREN)) {
1738                     // Assume the FK is Named here
1739                     fk_name = tokens.consume();
1740                 }
1741 
1742                 AstNode constraintNode = nodeFactory().node(fk_name, tableNode, mixinType);
1743                 constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1744 
1745                 // CONSUME COLUMNS
1746                 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1747 
1748                 // Parse the references to table and columns
1749                 parseReferences(tokens, constraintNode);
1750 
1751                 parseConstraintAttributes(tokens, constraintNode);
1752 
1753                 consumeComment(tokens);
1754             }
1755         } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "UNIQUE")) {
1756             // CONSTRAINT P_KEY_2a UNIQUE (PERMISSIONUID)
1757             tokens.consume(); // CONSTRAINT
1758             String uc_name = parseName(tokens); // UNIQUE CONSTRAINT NAME
1759             tokens.consume("UNIQUE"); // UNIQUE
1760 
1761             AstNode constraintNode = nodeFactory().node(uc_name, tableNode, mixinType);
1762             constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1763 
1764             // CONSUME COLUMNS
1765             parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1766 
1767             parseConstraintAttributes(tokens, constraintNode);
1768 
1769             consumeComment(tokens);
1770         } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "PRIMARY", "KEY")) {
1771             // CONSTRAINT U_KEY_2a PRIMARY KEY (PERMISSIONUID)
1772             tokens.consume("CONSTRAINT"); // CONSTRAINT
1773             String pk_name = parseName(tokens); // PRIMARY KEY NAME
1774             tokens.consume("PRIMARY", "KEY"); // PRIMARY KEY
1775 
1776             AstNode constraintNode = nodeFactory().node(pk_name, tableNode, mixinType);
1777             constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1778 
1779             // CONSUME COLUMNS
1780             parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1781 
1782             parseConstraintAttributes(tokens, constraintNode);
1783 
1784             consumeComment(tokens);
1785 
1786         } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "FOREIGN", "KEY")) {
1787             // CONSTRAINT F_KEY_2a FOREIGN KEY (PERMISSIONUID)
1788             tokens.consume("CONSTRAINT"); // CONSTRAINT
1789             String fk_name = parseName(tokens); // FOREIGN KEY NAME
1790             tokens.consume("FOREIGN", "KEY"); // FOREIGN KEY
1791 
1792             AstNode constraintNode = nodeFactory().node(fk_name, tableNode, mixinType);
1793 
1794             constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1795 
1796             // CONSUME COLUMNS
1797             parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1798 
1799             // Parse the references to table and columns
1800             parseReferences(tokens, constraintNode);
1801 
1802             parseConstraintAttributes(tokens, constraintNode);
1803 
1804             consumeComment(tokens);
1805 
1806         } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "CHECK")) {
1807             // CONSTRAINT zipchk CHECK (char_length(zipcode) = 5);
1808             tokens.consume("CONSTRAINT"); // CONSTRAINT
1809             String ck_name = parseName(tokens); // NAME
1810             tokens.consume("CHECK"); // CHECK
1811 
1812             AstNode constraintNode = nodeFactory().node(ck_name, tableNode, mixinType);
1813             constraintNode.setProperty(CONSTRAINT_TYPE, CHECK);
1814 
1815             String clause = consumeParenBoundedTokens(tokens, true);
1816             constraintNode.setProperty(CHECK_SEARCH_CONDITION, clause);
1817         }
1818 
1819     }
1820 
1821     /**
1822      * Parses the attributes associated with any in-line column constraint definition or a table constrain definition.
1823      * 
1824      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1825      * @param constraintNode
1826      * @throws ParsingException
1827      */
1828     protected void parseConstraintAttributes( DdlTokenStream tokens,
1829                                               AstNode constraintNode ) throws ParsingException {
1830         assert tokens != null;
1831         assert constraintNode != null;
1832 
1833         // Now we need to check for constraint attributes:
1834 
1835         // <constraint attributes> ::=
1836         // <constraint check time> [ [ NOT ] DEFERRABLE ]
1837         // | [ NOT ] DEFERRABLE [ <constraint check time> ]
1838         //
1839         // <constraint check time> ::=
1840         // INITIALLY DEFERRED
1841         // | INITIALLY IMMEDIATE
1842 
1843         // EXAMPLE : foreign key (contact_id) references contact (contact_id) on delete cascade INITIALLY DEFERRED,
1844         if (tokens.canConsume("INITIALLY", "DEFERRED")) {
1845             AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1846             attrNode.setProperty(PROPERTY_VALUE, "INITIALLY DEFERRED");
1847         }
1848         if (tokens.canConsume("INITIALLY", "IMMEDIATE")) {
1849             AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1850             attrNode.setProperty(PROPERTY_VALUE, "INITIALLY IMMEDIATE");
1851         }
1852         if (tokens.canConsume("NOT", "DEFERRABLE")) {
1853             AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1854             attrNode.setProperty(PROPERTY_VALUE, "NOT DEFERRABLE");
1855         }
1856         if (tokens.canConsume("DEFERRABLE")) {
1857             AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1858             attrNode.setProperty(PROPERTY_VALUE, "DEFERRABLE");
1859         }
1860         if (tokens.canConsume("INITIALLY", "DEFERRED")) {
1861             AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1862             attrNode.setProperty(PROPERTY_VALUE, "INITIALLY DEFERRED");
1863         }
1864         if (tokens.canConsume("INITIALLY", "IMMEDIATE")) {
1865             AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1866             attrNode.setProperty(PROPERTY_VALUE, "INITIALLY IMMEDIATE");
1867         }
1868     }
1869 
1870     protected void parseReferences( DdlTokenStream tokens,
1871                                     AstNode constraintNode ) throws ParsingException {
1872         assert tokens != null;
1873         assert constraintNode != null;
1874 
1875         if (tokens.matches("REFERENCES")) {
1876             tokens.consume("REFERENCES");
1877             // 'REFERENCES' referencedTableAndColumns matchType? referentialTriggeredAction?;
1878             String tableName = parseName(tokens);
1879 
1880             nodeFactory().node(tableName, constraintNode, TYPE_TABLE_REFERENCE);
1881 
1882             parseColumnNameList(tokens, constraintNode, TYPE_FK_COLUMN_REFERENCE);
1883 
1884             tokens.canConsume("MATCH", "FULL");
1885             tokens.canConsume("MATCH", "PARTIAL");
1886 
1887             //	
1888             // referentialTriggeredAction : (updateRule deleteRule?) | (deleteRule updateRule?);
1889             //
1890             // deleteRule : 'ON' 'DELETE' referencialAction;
1891             //	
1892             // updateRule : 'ON' 'UPDATE' referencialAction;
1893             //
1894             // referencialAction
1895             // : cascadeOption | setNullOption | setDefaultOption | noActionOption
1896             // ;
1897             //    		
1898             // cascadeOption : 'CASCADE';
1899             // setNullOption : 'SET' 'NULL';
1900             // setDefaultOption : 'SET' 'DEFAULT';
1901             // noActionOption : 'NO' 'ACTION';
1902             // nowOption : 'NOW' '(' ')' ;
1903 
1904             // Could be one or both, so check more than once.
1905             while (tokens.canConsume("ON", "UPDATE") || tokens.canConsume("ON", "DELETE")) {
1906 
1907                 if (tokens.matches("CASCADE") || tokens.matches("NOW()")) {
1908                     tokens.consume();
1909                 } else if (tokens.matches("SET", "NULL")) {
1910                     tokens.consume("SET", "NULL");
1911                 } else if (tokens.matches("SET", "DEFAULT")) {
1912                     tokens.consume("SET", "DEFAULT");
1913                 } else if (tokens.matches("NO", "ACTION")) {
1914                     tokens.consume("NO", "ACTION");
1915                 } else {
1916                     System.out.println(" ERROR:   ColumnDefinition REFERENCES has NO REFERENCIAL ACTION.");
1917                 }
1918             }
1919         }
1920     }
1921 
1922     // ===========================================================================================================================
1923     // PARSING CREATE VIEW
1924     // ===========================================================================================================================
1925 
1926     /**
1927      * Parses DDL CREATE VIEW {@link AstNode} basedregisterStatementStartPhrase on SQL 92 specifications. Initial implementation
1928      * here does not parse the statement in detail.
1929      * 
1930      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1931      * @param parentNode the parent {@link AstNode} node; may not be null
1932      * @return the Create View node
1933      * @throws ParsingException
1934      */
1935     protected AstNode parseCreateViewStatement( DdlTokenStream tokens,
1936                                                 AstNode parentNode ) throws ParsingException {
1937         assert tokens != null;
1938         assert parentNode != null;
1939 
1940         markStartOfStatement(tokens);
1941         // <view definition> ::=
1942         // CREATE VIEW <table name> [ <left paren> <view column list><right paren> ]
1943         // AS <query expression>
1944         // [ WITH [ <levels clause> ] CHECK OPTION ]
1945         // <levels clause> ::=
1946         // CASCADED | LOCAL
1947 
1948         // NOTE: the query expression along with the CHECK OPTION clause require no SQL statement terminator.
1949         // So the CHECK OPTION clause will NOT
1950 
1951         String stmtType = "CREATE";
1952         tokens.consume("CREATE");
1953         if (tokens.canConsume("OR", "REPLACE")) {
1954             stmtType = stmtType + SPACE + "OR REPLACE";
1955         }
1956         tokens.consume("VIEW");
1957         stmtType = stmtType + SPACE + "VIEW";
1958 
1959         String name = parseName(tokens);
1960 
1961         AstNode createViewNode = nodeFactory().node(name, parentNode, TYPE_CREATE_VIEW_STATEMENT);
1962 
1963         // CONSUME COLUMNS
1964         parseColumnNameList(tokens, createViewNode, TYPE_COLUMN_REFERENCE);
1965 
1966         tokens.consume("AS");
1967 
1968         String queryExpression = parseUntilTerminator(tokens);
1969 
1970         createViewNode.setProperty(CREATE_VIEW_QUERY_EXPRESSION, queryExpression);
1971 
1972         markEndOfStatement(tokens, createViewNode);
1973 
1974         return createViewNode;
1975     }
1976 
1977     // ===========================================================================================================================
1978     // PARSING CREATE SCHEMA
1979     // ===========================================================================================================================
1980 
1981     /**
1982      * Parses DDL CREATE SCHEMA {@link AstNode} based on SQL 92 specifications. Initial implementation here does not parse the
1983      * statement in detail.
1984      * 
1985      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1986      * @param parentNode the parent {@link AstNode} node; may not be null
1987      * @return the parsed schema node
1988      * @throws ParsingException
1989      */
1990     protected AstNode parseCreateSchemaStatement( DdlTokenStream tokens,
1991                                                   AstNode parentNode ) throws ParsingException {
1992         markStartOfStatement(tokens);
1993 
1994         AstNode schemaNode = null;
1995 
1996         String authorizationIdentifier = null;
1997         String schemaName = null;
1998 
1999         tokens.consume("CREATE", "SCHEMA");
2000 
2001         if (tokens.canConsume("AUTHORIZATION")) {
2002             authorizationIdentifier = tokens.consume();
2003         } else {
2004             schemaName = parseName(tokens);
2005             if (tokens.canConsume("AUTHORIZATION")) {
2006                 authorizationIdentifier = parseName(tokens);
2007             }
2008         }
2009         // Must have one or the other or both
2010         assert authorizationIdentifier != null || schemaName != null;
2011 
2012         if (schemaName != null) {
2013             schemaNode = nodeFactory().node(schemaName, parentNode, TYPE_CREATE_SCHEMA_STATEMENT);
2014         } else {
2015             schemaNode = nodeFactory().node(authorizationIdentifier, parentNode, TYPE_CREATE_SCHEMA_STATEMENT);
2016         }
2017 
2018         if (tokens.canConsume("DEFAULT", "CHARACTER", "SET")) {
2019             // consume name
2020             parseName(tokens);
2021         }
2022 
2023         markEndOfStatement(tokens, schemaNode);
2024 
2025         return schemaNode;
2026     }
2027 
2028     /**
2029      * Parses DDL CREATE ASSERTION {@link AstNode} based on SQL 92 specifications. Initial implementation here does not parse the
2030      * statement's search condition in detail.
2031      * 
2032      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2033      * @param parentNode the parent {@link AstNode} node; may not be null
2034      * @return the parsed schema node
2035      * @throws ParsingException
2036      */
2037     protected AstNode parseCreateAssertionStatement( DdlTokenStream tokens,
2038                                                      AstNode parentNode ) throws ParsingException {
2039         markStartOfStatement(tokens);
2040 
2041         // <assertion definition> ::=
2042         // CREATE ASSERTION <constraint name> CHECK <left paren> <search condition> <right paren> [ <constraint attributes> ]
2043 
2044         AstNode node = null;
2045 
2046         tokens.consume("CREATE", "ASSERTION");
2047 
2048         String name = parseName(tokens);
2049 
2050         // Must have one or the other or both
2051 
2052         node = nodeFactory().node(name, parentNode, TYPE_CREATE_ASSERTION_STATEMENT);
2053 
2054         tokens.consume("CHECK");
2055 
2056         String searchCondition = consumeParenBoundedTokens(tokens, false);
2057 
2058         node.setProperty(CHECK_SEARCH_CONDITION, searchCondition);
2059 
2060         parseConstraintAttributes(tokens, node);
2061 
2062         markEndOfStatement(tokens, node);
2063 
2064         return node;
2065     }
2066 
2067     // ===========================================================================================================================
2068     // PARSING CREATE XXXXX (Typed Statements)
2069     // ===========================================================================================================================
2070 
2071     /**
2072      * Utility method to parse a statement that can be ignored. The value returned in the generic {@link AstNode} will contain all
2073      * text between starting token and either the terminator (if defined) or the next statement start token. NOTE: This method
2074      * does NOT mark and add consumed fragment to parent node.
2075      * 
2076      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2077      * @param name
2078      * @param parentNode the parent {@link AstNode} node; may not be null
2079      * @return the parsed generic {@link AstNode}
2080      * @throws ParsingException
2081      */
2082     protected AstNode parseIgnorableStatement( DdlTokenStream tokens,
2083                                                String name,
2084                                                AstNode parentNode ) {
2085 
2086         AstNode node = nodeFactory().node(name, parentNode, TYPE_STATEMENT);
2087 
2088         parseUntilTerminator(tokens);
2089 
2090         // System.out.println(" >>> FOUND [" + stmt.getType() +"] STATEMENT TOKEN. IGNORING");
2091         return node;
2092     }
2093 
2094     /**
2095      * Utility method to parse a statement that can be ignored. The value returned in the generic {@link AstNode} will contain all
2096      * text between starting token and either the terminator (if defined) or the next statement start token. NOTE: This method
2097      * does NOT mark and add consumed fragment to parent node.
2098      * 
2099      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2100      * @param name
2101      * @param parentNode the parent {@link AstNode} node; may not be null
2102      * @param mixinType
2103      * @return the parsed generic {@link AstNode}
2104      * @throws ParsingException
2105      */
2106     protected AstNode parseIgnorableStatement( DdlTokenStream tokens,
2107                                                String name,
2108                                                AstNode parentNode,
2109                                                Name mixinType ) {
2110         CheckArg.isNotNull(tokens, "tokens");
2111         CheckArg.isNotNull(name, "name");
2112         CheckArg.isNotNull(parentNode, "parentNode");
2113         CheckArg.isNotNull(mixinType, "mixinType");
2114 
2115         AstNode node = nodeFactory().node(name, parentNode, mixinType);
2116 
2117         parseUntilTerminator(tokens);
2118 
2119         return node;
2120     }
2121 
2122     /**
2123      * Utility method to parse a generic statement given a start phrase and statement mixin type.
2124      * 
2125      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2126      * @param stmt_start_phrase the string array statement start phrase
2127      * @param parentNode the parent of the newly created node.
2128      * @param mixinType the mixin type of the newly created statement node
2129      * @return the new node
2130      */
2131     protected AstNode parseStatement( DdlTokenStream tokens,
2132                                       String[] stmt_start_phrase,
2133                                       AstNode parentNode,
2134                                       Name mixinType ) {
2135         CheckArg.isNotNull(tokens, "tokens");
2136         CheckArg.isNotNull(stmt_start_phrase, "stmt_start_phrase");
2137         CheckArg.isNotNull(parentNode, "parentNode");
2138         CheckArg.isNotNull(mixinType, "mixinType");
2139 
2140         markStartOfStatement(tokens);
2141         tokens.consume(stmt_start_phrase);
2142         AstNode result = parseIgnorableStatement(tokens, getStatementTypeName(stmt_start_phrase), parentNode, mixinType);
2143         markEndOfStatement(tokens, result);
2144 
2145         return result;
2146     }
2147 
2148     /**
2149      * Constructs a terminator AstNode as child of root node
2150      * 
2151      * @param parentNode the parent {@link AstNode} node; may not be null
2152      * @return terminator node
2153      */
2154     public final AstNode unknownTerminatedNode( AstNode parentNode ) {
2155         return nodeFactory.node("unknownStatement", parentNode, StandardDdlLexicon.TYPE_UNKNOWN_STATEMENT);
2156     }
2157 
2158     /**
2159      * Constructs a terminator AstNode as child of root node
2160      * 
2161      * @param parentNode the parent {@link AstNode} node; may not be null
2162      * @return terminator node
2163      */
2164     public final AstNode missingTerminatorNode( AstNode parentNode ) {
2165         return nodeFactory.node("missingTerminator", parentNode, StandardDdlLexicon.TYPE_MISSING_TERMINATOR);
2166     }
2167 
2168     public final boolean isMissingTerminatorNode( AstNode node ) {
2169         return node.getName().getString().equals(MISSING_TERMINATOR_NODE_LITERAL)
2170                && nodeFactory().hasMixinType(node, TYPE_MISSING_TERMINATOR);
2171     }
2172 
2173     public final boolean isValidSchemaChild( AstNode node ) {
2174         Name[] schemaChildMixins = getValidSchemaChildTypes();
2175         for (Object mixin : node.getProperty(JcrLexicon.MIXIN_TYPES).getValuesAsArray()) {
2176             if (mixin instanceof Name) {
2177                 for (Name nextType : schemaChildMixins) {
2178                     if (nextType.equals(mixin)) {
2179                         return true;
2180                     }
2181                 }
2182             }
2183         }
2184 
2185         return false;
2186     }
2187 
2188     public final boolean setAsSchemaChildNode( AstNode statementNode,
2189                                                boolean stmtIsMissingTerminator ) {
2190 
2191         if (!isValidSchemaChild(statementNode)) {
2192             return false;
2193         }
2194 
2195         // Because we are setting the schema children on the fly we can assume that if we are under a schema with children, then
2196         // the schema should be followed by a missing terminator node. So we just check the previous 2 nodes.
2197 
2198         List<AstNode> children = getRootNode().getChildren();
2199 
2200         if (children.size() > 2) {
2201             AstNode previousNode = children.get(children.size() - 2);
2202             if (nodeFactory().hasMixinType(previousNode, TYPE_MISSING_TERMINATOR)) {
2203                 AstNode theSchemaNode = children.get(children.size() - 3);
2204 
2205                 // If the last child of a schema is missing terminator, then the schema isn't complete.
2206                 // If it is NOT a missing terminator, we aren't under a schema node anymore.
2207                 if (theSchemaNode.getChildCount() == 0
2208                     || nodeFactory().hasMixinType(theSchemaNode.getLastChild(), TYPE_MISSING_TERMINATOR)) {
2209                     if (nodeFactory().hasMixinType(theSchemaNode, TYPE_CREATE_SCHEMA_STATEMENT)) {
2210                         statementNode.setParent(theSchemaNode);
2211                         if (stmtIsMissingTerminator) {
2212                             missingTerminatorNode(theSchemaNode);
2213                         }
2214                         return true;
2215                     }
2216                 }
2217             }
2218         }
2219 
2220         return false;
2221     }
2222 
2223     /**
2224      * Returns current terminator
2225      * 
2226      * @return terminator string value
2227      */
2228     protected String getTerminator() {
2229         return this.terminator;
2230     }
2231 
2232     /**
2233      * @param terminator the string value used as the statement terminator for the ddl dialect
2234      * @return if terminator was changed or not
2235      */
2236     protected boolean setTerminator( String terminator ) {
2237         CheckArg.isNotNull(terminator, "terminator");
2238         if (this.terminator.equalsIgnoreCase(terminator)) {
2239             return false;
2240         }
2241         this.terminator = terminator;
2242         return true;
2243     }
2244 
2245     protected Name[] getValidSchemaChildTypes() {
2246         return VALID_SCHEMA_CHILD_TYPES;
2247     }
2248 
2249     /**
2250      * Checks if next token is of type comment.
2251      * 
2252      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2253      * @return true if next token is a comment.
2254      * @throws ParsingException
2255      */
2256     protected boolean isComment( DdlTokenStream tokens ) throws ParsingException {
2257         return tokens.matches(DdlTokenizer.COMMENT);
2258     }
2259 
2260     /**
2261      * Consumes an an end-of-line comment or in-line comment
2262      * 
2263      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2264      * @return true if a comment was found and consumed
2265      * @throws ParsingException
2266      */
2267     protected boolean consumeComment( DdlTokenStream tokens ) throws ParsingException {
2268         return tokens.canConsume(DdlTokenizer.COMMENT);
2269     }
2270 
2271     /**
2272      * This utility method provides this parser the ability to distinguish between a CreateTable Constraint and a ColumnDefinition
2273      * Definition which are the only two statement segment types allowed within the CREATE TABLE parenthesis ( xxxxxx );
2274      * 
2275      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2276      * @return is table constraint
2277      * @throws ParsingException
2278      */
2279     protected boolean isTableConstraint( DdlTokenStream tokens ) throws ParsingException {
2280         boolean result = false;
2281 
2282         if ((tokens.matches("PRIMARY", "KEY")) || (tokens.matches("FOREIGN", "KEY")) || (tokens.matches("UNIQUE"))) {
2283             result = true;
2284         } else if (tokens.matches("CONSTRAINT")) {
2285             if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "UNIQUE")
2286                 || tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "PRIMARY", "KEY")
2287                 || tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "FOREIGN", "KEY")
2288                 || tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "CHECK")) {
2289                 result = true;
2290             }
2291         }
2292 
2293         return result;
2294     }
2295 
2296     /**
2297      * This utility method provides this parser the ability to distinguish between a CreateTable Constrain and a ColumnDefinition
2298      * Definition which are the only two statement segment types allowed within the CREATE TABLE parenthesis ( xxxxxx );
2299      * 
2300      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2301      * @return is column definition start phrase
2302      * @throws ParsingException
2303      */
2304     protected boolean isColumnDefinitionStart( DdlTokenStream tokens ) throws ParsingException {
2305         boolean result = false;
2306 
2307         if (isTableConstraint(tokens)) {
2308             result = false;
2309         } else {
2310             for (String dTypeStartWord : getDataTypeStartWords()) {
2311                 result = (tokens.matches(DdlTokenStream.ANY_VALUE, dTypeStartWord) || tokens.matches("COLUMN",
2312                                                                                                      DdlTokenStream.ANY_VALUE,
2313                                                                                                      dTypeStartWord));
2314                 if (result) {
2315                     break;
2316                 }
2317             }
2318 
2319         }
2320 
2321         return result;
2322     }
2323 
2324     /**
2325      * Returns a list of data type start words which can be used to help identify a column definition sub-statement.
2326      * 
2327      * @return list of data type start words
2328      */
2329     protected List<String> getDataTypeStartWords() {
2330         if (allDataTypeStartWords == null) {
2331             allDataTypeStartWords = new ArrayList<String>();
2332             allDataTypeStartWords.addAll(DataTypes.DATATYPE_START_WORDS);
2333             allDataTypeStartWords.addAll(getCustomDataTypeStartWords());
2334         }
2335         return allDataTypeStartWords;
2336     }
2337 
2338     /**
2339      * Returns a list of custom data type start words which can be used to help identify a column definition sub-statement.
2340      * Sub-classes should override this method to contribute DB-specific data types.
2341      * 
2342      * @return list of data type start words
2343      */
2344     protected List<String> getCustomDataTypeStartWords() {
2345         return Collections.emptyList();
2346     }
2347 
2348     /**
2349      * Method to parse fully qualified schema, table and column names that are defined with '.' separator and optionally bracketed
2350      * with square brackets Example: partsSchema.supplier Example: [partsSchema].[supplier]
2351      * 
2352      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2353      * @return the parsed name
2354      */
2355     protected String parseName( DdlTokenStream tokens ) {
2356         // Basically we want to construct a name that could have the form:
2357         // [schemaName].[tableName].[columnName]
2358         // NOTE: "[]" brackets are optional
2359         StringBuffer sb = new StringBuffer();
2360 
2361         if (tokens.matches('[')) {
2362             // We have the bracketed case, so assume all brackets
2363             while (true) {
2364 
2365                 tokens.consume('['); // [ bracket
2366                 sb.append(consumeIdentifier(tokens)); // name
2367                 tokens.consume(']'); // ] bracket
2368                 if (tokens.matches('.')) {
2369                     sb.append(tokens.consume()); // '.'
2370                 } else {
2371                     break;
2372                 }
2373             }
2374         } else {
2375 
2376             // We have the NON-bracketed case, so assume all brackets
2377             while (true) {
2378 
2379                 sb.append(consumeIdentifier(tokens)); // name
2380 
2381                 if (tokens.matches('.')) {
2382                     sb.append(tokens.consume()); // '.'
2383                 } else {
2384                     break;
2385                 }
2386 
2387             }
2388         }
2389 
2390         return sb.toString();
2391     }
2392 
2393     /**
2394      * Consumes an token identifier which can be of the form of a simple string or a double-quoted string.
2395      * 
2396      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2397      * @return the identifier
2398      * @throws ParsingException
2399      */
2400     protected String consumeIdentifier( DdlTokenStream tokens ) throws ParsingException {
2401         String value = tokens.consume();
2402         // This may surrounded by quotes, so remove them ...
2403         if (value.charAt(0) == '"') {
2404             int length = value.length();
2405             // Check for the end quote ...
2406             value = value.substring(1, length - 1); // not complete!!
2407         }
2408         // TODO: Handle warnings elegantly
2409         // else {
2410         // // Not quoted, so check for reserved words ...
2411         // if (tokens.isKeyWord(value)) {
2412         // // Record warning ...
2413         // System.out.println("  WARNING:  Identifier [" + value + "] is a SQL 92 Reserved Word");
2414         // }
2415         // }
2416         return value;
2417     }
2418 
2419     /**
2420      * Utility method to determine if next token is a terminator.
2421      * 
2422      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2423      * @return is terminator token
2424      * @throws ParsingException
2425      */
2426     protected boolean isTerminator( DdlTokenStream tokens ) throws ParsingException {
2427         boolean result = tokens.matches(getTerminator());
2428 
2429         return result;
2430     }
2431 
2432     /**
2433      * Adds column reference nodes to a parent node. Returns true if column references added, false if not.
2434      * 
2435      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2436      * @param parentNode the parent node
2437      * @param referenceType the type of the reference node to create
2438      * @return true if the column references were found and added to the node, or false if there were no column references found
2439      *         in the stream
2440      */
2441     protected boolean parseColumnNameList( DdlTokenStream tokens,
2442                                            AstNode parentNode,
2443                                            Name referenceType ) {
2444         boolean parsedColumns = false;
2445         // CONSUME COLUMNS
2446         List<String> columnNameList = new ArrayList<String>();
2447         if (tokens.matches(L_PAREN)) {
2448             tokens.consume(L_PAREN);
2449             columnNameList = parseColumnNameList(tokens);
2450             tokens.consume(R_PAREN);
2451         }
2452 
2453         for (String columnName : columnNameList) {
2454             nodeFactory().node(columnName, parentNode, referenceType);
2455             parsedColumns = true;
2456         }
2457 
2458         return parsedColumns;
2459     }
2460 
2461     /**
2462      * Parses a comma separated list of column names.
2463      * 
2464      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2465      * @return list of column names.
2466      * @throws ParsingException
2467      */
2468     protected List<String> parseColumnNameList( DdlTokenStream tokens ) throws ParsingException {
2469         List<String> columnNames = new LinkedList<String>();
2470 
2471         while (true) {
2472             columnNames.add(parseName(tokens));
2473             if (!tokens.canConsume(COMMA)) {
2474                 break;
2475             }
2476         }
2477 
2478         return columnNames;
2479     }
2480 
2481     /**
2482      * Utility method which parses tokens until a terminator is found, another statement is identified or there are no more
2483      * tokens.
2484      * 
2485      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2486      * @return the parsed string
2487      * @throws ParsingException
2488      */
2489     protected String parseUntilTerminator( DdlTokenStream tokens ) throws ParsingException {
2490         StringBuffer sb = new StringBuffer();
2491         if (doUseTerminator()) {
2492             boolean lastTokenWasPeriod = false;
2493             while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY) && !isTerminator(tokens)) {
2494                 String thisToken = tokens.consume();
2495                 boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2496                 boolean thisTokenIsComma = thisToken.equals(COMMA);
2497                 if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2498                     sb.append(thisToken);
2499                 } else {
2500                     sb.append(SPACE).append(thisToken);
2501                 }
2502                 if (thisTokenIsPeriod) {
2503                     lastTokenWasPeriod = true;
2504                 } else {
2505                     lastTokenWasPeriod = false;
2506                 }
2507             }
2508         } else {
2509             // parse until next statement
2510             boolean lastTokenWasPeriod = false;
2511             while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY)) {
2512                 String thisToken = tokens.consume();
2513                 boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2514                 boolean thisTokenIsComma = thisToken.equals(COMMA);
2515                 if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2516                     sb.append(thisToken);
2517                 } else {
2518                     sb.append(SPACE).append(thisToken);
2519                 }
2520                 if (thisTokenIsPeriod) {
2521                     lastTokenWasPeriod = true;
2522                 } else {
2523                     lastTokenWasPeriod = false;
2524                 }
2525             }
2526         }
2527 
2528         return sb.toString();
2529     }
2530 
2531     /**
2532      * Utility method which parses tokens until a terminator is found or there are no more tokens. This method differs from
2533      * parseUntilTermintor() in that it ignores embedded statements. This method can be used for parsers that have statements
2534      * which can embed statements that should not be parsed.
2535      * 
2536      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2537      * @return the parsed string
2538      * @throws ParsingException
2539      */
2540     protected String parseUntilTerminatorIgnoreEmbeddedStatements( DdlTokenStream tokens ) throws ParsingException {
2541         StringBuffer sb = new StringBuffer();
2542 
2543         boolean lastTokenWasPeriod = false;
2544         while (tokens.hasNext() && !isTerminator(tokens)) {
2545             String thisToken = tokens.consume();
2546             boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2547             boolean thisTokenIsComma = thisToken.equals(COMMA);
2548             if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2549                 sb.append(thisToken);
2550             } else {
2551                 sb.append(SPACE).append(thisToken);
2552             }
2553             if (thisTokenIsPeriod) {
2554                 lastTokenWasPeriod = true;
2555             } else {
2556                 lastTokenWasPeriod = false;
2557             }
2558         }
2559 
2560         return sb.toString();
2561     }
2562 
2563     /**
2564      * Utility method which parses tokens until a semicolon is found or there are no more tokens.
2565      * 
2566      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2567      * @return the parsed string
2568      * @throws ParsingException
2569      */
2570     protected String parseUntilSemiColon( DdlTokenStream tokens ) throws ParsingException {
2571         StringBuffer sb = new StringBuffer();
2572 
2573         boolean lastTokenWasPeriod = false;
2574         while (tokens.hasNext() && !tokens.matches(SEMICOLON)) {
2575             String thisToken = tokens.consume();
2576             boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2577             boolean thisTokenIsComma = thisToken.equals(COMMA);
2578             if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2579                 sb.append(thisToken);
2580             } else {
2581                 sb.append(SPACE).append(thisToken);
2582             }
2583             if (thisTokenIsPeriod) {
2584                 lastTokenWasPeriod = true;
2585             } else {
2586                 lastTokenWasPeriod = false;
2587             }
2588         }
2589 
2590         return sb.toString();
2591     }
2592 
2593     protected String parseUntilCommaOrTerminator( DdlTokenStream tokens ) throws ParsingException {
2594         StringBuffer sb = new StringBuffer();
2595         if (doUseTerminator()) {
2596             while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY) && !isTerminator(tokens)
2597                    && !tokens.matches(COMMA)) {
2598                 sb.append(SPACE).append(tokens.consume());
2599             }
2600         } else {
2601             // parse until next statement
2602             while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY) && !tokens.matches(COMMA)) {
2603                 sb.append(SPACE).append(tokens.consume());
2604             }
2605         }
2606 
2607         return sb.toString();
2608     }
2609 
2610     /**
2611      * Returns if parser is using statement terminator or not.
2612      * 
2613      * @return value of useTerminator flag.
2614      */
2615     public boolean doUseTerminator() {
2616         return useTerminator;
2617     }
2618 
2619     /**
2620      * Sets the value of the use terminator flag for the parser. If TRUE, then all statements are expected to be terminated by a
2621      * terminator. The default terminator ";" can be overridden by setting the value using setTerminator() method.
2622      * 
2623      * @param useTerminator
2624      */
2625     public void setDoUseTerminator( boolean useTerminator ) {
2626         this.useTerminator = useTerminator;
2627     }
2628 
2629     public String getStatementTypeName( String[] stmtPhrase ) {
2630         StringBuffer sb = new StringBuffer(100);
2631         for (int i = 0; i < stmtPhrase.length; i++) {
2632             if (i == 0) {
2633                 sb.append(stmtPhrase[0]);
2634             } else {
2635                 sb.append(SPACE).append(stmtPhrase[i]);
2636             }
2637         }
2638 
2639         return sb.toString();
2640     }
2641 
2642     /**
2643      * Parses the default clause for a column and sets appropriate properties on the column node.
2644      * 
2645      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2646      * @param columnNode the column node which may contain a default clause; may not be null
2647      * @return true if default clause was found and parsed, otherwise false
2648      * @throws ParsingException
2649      */
2650     protected boolean parseDefaultClause( DdlTokenStream tokens,
2651                                           AstNode columnNode ) throws ParsingException {
2652 
2653         assert tokens != null;
2654         assert columnNode != null;
2655 
2656         // defaultClause
2657         // : defaultOption
2658         // ;
2659         // defaultOption : <literal> | datetimeValueFunction
2660         // | USER | CURRENT_USER | SESSION_USER | SYSTEM_USER | NULL;
2661         //
2662         // <datetime value function> ::=
2663         // <current date value function>
2664         // | <current time value function>
2665         // | <current timestamp value function>
2666         //
2667         // <current date value function> ::= CURRENT_DATE
2668         //
2669         // <current time value function> ::=
2670         // CURRENT_TIME [ <left paren> <time precision> <right paren> ]
2671         //
2672         // <current timestamp value function> ::=
2673         // CURRENT_TIMESTAMP [ <left paren> <timestamp precision> <right paren> ]
2674 
2675         String defaultValue = "";
2676 
2677         if (tokens.canConsume("DEFAULT")) {
2678 
2679             int optionID = -1;
2680             int precision = -1;
2681 
2682             if (tokens.canConsume("CURRENT_DATE")) {
2683 
2684                 optionID = DEFAULT_ID_DATETIME;
2685                 defaultValue = "CURRENT_DATE";
2686             } else if (tokens.canConsume("CURRENT_TIME")) {
2687                 optionID = DEFAULT_ID_DATETIME;
2688                 defaultValue = "CURRENT_TIME";
2689                 if (tokens.canConsume(L_PAREN)) {
2690                     // EXPECT INTEGER
2691                     precision = integer(tokens.consume());
2692                     tokens.canConsume(R_PAREN);
2693                 }
2694             } else if (tokens.canConsume("CURRENT_TIMESTAMP")) {
2695                 optionID = DEFAULT_ID_DATETIME;
2696                 defaultValue = "CURRENT_TIMESTAMP";
2697                 if (tokens.canConsume(L_PAREN)) {
2698                     // EXPECT INTEGER
2699                     precision = integer(tokens.consume());
2700                     tokens.canConsume(R_PAREN);
2701                 }
2702             } else if (tokens.canConsume("USER")) {
2703                 optionID = DEFAULT_ID_USER;
2704                 defaultValue = "USER";
2705             } else if (tokens.canConsume("CURRENT_USER")) {
2706                 optionID = DEFAULT_ID_CURRENT_USER;
2707                 defaultValue = "CURRENT_USER";
2708             } else if (tokens.canConsume("SESSION_USER")) {
2709                 optionID = DEFAULT_ID_SESSION_USER;
2710                 defaultValue = "SESSION_USER";
2711             } else if (tokens.canConsume("SYSTEM_USER")) {
2712                 optionID = DEFAULT_ID_SYSTEM_USER;
2713                 defaultValue = "SYSTEM_USER";
2714             } else if (tokens.canConsume("NULL")) {
2715                 optionID = DEFAULT_ID_NULL;
2716                 defaultValue = "NULL";
2717             } else if (tokens.canConsume(L_PAREN)) {
2718                 optionID = DEFAULT_ID_LITERAL;
2719                 while (!tokens.canConsume(R_PAREN)) {
2720                     defaultValue = defaultValue + tokens.consume();
2721                 }
2722             } else {
2723                 optionID = DEFAULT_ID_LITERAL;
2724                 // Assume default was EMPTY or ''
2725                 defaultValue = tokens.consume();
2726                 // NOTE: default value could be a Real number as well as an integer, so
2727                 // 1000.00 is valid
2728                 if (tokens.canConsume(".")) {
2729                     defaultValue = defaultValue + '.' + tokens.consume();
2730                 }
2731             }
2732 
2733             columnNode.setProperty(DEFAULT_OPTION, optionID);
2734             columnNode.setProperty(DEFAULT_VALUE, defaultValue);
2735             if (precision > -1) {
2736                 columnNode.setProperty(DEFAULT_PRECISION, precision);
2737             }
2738             return true;
2739         }
2740 
2741         return false;
2742     }
2743 
2744     /**
2745      * Parses the default clause for a column and sets appropriate properties on the column node.
2746      * 
2747      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2748      * @param columnNode the column node which may contain a collate clause; may not be null
2749      * @return true if collate clause was found and parsed else return false.
2750      * @throws ParsingException
2751      */
2752     protected boolean parseCollateClause( DdlTokenStream tokens,
2753                                           AstNode columnNode ) throws ParsingException {
2754         assert tokens != null;
2755         assert columnNode != null;
2756 
2757         // an option in the CREATE DOMAIN definition
2758         //
2759         // <collate clause> ::= COLLATE <collation name>
2760 
2761         if (tokens.matches("COLLATE")) {
2762             tokens.consume("COLLATE");
2763             String collationName = parseName(tokens);
2764             columnNode.setProperty(COLLATION_NAME, collationName);
2765             return true;
2766         }
2767 
2768         return false;
2769     }
2770 
2771     /**
2772      * Returns the integer value of the input string. Handles both straight integer string or complex KMG (CLOB or BLOB) value.
2773      * Throws {@link NumberFormatException} if a valid integer is not found.
2774      * 
2775      * @param value the string to be parsed; may not be null and length must be > 0;
2776      * @return integer value
2777      */
2778     protected int integer( String value ) {
2779         assert value != null;
2780         assert value.length() > 0;
2781 
2782         return new BigInteger(value).intValue();
2783     }
2784 
2785     public final Position getCurrentMarkedPosition() {
2786         return currentMarkedPosition;
2787     }
2788 
2789     /**
2790      * Marks the token stream with the current position to help track statement scope within the original input string.
2791      * 
2792      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2793      */
2794     public final void markStartOfStatement( DdlTokenStream tokens ) {
2795         tokens.mark();
2796         currentMarkedPosition = tokens.nextPosition();
2797     }
2798 
2799     /**
2800      * Marks the end of a statement by consuming the terminator (if exists). If it does not exist, a missing terminator node may
2801      * be added. If the resulting statement node is a valid child node type for a schema, the child node may be re-parented to the
2802      * schema if the schema is still parentable. Each resulting statement node is tagged with the enclosing source expression,
2803      * starting line number and column number from the file content as well as a starting character index from that same content.
2804      * 
2805      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2806      * @param statementNode
2807      */
2808     public final void markEndOfStatement( DdlTokenStream tokens,
2809                                           AstNode statementNode ) {
2810         if (!tokens.canConsume(getTerminator())) {
2811             // System.out.println("  WARNING:  Terminator NOT FOUND");
2812 
2813             // Check previous until
2814             // 1) find two sequential nodes that are not missing terminator nodes
2815             // 2) the node before the missing terminator is a valid schema child and
2816             // 3) we find a schema node that is ALSO missing a terminator BEFORE we find an invalid schema child OR a terminated
2817             // node.
2818 
2819             if (!setAsSchemaChildNode(statementNode, true)) {
2820                 missingTerminatorNode(getRootNode()); // Construct missing terminator node
2821             }
2822         } else {
2823             setAsSchemaChildNode(statementNode, false);
2824         }
2825 
2826         String source = tokens.getMarkedContent().trim();
2827         statementNode.setProperty(DDL_EXPRESSION, source);
2828         statementNode.setProperty(DDL_START_LINE_NUMBER, currentMarkedPosition.getLine());
2829         statementNode.setProperty(DDL_START_CHAR_INDEX, currentMarkedPosition.getIndexInContent());
2830         statementNode.setProperty(DDL_START_COLUMN_NUMBER, currentMarkedPosition.getColumn());
2831 
2832         testPrint("== >> SOURCE:\n" + source + "\n");
2833     }
2834 
2835     protected void testPrint( String str ) {
2836         if (isTestMode()) {
2837             System.out.println(str);
2838         }
2839     }
2840 
2841     /**
2842      * @return testMode
2843      */
2844     public boolean isTestMode() {
2845         return testMode;
2846     }
2847 
2848     /**
2849      * @param testMode Sets testMode to the specified value.
2850      */
2851     public void setTestMode( boolean testMode ) {
2852         this.testMode = testMode;
2853     }
2854 
2855     /**
2856      * {@inheritDoc}
2857      * 
2858      * @see org.modeshape.sequencer.ddl.DdlParser#getId()
2859      */
2860     public String getId() {
2861         return parserId;
2862     }
2863 
2864     /**
2865      * {@inheritDoc}
2866      * 
2867      * @see java.lang.Object#hashCode()
2868      */
2869     @Override
2870     public int hashCode() {
2871         return this.parserId.hashCode();
2872     }
2873 
2874     /**
2875      * {@inheritDoc}
2876      * 
2877      * @see java.lang.Object#equals(java.lang.Object)
2878      */
2879     @Override
2880     public boolean equals( Object obj ) {
2881         if (obj == this) return true;
2882         if (obj instanceof DdlParser) {
2883             return ((DdlParser)obj).getId().equals(this.getId());
2884         }
2885         return false;
2886     }
2887 }