View Javadoc

1   /*
2    * ModeShape (http://www.modeshape.org)
3    * See the COPYRIGHT.txt file distributed with this work for information
4    * regarding copyright ownership.  Some portions may be licensed
5    * to Red Hat, Inc. under one or more contributor license agreements.
6    * See the AUTHORS.txt file in the distribution for a full listing of 
7    * individual contributors.
8    *
9    * ModeShape is free software. Unless otherwise indicated, all code in ModeShape
10   * is licensed to you under the terms of the GNU Lesser General Public License as
11   * published by the Free Software Foundation; either version 2.1 of
12   * the License, or (at your option) any later version.
13   * 
14   * ModeShape is distributed in the hope that it will be useful,
15   * but WITHOUT ANY WARRANTY; without even the implied warranty of
16   * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17   * Lesser General Public License for more details.
18   *
19   * You should have received a copy of the GNU Lesser General Public
20   * License along with this software; if not, write to the Free
21   * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
22   * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
23   */
24  
25  /**
26   * This class provides basic parsing of SQL-92 based DDL files.  The initial implementation does NOT handle generic SQL query
27   * statements, but rather database schema manipulation (i.e. CREATE, DROP, ALTER, etc...)
28   * 
29   */
30  package org.modeshape.sequencer.ddl;
31  
32  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.ALL_PRIVILEGES;
33  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CHECK_SEARCH_CONDITION;
34  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.COLLATION_NAME;
35  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CONSTRAINT_ATTRIBUTE_TYPE;
36  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CONSTRAINT_TYPE;
37  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CREATE_VIEW_QUERY_EXPRESSION;
38  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_EXPRESSION;
39  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_CHAR_INDEX;
40  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_COLUMN_NUMBER;
41  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_LINE_NUMBER;
42  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_OPTION;
43  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_PRECISION;
44  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_VALUE;
45  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DROP_BEHAVIOR;
46  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANTEE;
47  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANT_PRIVILEGE;
48  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.MESSAGE;
49  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.NAME;
50  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.NULLABLE;
51  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.PROBLEM_LEVEL;
52  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.PROPERTY_VALUE;
53  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TEMPORARY;
54  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE;
55  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ADD_TABLE_CONSTRAINT_DEFINITION;
56  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_COLUMN_DEFINITION;
57  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_DOMAIN_STATEMENT;
58  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_TABLE_STATEMENT;
59  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_DEFINITION;
60  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_REFERENCE;
61  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_ASSERTION_STATEMENT;
62  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_CHARACTER_SET_STATEMENT;
63  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_COLLATION_STATEMENT;
64  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_DOMAIN_STATEMENT;
65  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_SCHEMA_STATEMENT;
66  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_TABLE_STATEMENT;
67  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_TRANSLATION_STATEMENT;
68  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_VIEW_STATEMENT;
69  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_ASSERTION_STATEMENT;
70  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_CHARACTER_SET_STATEMENT;
71  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_COLLATION_STATEMENT;
72  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_COLUMN_DEFINITION;
73  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_DOMAIN_STATEMENT;
74  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_SCHEMA_STATEMENT;
75  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TABLE_CONSTRAINT_DEFINITION;
76  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TABLE_STATEMENT;
77  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TRANSLATION_STATEMENT;
78  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_VIEW_STATEMENT;
79  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_FK_COLUMN_REFERENCE;
80  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_CHARACTER_SET_STATEMENT;
81  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_COLLATION_STATEMENT;
82  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_DOMAIN_STATEMENT;
83  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_TABLE_STATEMENT;
84  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_TRANSLATION_STATEMENT;
85  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_INSERT_STATEMENT;
86  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_MISSING_TERMINATOR;
87  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_PROBLEM;
88  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_CHARACTER_SET_STATEMENT;
89  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_COLLATION_STATEMENT;
90  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_DOMAIN_STATEMENT;
91  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_TABLE_STATEMENT;
92  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_TRANSLATION_STATEMENT;
93  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_SET_STATEMENT;
94  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_STATEMENT;
95  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_STATEMENT_OPTION;
96  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_TABLE_CONSTRAINT;
97  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_TABLE_REFERENCE;
98  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.VALUE;
99  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.WITH_GRANT_OPTION;
100 import java.math.BigInteger;
101 import java.util.ArrayList;
102 import java.util.Collections;
103 import java.util.LinkedList;
104 import java.util.List;
105 import net.jcip.annotations.NotThreadSafe;
106 import org.modeshape.common.text.ParsingException;
107 import org.modeshape.common.text.Position;
108 import org.modeshape.common.util.CheckArg;
109 import org.modeshape.graph.JcrLexicon;
110 import org.modeshape.graph.property.Name;
111 import org.modeshape.sequencer.ddl.DdlTokenStream.DdlTokenizer;
112 import org.modeshape.sequencer.ddl.datatype.DataType;
113 import org.modeshape.sequencer.ddl.datatype.DataTypeParser;
114 import org.modeshape.sequencer.ddl.node.AstNode;
115 import org.modeshape.sequencer.ddl.node.AstNodeFactory;
116 
117 /**
118  * Standard SQL 92 DDL file content parser.
119  */
120 @NotThreadSafe
121 public class StandardDdlParser implements DdlParser, DdlConstants, DdlConstants.StatementStartPhrases {
122 
123     private final String parserId = "SQL92";
124     private boolean testMode = false;
125     private final List<DdlParserProblem> problems;
126     private final AstNodeFactory nodeFactory;
127     private AstNode rootNode;
128     private List<String> allDataTypeStartWords = null;
129     private DataTypeParser datatypeParser = null;
130     private String terminator = DEFAULT_TERMINATOR;
131     private boolean useTerminator = false;
132     private Position currentMarkedPosition;
133 
134     public StandardDdlParser() {
135         super();
136         setDoUseTerminator(true);
137         setDatatypeParser(new DataTypeParser());
138         nodeFactory = new AstNodeFactory();
139         problems = new ArrayList<DdlParserProblem>();
140     }
141 
142     /**
143      * Returns the data type parser instance.
144      * 
145      * @return the {@link DataTypeParser}
146      */
147     public DataTypeParser getDatatypeParser() {
148         return datatypeParser;
149     }
150 
151     /**
152      * @param datatypeParser
153      */
154     public void setDatatypeParser( DataTypeParser datatypeParser ) {
155         this.datatypeParser = datatypeParser;
156     }
157 
158     /**
159      * Method to access the node utility class.
160      * 
161      * @return the instance of the {@link AstNodeFactory} node utility class
162      */
163     public AstNodeFactory nodeFactory() {
164         return this.nodeFactory;
165     }
166 
167     /**
168      * @return rootNode
169      */
170     public AstNode getRootNode() {
171         return rootNode;
172     }
173 
174     /**
175      * @param rootNode Sets rootNode to the specified value.
176      */
177     public void setRootNode( AstNode rootNode ) {
178         this.rootNode = rootNode;
179     }
180 
181     /**
182      * {@inheritDoc}
183      * 
184      * @see org.modeshape.sequencer.ddl.DdlParser#score(java.lang.String, java.lang.String,
185      *      org.modeshape.sequencer.ddl.DdlParserScorer)
186      */
187     public Object score( String ddl,
188                          String fileName,
189                          DdlParserScorer scorer ) throws ParsingException {
190         CheckArg.isNotNull(ddl, "ddl");
191         CheckArg.isNotNull(scorer, "scorer");
192 
193         if (fileName != null) {
194             // Score the filename using the identifier only ...
195             scorer.scoreText(fileName, 2, getIdentifyingKeywords());
196         }
197 
198         // Create the state of this parser ...
199         problems.clear();
200         boolean includeComments = true;
201         DdlTokenStream tokens = new DdlTokenStream(ddl, DdlTokenStream.ddlTokenizer(includeComments), false);
202         initializeTokenStream(tokens);
203         tokens.start();
204 
205         testPrint("\n== >> StandardDdlParser.parse() PARSING STARTED: ");
206 
207         // Consume the first block of comments ...
208         while (tokens.matches(DdlTokenizer.COMMENT)) {
209             // Consume the comment ...
210             String comment = tokens.consume();
211             scorer.scoreText(comment, 2, getIdentifyingKeywords());
212         }
213 
214         // Compute the score for the rest of this content ...
215         computeScore(tokens, scorer);
216 
217         // Return the tokens so parse(...) won't have to re-tokenize ...
218         return tokens;
219     }
220 
221     protected void computeScore( DdlTokenStream tokens,
222                                  DdlParserScorer scorer ) {
223         while (tokens.hasNext()) {
224             if (tokens.isNextKeyWord()) {
225                 scorer.scoreStatements(1);
226             }
227             tokens.consume();
228         }
229     }
230 
231     public String[] getIdentifyingKeywords() {
232         return new String[] {getId()};
233     }
234 
235     /**
236      * {@inheritDoc}
237      * 
238      * @see org.modeshape.sequencer.ddl.DdlParser#parse(java.lang.String, org.modeshape.sequencer.ddl.node.AstNode,
239      *      java.lang.Object)
240      */
241     public void parse( String ddl,
242                        AstNode rootNode,
243                        Object scoreReturnObject ) throws ParsingException {
244         CheckArg.isNotNull(ddl, "ddl");
245         CheckArg.isNotNull(rootNode, "rootNode");
246         problems.clear();
247         setRootNode(rootNode);
248 
249         DdlTokenStream tokens = null;
250         if (scoreReturnObject instanceof DdlTokenStream) {
251             tokens = (DdlTokenStream)scoreReturnObject;
252             tokens.rewind();
253         } else {
254             // Need to create the token stream ...
255             boolean includeComments = false;
256             tokens = new DdlTokenStream(ddl, DdlTokenStream.ddlTokenizer(includeComments), false);
257             initializeTokenStream(tokens);
258             tokens.start();
259         }
260 
261         testPrint("\n== >> StandardDdlParser.parse() PARSING STARTED: ");
262 
263         // Simply move to the next statement start (registered prior to tokenizing).
264         while (moveToNextStatementStart(tokens)) {
265 
266             // It is assumed that if a statement is registered, the registering dialect will handle the parsing of that object
267             // and successfully create a statement {@link AstNode}
268             AstNode stmtNode = parseNextStatement(tokens, rootNode);
269             if (stmtNode == null) {
270                 markStartOfStatement(tokens);
271                 String stmtName = tokens.consume();
272                 stmtNode = parseIgnorableStatement(tokens, stmtName, rootNode);
273                 markEndOfStatement(tokens, stmtNode);
274             }
275             // testPrint("== >> Found Statement" + "(" + (++count) + "):\n" + stmtNode);
276         }
277 
278         rewrite(tokens, rootNode);
279 
280         for (DdlParserProblem problem : problems) {
281             attachNewProblem(problem, rootNode);
282         }
283 
284         // // Compute the score based upon the number of AST nodes ...
285         // // System.out.println("\n\n " + getId() + " (score=" + (getScore(rootNode) - 1 - (problems.size() * 2)) + ")\n" +
286         // // rootNode);
287         // int score = getScore(rootNode) - 1; // exclude the root, since we didn't create it
288         // score -= (problems.size() * 2); // remove double the # of problems
289         // scorer.scoreStatements(score);
290 
291         if (testMode) {
292             // testPrint("== >> StandardDdlParser.parse() PARSING COMPLETE: " + statements.size() + " statements parsed.\n\n");
293             int count = 0;
294             for (AstNode child : rootNode.getChildren()) {
295                 testPrint("== >> Found Statement" + "(" + (++count) + "):\n" + child);
296             }
297         }
298     }
299 
300     /**
301      * Method called by {@link #score(String, String, DdlParserScorer)} and {@link #parse(String, AstNode, Object)} to initialize
302      * the {@link DdlTokenStream token stream}, giving subclasses a chance to {@link DdlTokenStream#registeredKeyWords register
303      * key words} and {@link DdlTokenStream#registerStatementStartPhrase(String[]) statement start phrases}.
304      * 
305      * @param tokens the stream of tokens
306      */
307     protected void initializeTokenStream( DdlTokenStream tokens ) {
308         tokens.registerKeyWords(SQL_92_RESERVED_WORDS);
309         tokens.registerStatementStartPhrase(SQL_92_ALL_PHRASES);
310     }
311 
312     /**
313      * Performs token match checks for initial statement type and delegates to specific parser methods. If no specific statement
314      * is found, then a call is made to parse a custom statement type. Subclasses may override this method, but the
315      * {@link StandardDdlParser}.parseCustomStatement() method is designed to allow for parsing db-specific statement types.
316      * 
317      * @param tokens the tokenized {@link DdlTokenStream} of the DDL input content; may not be null
318      * @param node the top level {@link AstNode}; may not be null
319      * @return node the new statement node
320      */
321     protected AstNode parseNextStatement( DdlTokenStream tokens,
322                                           AstNode node ) {
323         assert tokens != null;
324         assert node != null;
325 
326         AstNode stmtNode = null;
327 
328         if (tokens.matches(CREATE)) {
329             stmtNode = parseCreateStatement(tokens, node);
330         } else if (tokens.matches(ALTER)) {
331             stmtNode = parseAlterStatement(tokens, node);
332         } else if (tokens.matches(DROP)) {
333             stmtNode = parseDropStatement(tokens, node);
334         } else if (tokens.matches(INSERT)) {
335             stmtNode = parseInsertStatement(tokens, node);
336         } else if (tokens.matches(SET)) {
337             stmtNode = parseSetStatement(tokens, node);
338         } else if (tokens.matches(GRANT)) {
339             stmtNode = parseGrantStatement(tokens, node);
340         } else if (tokens.matches(REVOKE)) {
341             stmtNode = parseRevokeStatement(tokens, node);
342         }
343 
344         if (stmtNode == null) {
345             stmtNode = parseCustomStatement(tokens, node);
346         }
347 
348         return stmtNode;
349     }
350 
351     private boolean moveToNextStatementStart( DdlTokenStream tokens ) throws ParsingException {
352         assert tokens != null;
353 
354         StringBuffer sb = new StringBuffer();
355         DdlParserProblem problem = null;
356 
357         // Check to see if any more tokens exists
358         if (tokens.hasNext()) {
359             while (tokens.hasNext()) {
360                 if (tokens.canConsume(DdlTokenizer.COMMENT)) continue;
361 
362                 // If the next toke is a STATEMENT_KEY, then stop
363                 if (!tokens.matches(DdlTokenizer.STATEMENT_KEY)) {
364                     // If the next toke is NOT a statement, create a problem statement in case it can't be fully recognized as
365                     // a statement.
366                     if (problem == null) {
367                         markStartOfStatement(tokens);
368 
369                         String msg = DdlSequencerI18n.unusedTokensDiscovered.text(tokens.nextPosition().getLine(),
370                                                                                   tokens.nextPosition().getColumn());
371                         problem = new DdlParserProblem(DdlConstants.Problems.WARNING, tokens.nextPosition(), msg);
372                     }
373 
374                     String nextTokenValue = null;
375 
376                     // For known, parsed statements, the terminator is consumed in the markEndOfStatement() method. So if we get
377                     // here, we then we know we've got an unknown statement.
378                     if (tokens.matches(getTerminator()) && sb.length() > 0) {
379                         nextTokenValue = getTerminator();
380                         // Let's call this a statement up until now
381                         AstNode unknownNode = unknownTerminatedNode(getRootNode());
382                         markEndOfStatement(tokens, unknownNode);
383                         // We've determined that it's just an unknown node, which we determine is not a problem node.
384                         problem = null;
385                     } else {
386                         // Just keep consuming, but check each token value and allow sub-classes to handle the token if they wish.
387                         // ORACLE, for instance can terminator a complex statement with a backslash, '/'. Calling
388                         // handleUnknownToken() allows that dialect to create it's own statement node that can be assessed and
389                         // used during the rewrite() call at the end of parsing.
390                         nextTokenValue = tokens.consume();
391                         AstNode unknownNode = handleUnknownToken(tokens, nextTokenValue);
392                         if (unknownNode != null) {
393                             markEndOfStatement(tokens, unknownNode);
394                             // We've determined that it's just an unknown node, which we determine is not a problem node.
395                             problem = null;
396                         }
397                     }
398                     sb.append(SPACE).append(nextTokenValue);
399 
400                 } else {
401                     // If we have a problem, add it.
402                     if (problem != null && sb.length() > 0) {
403                         problem.setUnusedSource(sb.toString());
404                         addProblem(problem);
405                     }
406                     return true;
407                 }
408             }
409 
410             // If we still have a problem, add it.
411             if (problem != null && sb.length() > 0) {
412                 problem.setUnusedSource(sb.toString());
413                 addProblem(problem);
414             }
415         }
416         return false;
417     }
418 
419     public final void addProblem( DdlParserProblem problem,
420                                   AstNode node ) {
421         addProblem(problem);
422         attachNewProblem(problem, node);
423     }
424 
425     public final void addProblem( DdlParserProblem problem ) {
426         problems.add(problem);
427     }
428 
429     public final List<DdlParserProblem> getProblems() {
430         return this.problems;
431     }
432 
433     public final void attachNewProblem( DdlParserProblem problem,
434                                         AstNode parentNode ) {
435         assert problem != null;
436         assert parentNode != null;
437 
438         AstNode problemNode = nodeFactory().node("DDL PROBLEM", parentNode, TYPE_PROBLEM);
439         problemNode.setProperty(PROBLEM_LEVEL, problem.getLevel());
440         problemNode.setProperty(MESSAGE, problem.toString() + "[" + problem.getUnusedSource() + "]");
441 
442         testPrint(problem.toString());
443     }
444 
445     protected void rewrite( DdlTokenStream tokens,
446                             AstNode rootNode ) {
447         assert tokens != null;
448         assert rootNode != null;
449         // Walk the tree and remove any missing missing terminator nodes
450 
451         removeMissingTerminatorNodes(rootNode);
452     }
453 
454     protected void removeMissingTerminatorNodes( AstNode parentNode ) {
455         assert parentNode != null;
456         // Walk the tree and remove any missing missing terminator nodes
457         List<AstNode> copyOfNodes = new ArrayList<AstNode>(parentNode.getChildren());
458 
459         for (AstNode child : copyOfNodes) {
460             if (nodeFactory().hasMixinType(child, TYPE_MISSING_TERMINATOR)) {
461                 parentNode.removeChild(child);
462             } else {
463                 removeMissingTerminatorNodes(child);
464             }
465         }
466     }
467 
468     /**
469      * Merges second node into first node by re-setting expression source and length.
470      * 
471      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
472      * @param firstNode the node to merge into; may not be null
473      * @param secondNode the node to merge into first node; may not be null
474      */
475     public void mergeNodes( DdlTokenStream tokens,
476                             AstNode firstNode,
477                             AstNode secondNode ) {
478         assert tokens != null;
479         assert firstNode != null;
480         assert secondNode != null;
481 
482         int firstStartIndex = (Integer)firstNode.getProperty(DDL_START_CHAR_INDEX).getFirstValue();
483         int secondStartIndex = (Integer)secondNode.getProperty(DDL_START_CHAR_INDEX).getFirstValue();
484         int deltaLength = ((String)secondNode.getProperty(DDL_EXPRESSION).getFirstValue()).length();
485         Position startPosition = new Position(firstStartIndex, 1, 0);
486         Position endPosition = new Position((secondStartIndex + deltaLength), 1, 0);
487         String source = tokens.getContentBetween(startPosition, endPosition);
488         firstNode.setProperty(DDL_EXPRESSION, source);
489     }
490 
491     /**
492      * Utility method subclasses can override to check unknown tokens and perform additional node manipulation. Example would be
493      * in Oracle dialect for CREATE FUNCTION statements that can end with an '/' character because statement can contain multiple
494      * statements.
495      * 
496      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
497      * @param tokenValue the string value of the unknown token; never null
498      * @return the new node
499      * @throws ParsingException
500      */
501     public AstNode handleUnknownToken( DdlTokenStream tokens,
502                                        String tokenValue ) throws ParsingException {
503         assert tokens != null;
504         assert tokenValue != null;
505         // DEFAULT IMPLEMENTATION DOES NOTHING
506         return null;
507     }
508 
509     /**
510      * Parses DDL CREATE statement based on SQL 92 specifications.
511      * 
512      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
513      * @param parentNode the parent {@link AstNode} node; may not be null
514      * @return the parsed CREATE {@link AstNode}
515      * @throws ParsingException
516      */
517     protected AstNode parseCreateStatement( DdlTokenStream tokens,
518                                             AstNode parentNode ) throws ParsingException {
519         assert tokens != null;
520         assert parentNode != null;
521 
522         AstNode stmtNode = null;
523 
524         // DEFAULT DOES NOTHING
525         // Subclasses can implement additional parsing
526         // System.out.println(" >>> FOUND [CREATE] STATEMENT: TOKEN = " + tokens.consume() + " " + tokens.consume() + " " +
527         // tokens.consume());
528         // SQL 92 CREATE OPTIONS:
529         // CREATE SCHEMA
530         // CREATE DOMAIN
531         // CREATE [ { GLOBAL | LOCAL } TEMPORARY ] TABLE
532         // CREATE VIEW
533         // CREATE ASSERTION
534         // CREATE CHARACTER SET
535         // CREATE COLLATION
536         // CREATE TRANSLATION
537 
538         if (tokens.matches(STMT_CREATE_SCHEMA)) {
539             stmtNode = parseCreateSchemaStatement(tokens, parentNode);
540         } else if (tokens.matches(STMT_CREATE_TABLE) || tokens.matches(STMT_CREATE_GLOBAL_TEMPORARY_TABLE)
541                    || tokens.matches(STMT_CREATE_LOCAL_TEMPORARY_TABLE)) {
542             stmtNode = parseCreateTableStatement(tokens, parentNode);
543         } else if (tokens.matches(STMT_CREATE_VIEW) || tokens.matches(STMT_CREATE_OR_REPLACE_VIEW)) {
544             stmtNode = parseCreateViewStatement(tokens, parentNode);
545         } else if (tokens.matches(STMT_CREATE_ASSERTION)) {
546             stmtNode = parseCreateAssertionStatement(tokens, parentNode);
547         } else if (tokens.matches(STMT_CREATE_CHARACTER_SET)) {
548             stmtNode = parseCreateCharacterSetStatement(tokens, parentNode);
549         } else if (tokens.matches(STMT_CREATE_COLLATION)) {
550             stmtNode = parseCreateCollationStatement(tokens, parentNode);
551         } else if (tokens.matches(STMT_CREATE_TRANSLATION)) {
552             stmtNode = parseCreateTranslationStatement(tokens, parentNode);
553         } else if (tokens.matches(STMT_CREATE_DOMAIN)) {
554             stmtNode = parseCreateDomainStatement(tokens, parentNode);
555         } else {
556             markStartOfStatement(tokens);
557 
558             stmtNode = parseIgnorableStatement(tokens, "CREATE UNKNOWN", parentNode);
559             Position position = getCurrentMarkedPosition();
560             String msg = DdlSequencerI18n.unknownCreateStatement.text(position.getLine(), position.getColumn());
561             DdlParserProblem problem = new DdlParserProblem(DdlConstants.Problems.WARNING, position, msg);
562 
563             stmtNode.setProperty(TYPE_PROBLEM, problem.toString());
564 
565             markEndOfStatement(tokens, stmtNode);
566         }
567 
568         return stmtNode;
569     }
570 
571     /**
572      * Parses DDL ALTER statement based on SQL 92 specifications.
573      * 
574      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
575      * @param parentNode the parent {@link AstNode} node; may not be null
576      * @return the parsed ALTER {@link AstNode}
577      * @throws ParsingException
578      */
579     protected AstNode parseAlterStatement( DdlTokenStream tokens,
580                                            AstNode parentNode ) throws ParsingException {
581         assert tokens != null;
582         assert parentNode != null;
583 
584         if (tokens.matches(ALTER, TABLE)) {
585             return parseAlterTableStatement(tokens, parentNode);
586         } else if (tokens.matches("ALTER", "DOMAIN")) {
587             markStartOfStatement(tokens);
588             tokens.consume("ALTER", "DOMAIN");
589             String domainName = parseName(tokens);
590             AstNode alterNode = nodeFactory().node(domainName, parentNode, TYPE_ALTER_DOMAIN_STATEMENT);
591             parseUntilTerminator(tokens);
592             markEndOfStatement(tokens, alterNode);
593             return alterNode;
594         }
595         return null;
596     }
597 
598     /**
599      * Parses DDL ALTER TABLE {@link AstNode} based on SQL 92 specifications.
600      * 
601      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
602      * @param parentNode the parent {@link AstNode} node; may not be null
603      * @return the parsed ALTER TABLE {@link AstNode}
604      * @throws ParsingException
605      */
606     protected AstNode parseAlterTableStatement( DdlTokenStream tokens,
607                                                 AstNode parentNode ) throws ParsingException {
608         assert tokens != null;
609         assert parentNode != null;
610 
611         markStartOfStatement(tokens);
612 
613         // <alter table statement> ::=
614         // ALTER TABLE <table name> <alter table action>
615         //
616         // <alter table action> ::=
617         // <add column definition>
618         // | <alter column definition>
619         // | <drop column definition>
620         // | <add table constraint definition>
621         // | <drop table constraint definition>
622 
623         tokens.consume("ALTER", "TABLE"); // consumes 'ALTER'
624         String tableName = parseName(tokens);
625 
626         AstNode alterTableNode = nodeFactory().node(tableName, parentNode, TYPE_ALTER_TABLE_STATEMENT);
627 
628         if (tokens.canConsume("ADD")) {
629             if (isTableConstraint(tokens)) {
630                 parseTableConstraint(tokens, alterTableNode, true);
631             } else {
632                 parseSingleTerminatedColumnDefinition(tokens, alterTableNode, true);
633             }
634         } else if (tokens.canConsume("DROP")) {
635             if (tokens.canConsume("CONSTRAINT")) {
636                 String constraintName = parseName(tokens); // constraint name
637                 AstNode constraintNode = nodeFactory().node(constraintName, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION);
638                 if (tokens.canConsume(DropBehavior.CASCADE)) {
639                     constraintNode.setProperty(DROP_BEHAVIOR, DropBehavior.CASCADE);
640                 } else if (tokens.canConsume(DropBehavior.RESTRICT)) {
641                     constraintNode.setProperty(DROP_BEHAVIOR, DropBehavior.RESTRICT);
642                 }
643             } else {
644                 // ALTER TABLE supplier
645                 // DROP COLUMN supplier_name;
646 
647                 // DROP [ COLUMN ] <column name> <drop behavior>
648                 tokens.canConsume("COLUMN"); // "COLUMN" is optional
649                 String columnName = parseName(tokens);
650                 AstNode columnNode = nodeFactory().node(columnName, alterTableNode, TYPE_DROP_COLUMN_DEFINITION);
651                 if (tokens.canConsume(DropBehavior.CASCADE)) {
652                     columnNode.setProperty(DROP_BEHAVIOR, DropBehavior.CASCADE);
653                 } else if (tokens.canConsume(DropBehavior.RESTRICT)) {
654                     columnNode.setProperty(DROP_BEHAVIOR, DropBehavior.RESTRICT);
655                 }
656             }
657         } else if (tokens.canConsume("ALTER")) {
658             // EXAMPLE: ALTER TABLE table_name [ ALTER column_name SET DEFAULT (0) ]
659             //
660             // ALTER [ COLUMN ] <column name> {SET <default clause> | DROP DEFAULT}
661 
662             tokens.canConsume("COLUMN");
663             String alterColumnName = parseName(tokens);
664             AstNode columnNode = nodeFactory().node(alterColumnName, alterTableNode, TYPE_ALTER_COLUMN_DEFINITION);
665             if (tokens.canConsume("SET")) {
666                 parseDefaultClause(tokens, columnNode);
667             } else if (tokens.canConsume("DROP", "DEFAULT")) {
668                 columnNode.setProperty(DROP_BEHAVIOR, "DROP DEFAULT");
669             }
670         } else {
671             parseUntilTerminator(tokens); // COULD BE "NESTED TABLE xxxxxxxx" option clause
672         }
673 
674         markEndOfStatement(tokens, alterTableNode);
675         return alterTableNode;
676     }
677 
678     /**
679      * Parses DDL DROP {@link AstNode} based on SQL 92 specifications.
680      * 
681      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
682      * @param parentNode the parent {@link AstNode} node; may not be null
683      * @return the parsed DROP {@link AstNode}
684      * @throws ParsingException
685      */
686     protected AstNode parseDropStatement( DdlTokenStream tokens,
687                                           AstNode parentNode ) throws ParsingException {
688         assert tokens != null;
689         assert parentNode != null;
690 
691         if (tokens.matches(STMT_DROP_TABLE)) {
692             // <drop table statement> ::=
693             // DROP TABLE <table name> <drop behavior>
694             //
695             // <drop behavior> ::= CASCADE | RESTRICT
696             return parseSimpleDropStatement(tokens, STMT_DROP_TABLE, parentNode, TYPE_DROP_TABLE_STATEMENT);
697         } else if (tokens.matches(STMT_DROP_VIEW)) {
698             return parseSimpleDropStatement(tokens, STMT_DROP_VIEW, parentNode, TYPE_DROP_VIEW_STATEMENT);
699         } else if (tokens.matches(STMT_DROP_SCHEMA)) {
700             return parseSimpleDropStatement(tokens, STMT_DROP_SCHEMA, parentNode, TYPE_DROP_SCHEMA_STATEMENT);
701         } else if (tokens.matches(STMT_DROP_DOMAIN)) {
702             return parseSimpleDropStatement(tokens, STMT_DROP_DOMAIN, parentNode, TYPE_DROP_DOMAIN_STATEMENT);
703         } else if (tokens.matches(STMT_DROP_TRANSLATION)) {
704             return parseSimpleDropStatement(tokens, STMT_DROP_TRANSLATION, parentNode, TYPE_DROP_TRANSLATION_STATEMENT);
705         } else if (tokens.matches(STMT_DROP_CHARACTER_SET)) {
706             return parseSimpleDropStatement(tokens, STMT_DROP_CHARACTER_SET, parentNode, TYPE_DROP_CHARACTER_SET_STATEMENT);
707         } else if (tokens.matches(STMT_DROP_ASSERTION)) {
708             return parseSimpleDropStatement(tokens, STMT_DROP_ASSERTION, parentNode, TYPE_DROP_ASSERTION_STATEMENT);
709         } else if (tokens.matches(STMT_DROP_COLLATION)) {
710             return parseSimpleDropStatement(tokens, STMT_DROP_COLLATION, parentNode, TYPE_DROP_COLLATION_STATEMENT);
711         }
712 
713         return null;
714     }
715 
716     private AstNode parseSimpleDropStatement( DdlTokenStream tokens,
717                                               String[] startPhrase,
718                                               AstNode parentNode,
719                                               Name stmtType ) throws ParsingException {
720         assert tokens != null;
721         assert startPhrase != null && startPhrase.length > 0;
722         assert parentNode != null;
723 
724         markStartOfStatement(tokens);
725         String behavior = null;
726         tokens.consume(startPhrase);
727         List<String> nameList = new ArrayList<String>();
728         nameList.add(parseName(tokens));
729         while (tokens.matches(COMMA)) {
730             tokens.consume(COMMA);
731             nameList.add(parseName(tokens));
732         }
733 
734         if (tokens.canConsume("CASCADE")) {
735             behavior = "CASCADE";
736         } else if (tokens.canConsume("RESTRICT")) {
737             behavior = "RESTRICT";
738         }
739 
740         AstNode dropNode = nodeFactory().node(nameList.get(0), parentNode, stmtType);
741         if (behavior != null) {
742             dropNode.setProperty(DROP_BEHAVIOR, behavior);
743         }
744         markEndOfStatement(tokens, dropNode);
745 
746         return dropNode;
747     }
748 
749     /**
750      * Parses DDL INSERT {@link AstNode} based on SQL 92 specifications.
751      * 
752      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
753      * @param parentNode the parent {@link AstNode} node; may not be null
754      * @return the {@link AstNode}
755      * @throws ParsingException
756      */
757     protected AstNode parseInsertStatement( DdlTokenStream tokens,
758                                             AstNode parentNode ) throws ParsingException {
759         assert tokens != null;
760         assert parentNode != null;
761 
762         // Original implementation does NOT parse Insert statement, but just returns a generic TypedStatement
763         if (tokens.matches(STMT_INSERT_INTO)) {
764             markStartOfStatement(tokens);
765             tokens.consume(STMT_INSERT_INTO);
766             String prefix = getStatementTypeName(STMT_INSERT_INTO);
767             AstNode node = nodeFactory().node(prefix, parentNode, TYPE_INSERT_STATEMENT);
768             parseUntilTerminator(tokens);
769             markEndOfStatement(tokens, node);
770             return node;
771         }
772         return null;
773     }
774 
775     /**
776      * Parses DDL SET {@link AstNode} based on SQL 92 specifications.
777      * 
778      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
779      * @param parentNode the parent {@link AstNode} node; may not be null
780      * @return the {@link AstNode}
781      * @throws ParsingException
782      */
783     protected AstNode parseSetStatement( DdlTokenStream tokens,
784                                          AstNode parentNode ) throws ParsingException {
785         assert tokens != null;
786         assert parentNode != null;
787 
788         // Original implementation does NOT parse Insert statement, but just returns a generic TypedStatement
789         if (tokens.matches(SET)) {
790             markStartOfStatement(tokens);
791             tokens.consume(SET);
792             AstNode node = nodeFactory().node("SET", parentNode, TYPE_SET_STATEMENT);
793             parseUntilTerminator(tokens);
794             markEndOfStatement(tokens, node);
795             return node;
796         }
797         return null;
798     }
799 
800     /**
801      * Parses DDL GRANT statement {@link AstNode} based on SQL 92 specifications.
802      * 
803      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
804      * @param parentNode the parent {@link AstNode} node; may not be null
805      * @return the {@link AstNode}
806      * @throws ParsingException
807      */
808     protected AstNode parseGrantStatement( DdlTokenStream tokens,
809                                            AstNode parentNode ) throws ParsingException {
810         assert tokens != null;
811         assert parentNode != null;
812         assert tokens.matches(GRANT);
813 
814         markStartOfStatement(tokens);
815 
816         // Syntax for tables
817         //
818         // GRANT <privileges> ON <object name>
819         // TO <grantee> [ { <comma> <grantee> }... ]
820         // [ WITH GRANT OPTION ]
821         //
822         // <object name> ::=
823         // [ TABLE ] <table name>
824         // | DOMAIN <domain name>
825         // | COLLATION <collation name>
826         // | CHARACTER SET <character set name>
827         // | TRANSLATION <translation name>
828         //
829         // Syntax for roles
830         //
831         // GRANT roleName [ {, roleName }* ] TO grantees
832 
833         // privilege-types
834         //
835         // ALL PRIVILEGES | privilege-list
836         //
837         AstNode grantNode = null;
838         boolean allPrivileges = false;
839 
840         List<AstNode> privileges = new ArrayList<AstNode>();
841 
842         tokens.consume("GRANT");
843 
844         if (tokens.canConsume("ALL", "PRIVILEGES")) {
845             allPrivileges = true;
846         } else {
847             parseGrantPrivileges(tokens, privileges);
848         }
849         tokens.consume("ON");
850 
851         if (tokens.canConsume("DOMAIN")) {
852             String name = parseName(tokens);
853             grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_DOMAIN_STATEMENT);
854         } else if (tokens.canConsume("COLLATION")) {
855             String name = parseName(tokens);
856             grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_COLLATION_STATEMENT);
857         } else if (tokens.canConsume("CHARACTER", "SET")) {
858             String name = parseName(tokens);
859             grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_CHARACTER_SET_STATEMENT);
860         } else if (tokens.canConsume("TRANSLATION")) {
861             String name = parseName(tokens);
862             grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_TRANSLATION_STATEMENT);
863         } else {
864             tokens.canConsume(TABLE); // OPTIONAL
865             String name = parseName(tokens);
866             grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_TABLE_STATEMENT);
867         }
868 
869         // Attach privileges to grant node
870         for (AstNode node : privileges) {
871             node.setParent(grantNode);
872         }
873         if (allPrivileges) {
874             grantNode.setProperty(ALL_PRIVILEGES, allPrivileges);
875         }
876 
877         tokens.consume("TO");
878 
879         do {
880             String grantee = parseName(tokens);
881             nodeFactory().node(grantee, grantNode, GRANTEE);
882         } while (tokens.canConsume(COMMA));
883 
884         if (tokens.canConsume("WITH", "GRANT", "OPTION")) {
885             grantNode.setProperty(WITH_GRANT_OPTION, "WITH GRANT OPTION");
886         }
887 
888         markEndOfStatement(tokens, grantNode);
889 
890         return grantNode;
891     }
892 
893     protected void parseGrantPrivileges( DdlTokenStream tokens,
894                                          List<AstNode> privileges ) throws ParsingException {
895         // privilege-types
896         //
897         // ALL PRIVILEGES | privilege-list
898         //
899         // privilege-list
900         //
901         // table-privilege {, table-privilege }*
902         //
903         // table-privilege
904         // SELECT
905         // | DELETE
906         // | INSERT [ <left paren> <privilege column list> <right paren> ]
907         // | UPDATE [ <left paren> <privilege column list> <right paren> ]
908         // | REFERENCES [ <left paren> <privilege column list> <right paren> ]
909         // | USAGE
910 
911         do {
912             AstNode node = null;
913 
914             if (tokens.canConsume(DELETE)) {
915                 node = nodeFactory().node("privilege");
916                 node.setProperty(TYPE, DELETE);
917             } else if (tokens.canConsume(INSERT)) {
918                 node = nodeFactory().node("privilege");
919                 node.setProperty(TYPE, INSERT);
920                 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
921             } else if (tokens.canConsume("REFERENCES")) {
922                 node = nodeFactory().node("privilege");
923                 node.setProperty(TYPE, "REFERENCES");
924                 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
925             } else if (tokens.canConsume(SELECT)) {
926                 node = nodeFactory().node("privilege");
927                 node.setProperty(TYPE, SELECT);
928             } else if (tokens.canConsume("USAGE")) {
929                 node = nodeFactory().node("privilege");
930                 node.setProperty(TYPE, "USAGE");
931             } else if (tokens.canConsume(UPDATE)) {
932                 node = nodeFactory().node("privilege");
933                 node.setProperty(TYPE, UPDATE);
934                 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
935             }
936             if (node == null) {
937                 break;
938             }
939             nodeFactory().setType(node, GRANT_PRIVILEGE);
940             privileges.add(node);
941 
942         } while (tokens.canConsume(COMMA));
943 
944     }
945 
946     protected AstNode parseRevokeStatement( DdlTokenStream tokens,
947                                             AstNode parentNode ) throws ParsingException {
948         assert tokens != null;
949         assert parentNode != null;
950         assert tokens.matches(REVOKE);
951 
952         markStartOfStatement(tokens);
953 
954         // <revoke statement> ::=
955         // REVOKE [ GRANT OPTION FOR ]
956         // <privileges>
957         // ON <object name>
958         // FROM <grantee> [ { <comma> <grantee> }... ] <drop behavior>
959 
960         AstNode revokeNode = null;
961         boolean allPrivileges = false;
962         boolean withGrantOption = false;
963 
964         List<AstNode> privileges = new ArrayList<AstNode>();
965 
966         tokens.consume("REVOKE");
967 
968         withGrantOption = tokens.canConsume("WITH", "GRANT", "OPTION");
969 
970         if (tokens.canConsume("ALL", "PRIVILEGES")) {
971             allPrivileges = true;
972         } else {
973             parseGrantPrivileges(tokens, privileges);
974         }
975         tokens.consume("ON");
976 
977         if (tokens.canConsume("DOMAIN")) {
978             String name = parseName(tokens);
979             revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_DOMAIN_STATEMENT);
980         } else if (tokens.canConsume("COLLATION")) {
981             String name = parseName(tokens);
982             revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_COLLATION_STATEMENT);
983         } else if (tokens.canConsume("CHARACTER", "SET")) {
984             String name = parseName(tokens);
985             revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_CHARACTER_SET_STATEMENT);
986         } else if (tokens.canConsume("TRANSLATION")) {
987             String name = parseName(tokens);
988             revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_TRANSLATION_STATEMENT);
989         } else {
990             tokens.canConsume(TABLE); // OPTIONAL
991             String name = parseName(tokens);
992             revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_TABLE_STATEMENT);
993         }
994 
995         // Attach privileges to grant node
996         for (AstNode node : privileges) {
997             node.setParent(revokeNode);
998         }
999 
1000         if (allPrivileges) {
1001             revokeNode.setProperty(ALL_PRIVILEGES, allPrivileges);
1002         }
1003 
1004         tokens.consume("FROM");
1005 
1006         do {
1007             String grantee = parseName(tokens);
1008             nodeFactory().node(grantee, revokeNode, GRANTEE);
1009         } while (tokens.canConsume(COMMA));
1010 
1011         String behavior = null;
1012 
1013         if (tokens.canConsume("CASCADE")) {
1014             behavior = "CASCADE";
1015         } else if (tokens.canConsume("RESTRICT")) {
1016             behavior = "RESTRICT";
1017         }
1018 
1019         if (behavior != null) {
1020             revokeNode.setProperty(DROP_BEHAVIOR, behavior);
1021         }
1022 
1023         if (withGrantOption) {
1024             revokeNode.setProperty(WITH_GRANT_OPTION, "WITH GRANT OPTION");
1025         }
1026 
1027         markEndOfStatement(tokens, revokeNode);
1028 
1029         return revokeNode;
1030     }
1031 
1032     /**
1033      * Parses DDL CREATE DOMAIN {@link AstNode} based on SQL 92 specifications.
1034      * 
1035      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1036      * @param parentNode the parent {@link AstNode} node; may not be null
1037      * @return the parsed statement node {@link AstNode}
1038      * @throws ParsingException
1039      */
1040     protected AstNode parseCreateDomainStatement( DdlTokenStream tokens,
1041                                                   AstNode parentNode ) throws ParsingException {
1042         assert tokens != null;
1043         assert parentNode != null;
1044 
1045         // <domain definition> ::=
1046         // CREATE DOMAIN <domain name>
1047         // [ AS ] <data type>
1048         // [ <default clause> ]
1049         // [ <domain constraint>... ]
1050         // [ <collate clause> ]
1051 
1052         markStartOfStatement(tokens);
1053 
1054         tokens.consume(STMT_CREATE_DOMAIN);
1055 
1056         String name = parseName(tokens);
1057 
1058         AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_DOMAIN_STATEMENT);
1059 
1060         parseUntilTerminator(tokens);
1061 
1062         markEndOfStatement(tokens, node);
1063 
1064         return node;
1065     }
1066 
1067     /**
1068      * Parses DDL CREATE COLLATION {@link AstNode} based on SQL 92 specifications.
1069      * 
1070      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1071      * @param parentNode the parent {@link AstNode} node; may not be null
1072      * @return the parsed statement node {@link AstNode}
1073      * @throws ParsingException
1074      */
1075     protected AstNode parseCreateCollationStatement( DdlTokenStream tokens,
1076                                                      AstNode parentNode ) throws ParsingException {
1077         assert tokens != null;
1078         assert parentNode != null;
1079 
1080         markStartOfStatement(tokens);
1081 
1082         tokens.consume(STMT_CREATE_COLLATION);
1083 
1084         String name = parseName(tokens);
1085 
1086         AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_COLLATION_STATEMENT);
1087 
1088         parseUntilTerminator(tokens);
1089 
1090         markEndOfStatement(tokens, node);
1091 
1092         return node;
1093     }
1094 
1095     /**
1096      * Parses DDL CREATE TRANSLATION {@link AstNode} based on SQL 92 specifications.
1097      * 
1098      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1099      * @param parentNode the parent {@link AstNode} node; may not be null
1100      * @return the parsed statement node {@link AstNode}
1101      * @throws ParsingException
1102      */
1103     protected AstNode parseCreateTranslationStatement( DdlTokenStream tokens,
1104                                                        AstNode parentNode ) throws ParsingException {
1105         assert tokens != null;
1106         assert parentNode != null;
1107 
1108         markStartOfStatement(tokens);
1109 
1110         tokens.consume(STMT_CREATE_TRANSLATION);
1111 
1112         String name = parseName(tokens);
1113 
1114         AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_TRANSLATION_STATEMENT);
1115 
1116         parseUntilTerminator(tokens);
1117 
1118         markEndOfStatement(tokens, node);
1119 
1120         return node;
1121     }
1122 
1123     /**
1124      * Parses DDL CREATE CHARACTER SET {@link AstNode} based on SQL 92 specifications.
1125      * 
1126      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1127      * @param parentNode the parent {@link AstNode} node; may not be null
1128      * @return the parsed statement node {@link AstNode}
1129      * @throws ParsingException
1130      */
1131     protected AstNode parseCreateCharacterSetStatement( DdlTokenStream tokens,
1132                                                         AstNode parentNode ) throws ParsingException {
1133         assert tokens != null;
1134         assert parentNode != null;
1135 
1136         markStartOfStatement(tokens);
1137 
1138         tokens.consume(STMT_CREATE_CHARACTER_SET);
1139 
1140         String name = parseName(tokens);
1141 
1142         AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_CHARACTER_SET_STATEMENT);
1143 
1144         parseUntilTerminator(tokens);
1145 
1146         markEndOfStatement(tokens, node);
1147 
1148         return node;
1149     }
1150 
1151     /**
1152      * Catch-all method to parse unknown (not registered or handled by sub-classes) statements.
1153      * 
1154      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1155      * @param parentNode the parent {@link AstNode} node; may not be null
1156      * @return the {@link AstNode}
1157      * @throws ParsingException
1158      */
1159     protected AstNode parseCustomStatement( DdlTokenStream tokens,
1160                                             AstNode parentNode ) throws ParsingException {
1161         assert tokens != null;
1162         assert parentNode != null;
1163 
1164         // DEFAULT DOES NOTHING
1165         // Subclasses can implement additional parsing
1166 
1167         return null;
1168     }
1169 
1170     // ===========================================================================================================================
1171     // PARSING CREATE TABLE
1172     // ===========================================================================================================================
1173 
1174     /**
1175      * Parses DDL CREATE TABLE {@link AstNode} based on SQL 92 specifications.
1176      * 
1177      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1178      * @param parentNode the parent {@link AstNode} node; may not be null
1179      * @return the parsed CREATE TABLE {@link AstNode}
1180      * @throws ParsingException
1181      */
1182     protected AstNode parseCreateTableStatement( DdlTokenStream tokens,
1183                                                  AstNode parentNode ) throws ParsingException {
1184         assert tokens != null;
1185         assert parentNode != null;
1186 
1187         markStartOfStatement(tokens);
1188 
1189         tokens.consume(CREATE); // CREATE
1190         String temporaryValue = null;
1191         if (tokens.canConsume("LOCAL")) {
1192             tokens.consume("TEMPORARY");
1193             temporaryValue = "LOCAL";
1194         } else if (tokens.canConsume("GLOBAL")) {
1195             tokens.consume("TEMPORARY");
1196             temporaryValue = "GLOBAL";
1197         }
1198 
1199         tokens.consume(TABLE);
1200 
1201         String tableName = parseName(tokens);
1202 
1203         AstNode tableNode = nodeFactory().node(tableName, parentNode, TYPE_CREATE_TABLE_STATEMENT);
1204 
1205         if (temporaryValue != null) {
1206             tableNode.setProperty(TEMPORARY, temporaryValue);
1207         }
1208 
1209         // System.out.println("  >> PARSING CREATE TABLE >>  Name = " + tableName);
1210         parseColumnsAndConstraints(tokens, tableNode);
1211 
1212         parseCreateTableOptions(tokens, tableNode);
1213 
1214         markEndOfStatement(tokens, tableNode);
1215 
1216         return tableNode;
1217     }
1218 
1219     protected void parseCreateTableOptions( DdlTokenStream tokens,
1220                                             AstNode tableNode ) throws ParsingException {
1221         assert tokens != null;
1222         assert tableNode != null;
1223 
1224         // [ ON COMMIT { PRESERVE ROWS | DELETE ROWS | DROP } ]
1225         while (areNextTokensCreateTableOptions(tokens)) {
1226             parseNextCreateTableOption(tokens, tableNode);
1227         }
1228 
1229     }
1230 
1231     protected void parseNextCreateTableOption( DdlTokenStream tokens,
1232                                                AstNode tableNode ) throws ParsingException {
1233         assert tokens != null;
1234         assert tableNode != null;
1235 
1236         if (tokens.canConsume("ON", "COMMIT")) {
1237             String option = "";
1238             // PRESERVE ROWS | DELETE ROWS | DROP
1239             if (tokens.canConsume("PRESERVE", "ROWS")) {
1240                 option = option + "ON COMMIT PRESERVE ROWS";
1241             } else if (tokens.canConsume("DELETE", "ROWS")) {
1242                 option = option + "ON COMMIT DELETE ROWS";
1243             } else if (tokens.canConsume("DROP")) {
1244                 option = option + "ON COMMIT DROP";
1245             }
1246 
1247             if (option.length() > 0) {
1248                 AstNode tableOption = nodeFactory().node("option", tableNode, TYPE_STATEMENT_OPTION);
1249                 tableOption.setProperty(VALUE, option);
1250             }
1251         }
1252     }
1253 
1254     protected boolean areNextTokensCreateTableOptions( DdlTokenStream tokens ) throws ParsingException {
1255         assert tokens != null;
1256 
1257         boolean result = false;
1258 
1259         // [ ON COMMIT { PRESERVE ROWS | DELETE ROWS | DROP } ]
1260         if (tokens.matches("ON", "COMMIT")) {
1261             result = true;
1262         }
1263 
1264         return result;
1265     }
1266 
1267     /**
1268      * Utility method to parse columns and table constraints within either a CREATE TABLE statement. Method first parses and
1269      * copies the text enclosed within the bracketed "( xxxx  )" statement. Then the individual column definition or table
1270      * constraint definition sub-statements are parsed assuming they are comma delimited.
1271      * 
1272      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1273      * @param tableNode
1274      * @throws ParsingException
1275      */
1276     protected void parseColumnsAndConstraints( DdlTokenStream tokens,
1277                                                AstNode tableNode ) throws ParsingException {
1278         assert tokens != null;
1279         assert tableNode != null;
1280 
1281         if (!tokens.matches(L_PAREN)) {
1282             return;
1283         }
1284 
1285         String tableElementString = getTableElementsString(tokens, false);
1286 
1287         DdlTokenStream localTokens = new DdlTokenStream(tableElementString, DdlTokenStream.ddlTokenizer(false), false);
1288 
1289         localTokens.start();
1290 
1291         StringBuffer unusedTokensSB = new StringBuffer();
1292         do {
1293             if (isTableConstraint(localTokens)) {
1294                 parseTableConstraint(localTokens, tableNode, false);
1295             } else if (isColumnDefinitionStart(localTokens)) {
1296                 parseColumnDefinition(localTokens, tableNode, false);
1297             } else {
1298                 unusedTokensSB.append(SPACE).append(localTokens.consume());
1299             }
1300         } while (localTokens.canConsume(COMMA));
1301 
1302         if (unusedTokensSB.length() > 0) {
1303             String msg = DdlSequencerI18n.unusedTokensParsingColumnsAndConstraints.text(tableNode.getProperty(NAME));
1304             DdlParserProblem problem = new DdlParserProblem(DdlConstants.Problems.WARNING, Position.EMPTY_CONTENT_POSITION, msg);
1305             problem.setUnusedSource(unusedTokensSB.toString());
1306             addProblem(problem, tableNode);
1307         }
1308 
1309     }
1310 
1311     /**
1312      * Utility method to parse the actual column definition. SQL-92 Structural Specification <column definition> ::= <column name>
1313      * { <data type> | <domain name> } [ <default clause> ] [ <column constraint definition>... ] [ <collate clause> ]
1314      * 
1315      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1316      * @param tableNode
1317      * @param isAlterTable true if in-line constraint is part of add column in alter table statement
1318      * @throws ParsingException
1319      */
1320     protected void parseColumnDefinition( DdlTokenStream tokens,
1321                                           AstNode tableNode,
1322                                           boolean isAlterTable ) throws ParsingException {
1323         assert tokens != null;
1324         assert tableNode != null;
1325 
1326         tokens.canConsume("COLUMN");
1327         String columnName = parseName(tokens);
1328         DataType datatype = getDatatypeParser().parse(tokens);
1329 
1330         AstNode columnNode = nodeFactory().node(columnName, tableNode, TYPE_COLUMN_DEFINITION);
1331 
1332         getDatatypeParser().setPropertiesOnNode(columnNode, datatype);
1333 
1334         // Now clauses and constraints can be defined in any order, so we need to keep parsing until we get to a comma
1335         StringBuffer unusedTokensSB = new StringBuffer();
1336 
1337         while (tokens.hasNext() && !tokens.matches(COMMA)) {
1338             boolean parsedDefaultClause = parseDefaultClause(tokens, columnNode);
1339             if (!parsedDefaultClause) {
1340                 boolean parsedCollate = parseCollateClause(tokens, columnNode);
1341                 boolean parsedConstraint = parseColumnConstraint(tokens, columnNode, isAlterTable);
1342                 if (!parsedCollate && !parsedConstraint) {
1343                     // THIS IS AN ERROR. NOTHING FOUND.
1344                     // NEED TO absorb tokens
1345                     unusedTokensSB.append(SPACE).append(tokens.consume());
1346                 }
1347             }
1348             tokens.canConsume(DdlTokenizer.COMMENT);
1349         }
1350 
1351         if (unusedTokensSB.length() > 0) {
1352             String msg = DdlSequencerI18n.unusedTokensParsingColumnDefinition.text(tableNode.getName());
1353             DdlParserProblem problem = new DdlParserProblem(Problems.WARNING, Position.EMPTY_CONTENT_POSITION, msg);
1354             problem.setUnusedSource(unusedTokensSB.toString());
1355             addProblem(problem, tableNode);
1356         }
1357     }
1358 
1359     /**
1360      * Utility method to parse the actual column definition. SQL-92 Structural Specification <column definition> ::= <column name>
1361      * { <data type> | <domain name> } [ <default clause> ] [ <column constraint definition>... ] [ <collate clause> ]
1362      * 
1363      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1364      * @param tableNode the alter or create table statement node; may not be null
1365      * @param isAlterTable true if in-line constraint is part of add column in alter table statement
1366      * @throws ParsingException
1367      */
1368     protected void parseSingleTerminatedColumnDefinition( DdlTokenStream tokens,
1369                                                           AstNode tableNode,
1370                                                           boolean isAlterTable ) throws ParsingException {
1371         assert tokens != null;
1372         assert tableNode != null;
1373 
1374         tokens.canConsume("COLUMN");
1375         String columnName = parseName(tokens);
1376         DataType datatype = getDatatypeParser().parse(tokens);
1377 
1378         AstNode columnNode = nodeFactory().node(columnName, tableNode, TYPE_COLUMN_DEFINITION);
1379 
1380         getDatatypeParser().setPropertiesOnNode(columnNode, datatype);
1381         // Now clauses and constraints can be defined in any order, so we need to keep parsing until we get to a comma, a
1382         // terminator
1383         // or a new statement
1384 
1385         while (tokens.hasNext() && !tokens.matches(getTerminator()) && !tokens.matches(DdlTokenizer.STATEMENT_KEY)) {
1386             boolean parsedDefaultClause = parseDefaultClause(tokens, columnNode);
1387             if (!parsedDefaultClause) {
1388                 parseCollateClause(tokens, columnNode);
1389                 parseColumnConstraint(tokens, columnNode, isAlterTable);
1390             }
1391             consumeComment(tokens);
1392             if (tokens.canConsume(COMMA)) break;
1393         }
1394     }
1395 
1396     /**
1397      * Method which extracts the table element string from a CREATE TABLE statement.
1398      * 
1399      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1400      * @param useTerminator
1401      * @return the parsed table elements String.
1402      * @throws ParsingException
1403      */
1404     protected String getTableElementsString( DdlTokenStream tokens,
1405                                              boolean useTerminator ) throws ParsingException {
1406         assert tokens != null;
1407 
1408         StringBuffer sb = new StringBuffer(100);
1409 
1410         if (useTerminator) {
1411             while (!isTerminator(tokens)) {
1412                 sb.append(SPACE).append(tokens.consume());
1413             }
1414         } else {
1415             // Assume we start with open parenthesis '(', then we can count on walking through ALL tokens until we find the close
1416             // parenthesis ')'. If there are intermediate parenthesis, we can count on them being pairs.
1417             tokens.consume(L_PAREN); // EXPECTED
1418 
1419             int iParen = 0;
1420             while (tokens.hasNext()) {
1421                 if (tokens.matches(L_PAREN)) {
1422                     iParen++;
1423                 } else if (tokens.matches(R_PAREN)) {
1424                     if (iParen == 0) {
1425                         tokens.consume(R_PAREN);
1426                         break;
1427                     }
1428                     iParen--;
1429                 }
1430                 if (isComment(tokens)) {
1431                     tokens.consume();
1432                 } else {
1433                     sb.append(SPACE).append(tokens.consume());
1434                 }
1435             }
1436         }
1437 
1438         return sb.toString();
1439 
1440     }
1441 
1442     /**
1443      * Simple method which parses, consumes and returns a string representing text found between parenthesis (i.e. '()') If
1444      * parents don't exist, method returns NULL;
1445      * 
1446      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1447      * @param includeParens
1448      * @return the parenthesis bounded text or null if no parens.
1449      * @throws ParsingException
1450      */
1451     protected String consumeParenBoundedTokens( DdlTokenStream tokens,
1452                                                 boolean includeParens ) throws ParsingException {
1453         assert tokens != null;
1454 
1455         // Assume we start with open parenthesis '(', then we can count on walking through ALL tokens until we find the close
1456         // parenthesis ')'. If there are intermediate parenthesis, we can count on them being pairs.
1457         if (tokens.canConsume(L_PAREN)) { // EXPECTED
1458             StringBuffer sb = new StringBuffer(100);
1459             if (includeParens) {
1460                 sb.append(L_PAREN);
1461             }
1462             int iParen = 0;
1463             while (tokens.hasNext()) {
1464                 if (tokens.matches(L_PAREN)) {
1465                     iParen++;
1466                 } else if (tokens.matches(R_PAREN)) {
1467                     if (iParen == 0) {
1468                         tokens.consume(R_PAREN);
1469                         if (includeParens) {
1470                             sb.append(SPACE).append(R_PAREN);
1471                         }
1472                         break;
1473                     }
1474                     iParen--;
1475                 }
1476                 if (isComment(tokens)) {
1477                     tokens.consume();
1478                 } else {
1479                     sb.append(SPACE).append(tokens.consume());
1480                 }
1481             }
1482             return sb.toString();
1483         }
1484 
1485         return null;
1486     }
1487 
1488     /**
1489      * Parses an in-line column constraint including NULLABLE value, UNIQUE, PRIMARY KEY and REFERENCES to a Foreign Key. The
1490      * values for the constraint are set as properties on the input columnNode.
1491      * 
1492      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1493      * @param columnNode the column definition being created; may not be null
1494      * @param isAlterTable true if in-line constraint is part of add column in alter table statement
1495      * @return true if parsed a constraint, else false.
1496      * @throws ParsingException
1497      */
1498     protected boolean parseColumnConstraint( DdlTokenStream tokens,
1499                                              AstNode columnNode,
1500                                              boolean isAlterTable ) throws ParsingException {
1501         assert tokens != null;
1502         assert columnNode != null;
1503 
1504         Name mixinType = isAlterTable ? TYPE_ADD_TABLE_CONSTRAINT_DEFINITION : TYPE_TABLE_CONSTRAINT;
1505 
1506         boolean result = false;
1507 
1508         // : [ CONSTRAINT <constraint name> ] <column constraint> [ <constraint attributes> ]
1509         // <column constraint> ::= NOT NULL | <unique specification> | <references specification> | <check constraint definition>
1510         // <unique specification> ::= UNIQUE | PRIMARY KEY
1511         // <references specification> ::= REFERENCES <referenced table and columns> [ MATCH <match type> ] [ <referential
1512         // triggered action> ]
1513         // <check constraint definition> ::= CHECK <left paren> <search condition> <right paren>
1514         String colName = columnNode.getName().getString();
1515 
1516         if (tokens.canConsume("NULL")) {
1517             columnNode.setProperty(NULLABLE, "NULL");
1518             result = true;
1519         } else if (tokens.canConsume("NOT", "NULL")) {
1520             columnNode.setProperty(NULLABLE, "NOT NULL");
1521             result = true;
1522         } else if (tokens.matches("CONSTRAINT")) {
1523             result = true;
1524             tokens.consume("CONSTRAINT");
1525             String constraintName = parseName(tokens);
1526             AstNode constraintNode = nodeFactory().node(constraintName, columnNode.getParent(), mixinType);
1527 
1528             if (tokens.matches("UNIQUE")) {
1529                 // CONSTRAINT P_KEY_2a UNIQUE (PERMISSIONUID)
1530                 tokens.consume("UNIQUE"); // UNIQUE
1531 
1532                 constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1533 
1534                 // CONSUME COLUMNS
1535                 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1536 
1537                 parseConstraintAttributes(tokens, constraintNode);
1538             } else if (tokens.matches("PRIMARY", "KEY")) {
1539                 // CONSTRAINT U_KEY_2a PRIMARY KEY (PERMISSIONUID)
1540                 tokens.consume("PRIMARY"); // PRIMARY
1541                 tokens.consume("KEY"); // KEY
1542 
1543                 constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1544 
1545                 // CONSUME COLUMNS
1546                 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1547 
1548                 parseConstraintAttributes(tokens, constraintNode);
1549             } else if (tokens.matches("REFERENCES")) {
1550                 // References in an in-line constraint is really a foreign key definition
1551                 // EXAMPLE:
1552                 // COLUMN_NAME DATATYPE NOT NULL DEFAULT (0) CONSTRAINT SOME_FK_NAME REFERENCES SOME_TABLE_NAME (SOME_COLUMN_NAME,
1553                 // ...)
1554 
1555                 constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1556 
1557                 nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1558 
1559                 parseReferences(tokens, constraintNode);
1560 
1561                 parseConstraintAttributes(tokens, constraintNode);
1562             }
1563         } else if (tokens.matches("UNIQUE")) {
1564             result = true;
1565             tokens.consume("UNIQUE");
1566             // Unique constraint for this particular column
1567             String uc_name = "UC_1"; // UNIQUE CONSTRAINT NAME
1568 
1569             AstNode constraintNode = nodeFactory().node(uc_name, columnNode.getParent(), mixinType);
1570 
1571             constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1572 
1573             nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1574 
1575         } else if (tokens.matches("PRIMARY", "KEY")) {
1576             result = true;
1577             tokens.consume("PRIMARY", "KEY");
1578             // PRIMARY KEY for this particular column
1579             String pk_name = "PK_1"; // PRIMARY KEY NAME
1580 
1581             AstNode constraintNode = nodeFactory().node(pk_name, columnNode.getParent(), mixinType);
1582 
1583             constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1584 
1585             nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1586 
1587         } else if (tokens.matches("FOREIGN", "KEY")) {
1588             result = true;
1589             tokens.consume("FOREIGN", "KEY");
1590             // This is an auto-named FK
1591             // References in an in-line constraint is really a foreign key definition
1592             // EXAMPLE:
1593             // COLUMN_NAME DATATYPE NOT NULL DEFAULT (0) FOREIGN KEY MY_FK_NAME REFERENCES SOME_TABLE_NAME (SOME_COLUMN_NAME, ...)
1594 
1595             String constraintName = parseName(tokens);
1596 
1597             AstNode constraintNode = nodeFactory().node(constraintName, columnNode.getParent(), mixinType);
1598 
1599             constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1600 
1601             nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1602 
1603             parseReferences(tokens, constraintNode);
1604             parseConstraintAttributes(tokens, constraintNode);
1605         } else if (tokens.matches("REFERENCES")) {
1606             result = true;
1607             // This is an auto-named FK
1608             // References in an in-line constraint is really a foreign key definition
1609             // EXAMPLE:
1610             // COLUMN_NAME DATATYPE NOT NULL DEFAULT (0) REFERENCES SOME_TABLE_NAME (SOME_COLUMN_NAME, ...)
1611 
1612             String constraintName = "FK_1";
1613 
1614             AstNode constraintNode = nodeFactory().node(constraintName, columnNode.getParent(), mixinType);
1615 
1616             constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1617 
1618             nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1619 
1620             parseReferences(tokens, constraintNode);
1621             parseConstraintAttributes(tokens, constraintNode);
1622         } else if (tokens.matches("CHECK")) {
1623             result = true;
1624             tokens.consume("CHECK"); // CHECK
1625 
1626             String ck_name = "CHECK_1";
1627 
1628             AstNode constraintNode = nodeFactory().node(ck_name, columnNode.getParent(), mixinType);
1629             constraintNode.setProperty(NAME, ck_name);
1630             constraintNode.setProperty(CONSTRAINT_TYPE, CHECK);
1631 
1632             String clause = consumeParenBoundedTokens(tokens, true);
1633             constraintNode.setProperty(CHECK_SEARCH_CONDITION, clause);
1634         }
1635 
1636         return result;
1637     }
1638 
1639     /**
1640      * Parses full table constraint definition including the "CONSTRAINT" token Examples: CONSTRAINT P_KEY_2a UNIQUE
1641      * (PERMISSIONUID)
1642      * 
1643      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1644      * @param tableNode
1645      * @param isAlterTable true if in-line constraint is part of add column in alter table statement
1646      * @throws ParsingException
1647      */
1648     protected void parseTableConstraint( DdlTokenStream tokens,
1649                                          AstNode tableNode,
1650                                          boolean isAlterTable ) throws ParsingException {
1651         assert tokens != null;
1652         assert tableNode != null;
1653 
1654         Name mixinType = isAlterTable ? TYPE_ADD_TABLE_CONSTRAINT_DEFINITION : TYPE_TABLE_CONSTRAINT;
1655 
1656         /*
1657         <table constraint definition> ::=
1658             [ <constraint name definition> ]
1659             <table constraint> [ <constraint attributes> ]
1660         
1661         <table constraint> ::=
1662               <unique constraint definition>
1663             | <referential constraint definition>
1664             | <check constraint definition>
1665             
1666         <constraint attributes> ::=
1667               <constraint check time> [ [ NOT ] DEFERRABLE ]
1668             | [ NOT ] DEFERRABLE [ <constraint check time> ]
1669         
1670         <unique constraint definition> ::=
1671                     <unique specification> even in SQL3)
1672             <unique specification>
1673               <left paren> <unique column list> <right paren>
1674         
1675         <unique column list> ::= <column name list>
1676         
1677         <referential constraint definition> ::=
1678             FOREIGN KEY
1679                 <left paren> <referencing columns> <right paren>
1680               <references specification>
1681         
1682         <referencing columns> ::=
1683             <reference column list>
1684             
1685         <constraint attributes> ::=
1686               <constraint check time> [ [ NOT ] DEFERRABLE ]
1687             | [ NOT ] DEFERRABLE [ <constraint check time> ]
1688         
1689         <constraint check time> ::=
1690               INITIALLY DEFERRED
1691             | INITIALLY IMMEDIATE
1692             
1693         <check constraint definition> ::=
1694         	CHECK
1695         		<left paren> <search condition> <right paren>
1696          */
1697         consumeComment(tokens);
1698 
1699         if ((tokens.matches("PRIMARY", "KEY")) || (tokens.matches("FOREIGN", "KEY")) || (tokens.matches("UNIQUE"))) {
1700 
1701             // This is the case where the PK/FK/UK is NOT NAMED
1702             if (tokens.matches("UNIQUE")) {
1703                 String uc_name = "UC_1"; // UNIQUE CONSTRAINT NAME
1704                 tokens.consume(); // UNIQUE
1705 
1706                 AstNode constraintNode = nodeFactory().node(uc_name, tableNode, mixinType);
1707                 constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1708 
1709                 // CONSUME COLUMNS
1710                 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1711 
1712                 parseConstraintAttributes(tokens, constraintNode);
1713 
1714                 consumeComment(tokens);
1715             } else if (tokens.matches("PRIMARY", "KEY")) {
1716                 String pk_name = "PK_1"; // PRIMARY KEY NAME
1717                 tokens.consume("PRIMARY", "KEY"); // PRIMARY KEY
1718 
1719                 AstNode constraintNode = nodeFactory().node(pk_name, tableNode, mixinType);
1720                 constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1721 
1722                 // CONSUME COLUMNS
1723                 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1724 
1725                 parseConstraintAttributes(tokens, constraintNode);
1726 
1727                 consumeComment(tokens);
1728             } else if (tokens.matches("FOREIGN", "KEY")) {
1729                 String fk_name = "FK_1"; // FOREIGN KEY NAME
1730                 tokens.consume("FOREIGN", "KEY"); // FOREIGN KEY
1731 
1732                 if (!tokens.matches(L_PAREN)) {
1733                     // Assume the FK is Named here
1734                     fk_name = tokens.consume();
1735                 }
1736 
1737                 AstNode constraintNode = nodeFactory().node(fk_name, tableNode, mixinType);
1738                 constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1739 
1740                 // CONSUME COLUMNS
1741                 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1742 
1743                 // Parse the references to table and columns
1744                 parseReferences(tokens, constraintNode);
1745 
1746                 parseConstraintAttributes(tokens, constraintNode);
1747 
1748                 consumeComment(tokens);
1749             }
1750         } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "UNIQUE")) {
1751             // CONSTRAINT P_KEY_2a UNIQUE (PERMISSIONUID)
1752             tokens.consume(); // CONSTRAINT
1753             String uc_name = parseName(tokens); // UNIQUE CONSTRAINT NAME
1754             tokens.consume("UNIQUE"); // UNIQUE
1755 
1756             AstNode constraintNode = nodeFactory().node(uc_name, tableNode, mixinType);
1757             constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1758 
1759             // CONSUME COLUMNS
1760             parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1761 
1762             parseConstraintAttributes(tokens, constraintNode);
1763 
1764             consumeComment(tokens);
1765         } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "PRIMARY", "KEY")) {
1766             // CONSTRAINT U_KEY_2a PRIMARY KEY (PERMISSIONUID)
1767             tokens.consume("CONSTRAINT"); // CONSTRAINT
1768             String pk_name = parseName(tokens); // PRIMARY KEY NAME
1769             tokens.consume("PRIMARY", "KEY"); // PRIMARY KEY
1770 
1771             AstNode constraintNode = nodeFactory().node(pk_name, tableNode, mixinType);
1772             constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1773 
1774             // CONSUME COLUMNS
1775             parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1776 
1777             parseConstraintAttributes(tokens, constraintNode);
1778 
1779             consumeComment(tokens);
1780 
1781         } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "FOREIGN", "KEY")) {
1782             // CONSTRAINT F_KEY_2a FOREIGN KEY (PERMISSIONUID)
1783             tokens.consume("CONSTRAINT"); // CONSTRAINT
1784             String fk_name = parseName(tokens); // FOREIGN KEY NAME
1785             tokens.consume("FOREIGN", "KEY"); // FOREIGN KEY
1786 
1787             AstNode constraintNode = nodeFactory().node(fk_name, tableNode, mixinType);
1788 
1789             constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1790 
1791             // CONSUME COLUMNS
1792             parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1793 
1794             // Parse the references to table and columns
1795             parseReferences(tokens, constraintNode);
1796 
1797             parseConstraintAttributes(tokens, constraintNode);
1798 
1799             consumeComment(tokens);
1800 
1801         } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "CHECK")) {
1802             // CONSTRAINT zipchk CHECK (char_length(zipcode) = 5);
1803             tokens.consume("CONSTRAINT"); // CONSTRAINT
1804             String ck_name = parseName(tokens); // NAME
1805             tokens.consume("CHECK"); // CHECK
1806 
1807             AstNode constraintNode = nodeFactory().node(ck_name, tableNode, mixinType);
1808             constraintNode.setProperty(CONSTRAINT_TYPE, CHECK);
1809 
1810             String clause = consumeParenBoundedTokens(tokens, true);
1811             constraintNode.setProperty(CHECK_SEARCH_CONDITION, clause);
1812         }
1813 
1814     }
1815 
1816     /**
1817      * Parses the attributes associated with any in-line column constraint definition or a table constrain definition.
1818      * 
1819      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1820      * @param constraintNode
1821      * @throws ParsingException
1822      */
1823     protected void parseConstraintAttributes( DdlTokenStream tokens,
1824                                               AstNode constraintNode ) throws ParsingException {
1825         assert tokens != null;
1826         assert constraintNode != null;
1827 
1828         // Now we need to check for constraint attributes:
1829 
1830         // <constraint attributes> ::=
1831         // <constraint check time> [ [ NOT ] DEFERRABLE ]
1832         // | [ NOT ] DEFERRABLE [ <constraint check time> ]
1833         //
1834         // <constraint check time> ::=
1835         // INITIALLY DEFERRED
1836         // | INITIALLY IMMEDIATE
1837 
1838         // EXAMPLE : foreign key (contact_id) references contact (contact_id) on delete cascade INITIALLY DEFERRED,
1839         if (tokens.canConsume("INITIALLY", "DEFERRED")) {
1840             AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1841             attrNode.setProperty(PROPERTY_VALUE, "INITIALLY DEFERRED");
1842         }
1843         if (tokens.canConsume("INITIALLY", "IMMEDIATE")) {
1844             AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1845             attrNode.setProperty(PROPERTY_VALUE, "INITIALLY IMMEDIATE");
1846         }
1847         if (tokens.canConsume("NOT", "DEFERRABLE")) {
1848             AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1849             attrNode.setProperty(PROPERTY_VALUE, "NOT DEFERRABLE");
1850         }
1851         if (tokens.canConsume("DEFERRABLE")) {
1852             AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1853             attrNode.setProperty(PROPERTY_VALUE, "DEFERRABLE");
1854         }
1855         if (tokens.canConsume("INITIALLY", "DEFERRED")) {
1856             AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1857             attrNode.setProperty(PROPERTY_VALUE, "INITIALLY DEFERRED");
1858         }
1859         if (tokens.canConsume("INITIALLY", "IMMEDIATE")) {
1860             AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1861             attrNode.setProperty(PROPERTY_VALUE, "INITIALLY IMMEDIATE");
1862         }
1863     }
1864 
1865     protected void parseReferences( DdlTokenStream tokens,
1866                                     AstNode constraintNode ) throws ParsingException {
1867         assert tokens != null;
1868         assert constraintNode != null;
1869 
1870         if (tokens.matches("REFERENCES")) {
1871             tokens.consume("REFERENCES");
1872             // 'REFERENCES' referencedTableAndColumns matchType? referentialTriggeredAction?;
1873             String tableName = parseName(tokens);
1874 
1875             nodeFactory().node(tableName, constraintNode, TYPE_TABLE_REFERENCE);
1876 
1877             parseColumnNameList(tokens, constraintNode, TYPE_FK_COLUMN_REFERENCE);
1878 
1879             tokens.canConsume("MATCH", "FULL");
1880             tokens.canConsume("MATCH", "PARTIAL");
1881 
1882             //	
1883             // referentialTriggeredAction : (updateRule deleteRule?) | (deleteRule updateRule?);
1884             //
1885             // deleteRule : 'ON' 'DELETE' referencialAction;
1886             //	
1887             // updateRule : 'ON' 'UPDATE' referencialAction;
1888             //
1889             // referencialAction
1890             // : cascadeOption | setNullOption | setDefaultOption | noActionOption
1891             // ;
1892             //    		
1893             // cascadeOption : 'CASCADE';
1894             // setNullOption : 'SET' 'NULL';
1895             // setDefaultOption : 'SET' 'DEFAULT';
1896             // noActionOption : 'NO' 'ACTION';
1897             // nowOption : 'NOW' '(' ')' ;
1898 
1899             // Could be one or both, so check more than once.
1900             while (tokens.canConsume("ON", "UPDATE") || tokens.canConsume("ON", "DELETE")) {
1901 
1902                 if (tokens.matches("CASCADE") || tokens.matches("NOW()")) {
1903                     tokens.consume();
1904                 } else if (tokens.matches("SET", "NULL")) {
1905                     tokens.consume("SET", "NULL");
1906                 } else if (tokens.matches("SET", "DEFAULT")) {
1907                     tokens.consume("SET", "DEFAULT");
1908                 } else if (tokens.matches("NO", "ACTION")) {
1909                     tokens.consume("NO", "ACTION");
1910                 } else {
1911                     System.out.println(" ERROR:   ColumnDefinition REFERENCES has NO REFERENCIAL ACTION.");
1912                 }
1913             }
1914         }
1915     }
1916 
1917     // ===========================================================================================================================
1918     // PARSING CREATE VIEW
1919     // ===========================================================================================================================
1920 
1921     /**
1922      * Parses DDL CREATE VIEW {@link AstNode} basedregisterStatementStartPhrase on SQL 92 specifications. Initial implementation
1923      * here does not parse the statement in detail.
1924      * 
1925      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1926      * @param parentNode the parent {@link AstNode} node; may not be null
1927      * @return the Create View node
1928      * @throws ParsingException
1929      */
1930     protected AstNode parseCreateViewStatement( DdlTokenStream tokens,
1931                                                 AstNode parentNode ) throws ParsingException {
1932         assert tokens != null;
1933         assert parentNode != null;
1934 
1935         markStartOfStatement(tokens);
1936         // <view definition> ::=
1937         // CREATE VIEW <table name> [ <left paren> <view column list><right paren> ]
1938         // AS <query expression>
1939         // [ WITH [ <levels clause> ] CHECK OPTION ]
1940         // <levels clause> ::=
1941         // CASCADED | LOCAL
1942 
1943         // NOTE: the query expression along with the CHECK OPTION clause require no SQL statement terminator.
1944         // So the CHECK OPTION clause will NOT
1945 
1946         String stmtType = "CREATE";
1947         tokens.consume("CREATE");
1948         if (tokens.canConsume("OR", "REPLACE")) {
1949             stmtType = stmtType + SPACE + "OR REPLACE";
1950         }
1951         tokens.consume("VIEW");
1952         stmtType = stmtType + SPACE + "VIEW";
1953 
1954         String name = parseName(tokens);
1955 
1956         AstNode createViewNode = nodeFactory().node(name, parentNode, TYPE_CREATE_VIEW_STATEMENT);
1957 
1958         // CONSUME COLUMNS
1959         parseColumnNameList(tokens, createViewNode, TYPE_COLUMN_REFERENCE);
1960 
1961         tokens.consume("AS");
1962 
1963         String queryExpression = parseUntilTerminator(tokens);
1964 
1965         createViewNode.setProperty(CREATE_VIEW_QUERY_EXPRESSION, queryExpression);
1966 
1967         markEndOfStatement(tokens, createViewNode);
1968 
1969         return createViewNode;
1970     }
1971 
1972     // ===========================================================================================================================
1973     // PARSING CREATE SCHEMA
1974     // ===========================================================================================================================
1975 
1976     /**
1977      * Parses DDL CREATE SCHEMA {@link AstNode} based on SQL 92 specifications. Initial implementation here does not parse the
1978      * statement in detail.
1979      * 
1980      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1981      * @param parentNode the parent {@link AstNode} node; may not be null
1982      * @return the parsed schema node
1983      * @throws ParsingException
1984      */
1985     protected AstNode parseCreateSchemaStatement( DdlTokenStream tokens,
1986                                                   AstNode parentNode ) throws ParsingException {
1987         markStartOfStatement(tokens);
1988 
1989         AstNode schemaNode = null;
1990 
1991         String authorizationIdentifier = null;
1992         String schemaName = null;
1993 
1994         tokens.consume("CREATE", "SCHEMA");
1995 
1996         if (tokens.canConsume("AUTHORIZATION")) {
1997             authorizationIdentifier = tokens.consume();
1998         } else {
1999             schemaName = parseName(tokens);
2000             if (tokens.canConsume("AUTHORIZATION")) {
2001                 authorizationIdentifier = parseName(tokens);
2002             }
2003         }
2004         // Must have one or the other or both
2005         assert authorizationIdentifier != null || schemaName != null;
2006 
2007         if (schemaName != null) {
2008             schemaNode = nodeFactory().node(schemaName, parentNode, TYPE_CREATE_SCHEMA_STATEMENT);
2009         } else {
2010             schemaNode = nodeFactory().node(authorizationIdentifier, parentNode, TYPE_CREATE_SCHEMA_STATEMENT);
2011         }
2012 
2013         if (tokens.canConsume("DEFAULT", "CHARACTER", "SET")) {
2014             // consume name
2015             parseName(tokens);
2016         }
2017 
2018         markEndOfStatement(tokens, schemaNode);
2019 
2020         return schemaNode;
2021     }
2022 
2023     /**
2024      * Parses DDL CREATE ASSERTION {@link AstNode} based on SQL 92 specifications. Initial implementation here does not parse the
2025      * statement's search condition in detail.
2026      * 
2027      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2028      * @param parentNode the parent {@link AstNode} node; may not be null
2029      * @return the parsed schema node
2030      * @throws ParsingException
2031      */
2032     protected AstNode parseCreateAssertionStatement( DdlTokenStream tokens,
2033                                                      AstNode parentNode ) throws ParsingException {
2034         markStartOfStatement(tokens);
2035 
2036         // <assertion definition> ::=
2037         // CREATE ASSERTION <constraint name> CHECK <left paren> <search condition> <right paren> [ <constraint attributes> ]
2038 
2039         AstNode node = null;
2040 
2041         tokens.consume("CREATE", "ASSERTION");
2042 
2043         String name = parseName(tokens);
2044 
2045         // Must have one or the other or both
2046 
2047         node = nodeFactory().node(name, parentNode, TYPE_CREATE_ASSERTION_STATEMENT);
2048 
2049         tokens.consume("CHECK");
2050 
2051         String searchCondition = consumeParenBoundedTokens(tokens, false);
2052 
2053         node.setProperty(CHECK_SEARCH_CONDITION, searchCondition);
2054 
2055         parseConstraintAttributes(tokens, node);
2056 
2057         markEndOfStatement(tokens, node);
2058 
2059         return node;
2060     }
2061 
2062     // ===========================================================================================================================
2063     // PARSING CREATE XXXXX (Typed Statements)
2064     // ===========================================================================================================================
2065 
2066     /**
2067      * Utility method to parse a statement that can be ignored. The value returned in the generic {@link AstNode} will contain all
2068      * text between starting token and either the terminator (if defined) or the next statement start token. NOTE: This method
2069      * does NOT mark and add consumed fragment to parent node.
2070      * 
2071      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2072      * @param name
2073      * @param parentNode the parent {@link AstNode} node; may not be null
2074      * @return the parsed generic {@link AstNode}
2075      * @throws ParsingException
2076      */
2077     protected AstNode parseIgnorableStatement( DdlTokenStream tokens,
2078                                                String name,
2079                                                AstNode parentNode ) {
2080 
2081         AstNode node = nodeFactory().node(name, parentNode, TYPE_STATEMENT);
2082 
2083         parseUntilTerminator(tokens);
2084 
2085         // System.out.println(" >>> FOUND [" + stmt.getType() +"] STATEMENT TOKEN. IGNORING");
2086         return node;
2087     }
2088 
2089     /**
2090      * Utility method to parse a statement that can be ignored. The value returned in the generic {@link AstNode} will contain all
2091      * text between starting token and either the terminator (if defined) or the next statement start token. NOTE: This method
2092      * does NOT mark and add consumed fragment to parent node.
2093      * 
2094      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2095      * @param name
2096      * @param parentNode the parent {@link AstNode} node; may not be null
2097      * @param mixinType
2098      * @return the parsed generic {@link AstNode}
2099      * @throws ParsingException
2100      */
2101     protected AstNode parseIgnorableStatement( DdlTokenStream tokens,
2102                                                String name,
2103                                                AstNode parentNode,
2104                                                Name mixinType ) {
2105         CheckArg.isNotNull(tokens, "tokens");
2106         CheckArg.isNotNull(name, "name");
2107         CheckArg.isNotNull(parentNode, "parentNode");
2108         CheckArg.isNotNull(mixinType, "mixinType");
2109 
2110         AstNode node = nodeFactory().node(name, parentNode, mixinType);
2111 
2112         parseUntilTerminator(tokens);
2113 
2114         return node;
2115     }
2116 
2117     /**
2118      * Utility method to parse a generic statement given a start phrase and statement mixin type.
2119      * 
2120      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2121      * @param stmt_start_phrase the string array statement start phrase
2122      * @param parentNode the parent of the newly created node.
2123      * @param mixinType the mixin type of the newly created statement node
2124      * @return the new node
2125      */
2126     protected AstNode parseStatement( DdlTokenStream tokens,
2127                                       String[] stmt_start_phrase,
2128                                       AstNode parentNode,
2129                                       Name mixinType ) {
2130         CheckArg.isNotNull(tokens, "tokens");
2131         CheckArg.isNotNull(stmt_start_phrase, "stmt_start_phrase");
2132         CheckArg.isNotNull(parentNode, "parentNode");
2133         CheckArg.isNotNull(mixinType, "mixinType");
2134 
2135         markStartOfStatement(tokens);
2136         tokens.consume(stmt_start_phrase);
2137         AstNode result = parseIgnorableStatement(tokens, getStatementTypeName(stmt_start_phrase), parentNode, mixinType);
2138         markEndOfStatement(tokens, result);
2139 
2140         return result;
2141     }
2142 
2143     /**
2144      * Constructs a terminator AstNode as child of root node
2145      * 
2146      * @param parentNode the parent {@link AstNode} node; may not be null
2147      * @return terminator node
2148      */
2149     public final AstNode unknownTerminatedNode( AstNode parentNode ) {
2150         return nodeFactory.node("unknownStatement", parentNode, StandardDdlLexicon.TYPE_UNKNOWN_STATEMENT);
2151     }
2152 
2153     /**
2154      * Constructs a terminator AstNode as child of root node
2155      * 
2156      * @param parentNode the parent {@link AstNode} node; may not be null
2157      * @return terminator node
2158      */
2159     public final AstNode missingTerminatorNode( AstNode parentNode ) {
2160         return nodeFactory.node("missingTerminator", parentNode, StandardDdlLexicon.TYPE_MISSING_TERMINATOR);
2161     }
2162 
2163     public final boolean isMissingTerminatorNode( AstNode node ) {
2164         return node.getName().getString().equals(MISSING_TERMINATOR_NODE_LITERAL)
2165                && nodeFactory().hasMixinType(node, TYPE_MISSING_TERMINATOR);
2166     }
2167 
2168     public final boolean isValidSchemaChild( AstNode node ) {
2169         Name[] schemaChildMixins = getValidSchemaChildTypes();
2170         for (Object mixin : node.getProperty(JcrLexicon.MIXIN_TYPES).getValuesAsArray()) {
2171             if (mixin instanceof Name) {
2172                 for (Name nextType : schemaChildMixins) {
2173                     if (nextType.equals(mixin)) {
2174                         return true;
2175                     }
2176                 }
2177             }
2178         }
2179 
2180         return false;
2181     }
2182 
2183     public final boolean setAsSchemaChildNode( AstNode statementNode,
2184                                                boolean stmtIsMissingTerminator ) {
2185 
2186         if (!isValidSchemaChild(statementNode)) {
2187             return false;
2188         }
2189 
2190         // Because we are setting the schema children on the fly we can assume that if we are under a schema with children, then
2191         // the schema should be followed by a missing terminator node. So we just check the previous 2 nodes.
2192 
2193         List<AstNode> children = getRootNode().getChildren();
2194 
2195         if (children.size() > 2) {
2196             AstNode previousNode = children.get(children.size() - 2);
2197             if (nodeFactory().hasMixinType(previousNode, TYPE_MISSING_TERMINATOR)) {
2198                 AstNode theSchemaNode = children.get(children.size() - 3);
2199 
2200                 // If the last child of a schema is missing terminator, then the schema isn't complete.
2201                 // If it is NOT a missing terminator, we aren't under a schema node anymore.
2202                 if (theSchemaNode.getChildCount() == 0
2203                     || nodeFactory().hasMixinType(theSchemaNode.getLastChild(), TYPE_MISSING_TERMINATOR)) {
2204                     if (nodeFactory().hasMixinType(theSchemaNode, TYPE_CREATE_SCHEMA_STATEMENT)) {
2205                         statementNode.setParent(theSchemaNode);
2206                         if (stmtIsMissingTerminator) {
2207                             missingTerminatorNode(theSchemaNode);
2208                         }
2209                         return true;
2210                     }
2211                 }
2212             }
2213         }
2214 
2215         return false;
2216     }
2217 
2218     /**
2219      * Returns current terminator
2220      * 
2221      * @return terminator string value
2222      */
2223     protected String getTerminator() {
2224         return this.terminator;
2225     }
2226 
2227     /**
2228      * @param terminator the string value used as the statement terminator for the ddl dialect
2229      * @return if terminator was changed or not
2230      */
2231     protected boolean setTerminator( String terminator ) {
2232         CheckArg.isNotNull(terminator, "terminator");
2233         if (this.terminator.equalsIgnoreCase(terminator)) {
2234             return false;
2235         }
2236         this.terminator = terminator;
2237         return true;
2238     }
2239 
2240     protected Name[] getValidSchemaChildTypes() {
2241         return VALID_SCHEMA_CHILD_TYPES;
2242     }
2243 
2244     /**
2245      * Checks if next token is of type comment.
2246      * 
2247      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2248      * @return true if next token is a comment.
2249      * @throws ParsingException
2250      */
2251     protected boolean isComment( DdlTokenStream tokens ) throws ParsingException {
2252         return tokens.matches(DdlTokenizer.COMMENT);
2253     }
2254 
2255     /**
2256      * Consumes an an end-of-line comment or in-line comment
2257      * 
2258      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2259      * @throws ParsingException
2260      */
2261     protected void consumeComment( DdlTokenStream tokens ) throws ParsingException {
2262         tokens.canConsume(DdlTokenizer.COMMENT);
2263     }
2264 
2265     /**
2266      * This utility method provides this parser the ability to distinguish between a CreateTable Constraint and a ColumnDefinition
2267      * Definition which are the only two statement segment types allowed within the CREATE TABLE parenthesis ( xxxxxx );
2268      * 
2269      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2270      * @return is table constraint
2271      * @throws ParsingException
2272      */
2273     protected boolean isTableConstraint( DdlTokenStream tokens ) throws ParsingException {
2274         boolean result = false;
2275 
2276         if ((tokens.matches("PRIMARY", "KEY")) || (tokens.matches("FOREIGN", "KEY")) || (tokens.matches("UNIQUE"))) {
2277             result = true;
2278         } else if (tokens.matches("CONSTRAINT")) {
2279             if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "UNIQUE")
2280                 || tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "PRIMARY", "KEY")
2281                 || tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "FOREIGN", "KEY")
2282                 || tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "CHECK")) {
2283                 result = true;
2284             }
2285         }
2286 
2287         return result;
2288     }
2289 
2290     /**
2291      * This utility method provides this parser the ability to distinguish between a CreateTable Constrain and a ColumnDefinition
2292      * Definition which are the only two statement segment types allowed within the CREATE TABLE parenthesis ( xxxxxx );
2293      * 
2294      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2295      * @return is column definition start phrase
2296      * @throws ParsingException
2297      */
2298     protected boolean isColumnDefinitionStart( DdlTokenStream tokens ) throws ParsingException {
2299         boolean result = false;
2300 
2301         if (isTableConstraint(tokens)) {
2302             result = false;
2303         } else {
2304             for (String dTypeStartWord : getDataTypeStartWords()) {
2305                 result = (tokens.matches(DdlTokenStream.ANY_VALUE, dTypeStartWord) || tokens.matches("COLUMN",
2306                                                                                                      DdlTokenStream.ANY_VALUE,
2307                                                                                                      dTypeStartWord));
2308                 if (result) {
2309                     break;
2310                 }
2311             }
2312 
2313         }
2314 
2315         return result;
2316     }
2317 
2318     /**
2319      * Returns a list of data type start words which can be used to help identify a column definition sub-statement.
2320      * 
2321      * @return list of data type start words
2322      */
2323     protected List<String> getDataTypeStartWords() {
2324         if (allDataTypeStartWords == null) {
2325             allDataTypeStartWords = new ArrayList<String>();
2326             allDataTypeStartWords.addAll(DataTypes.DATATYPE_START_WORDS);
2327             allDataTypeStartWords.addAll(getCustomDataTypeStartWords());
2328         }
2329         return allDataTypeStartWords;
2330     }
2331 
2332     /**
2333      * Returns a list of custom data type start words which can be used to help identify a column definition sub-statement.
2334      * Sub-classes should override this method to contribute DB-specific data types.
2335      * 
2336      * @return list of data type start words
2337      */
2338     protected List<String> getCustomDataTypeStartWords() {
2339         return Collections.emptyList();
2340     }
2341 
2342     /**
2343      * Method to parse fully qualified schema, table and column names that are defined with '.' separator and optionally bracketed
2344      * with square brackets Example: partsSchema.supplier Example: [partsSchema].[supplier]
2345      * 
2346      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2347      * @return the parsed name
2348      */
2349     protected String parseName( DdlTokenStream tokens ) {
2350         // Basically we want to construct a name that could have the form:
2351         // [schemaName].[tableName].[columnName]
2352         // NOTE: "[]" brackets are optional
2353         StringBuffer sb = new StringBuffer();
2354 
2355         if (tokens.matches('[')) {
2356             // We have the bracketed case, so assume all brackets
2357             while (true) {
2358 
2359                 tokens.consume('['); // [ bracket
2360                 sb.append(consumeIdentifier(tokens)); // name
2361                 tokens.consume(']'); // ] bracket
2362                 if (tokens.matches('.')) {
2363                     sb.append(tokens.consume()); // '.'
2364                 } else {
2365                     break;
2366                 }
2367             }
2368         } else {
2369 
2370             // We have the NON-bracketed case, so assume all brackets
2371             while (true) {
2372 
2373                 sb.append(consumeIdentifier(tokens)); // name
2374 
2375                 if (tokens.matches('.')) {
2376                     sb.append(tokens.consume()); // '.'
2377                 } else {
2378                     break;
2379                 }
2380 
2381             }
2382         }
2383 
2384         return sb.toString();
2385     }
2386 
2387     /**
2388      * Consumes an token identifier which can be of the form of a simple string or a double-quoted string.
2389      * 
2390      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2391      * @return the identifier
2392      * @throws ParsingException
2393      */
2394     protected String consumeIdentifier( DdlTokenStream tokens ) throws ParsingException {
2395         String value = tokens.consume();
2396         // This may surrounded by quotes, so remove them ...
2397         if (value.charAt(0) == '"') {
2398             int length = value.length();
2399             // Check for the end quote ...
2400             value = value.substring(1, length - 1); // not complete!!
2401         }
2402         // TODO: Handle warnings elegantly
2403         // else {
2404         // // Not quoted, so check for reserved words ...
2405         // if (tokens.isKeyWord(value)) {
2406         // // Record warning ...
2407         // System.out.println("  WARNING:  Identifier [" + value + "] is a SQL 92 Reserved Word");
2408         // }
2409         // }
2410         return value;
2411     }
2412 
2413     /**
2414      * Utility method to determine if next token is a terminator.
2415      * 
2416      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2417      * @return is terminator token
2418      * @throws ParsingException
2419      */
2420     protected boolean isTerminator( DdlTokenStream tokens ) throws ParsingException {
2421         boolean result = tokens.matches(getTerminator());
2422 
2423         return result;
2424     }
2425 
2426     protected void parseColumnNameList( DdlTokenStream tokens,
2427                                         AstNode parentNode,
2428                                         Name referenceType ) {
2429         // CONSUME COLUMNS
2430         List<String> columnNameList = new ArrayList<String>();
2431         if (tokens.matches(L_PAREN)) {
2432             tokens.consume(L_PAREN);
2433             columnNameList = parseColumnNameList(tokens);
2434             tokens.consume(R_PAREN);
2435         }
2436 
2437         for (String columnName : columnNameList) {
2438             nodeFactory().node(columnName, parentNode, referenceType);
2439         }
2440     }
2441 
2442     /**
2443      * Parses a comma separated list of column names.
2444      * 
2445      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2446      * @return list of column names.
2447      * @throws ParsingException
2448      */
2449     protected List<String> parseColumnNameList( DdlTokenStream tokens ) throws ParsingException {
2450         List<String> columnNames = new LinkedList<String>();
2451 
2452         while (true) {
2453             columnNames.add(parseName(tokens));
2454             if (!tokens.canConsume(COMMA)) {
2455                 break;
2456             }
2457         }
2458 
2459         return columnNames;
2460     }
2461 
2462     /**
2463      * Utility method which parses tokens until a terminator is found, another statement is identified or there are no more
2464      * tokens.
2465      * 
2466      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2467      * @return the parsed string
2468      * @throws ParsingException
2469      */
2470     protected String parseUntilTerminator( DdlTokenStream tokens ) throws ParsingException {
2471         StringBuffer sb = new StringBuffer();
2472         if (doUseTerminator()) {
2473             boolean lastTokenWasPeriod = false;
2474             while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY) && !isTerminator(tokens)) {
2475                 String thisToken = tokens.consume();
2476                 boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2477                 boolean thisTokenIsComma = thisToken.equals(COMMA);
2478                 if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2479                     sb.append(thisToken);
2480                 } else {
2481                     sb.append(SPACE).append(thisToken);
2482                 }
2483                 if (thisTokenIsPeriod) {
2484                     lastTokenWasPeriod = true;
2485                 } else {
2486                     lastTokenWasPeriod = false;
2487                 }
2488             }
2489         } else {
2490             // parse until next statement
2491             boolean lastTokenWasPeriod = false;
2492             while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY)) {
2493                 String thisToken = tokens.consume();
2494                 boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2495                 boolean thisTokenIsComma = thisToken.equals(COMMA);
2496                 if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2497                     sb.append(thisToken);
2498                 } else {
2499                     sb.append(SPACE).append(thisToken);
2500                 }
2501                 if (thisTokenIsPeriod) {
2502                     lastTokenWasPeriod = true;
2503                 } else {
2504                     lastTokenWasPeriod = false;
2505                 }
2506             }
2507         }
2508 
2509         return sb.toString();
2510     }
2511 
2512     /**
2513      * Utility method which parses tokens until a terminator is found or there are no more tokens. This method differs from
2514      * parseUntilTermintor() in that it ignores embedded statements. This method can be used for parsers that have statements
2515      * which can embed statements that should not be parsed.
2516      * 
2517      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2518      * @return the parsed string
2519      * @throws ParsingException
2520      */
2521     protected String parseUntilTerminatorIgnoreEmbeddedStatements( DdlTokenStream tokens ) throws ParsingException {
2522         StringBuffer sb = new StringBuffer();
2523 
2524         boolean lastTokenWasPeriod = false;
2525         while (tokens.hasNext() && !isTerminator(tokens)) {
2526             String thisToken = tokens.consume();
2527             boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2528             boolean thisTokenIsComma = thisToken.equals(COMMA);
2529             if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2530                 sb.append(thisToken);
2531             } else {
2532                 sb.append(SPACE).append(thisToken);
2533             }
2534             if (thisTokenIsPeriod) {
2535                 lastTokenWasPeriod = true;
2536             } else {
2537                 lastTokenWasPeriod = false;
2538             }
2539         }
2540 
2541         return sb.toString();
2542     }
2543 
2544     /**
2545      * Utility method which parses tokens until a semicolon is found or there are no more tokens.
2546      * 
2547      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2548      * @return the parsed string
2549      * @throws ParsingException
2550      */
2551     protected String parseUntilSemiColon( DdlTokenStream tokens ) throws ParsingException {
2552         StringBuffer sb = new StringBuffer();
2553 
2554         boolean lastTokenWasPeriod = false;
2555         while (tokens.hasNext() && !tokens.matches(SEMICOLON)) {
2556             String thisToken = tokens.consume();
2557             boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2558             boolean thisTokenIsComma = thisToken.equals(COMMA);
2559             if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2560                 sb.append(thisToken);
2561             } else {
2562                 sb.append(SPACE).append(thisToken);
2563             }
2564             if (thisTokenIsPeriod) {
2565                 lastTokenWasPeriod = true;
2566             } else {
2567                 lastTokenWasPeriod = false;
2568             }
2569         }
2570 
2571         return sb.toString();
2572     }
2573 
2574     protected String parseUntilCommaOrTerminator( DdlTokenStream tokens ) throws ParsingException {
2575         StringBuffer sb = new StringBuffer();
2576         if (doUseTerminator()) {
2577             while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY) && !isTerminator(tokens)
2578                    && !tokens.matches(COMMA)) {
2579                 sb.append(SPACE).append(tokens.consume());
2580             }
2581         } else {
2582             // parse until next statement
2583             while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY) && !tokens.matches(COMMA)) {
2584                 sb.append(SPACE).append(tokens.consume());
2585             }
2586         }
2587 
2588         return sb.toString();
2589     }
2590 
2591     /**
2592      * Returns if parser is using statement terminator or not.
2593      * 
2594      * @return value of useTerminator flag.
2595      */
2596     public boolean doUseTerminator() {
2597         return useTerminator;
2598     }
2599 
2600     /**
2601      * Sets the value of the use terminator flag for the parser. If TRUE, then all statements are expected to be terminated by a
2602      * terminator. The default terminator ";" can be overridden by setting the value using setTerminator() method.
2603      * 
2604      * @param useTerminator
2605      */
2606     public void setDoUseTerminator( boolean useTerminator ) {
2607         this.useTerminator = useTerminator;
2608     }
2609 
2610     public String getStatementTypeName( String[] stmtPhrase ) {
2611         StringBuffer sb = new StringBuffer(100);
2612         for (int i = 0; i < stmtPhrase.length; i++) {
2613             if (i == 0) {
2614                 sb.append(stmtPhrase[0]);
2615             } else {
2616                 sb.append(SPACE).append(stmtPhrase[i]);
2617             }
2618         }
2619 
2620         return sb.toString();
2621     }
2622 
2623     /**
2624      * Parses the default clause for a column and sets appropriate properties on the column node.
2625      * 
2626      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2627      * @param columnNode the column node which may contain a default clause; may not be null
2628      * @return true if default clause was found and parsed, otherwise false
2629      * @throws ParsingException
2630      */
2631     protected boolean parseDefaultClause( DdlTokenStream tokens,
2632                                           AstNode columnNode ) throws ParsingException {
2633 
2634         assert tokens != null;
2635         assert columnNode != null;
2636 
2637         // defaultClause
2638         // : defaultOption
2639         // ;
2640         // defaultOption : <literal> | datetimeValueFunction
2641         // | USER | CURRENT_USER | SESSION_USER | SYSTEM_USER | NULL;
2642         //
2643         // <datetime value function> ::=
2644         // <current date value function>
2645         // | <current time value function>
2646         // | <current timestamp value function>
2647         //
2648         // <current date value function> ::= CURRENT_DATE
2649         //
2650         // <current time value function> ::=
2651         // CURRENT_TIME [ <left paren> <time precision> <right paren> ]
2652         //
2653         // <current timestamp value function> ::=
2654         // CURRENT_TIMESTAMP [ <left paren> <timestamp precision> <right paren> ]
2655 
2656         String defaultValue = "";
2657 
2658         if (tokens.canConsume("DEFAULT")) {
2659 
2660             int optionID = -1;
2661             int precision = -1;
2662 
2663             if (tokens.canConsume("CURRENT_DATE")) {
2664 
2665                 optionID = DEFAULT_ID_DATETIME;
2666                 defaultValue = "CURRENT_DATE";
2667             } else if (tokens.canConsume("CURRENT_TIME")) {
2668                 optionID = DEFAULT_ID_DATETIME;
2669                 defaultValue = "CURRENT_TIME";
2670                 if (tokens.canConsume(L_PAREN)) {
2671                     // EXPECT INTEGER
2672                     precision = integer(tokens.consume());
2673                     tokens.canConsume(R_PAREN);
2674                 }
2675             } else if (tokens.canConsume("CURRENT_TIMESTAMP")) {
2676                 optionID = DEFAULT_ID_DATETIME;
2677                 defaultValue = "CURRENT_TIMESTAMP";
2678                 if (tokens.canConsume(L_PAREN)) {
2679                     // EXPECT INTEGER
2680                     precision = integer(tokens.consume());
2681                     tokens.canConsume(R_PAREN);
2682                 }
2683             } else if (tokens.canConsume("USER")) {
2684                 optionID = DEFAULT_ID_USER;
2685                 defaultValue = "USER";
2686             } else if (tokens.canConsume("CURRENT_USER")) {
2687                 optionID = DEFAULT_ID_CURRENT_USER;
2688                 defaultValue = "CURRENT_USER";
2689             } else if (tokens.canConsume("SESSION_USER")) {
2690                 optionID = DEFAULT_ID_SESSION_USER;
2691                 defaultValue = "SESSION_USER";
2692             } else if (tokens.canConsume("SYSTEM_USER")) {
2693                 optionID = DEFAULT_ID_SYSTEM_USER;
2694                 defaultValue = "SYSTEM_USER";
2695             } else if (tokens.canConsume("NULL")) {
2696                 optionID = DEFAULT_ID_NULL;
2697                 defaultValue = "NULL";
2698             } else if (tokens.canConsume(L_PAREN)) {
2699                 optionID = DEFAULT_ID_LITERAL;
2700                 while (!tokens.canConsume(R_PAREN)) {
2701                     defaultValue = defaultValue + tokens.consume();
2702                 }
2703             } else {
2704                 optionID = DEFAULT_ID_LITERAL;
2705                 // Assume default was EMPTY or ''
2706                 defaultValue = tokens.consume();
2707                 // NOTE: default value could be a Real number as well as an integer, so
2708                 // 1000.00 is valid
2709                 if (tokens.canConsume(".")) {
2710                     defaultValue = defaultValue + '.' + tokens.consume();
2711                 }
2712             }
2713 
2714             columnNode.setProperty(DEFAULT_OPTION, optionID);
2715             columnNode.setProperty(DEFAULT_VALUE, defaultValue);
2716             if (precision > -1) {
2717                 columnNode.setProperty(DEFAULT_PRECISION, precision);
2718             }
2719             return true;
2720         }
2721 
2722         return false;
2723     }
2724 
2725     /**
2726      * Parses the default clause for a column and sets appropriate properties on the column node.
2727      * 
2728      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2729      * @param columnNode the column node which may contain a collate clause; may not be null
2730      * @return true if collate clause was found and parsed else return false.
2731      * @throws ParsingException
2732      */
2733     protected boolean parseCollateClause( DdlTokenStream tokens,
2734                                           AstNode columnNode ) throws ParsingException {
2735         assert tokens != null;
2736         assert columnNode != null;
2737 
2738         // an option in the CREATE DOMAIN definition
2739         //
2740         // <collate clause> ::= COLLATE <collation name>
2741 
2742         if (tokens.matches("COLLATE")) {
2743             tokens.consume("COLLATE");
2744             String collationName = parseName(tokens);
2745             columnNode.setProperty(COLLATION_NAME, collationName);
2746             return true;
2747         }
2748 
2749         return false;
2750     }
2751 
2752     /**
2753      * Returns the integer value of the input string. Handles both straight integer string or complex KMG (CLOB or BLOB) value.
2754      * Throws {@link NumberFormatException} if a valid integer is not found.
2755      * 
2756      * @param value the string to be parsed; may not be null and length must be > 0;
2757      * @return integer value
2758      */
2759     protected int integer( String value ) {
2760         assert value != null;
2761         assert value.length() > 0;
2762 
2763         return new BigInteger(value).intValue();
2764     }
2765 
2766     public final Position getCurrentMarkedPosition() {
2767         return currentMarkedPosition;
2768     }
2769 
2770     /**
2771      * Marks the token stream with the current position to help track statement scope within the original input string.
2772      * 
2773      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2774      */
2775     public final void markStartOfStatement( DdlTokenStream tokens ) {
2776         tokens.mark();
2777         currentMarkedPosition = tokens.nextPosition();
2778     }
2779 
2780     /**
2781      * Marks the end of a statement by consuming the terminator (if exists). If it does not exist, a missing terminator node may
2782      * be added. If the resulting statement node is a valid child node type for a schema, the child node may be re-parented to the
2783      * schema if the schema is still parentable. Each resulting statement node is tagged with the enclosing source expression,
2784      * starting line number and column number from the file content as well as a starting character index from that same content.
2785      * 
2786      * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2787      * @param statementNode
2788      */
2789     public final void markEndOfStatement( DdlTokenStream tokens,
2790                                           AstNode statementNode ) {
2791         if (!tokens.canConsume(getTerminator())) {
2792             // System.out.println("  WARNING:  Terminator NOT FOUND");
2793 
2794             // Check previous until
2795             // 1) find two sequential nodes that are not missing terminator nodes
2796             // 2) the node before the missing terminator is a valid schema child and
2797             // 3) we find a schema node that is ALSO missing a terminator BEFORE we find an invalid schema child OR a terminated
2798             // node.
2799 
2800             if (!setAsSchemaChildNode(statementNode, true)) {
2801                 missingTerminatorNode(getRootNode()); // Construct missing terminator node
2802             }
2803         } else {
2804             setAsSchemaChildNode(statementNode, false);
2805         }
2806 
2807         String source = tokens.getMarkedContent().trim();
2808         statementNode.setProperty(DDL_EXPRESSION, source);
2809         statementNode.setProperty(DDL_START_LINE_NUMBER, currentMarkedPosition.getLine());
2810         statementNode.setProperty(DDL_START_CHAR_INDEX, currentMarkedPosition.getIndexInContent());
2811         statementNode.setProperty(DDL_START_COLUMN_NUMBER, currentMarkedPosition.getColumn());
2812 
2813         testPrint("== >> SOURCE:\n" + source + "\n");
2814     }
2815 
2816     protected void testPrint( String str ) {
2817         if (isTestMode()) {
2818             System.out.println(str);
2819         }
2820     }
2821 
2822     /**
2823      * @return testMode
2824      */
2825     public boolean isTestMode() {
2826         return testMode;
2827     }
2828 
2829     /**
2830      * @param testMode Sets testMode to the specified value.
2831      */
2832     public void setTestMode( boolean testMode ) {
2833         this.testMode = testMode;
2834     }
2835 
2836     /**
2837      * {@inheritDoc}
2838      * 
2839      * @see org.modeshape.sequencer.ddl.DdlParser#getId()
2840      */
2841     public String getId() {
2842         return parserId;
2843     }
2844 
2845     /**
2846      * {@inheritDoc}
2847      * 
2848      * @see java.lang.Object#hashCode()
2849      */
2850     @Override
2851     public int hashCode() {
2852         return this.parserId.hashCode();
2853     }
2854 
2855     /**
2856      * {@inheritDoc}
2857      * 
2858      * @see java.lang.Object#equals(java.lang.Object)
2859      */
2860     @Override
2861     public boolean equals( Object obj ) {
2862         if (obj == this) return true;
2863         if (obj instanceof DdlParser) {
2864             return ((DdlParser)obj).getId().equals(this.getId());
2865         }
2866         return false;
2867     }
2868 }