View Javadoc

1   package org.modeshape.sequencer.ddl.dialect.derby;
2   
3   import static org.modeshape.sequencer.ddl.StandardDdlLexicon.ALL_PRIVILEGES;
4   import static org.modeshape.sequencer.ddl.StandardDdlLexicon.COLUMN_ATTRIBUTE_TYPE;
5   import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANTEE;
6   import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANT_PRIVILEGE;
7   import static org.modeshape.sequencer.ddl.StandardDdlLexicon.NEW_NAME;
8   import static org.modeshape.sequencer.ddl.StandardDdlLexicon.PROPERTY_VALUE;
9   import static org.modeshape.sequencer.ddl.StandardDdlLexicon.SQL;
10  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE;
11  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_COLUMN_DEFINITION;
12  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_TABLE_STATEMENT;
13  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_DEFINITION;
14  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_REFERENCE;
15  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_TABLE_STATEMENT;
16  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_COLUMN_DEFINITION;
17  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TABLE_CONSTRAINT_DEFINITION;
18  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_TABLE_STATEMENT;
19  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_STATEMENT_OPTION;
20  import static org.modeshape.sequencer.ddl.StandardDdlLexicon.VALUE;
21  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.IS_TABLE_TYPE;
22  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.ORDER;
23  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.ROLE_NAME;
24  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TABLE_NAME;
25  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_CREATE_FUNCTION_STATEMENT;
26  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_CREATE_INDEX_STATEMENT;
27  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_CREATE_PROCEDURE_STATEMENT;
28  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_CREATE_ROLE_STATEMENT;
29  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_CREATE_SYNONYM_STATEMENT;
30  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_CREATE_TRIGGER_STATEMENT;
31  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_DECLARE_GLOBAL_TEMPORARY_TABLE_STATEMENT;
32  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_DROP_FUNCTION_STATEMENT;
33  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_DROP_INDEX_STATEMENT;
34  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_DROP_PROCEDURE_STATEMENT;
35  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_DROP_ROLE_STATEMENT;
36  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_DROP_SYNONYM_STATEMENT;
37  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_DROP_TRIGGER_STATEMENT;
38  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_FUNCTION_PARAMETER;
39  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_GRANT_ON_FUNCTION_STATEMENT;
40  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_GRANT_ON_PROCEDURE_STATEMENT;
41  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_GRANT_ROLES_STATEMENT;
42  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_INDEX_COLUMN_REFERENCE;
43  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_LOCK_TABLE_STATEMENT;
44  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_RENAME_INDEX_STATEMENT;
45  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_RENAME_TABLE_STATEMENT;
46  import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.UNIQUE_INDEX;
47  import java.util.ArrayList;
48  import java.util.List;
49  import org.modeshape.common.text.ParsingException;
50  import org.modeshape.graph.property.Name;
51  import org.modeshape.sequencer.ddl.DdlParserProblem;
52  import org.modeshape.sequencer.ddl.DdlSequencerI18n;
53  import org.modeshape.sequencer.ddl.DdlTokenStream;
54  import org.modeshape.sequencer.ddl.StandardDdlLexicon;
55  import org.modeshape.sequencer.ddl.StandardDdlParser;
56  import org.modeshape.sequencer.ddl.DdlTokenStream.DdlTokenizer;
57  import org.modeshape.sequencer.ddl.datatype.DataType;
58  import org.modeshape.sequencer.ddl.datatype.DataTypeParser;
59  import org.modeshape.sequencer.ddl.node.AstNode;
60  
61  /**
62   * Derby-specific DDL Parser. Includes custom data types as well as custom DDL statements.
63   */
64  public class DerbyDdlParser extends StandardDdlParser implements DerbyDdlConstants, DerbyDdlConstants.DerbyStatementStartPhrases {
65      private final String parserId = "DERBY";
66  
67      protected static final List<String[]> derbyDataTypeStrings = new ArrayList<String[]>(
68                                                                                           DerbyDataTypes.CUSTOM_DATATYPE_START_PHRASES);
69  
70      private static final String TERMINATOR = DEFAULT_TERMINATOR;
71  
72      public DerbyDdlParser() {
73          setDatatypeParser(new DerbyDataTypeParser());
74          setDoUseTerminator(true);
75          setTerminator(TERMINATOR);
76      }
77  
78      /**
79       * {@inheritDoc}
80       * 
81       * @see org.modeshape.sequencer.ddl.StandardDdlParser#initializeTokenStream(org.modeshape.sequencer.ddl.DdlTokenStream)
82       */
83      @Override
84      protected void initializeTokenStream( DdlTokenStream tokens ) {
85          super.initializeTokenStream(tokens);
86          tokens.registerKeyWords(CUSTOM_KEYWORDS);
87          tokens.registerStatementStartPhrase(ALTER_PHRASES);
88          tokens.registerStatementStartPhrase(CREATE_PHRASES);
89          tokens.registerStatementStartPhrase(DROP_PHRASES);
90          tokens.registerStatementStartPhrase(SET_PHRASES);
91          tokens.registerStatementStartPhrase(MISC_PHRASES);
92      }
93  
94      /**
95       * {@inheritDoc}
96       * 
97       * @see org.modeshape.sequencer.ddl.StandardDdlParser#getId()
98       */
99      @Override
100     public String getId() {
101         return this.parserId;
102     }
103 
104     /**
105      * {@inheritDoc}
106      * 
107      * @see org.modeshape.sequencer.ddl.StandardDdlParser#getValidSchemaChildTypes()
108      */
109     @Override
110     protected Name[] getValidSchemaChildTypes() {
111         return VALID_SCHEMA_CHILD_STMTS;
112     }
113 
114     /**
115      * {@inheritDoc}
116      * 
117      * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseCustomStatement(org.modeshape.sequencer.ddl.DdlTokenStream,
118      *      org.modeshape.sequencer.ddl.node.AstNode)
119      */
120     @Override
121     protected AstNode parseCustomStatement( DdlTokenStream tokens,
122                                             AstNode parentNode ) throws ParsingException {
123         assert tokens != null;
124         assert parentNode != null;
125 
126         AstNode result = super.parseCustomStatement(tokens, parentNode);
127         if (result == null) {
128             if (tokens.matches(STMT_LOCK_TABLE)) {
129                 result = parseLockTable(tokens, parentNode);
130             } else if (tokens.matches(STMT_RENAME_TABLE)) {
131                 result = parseRenameTable(tokens, parentNode);
132             } else if (tokens.matches(STMT_RENAME_INDEX)) {
133                 result = parseRenameIndex(tokens, parentNode);
134             } else if (tokens.matches(STMT_DECLARE_GLOBAL_TEMP_TABLE)) {
135                 result = parseDeclareGlobalTempTable(tokens, parentNode);
136             }
137         }
138         return result;
139     }
140 
141     /**
142      * {@inheritDoc}
143      * 
144      * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseCreateStatement(org.modeshape.sequencer.ddl.DdlTokenStream,
145      *      org.modeshape.sequencer.ddl.node.AstNode)
146      */
147     @Override
148     protected AstNode parseCreateStatement( DdlTokenStream tokens,
149                                             AstNode parentNode ) throws ParsingException {
150         assert tokens != null;
151         assert parentNode != null;
152 
153         if (tokens.matches(STMT_CREATE_INDEX) || tokens.matches(STMT_CREATE_UNIQUE_INDEX)) {
154             return parseCreateIndex(tokens, parentNode);
155         } else if (tokens.matches(STMT_CREATE_FUNCTION)) {
156             return parseCreateFunction(tokens, parentNode);
157         } else if (tokens.matches(STMT_CREATE_PROCEDURE)) {
158             return parseStatement(tokens, STMT_CREATE_PROCEDURE, parentNode, TYPE_CREATE_PROCEDURE_STATEMENT);
159         } else if (tokens.matches(STMT_CREATE_ROLE)) {
160             return parseStatement(tokens, STMT_CREATE_ROLE, parentNode, TYPE_CREATE_ROLE_STATEMENT);
161         } else if (tokens.matches(STMT_CREATE_SYNONYM)) {
162             return parseCreateSynonym(tokens, parentNode);
163         } else if (tokens.matches(STMT_CREATE_TRIGGER)) {
164             return parseCreateTrigger(tokens, parentNode);
165         }
166 
167         return super.parseCreateStatement(tokens, parentNode);
168 
169     }
170 
171     /**
172      * Parses DDL CREATE INDEX
173      * 
174      * @param tokens the tokenized {@link DdlTokenStream} of the DDL input content; may not be null
175      * @param parentNode the parent {@link AstNode} node; may not be null
176      * @return the parsed CREATE INDEX
177      * @throws ParsingException
178      */
179     protected AstNode parseCreateIndex( DdlTokenStream tokens,
180                                         AstNode parentNode ) throws ParsingException {
181         assert tokens != null;
182         assert parentNode != null;
183 
184         markStartOfStatement(tokens);
185         // CREATE [UNIQUE] INDEX index-Name
186         // ON table-Name ( Simple-column-Name [ ASC | DESC ] [ , Simple-column-Name [ ASC | DESC ]] * )
187         tokens.consume(CREATE); // CREATE
188 
189         boolean isUnique = tokens.canConsume("UNIQUE");
190 
191         tokens.consume("INDEX");
192         String indexName = parseName(tokens);
193         tokens.consume("ON");
194         String tableName = parseName(tokens);
195 
196         AstNode indexNode = nodeFactory().node(indexName, parentNode, TYPE_CREATE_INDEX_STATEMENT);
197 
198         indexNode.setProperty(UNIQUE_INDEX, isUnique);
199         indexNode.setProperty(TABLE_NAME, tableName);
200 
201         parseIndexTableColumns(tokens, indexNode);
202 
203         parseUntilTerminator(tokens);
204 
205         markEndOfStatement(tokens, indexNode);
206 
207         return indexNode;
208     }
209 
210     private void parseIndexTableColumns( DdlTokenStream tokens,
211                                          AstNode indexNode ) throws ParsingException {
212         assert tokens != null;
213         assert indexNode != null;
214 
215         // Assume we start with open parenthesis '(', then we parse comma separated list of column names followed by optional
216         // ASC or DESC
217 
218         tokens.consume(L_PAREN); // EXPECTED
219 
220         while (!tokens.canConsume(R_PAREN)) {
221             String colName = parseName(tokens);
222             AstNode colRefNode = nodeFactory().node(colName, indexNode, TYPE_INDEX_COLUMN_REFERENCE);
223             if (tokens.canConsume("ASC")) {
224                 colRefNode.setProperty(ORDER, "ASC");
225             } else if (tokens.canConsume("DESC")) {
226                 colRefNode.setProperty(ORDER, "DESC");
227             }
228             tokens.canConsume(COMMA);
229         }
230     }
231 
232     /**
233      * Parses DDL CREATE FUNCTION statement
234      * 
235      * @param tokens the tokenized {@link DdlTokenStream} of the DDL input content; may not be null
236      * @param parentNode the parent {@link AstNode} node; may not be null
237      * @return the parsed CREATE FUNCTION statement node
238      * @throws ParsingException
239      */
240     protected AstNode parseCreateFunction( DdlTokenStream tokens,
241                                            AstNode parentNode ) throws ParsingException {
242         assert tokens != null;
243         assert parentNode != null;
244 
245         markStartOfStatement(tokens);
246         // CREATE FUNCTION function-name ( [ FunctionParameter [, FunctionParameter] ] * )
247         // RETURNS ReturnDataType [ FunctionElement ] *
248 
249         // FunctionElement
250         // {
251         // | LANGUAGE { JAVA }
252         // | {DETERMINISTIC | NOT DETERMINISTIC}
253         // | EXTERNAL NAME string
254         // | PARAMETER STYLE {JAVA | DERBY_JDBC_RESULT_SET}
255         // | { NO SQL | CONTAINS SQL | READS SQL DATA }
256         // | { RETURNS NULL ON NULL INPUT | CALLED ON NULL INPUT }
257         // }
258         tokens.consume(CREATE, "FUNCTION");
259 
260         String functionName = parseName(tokens);
261 
262         AstNode functionNode = nodeFactory().node(functionName, parentNode, TYPE_CREATE_FUNCTION_STATEMENT);
263 
264         parseFunctionParameters(tokens, functionNode);
265 
266         tokens.consume("RETURNS");
267 
268         if (tokens.canConsume("TABLE")) {
269             AstNode tableNode = nodeFactory().node("TABLE", functionNode, TYPE_CREATE_TABLE_STATEMENT);
270             parseColumnsAndConstraints(tokens, tableNode);
271             tableNode.setProperty(IS_TABLE_TYPE, true);
272         } else {
273             // Assume DataType
274             DataType datatype = getDatatypeParser().parse(tokens);
275             if (datatype != null) {
276                 getDatatypeParser().setPropertiesOnNode(functionNode, datatype);
277             } else {
278                 String msg = DdlSequencerI18n.missingReturnTypeForFunction.text(functionName);
279                 DdlParserProblem problem = new DdlParserProblem(Problems.WARNING, getCurrentMarkedPosition(), msg);
280                 addProblem(problem, functionNode);
281             }
282         }
283 
284         while (!isTerminator(tokens)) {
285             if (tokens.matches("LANGUAGE")) {
286                 AstNode optionNode = nodeFactory().node("language", functionNode, TYPE_STATEMENT_OPTION);
287                 if (tokens.canConsume("LANGUAGE", "JAVA")) {
288                     optionNode.setProperty(VALUE, "LANGUAGE JAVA");
289                 } else {
290                     tokens.consume("LANGUAGE");
291                     optionNode.setProperty(VALUE, "LANGUAGE");
292                 }
293             } else if (tokens.canConsume("DETERMINISTIC")) {
294                 AstNode optionNode = nodeFactory().node("deterministic", functionNode, TYPE_STATEMENT_OPTION);
295                 optionNode.setProperty(VALUE, "DETERMINISTIC");
296             } else if (tokens.canConsume("NOT", "DETERMINISTIC")) {
297                 AstNode optionNode = nodeFactory().node("deterministic", functionNode, TYPE_STATEMENT_OPTION);
298                 optionNode.setProperty(VALUE, "NOT DETERMINISTIC");
299             } else if (tokens.canConsume("EXTERNAL", "NAME")) {
300                 String extName = parseName(tokens);
301                 AstNode optionNode = nodeFactory().node("externalName", functionNode, TYPE_STATEMENT_OPTION);
302                 optionNode.setProperty(VALUE, "EXTERNAL NAME" + SPACE + extName);
303             } else if (tokens.canConsume("PARAMETER", "STYLE")) {
304                 AstNode optionNode = nodeFactory().node("parameterStyle", functionNode, TYPE_STATEMENT_OPTION);
305                 if (tokens.canConsume("JAVA")) {
306                     optionNode.setProperty(VALUE, "PARAMETER STYLE" + SPACE + "JAVA");
307                 } else {
308                     tokens.consume("DERBY_JDBC_RESULT_SET");
309                     optionNode.setProperty(VALUE, "PARAMETER STYLE" + SPACE + "DERBY_JDBC_RESULT_SET");
310                 }
311             } else if (tokens.canConsume("NO", "SQL")) {
312                 AstNode optionNode = nodeFactory().node("sqlStatus", functionNode, TYPE_STATEMENT_OPTION);
313                 optionNode.setProperty(VALUE, "NO SQL");
314             } else if (tokens.canConsume("CONTAINS", "SQL")) {
315                 AstNode optionNode = nodeFactory().node("sqlStatus", functionNode, TYPE_STATEMENT_OPTION);
316                 optionNode.setProperty(VALUE, "CONTAINS SQL");
317             } else if (tokens.canConsume("READS", "SQL", "DATA")) {
318                 AstNode optionNode = nodeFactory().node("sqlStatus", functionNode, TYPE_STATEMENT_OPTION);
319                 optionNode.setProperty(VALUE, "READS SQL DATA");
320             } else if (tokens.canConsume("RETURNS", "NULL", "ON", "NULL", "INPUT")) {
321                 AstNode optionNode = nodeFactory().node("nullInput", functionNode, TYPE_STATEMENT_OPTION);
322                 optionNode.setProperty(VALUE, "RETURNS NULL ON NULL INPUT");
323             } else if (tokens.canConsume("CALLED", "ON", "NULL", "INPUT")) {
324                 AstNode optionNode = nodeFactory().node("nullInput", functionNode, TYPE_STATEMENT_OPTION);
325                 optionNode.setProperty(VALUE, "CALLED ON NULL INPUT");
326             } else {
327                 String msg = DdlSequencerI18n.errorParsingDdlContent.text(functionName);
328                 DdlParserProblem problem = new DdlParserProblem(Problems.ERROR, getCurrentMarkedPosition(), msg);
329                 addProblem(problem, functionNode);
330                 break;
331             }
332         }
333 
334         markEndOfStatement(tokens, functionNode);
335 
336         return functionNode;
337     }
338 
339     private void parseFunctionParameters( DdlTokenStream tokens,
340                                           AstNode functionNode ) throws ParsingException {
341         assert tokens != null;
342         assert functionNode != null;
343 
344         // Assume we start with open parenthesis '(', then we parse comma separated list of function parameters
345         // which have the form: [ parameter-Name ] DataType
346         // So, try getting datatype, if datatype == NULL, then parseName() & parse datatype, then repeat as long as next token is
347         // ","
348 
349         tokens.consume(L_PAREN); // EXPECTED
350 
351         while (!tokens.canConsume(R_PAREN)) {
352             DataType datatype = getDatatypeParser().parse(tokens);
353             if (datatype == null) {
354                 String paramName = parseName(tokens);
355                 datatype = getDatatypeParser().parse(tokens);
356                 AstNode paramNode = nodeFactory().node(paramName, functionNode, TYPE_FUNCTION_PARAMETER);
357                 getDatatypeParser().setPropertiesOnNode(paramNode, datatype);
358             } else {
359                 AstNode paramNode = nodeFactory().node("functionParameter", functionNode, TYPE_FUNCTION_PARAMETER);
360                 getDatatypeParser().setPropertiesOnNode(paramNode, datatype);
361             }
362             tokens.canConsume(COMMA);
363         }
364     }
365 
366     /**
367      * Parses DDL CREATE PROCEDURE statement
368      * 
369      * @param tokens the tokenized {@link DdlTokenStream} of the DDL input content; may not be null
370      * @param parentNode the parent {@link AstNode} node; may not be null
371      * @return the parsed CREATE PROCEDURE statement node
372      * @throws ParsingException
373      */
374     protected AstNode parseCreateProcedure( DdlTokenStream tokens,
375                                             AstNode parentNode ) throws ParsingException {
376         assert tokens != null;
377         assert parentNode != null;
378 
379         markStartOfStatement(tokens);
380 
381         tokens.consume(CREATE, "PROCEDURE");
382 
383         String functionName = parseName(tokens);
384 
385         AstNode functionNode = nodeFactory().node(functionName, parentNode, TYPE_CREATE_FUNCTION_STATEMENT);
386 
387         markEndOfStatement(tokens, functionNode);
388 
389         return functionNode;
390     }
391 
392     /**
393      * {@inheritDoc}
394      * 
395      * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseDropStatement(org.modeshape.sequencer.ddl.DdlTokenStream,
396      *      org.modeshape.sequencer.ddl.node.AstNode)
397      */
398     @Override
399     protected AstNode parseDropStatement( DdlTokenStream tokens,
400                                           AstNode parentNode ) throws ParsingException {
401         assert tokens != null;
402         assert parentNode != null;
403 
404         AstNode dropNode = null;
405 
406         String name = null;
407 
408         if (tokens.matches(STMT_DROP_FUNCTION)) {
409             markStartOfStatement(tokens);
410             tokens.consume(STMT_DROP_FUNCTION);
411             name = parseName(tokens);
412             dropNode = nodeFactory().node(name, parentNode, TYPE_DROP_FUNCTION_STATEMENT);
413         } else if (tokens.matches(STMT_DROP_INDEX)) {
414             markStartOfStatement(tokens);
415             tokens.consume(STMT_DROP_INDEX);
416             name = parseName(tokens);
417             dropNode = nodeFactory().node(name, parentNode, TYPE_DROP_INDEX_STATEMENT);
418         } else if (tokens.matches(STMT_DROP_PROCEDURE)) {
419             markStartOfStatement(tokens);
420             tokens.consume(STMT_DROP_PROCEDURE);
421             name = parseName(tokens);
422             dropNode = nodeFactory().node(name, parentNode, TYPE_DROP_PROCEDURE_STATEMENT);
423         } else if (tokens.matches(STMT_DROP_ROLE)) {
424             markStartOfStatement(tokens);
425             tokens.consume(STMT_DROP_ROLE);
426             name = parseName(tokens);
427             dropNode = nodeFactory().node(name, parentNode, TYPE_DROP_ROLE_STATEMENT);
428         } else if (tokens.matches(STMT_DROP_SYNONYM)) {
429             markStartOfStatement(tokens);
430             tokens.consume(STMT_DROP_SYNONYM);
431             name = parseName(tokens);
432             dropNode = nodeFactory().node(name, parentNode, TYPE_DROP_SYNONYM_STATEMENT);
433         } else if (tokens.matches(STMT_DROP_TRIGGER)) {
434             markStartOfStatement(tokens);
435             tokens.consume(STMT_DROP_TRIGGER);
436             name = parseName(tokens);
437             dropNode = nodeFactory().node(name, parentNode, TYPE_DROP_TRIGGER_STATEMENT);
438         }
439 
440         if (dropNode != null) {
441             markEndOfStatement(tokens, dropNode);
442         }
443 
444         if (dropNode == null) {
445             dropNode = super.parseDropStatement(tokens, parentNode);
446         }
447 
448         return dropNode;
449     }
450 
451     /**
452      * {@inheritDoc} Syntax for tables GRANT privilege-type ON [TABLE] { table-Name | view-Name } TO grantees Syntax for routines
453      * GRANT EXECUTE ON { FUNCTION | PROCEDURE } routine-designator TO grantees Syntax for roles GRANT roleName [ {, roleName }* ]
454      * TO grantees privilege-types ALL PRIVILEGES | privilege-list privilege-list table-privilege {, table-privilege }*
455      * table-privilege DELETE | INSERT | REFERENCES [column list] | SELECT [column list] | TRIGGER | UPDATE [column list] column
456      * list ( column-identifier {, column-identifier}* ) GRANT
457      * 
458      * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseGrantStatement(org.modeshape.sequencer.ddl.DdlTokenStream,
459      *      org.modeshape.sequencer.ddl.node.AstNode)
460      */
461     @Override
462     protected AstNode parseGrantStatement( DdlTokenStream tokens,
463                                            AstNode parentNode ) throws ParsingException {
464         assert tokens != null;
465         assert parentNode != null;
466         assert tokens.matches(GRANT);
467 
468         markStartOfStatement(tokens);
469 
470         // Syntax for tables
471         //
472         // GRANT privilege-type ON [TABLE] { table-Name | view-Name } TO grantees
473         //
474         // Syntax for routines
475         //
476         // GRANT EXECUTE ON { FUNCTION | PROCEDURE } {function-name | procedure-name} TO grantees
477         //
478         // Syntax for roles
479         //
480         // GRANT roleName [ {, roleName }* ] TO grantees
481 
482         // privilege-types
483         //
484         // ALL PRIVILEGES | privilege-list
485         //
486         AstNode grantNode = null;
487         boolean allPrivileges = false;
488 
489         List<AstNode> privileges = new ArrayList<AstNode>();
490 
491         tokens.consume("GRANT");
492         if (tokens.canConsume("EXECUTE", "ON")) {
493             AstNode node = nodeFactory().node("privilege");
494             nodeFactory().setType(node, GRANT_PRIVILEGE);
495             node.setProperty(TYPE, "EXECUTE");
496             privileges = new ArrayList<AstNode>();
497             privileges.add(node);
498             if (tokens.canConsume("FUNCTION")) {
499                 String name = parseName(tokens);
500                 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_FUNCTION_STATEMENT);
501             } else {
502                 tokens.consume("PROCEDURE");
503                 String name = parseName(tokens);
504                 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_PROCEDURE_STATEMENT);
505             }
506         } else {
507 
508             if (tokens.canConsume("ALL", "PRIVILEGES")) {
509                 allPrivileges = true;
510             } else {
511                 parseGrantPrivileges(tokens, privileges);
512 
513                 if (privileges.isEmpty()) {
514                     // ASSUME: GRANT roleName [ {, roleName }* ] TO grantees
515                     grantNode = nodeFactory().node("grantRoles", parentNode, TYPE_GRANT_ROLES_STATEMENT);
516                     do {
517                         String roleName = parseName(tokens);
518                         nodeFactory().node(roleName, grantNode, ROLE_NAME);
519                     } while (tokens.canConsume(COMMA));
520                 }
521             }
522             if (grantNode == null) {
523                 tokens.consume("ON");
524                 tokens.canConsume(TABLE); // OPTIONAL
525                 String name = parseName(tokens);
526                 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_TABLE_STATEMENT);
527                 // Attach privileges to grant node
528                 for (AstNode node : privileges) {
529                     node.setParent(grantNode);
530                 }
531                 if (allPrivileges) {
532                     grantNode.setProperty(ALL_PRIVILEGES, allPrivileges);
533                 }
534             }
535 
536         }
537 
538         tokens.consume("TO");
539 
540         do {
541             String grantee = parseName(tokens);
542             nodeFactory().node(grantee, grantNode, GRANTEE);
543         } while (tokens.canConsume(COMMA));
544 
545         markEndOfStatement(tokens, grantNode);
546 
547         return grantNode;
548     }
549 
550     /**
551      * {@inheritDoc}
552      * 
553      * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseGrantPrivileges(org.modeshape.sequencer.ddl.DdlTokenStream,
554      *      java.util.List)
555      */
556     @Override
557     protected void parseGrantPrivileges( DdlTokenStream tokens,
558                                          List<AstNode> privileges ) throws ParsingException {
559         // privilege-types
560         //
561         // ALL PRIVILEGES | privilege-list
562         //
563         // privilege-list
564         //
565         // table-privilege {, table-privilege }*
566         //
567         // table-privilege
568         // DELETE |
569         // INSERT |
570         // REFERENCES [column list] |
571         // SELECT [column list] |
572         // TRIGGER |
573         // UPDATE [column list]
574         // column list
575         // ( column-identifier {, column-identifier}* )
576 
577         do {
578             AstNode node = null;
579 
580             if (tokens.canConsume(DELETE)) {
581                 node = nodeFactory().node("privilege");
582                 node.setProperty(TYPE, DELETE);
583             } else if (tokens.canConsume(INSERT)) {
584                 node = nodeFactory().node("privilege");
585                 node.setProperty(TYPE, INSERT);
586             } else if (tokens.canConsume("REFERENCES")) {
587                 node = nodeFactory().node("privilege");
588                 node.setProperty(TYPE, "REFERENCES");
589                 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
590             } else if (tokens.canConsume(SELECT)) {
591                 node = nodeFactory().node("privilege");
592                 node.setProperty(TYPE, SELECT);
593                 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
594             } else if (tokens.canConsume("TRIGGER")) {
595                 node = nodeFactory().node("privilege");
596                 node.setProperty(TYPE, "TRIGGER");
597             } else if (tokens.canConsume(UPDATE)) {
598                 node = nodeFactory().node("privilege");
599                 node.setProperty(TYPE, UPDATE);
600                 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
601             }
602             if (node == null) {
603                 break;
604             }
605             nodeFactory().setType(node, GRANT_PRIVILEGE);
606             privileges.add(node);
607 
608         } while (tokens.canConsume(COMMA));
609 
610     }
611 
612     /**
613      * {@inheritDoc}
614      * 
615      * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseAlterTableStatement(org.modeshape.sequencer.ddl.DdlTokenStream,
616      *      org.modeshape.sequencer.ddl.node.AstNode)
617      */
618     @Override
619     protected AstNode parseAlterTableStatement( DdlTokenStream tokens,
620                                                 AstNode parentNode ) throws ParsingException {
621         assert tokens != null;
622         assert parentNode != null;
623 
624         markStartOfStatement(tokens);
625 
626         // ALTER TABLE table-Name
627         // {
628         // ADD COLUMN column-definition |
629         // ADD CONSTRAINT clause |
630         // DROP [ COLUMN ] column-name [ CASCADE | RESTRICT ] |
631         // DROP { PRIMARY KEY | FOREIGN KEY constraint-name | UNIQUE constraint-name | CHECK constraint-name | CONSTRAINT
632         // constraint-name } |
633         // ALTER [ COLUMN ] column-alteration |
634         // LOCKSIZE { ROW | TABLE }
635         // }
636 
637         tokens.consume(ALTER, TABLE); // consumes 'ALTER TABLE'
638         String tableName = parseName(tokens);
639 
640         AstNode alterTableNode = nodeFactory().node(tableName, parentNode, TYPE_ALTER_TABLE_STATEMENT);
641 
642         // System.out.println("  >> PARSIN ALTER STATEMENT >>  TABLE Name = " + tableName);
643 
644         if (tokens.canConsume("ADD")) {
645             if (isTableConstraint(tokens)) {
646                 parseTableConstraint(tokens, alterTableNode, true);
647             } else {
648                 // This segment can also be enclosed in "()" brackets to handle multiple ColumnDefinition ADDs
649                 if (tokens.matches(L_PAREN)) {
650                     parseColumns(tokens, alterTableNode, true);
651                 } else {
652                     parseSingleTerminatedColumnDefinition(tokens, alterTableNode, true);
653                 }
654             }
655 
656         } else if (tokens.canConsume("DROP")) {
657             // DROP { PRIMARY KEY | FOREIGN KEY constraint-name | UNIQUE constraint-name | CHECK constraint-name | CONSTRAINT
658             // constraint-name }
659             if (tokens.canConsume("PRIMARY", "KEY")) {
660                 String name = parseName(tokens); // constraint name
661                 nodeFactory().node(name, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION);
662             } else if (tokens.canConsume("FOREIGN", "KEY")) {
663                 String name = parseName(tokens); // constraint name
664                 nodeFactory().node(name, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION);
665             } else if (tokens.canConsume("UNIQUE")) {
666                 String name = parseName(tokens); // constraint name
667                 nodeFactory().node(name, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION);
668             } else if (tokens.canConsume("CHECK")) {
669                 String name = parseName(tokens); // constraint name
670                 nodeFactory().node(name, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION);
671             } else if (tokens.canConsume("CONSTRAINT")) {
672                 String name = parseName(tokens); // constraint name
673                 nodeFactory().node(name, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION);
674             } else {
675                 // DROP [ COLUMN ] column-name [ CASCADE | RESTRICT ]
676                 tokens.canConsume("COLUMN"); // "COLUMN" is optional
677 
678                 String columnName = parseName(tokens);
679 
680                 AstNode columnNode = nodeFactory().node(columnName, alterTableNode, TYPE_DROP_COLUMN_DEFINITION);
681                 columnNode.setProperty(StandardDdlLexicon.NAME, columnName);
682 
683                 if (tokens.canConsume(DropBehavior.CASCADE)) {
684                     columnNode.setProperty(StandardDdlLexicon.DROP_BEHAVIOR, DropBehavior.CASCADE);
685                 } else if (tokens.canConsume(DropBehavior.RESTRICT)) {
686                     columnNode.setProperty(StandardDdlLexicon.DROP_BEHAVIOR, DropBehavior.RESTRICT);
687                 }
688             }
689         } else if (tokens.canConsume("ALTER")) {
690             // column-alteration
691             //
692             // ALTER [ COLUMN ] column-Name SET DATA TYPE VARCHAR(integer) |
693             // ALTER [ COLUMN ] column-Name SET DATA TYPE VARCHAR FOR BIT DATA(integer) |
694             // ALTER [ COLUMN ] column-name SET INCREMENT BY integer-constant |
695             // ALTER [ COLUMN ] column-name RESTART WITH integer-constant |
696             // ALTER [ COLUMN ] column-name [ NOT ] NULL |
697             // ALTER [ COLUMN ] column-name [ WITH | SET ] DEFAULT default-value |
698             // ALTER [ COLUMN ] column-name DROP DEFAULT
699 
700             tokens.canConsume("COLUMN");
701             String alterColumnName = parseName(tokens);
702 
703             AstNode columnNode = nodeFactory().node(alterColumnName, alterTableNode, TYPE_ALTER_COLUMN_DEFINITION);
704 
705             if (tokens.matches("DEFAULT")) {
706                 parseDefaultClause(tokens, columnNode);
707             } else if (tokens.canConsume("SET")) {
708                 if (tokens.canConsume("DATA", "TYPE")) {
709                     DataType datatype = getDatatypeParser().parse(tokens);
710 
711                     columnNode.setProperty(StandardDdlLexicon.DATATYPE_NAME, datatype.getName());
712                     if (datatype.getLength() >= 0) {
713                         columnNode.setProperty(StandardDdlLexicon.DATATYPE_LENGTH, datatype.getLength());
714                     }
715                     if (datatype.getPrecision() >= 0) {
716                         columnNode.setProperty(StandardDdlLexicon.DATATYPE_PRECISION, datatype.getPrecision());
717                     }
718                     if (datatype.getScale() >= 0) {
719                         columnNode.setProperty(StandardDdlLexicon.DATATYPE_SCALE, datatype.getScale());
720                     }
721 
722                 } else if (tokens.canConsume("INCREMENT")) {
723                     tokens.consume("BY", DdlTokenStream.ANY_VALUE);
724                 }
725                 if (tokens.matches("DEFAULT")) {
726                     parseDefaultClause(tokens, columnNode);
727                 }
728             } else if (tokens.canConsume("WITH")) {
729                 parseDefaultClause(tokens, columnNode);
730             } else {
731                 tokens.canConsume("RESTART", "WITH", DdlTokenStream.ANY_VALUE);
732                 tokens.canConsume("DROP", "DEFAULT");
733 
734                 if (tokens.canConsume("NOT", "NULL")) {
735                     columnNode.setProperty(StandardDdlLexicon.NULLABLE, "NOT NULL");
736                 } else if (tokens.canConsume("NULL")) {
737                     columnNode.setProperty(StandardDdlLexicon.NULLABLE, "NULL");
738                 }
739             }
740 
741         } else if (tokens.canConsume("LOCKSIZE")) {
742             tokens.canConsume("ROWS");
743             tokens.canConsume("TABLE");
744         }
745 
746         markEndOfStatement(tokens, alterTableNode);
747 
748         return alterTableNode;
749     }
750 
751     /**
752      * {@inheritDoc}
753      * 
754      * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseColumnDefinition(org.modeshape.sequencer.ddl.DdlTokenStream,
755      *      org.modeshape.sequencer.ddl.node.AstNode, boolean)
756      */
757     @Override
758     protected void parseColumnDefinition( DdlTokenStream tokens,
759                                           AstNode tableNode,
760                                           boolean isAlterTable ) throws ParsingException {
761         // column-definition
762         //
763         // Simple-column-Name DataType
764         // [ ColumnDefinition-level-constraint ]*
765         // [ [ WITH ] DEFAULT { ConstantExpression | NULL } |generated-column-spec ]
766         // [ ColumnDefinition-level-constraint ]*
767 
768         // generated-column-spec
769         //
770         // [ GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY [ ( START WITH IntegerConstant [ ,INCREMENT BY IntegerConstant] ) ] ] ]
771 
772         // EXAMPLE COLUMNS
773         // (i INT GENERATED BY DEFAULT AS IDENTITY (START WITH 2, INCREMENT BY 1),
774         // ch CHAR(50));
775 
776         tokens.canConsume("COLUMN"); // FOR ALTER TABLE ADD [COLUMN] case
777         String columnName = parseName(tokens);
778         DataType datatype = getDatatypeParser().parse(tokens);
779 
780         AstNode columnNode = nodeFactory().node(columnName, tableNode, TYPE_COLUMN_DEFINITION);
781 
782         columnNode.setProperty(StandardDdlLexicon.DATATYPE_NAME, datatype.getName());
783         if (datatype.getLength() >= 0) {
784             columnNode.setProperty(StandardDdlLexicon.DATATYPE_LENGTH, datatype.getLength());
785         }
786         if (datatype.getPrecision() >= 0) {
787             columnNode.setProperty(StandardDdlLexicon.DATATYPE_PRECISION, datatype.getPrecision());
788         }
789         if (datatype.getScale() >= 0) {
790             columnNode.setProperty(StandardDdlLexicon.DATATYPE_SCALE, datatype.getScale());
791         }
792 
793         // Now clauses and constraints can be defined in any order, so we need to keep parsing until we get to a comma
794         // Now clauses and constraints can be defined in any order, so we need to keep parsing until we get to a comma
795         StringBuffer unusedTokensSB = new StringBuffer();
796 
797         while (tokens.hasNext() && !tokens.matches(COMMA)) {
798             boolean parsedDefaultClause = parseDefaultClause(tokens, columnNode);
799             if (!parsedDefaultClause) {
800                 boolean parsedCollate = parseCollateClause(tokens, columnNode);
801                 boolean parsedConstraint = parseColumnConstraint(tokens, columnNode, isAlterTable);
802                 boolean parsedGeneratedColumn = parseGeneratedColumnSpecClause(tokens, columnNode);
803                 if (!parsedCollate && !parsedConstraint && !parsedGeneratedColumn) {
804                     // THIS IS AN ERROR. NOTHING FOUND.
805                     // NEED TO absorb tokens
806                     unusedTokensSB.append(SPACE).append(tokens.consume());
807                 }
808             }
809             tokens.canConsume(DdlTokenizer.COMMENT);
810         }
811 
812         if (unusedTokensSB.length() > 0) {
813             String msg = DdlSequencerI18n.unusedTokensParsingColumnDefinition.text(tableNode.getProperty(StandardDdlLexicon.NAME));
814             DdlParserProblem problem = new DdlParserProblem(Problems.WARNING, getCurrentMarkedPosition(), msg);
815             problem.setUnusedSource(unusedTokensSB.toString());
816             addProblem(problem, tableNode);
817         }
818 
819     }
820 
821     /**
822      * Utility method designed to parse columns within an ALTER TABLE ADD statement.
823      * 
824      * @param tokens the tokenized {@link DdlTokenStream} of the DDL input content; may not be null
825      * @param tableNode
826      * @param isAlterTable
827      * @throws ParsingException
828      */
829     protected void parseColumns( DdlTokenStream tokens,
830                                  AstNode tableNode,
831                                  boolean isAlterTable ) throws ParsingException {
832         String tableElementString = getTableElementsString(tokens, false);
833 
834         DdlTokenStream localTokens = new DdlTokenStream(tableElementString, DdlTokenStream.ddlTokenizer(false), false);
835 
836         localTokens.start();
837 
838         StringBuffer unusedTokensSB = new StringBuffer();
839 
840         do {
841             if (isColumnDefinitionStart(localTokens)) {
842                 parseColumnDefinition(localTokens, tableNode, isAlterTable);
843             } else {
844                 // THIS IS AN ERROR. NOTHING FOUND.
845                 // NEED TO absorb tokens
846                 unusedTokensSB.append(SPACE).append(localTokens.consume());
847             }
848         } while (localTokens.canConsume(COMMA));
849 
850         if (unusedTokensSB.length() > 0) {
851             String msg = DdlSequencerI18n.unusedTokensParsingColumnDefinition.text(tableNode.getProperty(StandardDdlLexicon.NAME));
852             DdlParserProblem problem = new DdlParserProblem(Problems.WARNING, getCurrentMarkedPosition(), msg);
853             problem.setUnusedSource(unusedTokensSB.toString());
854             addProblem(problem, tableNode);
855         }
856     }
857 
858     private boolean parseGeneratedColumnSpecClause( DdlTokenStream tokens,
859                                                     AstNode columnNode ) throws ParsingException {
860         assert tokens != null;
861         assert columnNode != null;
862         // generated-column-spec
863         //
864         // [ GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY [ ( START WITH IntegerConstant [ ,INCREMENT BY IntegerConstant] ) ] ] ]
865         if (tokens.canConsume("GENERATED")) {
866             StringBuffer sb = new StringBuffer("GENERATED");
867 
868             if (tokens.canConsume("ALWAYS")) {
869                 sb.append(SPACE).append("ALWAYS");
870             } else {
871                 tokens.consume("BY", "DEFAULT");
872                 sb.append(SPACE).append("BY DEFAULT");
873             }
874 
875             tokens.consume("AS", "IDENTITY");
876             sb.append(SPACE).append("AS IDENTITY");
877 
878             if (tokens.canConsume(L_PAREN, "START", "WITH")) {
879                 String value = tokens.consume(); // integer constant
880                 sb.append(SPACE).append(L_PAREN).append(SPACE).append("START WITH").append(SPACE).append(value);
881                 if (tokens.canConsume(COMMA, "INCREMENT", "BY")) {
882                     value = tokens.consume();// integer constant
883                     sb.append(COMMA).append("INCREMENT BY").append(SPACE).append(value);
884                 }
885                 tokens.consume(R_PAREN);
886                 sb.append(SPACE).append(R_PAREN);
887             }
888             AstNode propNode = nodeFactory().node("GENERATED_COLUMN_SPEC", columnNode, COLUMN_ATTRIBUTE_TYPE);
889             propNode.setProperty(PROPERTY_VALUE, sb.toString());
890 
891             return true;
892         }
893 
894         return false;
895     }
896 
897     private AstNode parseDeclareGlobalTempTable( DdlTokenStream tokens,
898                                                  AstNode parentNode ) throws ParsingException {
899         assert tokens != null;
900         assert parentNode != null;
901 
902         markStartOfStatement(tokens);
903 
904         // DECLARE GLOBAL TEMPORARY TABLE table-Name
905         // { column-definition [ , column-definition ] * }
906         // [ ON COMMIT {DELETE | PRESERVE} ROWS ]
907         // NOT LOGGED [ON ROLLBACK DELETE ROWS]
908 
909         tokens.consume(STMT_DECLARE_GLOBAL_TEMP_TABLE);
910         String name = parseName(tokens);
911 
912         AstNode node = nodeFactory().node(name, parentNode, TYPE_DECLARE_GLOBAL_TEMPORARY_TABLE_STATEMENT);
913 
914         parseColumnsAndConstraints(tokens, node);
915 
916         if (tokens.canConsume("ON", "COMMIT")) {
917             AstNode optionNode = nodeFactory().node("onCommit", node, TYPE_STATEMENT_OPTION);
918             if (tokens.canConsume("DELETE", "ROWS")) {
919                 optionNode.setProperty(VALUE, "ON COMMIT DELETE ROWS");
920             } else {
921                 tokens.consume("PRESERVE", "ROWS");
922                 optionNode.setProperty(VALUE, "ON COMMIT PRESERVE ROWS");
923             }
924         }
925         tokens.consume("NOT", "LOGGED");
926 
927         if (tokens.canConsume("ON", "ROLLBACK", "DELETE", "ROWS")) {
928             AstNode optionNode = nodeFactory().node("onRollback", node, TYPE_STATEMENT_OPTION);
929             optionNode.setProperty(VALUE, "ON ROLLBACK DELETE ROWS");
930         }
931 
932         markEndOfStatement(tokens, node);
933 
934         return node;
935     }
936 
937     private AstNode parseLockTable( DdlTokenStream tokens,
938                                     AstNode parentNode ) throws ParsingException {
939         assert tokens != null;
940         assert parentNode != null;
941 
942         markStartOfStatement(tokens);
943 
944         // LOCK TABLE table-Name IN { SHARE | EXCLUSIVE } MODE;
945 
946         tokens.consume(STMT_LOCK_TABLE);
947 
948         String name = parseName(tokens);
949 
950         AstNode node = nodeFactory().node(name, parentNode, TYPE_LOCK_TABLE_STATEMENT);
951 
952         tokens.consume("IN");
953 
954         if (tokens.canConsume("SHARE")) {
955             AstNode propNode = nodeFactory().node("lockMode", node, TYPE_STATEMENT_OPTION);
956             propNode.setProperty(VALUE, "SHARE");
957         } else {
958             tokens.consume("EXCLUSIVE");
959             AstNode propNode = nodeFactory().node("lockMode", node, TYPE_STATEMENT_OPTION);
960             propNode.setProperty(VALUE, "EXCLUSIVE");
961         }
962         tokens.consume("MODE");
963 
964         markEndOfStatement(tokens, node);
965 
966         return node;
967     }
968 
969     private AstNode parseRenameTable( DdlTokenStream tokens,
970                                       AstNode parentNode ) throws ParsingException {
971         assert tokens != null;
972         assert parentNode != null;
973 
974         markStartOfStatement(tokens);
975 
976         // RENAME TABLE SAMP.EMP_ACT TO EMPLOYEE_ACT;
977 
978         tokens.consume(STMT_RENAME_TABLE);
979 
980         String oldName = parseName(tokens);
981 
982         AstNode node = nodeFactory().node(oldName, parentNode, TYPE_RENAME_TABLE_STATEMENT);
983 
984         tokens.consume("TO");
985 
986         String newName = parseName(tokens);
987 
988         node.setProperty(NEW_NAME, newName);
989 
990         markEndOfStatement(tokens, node);
991 
992         return node;
993     }
994 
995     private AstNode parseRenameIndex( DdlTokenStream tokens,
996                                       AstNode parentNode ) throws ParsingException {
997         assert tokens != null;
998         assert parentNode != null;
999 
1000         markStartOfStatement(tokens);
1001 
1002         // RENAME TABLE SAMP.EMP_ACT TO EMPLOYEE_ACT;
1003 
1004         tokens.consume(STMT_RENAME_INDEX);
1005 
1006         String oldName = parseName(tokens);
1007 
1008         AstNode node = nodeFactory().node(oldName, parentNode, TYPE_RENAME_INDEX_STATEMENT);
1009 
1010         tokens.consume("TO");
1011 
1012         String newName = parseName(tokens);
1013 
1014         node.setProperty(NEW_NAME, newName);
1015 
1016         markEndOfStatement(tokens, node);
1017 
1018         return node;
1019     }
1020 
1021     private AstNode parseCreateSynonym( DdlTokenStream tokens,
1022                                         AstNode parentNode ) throws ParsingException {
1023         assert tokens != null;
1024         assert parentNode != null;
1025 
1026         markStartOfStatement(tokens);
1027         // CREATE SYNONYM synonym-Name FOR { view-Name | table-Name }
1028 
1029         tokens.consume(STMT_CREATE_SYNONYM);
1030 
1031         String name = parseName(tokens);
1032 
1033         AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_SYNONYM_STATEMENT);
1034 
1035         tokens.consume("FOR");
1036 
1037         String tableOrViewName = parseName(tokens);
1038 
1039         node.setProperty(TABLE_NAME, tableOrViewName);
1040 
1041         markEndOfStatement(tokens, node);
1042 
1043         return node;
1044     }
1045 
1046     private AstNode parseCreateTrigger( DdlTokenStream tokens,
1047                                         AstNode parentNode ) throws ParsingException {
1048         assert tokens != null;
1049         assert parentNode != null;
1050 
1051         markStartOfStatement(tokens);
1052         // CREATE TRIGGER TriggerName
1053         // { AFTER | NO CASCADE BEFORE }
1054         // { INSERT | DELETE | UPDATE [ OF column-Name [, column-Name]* ] }
1055         // ON table-Name
1056         // [ ReferencingClause ]
1057         // [ FOR EACH { ROW | STATEMENT } ] [ MODE DB2SQL ]
1058         // Triggered-SQL-statement
1059 
1060         // ReferencingClause
1061         // REFERENCING
1062         // {
1063         // { OLD | NEW } [ ROW ] [ AS ] correlation-Name [ { OLD | NEW } [ ROW ] [ AS ] correlation-Name ] |
1064         // { OLD TABLE | NEW TABLE } [ AS ] Identifier [ { OLD TABLE | NEW TABLE } [AS] Identifier ] |
1065         // { OLD_TABLE | NEW_TABLE } [ AS ] Identifier [ { OLD_TABLE | NEW_TABLE } [AS] Identifier ]
1066         // }
1067 
1068         // EXAMPLE:
1069         // CREATE TRIGGER t1 NO CASCADE BEFORE UPDATE ON x
1070         // FOR EACH ROW MODE DB2SQL
1071         // values app.notifyEmail('Jerry', 'Table x is about to be updated');
1072 
1073         tokens.consume(STMT_CREATE_TRIGGER);
1074 
1075         String name = parseName(tokens);
1076 
1077         AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_TRIGGER_STATEMENT);
1078 
1079         String type = null;
1080 
1081         if (tokens.canConsume("AFTER")) {
1082             AstNode optionNode = nodeFactory().node("beforeOrAfter", node, TYPE_STATEMENT_OPTION);
1083             optionNode.setProperty(VALUE, "AFTER");
1084         } else {
1085             tokens.consume("NO", "CASCADE", "BEFORE");
1086             AstNode optionNode = nodeFactory().node("beforeOrAfter", node, TYPE_STATEMENT_OPTION);
1087             optionNode.setProperty(VALUE, "NO CASCADE BEFORE");
1088         }
1089 
1090         if (tokens.canConsume(INSERT)) {
1091             AstNode optionNode = nodeFactory().node("eventType", node, TYPE_STATEMENT_OPTION);
1092             optionNode.setProperty(VALUE, INSERT);
1093             type = INSERT;
1094         } else if (tokens.canConsume(DELETE)) {
1095             AstNode optionNode = nodeFactory().node("eventType", node, TYPE_STATEMENT_OPTION);
1096             optionNode.setProperty(VALUE, DELETE);
1097             type = DELETE;
1098         } else {
1099             tokens.consume(UPDATE);
1100             AstNode optionNode = nodeFactory().node("eventType", node, TYPE_STATEMENT_OPTION);
1101             optionNode.setProperty(VALUE, UPDATE);
1102             type = UPDATE;
1103         }
1104 
1105         if (tokens.canConsume("OF")) {
1106             // Parse comma separated column names
1107             String colName = parseName(tokens);
1108             nodeFactory().node(colName, node, TYPE_COLUMN_REFERENCE);
1109 
1110             while (tokens.canConsume(COMMA)) {
1111                 colName = parseName(tokens);
1112                 nodeFactory().node(colName, node, TYPE_COLUMN_REFERENCE);
1113             }
1114         }
1115         tokens.consume("ON");
1116 
1117         String tableName = parseName(tokens);
1118 
1119         node.setProperty(TABLE_NAME, tableName);
1120 
1121         if (tokens.canConsume("REFERENCING")) {
1122             // ReferencingClause
1123             // REFERENCING
1124             // {
1125             // { OLD | NEW } [ ROW ] [ AS ] correlation-Name [ { OLD | NEW } [ ROW ] [ AS ] correlation-Name ] |
1126             // { OLD TABLE | NEW TABLE } [ AS ] Identifier [ { OLD TABLE | NEW TABLE } [AS] Identifier ] |
1127             // { OLD_TABLE | NEW_TABLE } [ AS ] Identifier [ { OLD_TABLE | NEW_TABLE } [AS] Identifier ]
1128             // }
1129 
1130             StringBuffer sb = new StringBuffer();
1131             if (tokens.matchesAnyOf("OLD", "NEW")) {
1132                 if (tokens.canConsume("OLD")) {
1133                     sb.append("OLD");
1134                 } else {
1135                     tokens.consume("NEW");
1136                     sb.append("NEW");
1137                 }
1138                 if (tokens.canConsume("ROW")) {
1139                     sb.append(SPACE).append("ROW");
1140                 }
1141                 if (tokens.canConsume("AS")) {
1142                     sb.append(SPACE).append("AS");
1143                 }
1144                 if (tokens.matchesAnyOf("OLD", "NEW")) {
1145                     if (tokens.canConsume("OLD")) {
1146                         sb.append(SPACE).append("OLD");
1147                     } else {
1148                         tokens.consume("NEW");
1149                         sb.append(SPACE).append("NEW");
1150                     }
1151 
1152                     if (tokens.canConsume("ROW")) {
1153                         sb.append(SPACE).append("ROW");
1154                     }
1155                     if (tokens.canConsume("AS")) {
1156                         sb.append(SPACE).append("AS");
1157                     }
1158                     if (!tokens.matchesAnyOf("FOR", "MODE", type)) {
1159                         String corrName = parseName(tokens);
1160                         sb.append(SPACE).append(corrName);
1161                     }
1162                 } else {
1163                     String corrName = parseName(tokens);
1164                     sb.append(SPACE).append(corrName);
1165 
1166                     if (tokens.matchesAnyOf("OLD", "NEW")) {
1167                         if (tokens.canConsume("OLD")) {
1168                             sb.append(SPACE).append("OLD");
1169                         } else {
1170                             tokens.consume("NEW");
1171                             sb.append(SPACE).append("NEW");
1172                         }
1173 
1174                         if (tokens.canConsume("ROW")) {
1175                             sb.append(SPACE).append("ROW");
1176                         }
1177                         if (tokens.canConsume("AS")) {
1178                             sb.append(SPACE).append("AS");
1179                         }
1180                         if (!tokens.matchesAnyOf("FOR", "MODE", type)) {
1181                             corrName = parseName(tokens);
1182                             sb.append(SPACE).append(corrName);
1183                         }
1184                     }
1185                 }
1186             }
1187         }
1188         // [ FOR EACH { ROW | STATEMENT } ] [ MODE DB2SQL ]
1189         if (tokens.canConsume("FOR", "EACH")) {
1190             if (tokens.canConsume("ROW")) {
1191                 AstNode optionNode = nodeFactory().node("forEach", node, TYPE_STATEMENT_OPTION);
1192                 optionNode.setProperty(VALUE, "FOR EACH ROW");
1193             } else {
1194                 tokens.consume("STATEMENT");
1195                 AstNode optionNode = nodeFactory().node("forEach", node, TYPE_STATEMENT_OPTION);
1196                 optionNode.setProperty(VALUE, "FOR EACH STATEMENT");
1197             }
1198         }
1199         if (tokens.canConsume("MODE")) {
1200             tokens.consume("DB2SQL");
1201             AstNode optionNode = nodeFactory().node("mode", node, TYPE_STATEMENT_OPTION);
1202             optionNode.setProperty(VALUE, "MODE DB2SQL");
1203         }
1204 
1205         String sql = parseUntilTerminatorIgnoreEmbeddedStatements(tokens);
1206         node.setProperty(SQL, sql);
1207 
1208         markEndOfStatement(tokens, node);
1209 
1210         return node;
1211     }
1212 
1213     /**
1214      * {@inheritDoc}
1215      * 
1216      * @see org.modeshape.sequencer.ddl.StandardDdlParser#getDataTypeStartWords()
1217      */
1218     @Override
1219     protected List<String> getCustomDataTypeStartWords() {
1220         return DerbyDataTypes.CUSTOM_DATATYPE_START_WORDS;
1221     }
1222 
1223     class DerbyDataTypeParser extends DataTypeParser {
1224 
1225         /**
1226          * {@inheritDoc}
1227          * 
1228          * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#isCustomDataType(org.modeshape.sequencer.ddl.DdlTokenStream)
1229          */
1230         @Override
1231         protected boolean isCustomDataType( DdlTokenStream tokens ) throws ParsingException {
1232             // Loop through the registered statement start string arrays and look for exact matches.
1233 
1234             for (String[] stmts : derbyDataTypeStrings) {
1235                 if (tokens.matches(stmts)) return true;
1236             }
1237             return super.isCustomDataType(tokens);
1238         }
1239 
1240         /**
1241          * {@inheritDoc}
1242          * 
1243          * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseApproxNumericType(org.modeshape.sequencer.ddl.DdlTokenStream)
1244          */
1245         @Override
1246         protected DataType parseApproxNumericType( DdlTokenStream tokens ) throws ParsingException {
1247             return super.parseApproxNumericType(tokens);
1248         }
1249 
1250         /**
1251          * {@inheritDoc}
1252          * 
1253          * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseBitStringType(org.modeshape.sequencer.ddl.DdlTokenStream)
1254          */
1255         @Override
1256         protected DataType parseBitStringType( DdlTokenStream tokens ) throws ParsingException {
1257             return super.parseBitStringType(tokens);
1258         }
1259 
1260         /**
1261          * {@inheritDoc}
1262          * 
1263          * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseBracketedInteger(org.modeshape.sequencer.ddl.DdlTokenStream,
1264          *      org.modeshape.sequencer.ddl.datatype.DataType)
1265          */
1266         @Override
1267         protected int parseBracketedInteger( DdlTokenStream tokens,
1268                                              DataType dataType ) {
1269             return super.parseBracketedInteger(tokens, dataType);
1270         }
1271 
1272         /**
1273          * {@inheritDoc}
1274          * 
1275          * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseCharStringType(org.modeshape.sequencer.ddl.DdlTokenStream)
1276          */
1277         @Override
1278         protected DataType parseCharStringType( DdlTokenStream tokens ) throws ParsingException {
1279             DataType result = super.parseCharStringType(tokens);
1280 
1281             canConsume(tokens, result, true, "FOR", "BIT", "DATA");
1282 
1283             return result;
1284         }
1285 
1286         /**
1287          * {@inheritDoc}
1288          * 
1289          * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseCustomType(org.modeshape.sequencer.ddl.DdlTokenStream)
1290          */
1291         @Override
1292         protected DataType parseCustomType( DdlTokenStream tokens ) throws ParsingException {
1293             DataType dataType = null;
1294             String typeName = null;
1295             int length = 0;
1296 
1297             if (tokens.matches(DerbyDataTypes.DTYPE_BINARY_LARGE_OBJECT)
1298                 || tokens.matches(DerbyDataTypes.DTYPE_CHARACTER_LARGE_OBJECT)) {
1299                 dataType = new DataType();
1300                 typeName = consume(tokens, dataType, true) + SPACE + consume(tokens, dataType, true) + SPACE
1301                            + consume(tokens, dataType, true);
1302                 boolean isKMGLength = false;
1303                 String kmgValue = null;
1304                 if (canConsume(tokens, dataType, true, L_PAREN)) {
1305                     String lengthValue = consume(tokens, dataType, false);
1306                     kmgValue = getKMG(lengthValue);
1307 
1308                     isKMGLength = isKMGInteger(lengthValue);
1309 
1310                     length = parseInteger(lengthValue);
1311 
1312                     consume(tokens, dataType, true, R_PAREN);
1313                 }
1314 
1315                 dataType.setName(typeName);
1316                 dataType.setLength(length);
1317                 dataType.setKMGLength(isKMGLength);
1318                 dataType.setKMGValue(kmgValue);
1319             } else if (tokens.matches(DerbyDataTypes.DTYPE_CLOB) || tokens.matches(DerbyDataTypes.DTYPE_BLOB)) {
1320                 dataType = new DataType();
1321                 typeName = consume(tokens, dataType, true);
1322                 boolean isKMGLength = false;
1323                 String kmgValue = null;
1324                 if (canConsume(tokens, dataType, true, L_PAREN)) {
1325                     String lengthValue = consume(tokens, dataType, false);
1326                     kmgValue = getKMG(lengthValue);
1327 
1328                     isKMGLength = isKMGInteger(lengthValue);
1329 
1330                     length = parseInteger(lengthValue);
1331 
1332                     consume(tokens, dataType, true, R_PAREN);
1333                 }
1334 
1335                 dataType.setName(typeName);
1336                 dataType.setLength(length);
1337                 dataType.setKMGLength(isKMGLength);
1338                 dataType.setKMGValue(kmgValue);
1339             } else if (tokens.matches(DerbyDataTypes.DTYPE_BIGINT)) {
1340                 dataType = new DataType();
1341                 typeName = consume(tokens, dataType, true);
1342                 dataType.setName(typeName);
1343             } else if (tokens.matches(DerbyDataTypes.DTYPE_LONG_VARCHAR_FBD)) {
1344                 dataType = new DataType();
1345                 typeName = consume(tokens, dataType, true) + SPACE + consume(tokens, dataType, true) + SPACE
1346                            + consume(tokens, dataType, true) + SPACE + consume(tokens, dataType, true) + SPACE
1347                            + consume(tokens, dataType, true);
1348                 dataType.setName(typeName);
1349             } else if (tokens.matches(DerbyDataTypes.DTYPE_LONG_VARCHAR)) {
1350                 dataType = new DataType();
1351                 typeName = consume(tokens, dataType, true) + SPACE + consume(tokens, dataType, true);
1352                 typeName = consume(tokens, dataType, true);
1353                 dataType.setName(typeName);
1354             } else if (tokens.matches(DerbyDataTypes.DTYPE_DOUBLE)) {
1355                 dataType = new DataType();
1356                 typeName = consume(tokens, dataType, true);
1357                 dataType.setName(typeName);
1358             } else if (tokens.matches(DerbyDataTypes.DTYPE_XML)) {
1359                 dataType = new DataType();
1360                 typeName = consume(tokens, dataType, true);
1361                 dataType.setName(typeName);
1362             }
1363 
1364             if (dataType == null) {
1365                 super.parseCustomType(tokens);
1366             }
1367             return dataType;
1368         }
1369 
1370         /**
1371          * {@inheritDoc}
1372          * 
1373          * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseDateTimeType(org.modeshape.sequencer.ddl.DdlTokenStream)
1374          */
1375         @Override
1376         protected DataType parseDateTimeType( DdlTokenStream tokens ) throws ParsingException {
1377             return super.parseDateTimeType(tokens);
1378         }
1379 
1380         /**
1381          * {@inheritDoc}
1382          * 
1383          * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseExactNumericType(org.modeshape.sequencer.ddl.DdlTokenStream)
1384          */
1385         @Override
1386         protected DataType parseExactNumericType( DdlTokenStream tokens ) throws ParsingException {
1387             return super.parseExactNumericType(tokens);
1388         }
1389 
1390     }
1391 
1392 }