1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30 package org.modeshape.sequencer.ddl;
31
32 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.ALL_PRIVILEGES;
33 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CHECK_SEARCH_CONDITION;
34 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.COLLATION_NAME;
35 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CONSTRAINT_ATTRIBUTE_TYPE;
36 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CONSTRAINT_TYPE;
37 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CREATE_VIEW_QUERY_EXPRESSION;
38 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_EXPRESSION;
39 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_CHAR_INDEX;
40 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_COLUMN_NUMBER;
41 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_LINE_NUMBER;
42 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_OPTION;
43 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_PRECISION;
44 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_VALUE;
45 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DROP_BEHAVIOR;
46 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANTEE;
47 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANT_PRIVILEGE;
48 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.MESSAGE;
49 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.NAME;
50 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.NULLABLE;
51 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.PROBLEM_LEVEL;
52 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.PROPERTY_VALUE;
53 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TEMPORARY;
54 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE;
55 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ADD_TABLE_CONSTRAINT_DEFINITION;
56 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_COLUMN_DEFINITION;
57 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_DOMAIN_STATEMENT;
58 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_TABLE_STATEMENT;
59 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_DEFINITION;
60 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_REFERENCE;
61 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_ASSERTION_STATEMENT;
62 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_CHARACTER_SET_STATEMENT;
63 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_COLLATION_STATEMENT;
64 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_DOMAIN_STATEMENT;
65 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_SCHEMA_STATEMENT;
66 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_TABLE_STATEMENT;
67 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_TRANSLATION_STATEMENT;
68 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_VIEW_STATEMENT;
69 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_ASSERTION_STATEMENT;
70 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_CHARACTER_SET_STATEMENT;
71 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_COLLATION_STATEMENT;
72 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_COLUMN_DEFINITION;
73 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_DOMAIN_STATEMENT;
74 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_SCHEMA_STATEMENT;
75 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TABLE_CONSTRAINT_DEFINITION;
76 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TABLE_STATEMENT;
77 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TRANSLATION_STATEMENT;
78 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_VIEW_STATEMENT;
79 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_FK_COLUMN_REFERENCE;
80 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_CHARACTER_SET_STATEMENT;
81 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_COLLATION_STATEMENT;
82 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_DOMAIN_STATEMENT;
83 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_TABLE_STATEMENT;
84 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_TRANSLATION_STATEMENT;
85 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_INSERT_STATEMENT;
86 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_MISSING_TERMINATOR;
87 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_PROBLEM;
88 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_CHARACTER_SET_STATEMENT;
89 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_COLLATION_STATEMENT;
90 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_DOMAIN_STATEMENT;
91 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_TABLE_STATEMENT;
92 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_TRANSLATION_STATEMENT;
93 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_SET_STATEMENT;
94 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_STATEMENT;
95 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_STATEMENT_OPTION;
96 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_TABLE_CONSTRAINT;
97 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_TABLE_REFERENCE;
98 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.VALUE;
99 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.WITH_GRANT_OPTION;
100 import java.math.BigInteger;
101 import java.util.ArrayList;
102 import java.util.Collections;
103 import java.util.LinkedList;
104 import java.util.List;
105 import net.jcip.annotations.NotThreadSafe;
106 import org.modeshape.common.text.ParsingException;
107 import org.modeshape.common.text.Position;
108 import org.modeshape.common.util.CheckArg;
109 import org.modeshape.graph.JcrLexicon;
110 import org.modeshape.graph.property.Name;
111 import org.modeshape.sequencer.ddl.DdlTokenStream.DdlTokenizer;
112 import org.modeshape.sequencer.ddl.datatype.DataType;
113 import org.modeshape.sequencer.ddl.datatype.DataTypeParser;
114 import org.modeshape.sequencer.ddl.node.AstNode;
115 import org.modeshape.sequencer.ddl.node.AstNodeFactory;
116
117
118
119
120 @NotThreadSafe
121 public class StandardDdlParser implements DdlParser, DdlConstants, DdlConstants.StatementStartPhrases {
122
123 private final String parserId = "SQL92";
124 private boolean testMode = false;
125 private final List<DdlParserProblem> problems;
126 private final AstNodeFactory nodeFactory;
127 private AstNode rootNode;
128 private List<String> allDataTypeStartWords = null;
129 private DataTypeParser datatypeParser = null;
130 private String terminator = DEFAULT_TERMINATOR;
131 private boolean useTerminator = false;
132 private Position currentMarkedPosition;
133
134 public StandardDdlParser() {
135 super();
136 setDoUseTerminator(true);
137 setDatatypeParser(new DataTypeParser());
138 nodeFactory = new AstNodeFactory();
139 problems = new ArrayList<DdlParserProblem>();
140 }
141
142
143
144
145
146
147 public DataTypeParser getDatatypeParser() {
148 return datatypeParser;
149 }
150
151
152
153
154 public void setDatatypeParser( DataTypeParser datatypeParser ) {
155 this.datatypeParser = datatypeParser;
156 }
157
158
159
160
161
162
163 public AstNodeFactory nodeFactory() {
164 return this.nodeFactory;
165 }
166
167
168
169
170 public AstNode getRootNode() {
171 return rootNode;
172 }
173
174
175
176
177 public void setRootNode( AstNode rootNode ) {
178 this.rootNode = rootNode;
179 }
180
181
182
183
184
185
186
187 public Object score( String ddl,
188 String fileName,
189 DdlParserScorer scorer ) throws ParsingException {
190 CheckArg.isNotNull(ddl, "ddl");
191 CheckArg.isNotNull(scorer, "scorer");
192
193 if (fileName != null) {
194
195 scorer.scoreText(fileName, 2, getIdentifyingKeywords());
196 }
197
198
199 problems.clear();
200 boolean includeComments = true;
201 DdlTokenStream tokens = new DdlTokenStream(ddl, DdlTokenStream.ddlTokenizer(includeComments), false);
202 initializeTokenStream(tokens);
203 tokens.start();
204
205 testPrint("\n== >> StandardDdlParser.parse() PARSING STARTED: ");
206
207
208 while (tokens.matches(DdlTokenizer.COMMENT)) {
209
210 String comment = tokens.consume();
211 scorer.scoreText(comment, 2, getIdentifyingKeywords());
212 }
213
214
215 computeScore(tokens, scorer);
216
217
218 return tokens;
219 }
220
221 protected void computeScore( DdlTokenStream tokens,
222 DdlParserScorer scorer ) {
223 while (tokens.hasNext()) {
224 if (tokens.isNextKeyWord()) {
225 scorer.scoreStatements(1);
226 }
227 tokens.consume();
228 }
229 }
230
231 public String[] getIdentifyingKeywords() {
232 return new String[] {getId()};
233 }
234
235
236
237
238
239
240
241 public void parse( String ddl,
242 AstNode rootNode,
243 Object scoreReturnObject ) throws ParsingException {
244 CheckArg.isNotNull(ddl, "ddl");
245 CheckArg.isNotNull(rootNode, "rootNode");
246 problems.clear();
247 setRootNode(rootNode);
248
249 DdlTokenStream tokens = null;
250 if (scoreReturnObject instanceof DdlTokenStream) {
251 tokens = (DdlTokenStream)scoreReturnObject;
252 tokens.rewind();
253 } else {
254
255 boolean includeComments = false;
256 tokens = new DdlTokenStream(ddl, DdlTokenStream.ddlTokenizer(includeComments), false);
257 initializeTokenStream(tokens);
258 tokens.start();
259 }
260
261 testPrint("\n== >> StandardDdlParser.parse() PARSING STARTED: ");
262
263
264 while (moveToNextStatementStart(tokens)) {
265
266
267
268 AstNode stmtNode = parseNextStatement(tokens, rootNode);
269 if (stmtNode == null) {
270 markStartOfStatement(tokens);
271 String stmtName = tokens.consume();
272 stmtNode = parseIgnorableStatement(tokens, stmtName, rootNode);
273 markEndOfStatement(tokens, stmtNode);
274 }
275
276 }
277
278 rewrite(tokens, rootNode);
279
280 for (DdlParserProblem problem : problems) {
281 attachNewProblem(problem, rootNode);
282 }
283
284
285
286
287
288
289
290
291 if (testMode) {
292
293 int count = 0;
294 for (AstNode child : rootNode.getChildren()) {
295 testPrint("== >> Found Statement" + "(" + (++count) + "):\n" + child);
296 }
297 }
298 }
299
300
301
302
303
304
305
306
307 protected void initializeTokenStream( DdlTokenStream tokens ) {
308 tokens.registerKeyWords(SQL_92_RESERVED_WORDS);
309 tokens.registerStatementStartPhrase(SQL_92_ALL_PHRASES);
310 }
311
312
313
314
315
316
317
318
319
320
321 protected AstNode parseNextStatement( DdlTokenStream tokens,
322 AstNode node ) {
323 assert tokens != null;
324 assert node != null;
325
326 AstNode stmtNode = null;
327
328 if (tokens.matches(CREATE)) {
329 stmtNode = parseCreateStatement(tokens, node);
330 } else if (tokens.matches(ALTER)) {
331 stmtNode = parseAlterStatement(tokens, node);
332 } else if (tokens.matches(DROP)) {
333 stmtNode = parseDropStatement(tokens, node);
334 } else if (tokens.matches(INSERT)) {
335 stmtNode = parseInsertStatement(tokens, node);
336 } else if (tokens.matches(SET)) {
337 stmtNode = parseSetStatement(tokens, node);
338 } else if (tokens.matches(GRANT)) {
339 stmtNode = parseGrantStatement(tokens, node);
340 } else if (tokens.matches(REVOKE)) {
341 stmtNode = parseRevokeStatement(tokens, node);
342 }
343
344 if (stmtNode == null) {
345 stmtNode = parseCustomStatement(tokens, node);
346 }
347
348 return stmtNode;
349 }
350
351 private boolean moveToNextStatementStart( DdlTokenStream tokens ) throws ParsingException {
352 assert tokens != null;
353
354 StringBuffer sb = new StringBuffer();
355 DdlParserProblem problem = null;
356
357
358 if (tokens.hasNext()) {
359 while (tokens.hasNext()) {
360 if (tokens.canConsume(DdlTokenizer.COMMENT)) continue;
361
362
363 if (!tokens.matches(DdlTokenizer.STATEMENT_KEY)) {
364
365
366 if (problem == null) {
367 markStartOfStatement(tokens);
368
369 String msg = DdlSequencerI18n.unusedTokensDiscovered.text(tokens.nextPosition().getLine(),
370 tokens.nextPosition().getColumn());
371 problem = new DdlParserProblem(DdlConstants.Problems.WARNING, tokens.nextPosition(), msg);
372 }
373
374 String nextTokenValue = null;
375
376
377
378 if (tokens.matches(getTerminator()) && sb.length() > 0) {
379 nextTokenValue = getTerminator();
380
381 AstNode unknownNode = unknownTerminatedNode(getRootNode());
382 markEndOfStatement(tokens, unknownNode);
383
384 problem = null;
385 } else {
386
387
388
389
390 nextTokenValue = tokens.consume();
391 AstNode unknownNode = handleUnknownToken(tokens, nextTokenValue);
392 if (unknownNode != null) {
393 markEndOfStatement(tokens, unknownNode);
394
395 problem = null;
396 }
397 }
398 sb.append(SPACE).append(nextTokenValue);
399
400 } else {
401
402 if (problem != null && sb.length() > 0) {
403 problem.setUnusedSource(sb.toString());
404 addProblem(problem);
405 }
406 return true;
407 }
408 }
409
410
411 if (problem != null && sb.length() > 0) {
412 problem.setUnusedSource(sb.toString());
413 addProblem(problem);
414 }
415 }
416 return false;
417 }
418
419 public final void addProblem( DdlParserProblem problem,
420 AstNode node ) {
421 addProblem(problem);
422 attachNewProblem(problem, node);
423 }
424
425 public final void addProblem( DdlParserProblem problem ) {
426 problems.add(problem);
427 }
428
429 public final List<DdlParserProblem> getProblems() {
430 return this.problems;
431 }
432
433 public final void attachNewProblem( DdlParserProblem problem,
434 AstNode parentNode ) {
435 assert problem != null;
436 assert parentNode != null;
437
438 AstNode problemNode = nodeFactory().node("DDL PROBLEM", parentNode, TYPE_PROBLEM);
439 problemNode.setProperty(PROBLEM_LEVEL, problem.getLevel());
440 problemNode.setProperty(MESSAGE, problem.toString() + "[" + problem.getUnusedSource() + "]");
441
442 testPrint(problem.toString());
443 }
444
445 protected void rewrite( DdlTokenStream tokens,
446 AstNode rootNode ) {
447 assert tokens != null;
448 assert rootNode != null;
449
450
451 removeMissingTerminatorNodes(rootNode);
452 }
453
454 protected void removeMissingTerminatorNodes( AstNode parentNode ) {
455 assert parentNode != null;
456
457 List<AstNode> copyOfNodes = new ArrayList<AstNode>(parentNode.getChildren());
458
459 for (AstNode child : copyOfNodes) {
460 if (nodeFactory().hasMixinType(child, TYPE_MISSING_TERMINATOR)) {
461 parentNode.removeChild(child);
462 } else {
463 removeMissingTerminatorNodes(child);
464 }
465 }
466 }
467
468
469
470
471
472
473
474
475 public void mergeNodes( DdlTokenStream tokens,
476 AstNode firstNode,
477 AstNode secondNode ) {
478 assert tokens != null;
479 assert firstNode != null;
480 assert secondNode != null;
481
482 int firstStartIndex = (Integer)firstNode.getProperty(DDL_START_CHAR_INDEX).getFirstValue();
483 int secondStartIndex = (Integer)secondNode.getProperty(DDL_START_CHAR_INDEX).getFirstValue();
484 int deltaLength = ((String)secondNode.getProperty(DDL_EXPRESSION).getFirstValue()).length();
485 Position startPosition = new Position(firstStartIndex, 1, 0);
486 Position endPosition = new Position((secondStartIndex + deltaLength), 1, 0);
487 String source = tokens.getContentBetween(startPosition, endPosition);
488 firstNode.setProperty(DDL_EXPRESSION, source);
489 }
490
491
492
493
494
495
496
497
498
499
500
501 public AstNode handleUnknownToken( DdlTokenStream tokens,
502 String tokenValue ) throws ParsingException {
503 assert tokens != null;
504 assert tokenValue != null;
505
506 return null;
507 }
508
509
510
511
512
513
514
515
516
517 protected AstNode parseCreateStatement( DdlTokenStream tokens,
518 AstNode parentNode ) throws ParsingException {
519 assert tokens != null;
520 assert parentNode != null;
521
522 AstNode stmtNode = null;
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538 if (tokens.matches(STMT_CREATE_SCHEMA)) {
539 stmtNode = parseCreateSchemaStatement(tokens, parentNode);
540 } else if (tokens.matches(STMT_CREATE_TABLE) || tokens.matches(STMT_CREATE_GLOBAL_TEMPORARY_TABLE)
541 || tokens.matches(STMT_CREATE_LOCAL_TEMPORARY_TABLE)) {
542 stmtNode = parseCreateTableStatement(tokens, parentNode);
543 } else if (tokens.matches(STMT_CREATE_VIEW) || tokens.matches(STMT_CREATE_OR_REPLACE_VIEW)) {
544 stmtNode = parseCreateViewStatement(tokens, parentNode);
545 } else if (tokens.matches(STMT_CREATE_ASSERTION)) {
546 stmtNode = parseCreateAssertionStatement(tokens, parentNode);
547 } else if (tokens.matches(STMT_CREATE_CHARACTER_SET)) {
548 stmtNode = parseCreateCharacterSetStatement(tokens, parentNode);
549 } else if (tokens.matches(STMT_CREATE_COLLATION)) {
550 stmtNode = parseCreateCollationStatement(tokens, parentNode);
551 } else if (tokens.matches(STMT_CREATE_TRANSLATION)) {
552 stmtNode = parseCreateTranslationStatement(tokens, parentNode);
553 } else if (tokens.matches(STMT_CREATE_DOMAIN)) {
554 stmtNode = parseCreateDomainStatement(tokens, parentNode);
555 } else {
556 markStartOfStatement(tokens);
557
558 stmtNode = parseIgnorableStatement(tokens, "CREATE UNKNOWN", parentNode);
559 Position position = getCurrentMarkedPosition();
560 String msg = DdlSequencerI18n.unknownCreateStatement.text(position.getLine(), position.getColumn());
561 DdlParserProblem problem = new DdlParserProblem(DdlConstants.Problems.WARNING, position, msg);
562
563 stmtNode.setProperty(TYPE_PROBLEM, problem.toString());
564
565 markEndOfStatement(tokens, stmtNode);
566 }
567
568 return stmtNode;
569 }
570
571
572
573
574
575
576
577
578
579 protected AstNode parseAlterStatement( DdlTokenStream tokens,
580 AstNode parentNode ) throws ParsingException {
581 assert tokens != null;
582 assert parentNode != null;
583
584 if (tokens.matches(ALTER, TABLE)) {
585 return parseAlterTableStatement(tokens, parentNode);
586 } else if (tokens.matches("ALTER", "DOMAIN")) {
587 markStartOfStatement(tokens);
588 tokens.consume("ALTER", "DOMAIN");
589 String domainName = parseName(tokens);
590 AstNode alterNode = nodeFactory().node(domainName, parentNode, TYPE_ALTER_DOMAIN_STATEMENT);
591 parseUntilTerminator(tokens);
592 markEndOfStatement(tokens, alterNode);
593 return alterNode;
594 }
595 return null;
596 }
597
598
599
600
601
602
603
604
605
606 protected AstNode parseAlterTableStatement( DdlTokenStream tokens,
607 AstNode parentNode ) throws ParsingException {
608 assert tokens != null;
609 assert parentNode != null;
610
611 markStartOfStatement(tokens);
612
613
614
615
616
617
618
619
620
621
622
623 tokens.consume("ALTER", "TABLE");
624 String tableName = parseName(tokens);
625
626 AstNode alterTableNode = nodeFactory().node(tableName, parentNode, TYPE_ALTER_TABLE_STATEMENT);
627
628 if (tokens.canConsume("ADD")) {
629 if (isTableConstraint(tokens)) {
630 parseTableConstraint(tokens, alterTableNode, true);
631 } else {
632 parseSingleTerminatedColumnDefinition(tokens, alterTableNode, true);
633 }
634 } else if (tokens.canConsume("DROP")) {
635 if (tokens.canConsume("CONSTRAINT")) {
636 String constraintName = parseName(tokens);
637 AstNode constraintNode = nodeFactory().node(constraintName, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION);
638 if (tokens.canConsume(DropBehavior.CASCADE)) {
639 constraintNode.setProperty(DROP_BEHAVIOR, DropBehavior.CASCADE);
640 } else if (tokens.canConsume(DropBehavior.RESTRICT)) {
641 constraintNode.setProperty(DROP_BEHAVIOR, DropBehavior.RESTRICT);
642 }
643 } else {
644
645
646
647
648 tokens.canConsume("COLUMN");
649 String columnName = parseName(tokens);
650 AstNode columnNode = nodeFactory().node(columnName, alterTableNode, TYPE_DROP_COLUMN_DEFINITION);
651 if (tokens.canConsume(DropBehavior.CASCADE)) {
652 columnNode.setProperty(DROP_BEHAVIOR, DropBehavior.CASCADE);
653 } else if (tokens.canConsume(DropBehavior.RESTRICT)) {
654 columnNode.setProperty(DROP_BEHAVIOR, DropBehavior.RESTRICT);
655 }
656 }
657 } else if (tokens.canConsume("ALTER")) {
658
659
660
661
662 tokens.canConsume("COLUMN");
663 String alterColumnName = parseName(tokens);
664 AstNode columnNode = nodeFactory().node(alterColumnName, alterTableNode, TYPE_ALTER_COLUMN_DEFINITION);
665 if (tokens.canConsume("SET")) {
666 parseDefaultClause(tokens, columnNode);
667 } else if (tokens.canConsume("DROP", "DEFAULT")) {
668 columnNode.setProperty(DROP_BEHAVIOR, "DROP DEFAULT");
669 }
670 } else {
671 parseUntilTerminator(tokens);
672 }
673
674 markEndOfStatement(tokens, alterTableNode);
675 return alterTableNode;
676 }
677
678
679
680
681
682
683
684
685
686 protected AstNode parseDropStatement( DdlTokenStream tokens,
687 AstNode parentNode ) throws ParsingException {
688 assert tokens != null;
689 assert parentNode != null;
690
691 if (tokens.matches(STMT_DROP_TABLE)) {
692
693
694
695
696 return parseSimpleDropStatement(tokens, STMT_DROP_TABLE, parentNode, TYPE_DROP_TABLE_STATEMENT);
697 } else if (tokens.matches(STMT_DROP_VIEW)) {
698 return parseSimpleDropStatement(tokens, STMT_DROP_VIEW, parentNode, TYPE_DROP_VIEW_STATEMENT);
699 } else if (tokens.matches(STMT_DROP_SCHEMA)) {
700 return parseSimpleDropStatement(tokens, STMT_DROP_SCHEMA, parentNode, TYPE_DROP_SCHEMA_STATEMENT);
701 } else if (tokens.matches(STMT_DROP_DOMAIN)) {
702 return parseSimpleDropStatement(tokens, STMT_DROP_DOMAIN, parentNode, TYPE_DROP_DOMAIN_STATEMENT);
703 } else if (tokens.matches(STMT_DROP_TRANSLATION)) {
704 return parseSimpleDropStatement(tokens, STMT_DROP_TRANSLATION, parentNode, TYPE_DROP_TRANSLATION_STATEMENT);
705 } else if (tokens.matches(STMT_DROP_CHARACTER_SET)) {
706 return parseSimpleDropStatement(tokens, STMT_DROP_CHARACTER_SET, parentNode, TYPE_DROP_CHARACTER_SET_STATEMENT);
707 } else if (tokens.matches(STMT_DROP_ASSERTION)) {
708 return parseSimpleDropStatement(tokens, STMT_DROP_ASSERTION, parentNode, TYPE_DROP_ASSERTION_STATEMENT);
709 } else if (tokens.matches(STMT_DROP_COLLATION)) {
710 return parseSimpleDropStatement(tokens, STMT_DROP_COLLATION, parentNode, TYPE_DROP_COLLATION_STATEMENT);
711 }
712
713 return null;
714 }
715
716 private AstNode parseSimpleDropStatement( DdlTokenStream tokens,
717 String[] startPhrase,
718 AstNode parentNode,
719 Name stmtType ) throws ParsingException {
720 assert tokens != null;
721 assert startPhrase != null && startPhrase.length > 0;
722 assert parentNode != null;
723
724 markStartOfStatement(tokens);
725 String behavior = null;
726 tokens.consume(startPhrase);
727 List<String> nameList = new ArrayList<String>();
728 nameList.add(parseName(tokens));
729 while (tokens.matches(COMMA)) {
730 tokens.consume(COMMA);
731 nameList.add(parseName(tokens));
732 }
733
734 if (tokens.canConsume("CASCADE")) {
735 behavior = "CASCADE";
736 } else if (tokens.canConsume("RESTRICT")) {
737 behavior = "RESTRICT";
738 }
739
740 AstNode dropNode = nodeFactory().node(nameList.get(0), parentNode, stmtType);
741 if (behavior != null) {
742 dropNode.setProperty(DROP_BEHAVIOR, behavior);
743 }
744 markEndOfStatement(tokens, dropNode);
745
746 return dropNode;
747 }
748
749
750
751
752
753
754
755
756
757 protected AstNode parseInsertStatement( DdlTokenStream tokens,
758 AstNode parentNode ) throws ParsingException {
759 assert tokens != null;
760 assert parentNode != null;
761
762
763 if (tokens.matches(STMT_INSERT_INTO)) {
764 markStartOfStatement(tokens);
765 tokens.consume(STMT_INSERT_INTO);
766 String prefix = getStatementTypeName(STMT_INSERT_INTO);
767 AstNode node = nodeFactory().node(prefix, parentNode, TYPE_INSERT_STATEMENT);
768 parseUntilTerminator(tokens);
769 markEndOfStatement(tokens, node);
770 return node;
771 }
772 return null;
773 }
774
775
776
777
778
779
780
781
782
783 protected AstNode parseSetStatement( DdlTokenStream tokens,
784 AstNode parentNode ) throws ParsingException {
785 assert tokens != null;
786 assert parentNode != null;
787
788
789 if (tokens.matches(SET)) {
790 markStartOfStatement(tokens);
791 tokens.consume(SET);
792 AstNode node = nodeFactory().node("SET", parentNode, TYPE_SET_STATEMENT);
793 parseUntilTerminator(tokens);
794 markEndOfStatement(tokens, node);
795 return node;
796 }
797 return null;
798 }
799
800
801
802
803
804
805
806
807
808 protected AstNode parseGrantStatement( DdlTokenStream tokens,
809 AstNode parentNode ) throws ParsingException {
810 assert tokens != null;
811 assert parentNode != null;
812 assert tokens.matches(GRANT);
813
814 markStartOfStatement(tokens);
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837 AstNode grantNode = null;
838 boolean allPrivileges = false;
839
840 List<AstNode> privileges = new ArrayList<AstNode>();
841
842 tokens.consume("GRANT");
843
844 if (tokens.canConsume("ALL", "PRIVILEGES")) {
845 allPrivileges = true;
846 } else {
847 parseGrantPrivileges(tokens, privileges);
848 }
849 tokens.consume("ON");
850
851 if (tokens.canConsume("DOMAIN")) {
852 String name = parseName(tokens);
853 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_DOMAIN_STATEMENT);
854 } else if (tokens.canConsume("COLLATION")) {
855 String name = parseName(tokens);
856 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_COLLATION_STATEMENT);
857 } else if (tokens.canConsume("CHARACTER", "SET")) {
858 String name = parseName(tokens);
859 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_CHARACTER_SET_STATEMENT);
860 } else if (tokens.canConsume("TRANSLATION")) {
861 String name = parseName(tokens);
862 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_TRANSLATION_STATEMENT);
863 } else {
864 tokens.canConsume(TABLE);
865 String name = parseName(tokens);
866 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_TABLE_STATEMENT);
867 }
868
869
870 for (AstNode node : privileges) {
871 node.setParent(grantNode);
872 }
873 if (allPrivileges) {
874 grantNode.setProperty(ALL_PRIVILEGES, allPrivileges);
875 }
876
877 tokens.consume("TO");
878
879 do {
880 String grantee = parseName(tokens);
881 nodeFactory().node(grantee, grantNode, GRANTEE);
882 } while (tokens.canConsume(COMMA));
883
884 if (tokens.canConsume("WITH", "GRANT", "OPTION")) {
885 grantNode.setProperty(WITH_GRANT_OPTION, "WITH GRANT OPTION");
886 }
887
888 markEndOfStatement(tokens, grantNode);
889
890 return grantNode;
891 }
892
893 protected void parseGrantPrivileges( DdlTokenStream tokens,
894 List<AstNode> privileges ) throws ParsingException {
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911 do {
912 AstNode node = null;
913
914 if (tokens.canConsume(DELETE)) {
915 node = nodeFactory().node("privilege");
916 node.setProperty(TYPE, DELETE);
917 } else if (tokens.canConsume(INSERT)) {
918 node = nodeFactory().node("privilege");
919 node.setProperty(TYPE, INSERT);
920 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
921 } else if (tokens.canConsume("REFERENCES")) {
922 node = nodeFactory().node("privilege");
923 node.setProperty(TYPE, "REFERENCES");
924 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
925 } else if (tokens.canConsume(SELECT)) {
926 node = nodeFactory().node("privilege");
927 node.setProperty(TYPE, SELECT);
928 } else if (tokens.canConsume("USAGE")) {
929 node = nodeFactory().node("privilege");
930 node.setProperty(TYPE, "USAGE");
931 } else if (tokens.canConsume(UPDATE)) {
932 node = nodeFactory().node("privilege");
933 node.setProperty(TYPE, UPDATE);
934 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
935 }
936 if (node == null) {
937 break;
938 }
939 nodeFactory().setType(node, GRANT_PRIVILEGE);
940 privileges.add(node);
941
942 } while (tokens.canConsume(COMMA));
943
944 }
945
946 protected AstNode parseRevokeStatement( DdlTokenStream tokens,
947 AstNode parentNode ) throws ParsingException {
948 assert tokens != null;
949 assert parentNode != null;
950 assert tokens.matches(REVOKE);
951
952 markStartOfStatement(tokens);
953
954
955
956
957
958
959
960 AstNode revokeNode = null;
961 boolean allPrivileges = false;
962 boolean withGrantOption = false;
963
964 List<AstNode> privileges = new ArrayList<AstNode>();
965
966 tokens.consume("REVOKE");
967
968 withGrantOption = tokens.canConsume("WITH", "GRANT", "OPTION");
969
970 if (tokens.canConsume("ALL", "PRIVILEGES")) {
971 allPrivileges = true;
972 } else {
973 parseGrantPrivileges(tokens, privileges);
974 }
975 tokens.consume("ON");
976
977 if (tokens.canConsume("DOMAIN")) {
978 String name = parseName(tokens);
979 revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_DOMAIN_STATEMENT);
980 } else if (tokens.canConsume("COLLATION")) {
981 String name = parseName(tokens);
982 revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_COLLATION_STATEMENT);
983 } else if (tokens.canConsume("CHARACTER", "SET")) {
984 String name = parseName(tokens);
985 revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_CHARACTER_SET_STATEMENT);
986 } else if (tokens.canConsume("TRANSLATION")) {
987 String name = parseName(tokens);
988 revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_TRANSLATION_STATEMENT);
989 } else {
990 tokens.canConsume(TABLE);
991 String name = parseName(tokens);
992 revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_TABLE_STATEMENT);
993 }
994
995
996 for (AstNode node : privileges) {
997 node.setParent(revokeNode);
998 }
999
1000 if (allPrivileges) {
1001 revokeNode.setProperty(ALL_PRIVILEGES, allPrivileges);
1002 }
1003
1004 tokens.consume("FROM");
1005
1006 do {
1007 String grantee = parseName(tokens);
1008 nodeFactory().node(grantee, revokeNode, GRANTEE);
1009 } while (tokens.canConsume(COMMA));
1010
1011 String behavior = null;
1012
1013 if (tokens.canConsume("CASCADE")) {
1014 behavior = "CASCADE";
1015 } else if (tokens.canConsume("RESTRICT")) {
1016 behavior = "RESTRICT";
1017 }
1018
1019 if (behavior != null) {
1020 revokeNode.setProperty(DROP_BEHAVIOR, behavior);
1021 }
1022
1023 if (withGrantOption) {
1024 revokeNode.setProperty(WITH_GRANT_OPTION, "WITH GRANT OPTION");
1025 }
1026
1027 markEndOfStatement(tokens, revokeNode);
1028
1029 return revokeNode;
1030 }
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040 protected AstNode parseCreateDomainStatement( DdlTokenStream tokens,
1041 AstNode parentNode ) throws ParsingException {
1042 assert tokens != null;
1043 assert parentNode != null;
1044
1045
1046
1047
1048
1049
1050
1051
1052 markStartOfStatement(tokens);
1053
1054 tokens.consume(STMT_CREATE_DOMAIN);
1055
1056 String name = parseName(tokens);
1057
1058 AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_DOMAIN_STATEMENT);
1059
1060 parseUntilTerminator(tokens);
1061
1062 markEndOfStatement(tokens, node);
1063
1064 return node;
1065 }
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075 protected AstNode parseCreateCollationStatement( DdlTokenStream tokens,
1076 AstNode parentNode ) throws ParsingException {
1077 assert tokens != null;
1078 assert parentNode != null;
1079
1080 markStartOfStatement(tokens);
1081
1082 tokens.consume(STMT_CREATE_COLLATION);
1083
1084 String name = parseName(tokens);
1085
1086 AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_COLLATION_STATEMENT);
1087
1088 parseUntilTerminator(tokens);
1089
1090 markEndOfStatement(tokens, node);
1091
1092 return node;
1093 }
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103 protected AstNode parseCreateTranslationStatement( DdlTokenStream tokens,
1104 AstNode parentNode ) throws ParsingException {
1105 assert tokens != null;
1106 assert parentNode != null;
1107
1108 markStartOfStatement(tokens);
1109
1110 tokens.consume(STMT_CREATE_TRANSLATION);
1111
1112 String name = parseName(tokens);
1113
1114 AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_TRANSLATION_STATEMENT);
1115
1116 parseUntilTerminator(tokens);
1117
1118 markEndOfStatement(tokens, node);
1119
1120 return node;
1121 }
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131 protected AstNode parseCreateCharacterSetStatement( DdlTokenStream tokens,
1132 AstNode parentNode ) throws ParsingException {
1133 assert tokens != null;
1134 assert parentNode != null;
1135
1136 markStartOfStatement(tokens);
1137
1138 tokens.consume(STMT_CREATE_CHARACTER_SET);
1139
1140 String name = parseName(tokens);
1141
1142 AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_CHARACTER_SET_STATEMENT);
1143
1144 parseUntilTerminator(tokens);
1145
1146 markEndOfStatement(tokens, node);
1147
1148 return node;
1149 }
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159 protected AstNode parseCustomStatement( DdlTokenStream tokens,
1160 AstNode parentNode ) throws ParsingException {
1161 assert tokens != null;
1162 assert parentNode != null;
1163
1164
1165
1166
1167 return null;
1168 }
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182 protected AstNode parseCreateTableStatement( DdlTokenStream tokens,
1183 AstNode parentNode ) throws ParsingException {
1184 assert tokens != null;
1185 assert parentNode != null;
1186
1187 markStartOfStatement(tokens);
1188
1189 tokens.consume(CREATE);
1190 String temporaryValue = null;
1191 if (tokens.canConsume("LOCAL")) {
1192 tokens.consume("TEMPORARY");
1193 temporaryValue = "LOCAL";
1194 } else if (tokens.canConsume("GLOBAL")) {
1195 tokens.consume("TEMPORARY");
1196 temporaryValue = "GLOBAL";
1197 }
1198
1199 tokens.consume(TABLE);
1200
1201 String tableName = parseName(tokens);
1202
1203 AstNode tableNode = nodeFactory().node(tableName, parentNode, TYPE_CREATE_TABLE_STATEMENT);
1204
1205 if (temporaryValue != null) {
1206 tableNode.setProperty(TEMPORARY, temporaryValue);
1207 }
1208
1209
1210 parseColumnsAndConstraints(tokens, tableNode);
1211
1212 parseCreateTableOptions(tokens, tableNode);
1213
1214 markEndOfStatement(tokens, tableNode);
1215
1216 return tableNode;
1217 }
1218
1219 protected void parseCreateTableOptions( DdlTokenStream tokens,
1220 AstNode tableNode ) throws ParsingException {
1221 assert tokens != null;
1222 assert tableNode != null;
1223
1224
1225 while (areNextTokensCreateTableOptions(tokens)) {
1226 parseNextCreateTableOption(tokens, tableNode);
1227 }
1228
1229 }
1230
1231 protected void parseNextCreateTableOption( DdlTokenStream tokens,
1232 AstNode tableNode ) throws ParsingException {
1233 assert tokens != null;
1234 assert tableNode != null;
1235
1236 if (tokens.canConsume("ON", "COMMIT")) {
1237 String option = "";
1238
1239 if (tokens.canConsume("PRESERVE", "ROWS")) {
1240 option = option + "ON COMMIT PRESERVE ROWS";
1241 } else if (tokens.canConsume("DELETE", "ROWS")) {
1242 option = option + "ON COMMIT DELETE ROWS";
1243 } else if (tokens.canConsume("DROP")) {
1244 option = option + "ON COMMIT DROP";
1245 }
1246
1247 if (option.length() > 0) {
1248 AstNode tableOption = nodeFactory().node("option", tableNode, TYPE_STATEMENT_OPTION);
1249 tableOption.setProperty(VALUE, option);
1250 }
1251 }
1252 }
1253
1254 protected boolean areNextTokensCreateTableOptions( DdlTokenStream tokens ) throws ParsingException {
1255 assert tokens != null;
1256
1257 boolean result = false;
1258
1259
1260 if (tokens.matches("ON", "COMMIT")) {
1261 result = true;
1262 }
1263
1264 return result;
1265 }
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276 protected void parseColumnsAndConstraints( DdlTokenStream tokens,
1277 AstNode tableNode ) throws ParsingException {
1278 assert tokens != null;
1279 assert tableNode != null;
1280
1281 if (!tokens.matches(L_PAREN)) {
1282 return;
1283 }
1284
1285 String tableElementString = getTableElementsString(tokens, false);
1286
1287 DdlTokenStream localTokens = new DdlTokenStream(tableElementString, DdlTokenStream.ddlTokenizer(false), false);
1288
1289 localTokens.start();
1290
1291 StringBuffer unusedTokensSB = new StringBuffer();
1292 do {
1293 if (isTableConstraint(localTokens)) {
1294 parseTableConstraint(localTokens, tableNode, false);
1295 } else if (isColumnDefinitionStart(localTokens)) {
1296 parseColumnDefinition(localTokens, tableNode, false);
1297 } else {
1298 unusedTokensSB.append(SPACE).append(localTokens.consume());
1299 }
1300 } while (localTokens.canConsume(COMMA));
1301
1302 if (unusedTokensSB.length() > 0) {
1303 String msg = DdlSequencerI18n.unusedTokensParsingColumnsAndConstraints.text(tableNode.getProperty(NAME));
1304 DdlParserProblem problem = new DdlParserProblem(DdlConstants.Problems.WARNING, Position.EMPTY_CONTENT_POSITION, msg);
1305 problem.setUnusedSource(unusedTokensSB.toString());
1306 addProblem(problem, tableNode);
1307 }
1308
1309 }
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320 protected void parseColumnDefinition( DdlTokenStream tokens,
1321 AstNode tableNode,
1322 boolean isAlterTable ) throws ParsingException {
1323 assert tokens != null;
1324 assert tableNode != null;
1325
1326 tokens.canConsume("COLUMN");
1327 String columnName = parseName(tokens);
1328 DataType datatype = getDatatypeParser().parse(tokens);
1329
1330 AstNode columnNode = nodeFactory().node(columnName, tableNode, TYPE_COLUMN_DEFINITION);
1331
1332 getDatatypeParser().setPropertiesOnNode(columnNode, datatype);
1333
1334
1335 StringBuffer unusedTokensSB = new StringBuffer();
1336
1337 while (tokens.hasNext() && !tokens.matches(COMMA)) {
1338 boolean parsedDefaultClause = parseDefaultClause(tokens, columnNode);
1339 if (!parsedDefaultClause) {
1340 boolean parsedCollate = parseCollateClause(tokens, columnNode);
1341 boolean parsedConstraint = parseColumnConstraint(tokens, columnNode, isAlterTable);
1342 if (!parsedCollate && !parsedConstraint) {
1343
1344
1345 unusedTokensSB.append(SPACE).append(tokens.consume());
1346 }
1347 }
1348 tokens.canConsume(DdlTokenizer.COMMENT);
1349 }
1350
1351 if (unusedTokensSB.length() > 0) {
1352 String msg = DdlSequencerI18n.unusedTokensParsingColumnDefinition.text(tableNode.getName());
1353 DdlParserProblem problem = new DdlParserProblem(Problems.WARNING, Position.EMPTY_CONTENT_POSITION, msg);
1354 problem.setUnusedSource(unusedTokensSB.toString());
1355 addProblem(problem, tableNode);
1356 }
1357 }
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368 protected void parseSingleTerminatedColumnDefinition( DdlTokenStream tokens,
1369 AstNode tableNode,
1370 boolean isAlterTable ) throws ParsingException {
1371 assert tokens != null;
1372 assert tableNode != null;
1373
1374 tokens.canConsume("COLUMN");
1375 String columnName = parseName(tokens);
1376 DataType datatype = getDatatypeParser().parse(tokens);
1377
1378 AstNode columnNode = nodeFactory().node(columnName, tableNode, TYPE_COLUMN_DEFINITION);
1379
1380 getDatatypeParser().setPropertiesOnNode(columnNode, datatype);
1381
1382
1383
1384
1385 while (tokens.hasNext() && !tokens.matches(getTerminator()) && !tokens.matches(DdlTokenizer.STATEMENT_KEY)) {
1386 boolean parsedDefaultClause = parseDefaultClause(tokens, columnNode);
1387 if (!parsedDefaultClause) {
1388 parseCollateClause(tokens, columnNode);
1389 parseColumnConstraint(tokens, columnNode, isAlterTable);
1390 }
1391 consumeComment(tokens);
1392 if (tokens.canConsume(COMMA)) break;
1393 }
1394 }
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404 protected String getTableElementsString( DdlTokenStream tokens,
1405 boolean useTerminator ) throws ParsingException {
1406 assert tokens != null;
1407
1408 StringBuffer sb = new StringBuffer(100);
1409
1410 if (useTerminator) {
1411 while (!isTerminator(tokens)) {
1412 sb.append(SPACE).append(tokens.consume());
1413 }
1414 } else {
1415
1416
1417 tokens.consume(L_PAREN);
1418
1419 int iParen = 0;
1420 while (tokens.hasNext()) {
1421 if (tokens.matches(L_PAREN)) {
1422 iParen++;
1423 } else if (tokens.matches(R_PAREN)) {
1424 if (iParen == 0) {
1425 tokens.consume(R_PAREN);
1426 break;
1427 }
1428 iParen--;
1429 }
1430 if (isComment(tokens)) {
1431 tokens.consume();
1432 } else {
1433 sb.append(SPACE).append(tokens.consume());
1434 }
1435 }
1436 }
1437
1438 return sb.toString();
1439
1440 }
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451 protected String consumeParenBoundedTokens( DdlTokenStream tokens,
1452 boolean includeParens ) throws ParsingException {
1453 assert tokens != null;
1454
1455
1456
1457 if (tokens.canConsume(L_PAREN)) {
1458 StringBuffer sb = new StringBuffer(100);
1459 if (includeParens) {
1460 sb.append(L_PAREN);
1461 }
1462 int iParen = 0;
1463 while (tokens.hasNext()) {
1464 if (tokens.matches(L_PAREN)) {
1465 iParen++;
1466 } else if (tokens.matches(R_PAREN)) {
1467 if (iParen == 0) {
1468 tokens.consume(R_PAREN);
1469 if (includeParens) {
1470 sb.append(SPACE).append(R_PAREN);
1471 }
1472 break;
1473 }
1474 iParen--;
1475 }
1476 if (isComment(tokens)) {
1477 tokens.consume();
1478 } else {
1479 sb.append(SPACE).append(tokens.consume());
1480 }
1481 }
1482 return sb.toString();
1483 }
1484
1485 return null;
1486 }
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498 protected boolean parseColumnConstraint( DdlTokenStream tokens,
1499 AstNode columnNode,
1500 boolean isAlterTable ) throws ParsingException {
1501 assert tokens != null;
1502 assert columnNode != null;
1503
1504 Name mixinType = isAlterTable ? TYPE_ADD_TABLE_CONSTRAINT_DEFINITION : TYPE_TABLE_CONSTRAINT;
1505
1506 boolean result = false;
1507
1508
1509
1510
1511
1512
1513
1514 String colName = columnNode.getName().getString();
1515
1516 if (tokens.canConsume("NULL")) {
1517 columnNode.setProperty(NULLABLE, "NULL");
1518 result = true;
1519 } else if (tokens.canConsume("NOT", "NULL")) {
1520 columnNode.setProperty(NULLABLE, "NOT NULL");
1521 result = true;
1522 } else if (tokens.matches("CONSTRAINT")) {
1523 result = true;
1524 tokens.consume("CONSTRAINT");
1525 String constraintName = parseName(tokens);
1526 AstNode constraintNode = nodeFactory().node(constraintName, columnNode.getParent(), mixinType);
1527
1528 if (tokens.matches("UNIQUE")) {
1529
1530 tokens.consume("UNIQUE");
1531
1532 constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1533
1534
1535 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1536
1537 parseConstraintAttributes(tokens, constraintNode);
1538 } else if (tokens.matches("PRIMARY", "KEY")) {
1539
1540 tokens.consume("PRIMARY");
1541 tokens.consume("KEY");
1542
1543 constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1544
1545
1546 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1547
1548 parseConstraintAttributes(tokens, constraintNode);
1549 } else if (tokens.matches("REFERENCES")) {
1550
1551
1552
1553
1554
1555 constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1556
1557 nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1558
1559 parseReferences(tokens, constraintNode);
1560
1561 parseConstraintAttributes(tokens, constraintNode);
1562 }
1563 } else if (tokens.matches("UNIQUE")) {
1564 result = true;
1565 tokens.consume("UNIQUE");
1566
1567 String uc_name = "UC_1";
1568
1569 AstNode constraintNode = nodeFactory().node(uc_name, columnNode.getParent(), mixinType);
1570
1571 constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1572
1573 nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1574
1575 } else if (tokens.matches("PRIMARY", "KEY")) {
1576 result = true;
1577 tokens.consume("PRIMARY", "KEY");
1578
1579 String pk_name = "PK_1";
1580
1581 AstNode constraintNode = nodeFactory().node(pk_name, columnNode.getParent(), mixinType);
1582
1583 constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1584
1585 nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1586
1587 } else if (tokens.matches("FOREIGN", "KEY")) {
1588 result = true;
1589 tokens.consume("FOREIGN", "KEY");
1590
1591
1592
1593
1594
1595 String constraintName = parseName(tokens);
1596
1597 AstNode constraintNode = nodeFactory().node(constraintName, columnNode.getParent(), mixinType);
1598
1599 constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1600
1601 nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1602
1603 parseReferences(tokens, constraintNode);
1604 parseConstraintAttributes(tokens, constraintNode);
1605 } else if (tokens.matches("REFERENCES")) {
1606 result = true;
1607
1608
1609
1610
1611
1612 String constraintName = "FK_1";
1613
1614 AstNode constraintNode = nodeFactory().node(constraintName, columnNode.getParent(), mixinType);
1615
1616 constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1617
1618 nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1619
1620 parseReferences(tokens, constraintNode);
1621 parseConstraintAttributes(tokens, constraintNode);
1622 } else if (tokens.matches("CHECK")) {
1623 result = true;
1624 tokens.consume("CHECK");
1625
1626 String ck_name = "CHECK_1";
1627
1628 AstNode constraintNode = nodeFactory().node(ck_name, columnNode.getParent(), mixinType);
1629 constraintNode.setProperty(NAME, ck_name);
1630 constraintNode.setProperty(CONSTRAINT_TYPE, CHECK);
1631
1632 String clause = consumeParenBoundedTokens(tokens, true);
1633 constraintNode.setProperty(CHECK_SEARCH_CONDITION, clause);
1634 }
1635
1636 return result;
1637 }
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648 protected void parseTableConstraint( DdlTokenStream tokens,
1649 AstNode tableNode,
1650 boolean isAlterTable ) throws ParsingException {
1651 assert tokens != null;
1652 assert tableNode != null;
1653
1654 Name mixinType = isAlterTable ? TYPE_ADD_TABLE_CONSTRAINT_DEFINITION : TYPE_TABLE_CONSTRAINT;
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697 consumeComment(tokens);
1698
1699 if ((tokens.matches("PRIMARY", "KEY")) || (tokens.matches("FOREIGN", "KEY")) || (tokens.matches("UNIQUE"))) {
1700
1701
1702 if (tokens.matches("UNIQUE")) {
1703 String uc_name = "UC_1";
1704 tokens.consume();
1705
1706 AstNode constraintNode = nodeFactory().node(uc_name, tableNode, mixinType);
1707 constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1708
1709
1710 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1711
1712 parseConstraintAttributes(tokens, constraintNode);
1713
1714 consumeComment(tokens);
1715 } else if (tokens.matches("PRIMARY", "KEY")) {
1716 String pk_name = "PK_1";
1717 tokens.consume("PRIMARY", "KEY");
1718
1719 AstNode constraintNode = nodeFactory().node(pk_name, tableNode, mixinType);
1720 constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1721
1722
1723 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1724
1725 parseConstraintAttributes(tokens, constraintNode);
1726
1727 consumeComment(tokens);
1728 } else if (tokens.matches("FOREIGN", "KEY")) {
1729 String fk_name = "FK_1";
1730 tokens.consume("FOREIGN", "KEY");
1731
1732 if (!tokens.matches(L_PAREN)) {
1733
1734 fk_name = tokens.consume();
1735 }
1736
1737 AstNode constraintNode = nodeFactory().node(fk_name, tableNode, mixinType);
1738 constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1739
1740
1741 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1742
1743
1744 parseReferences(tokens, constraintNode);
1745
1746 parseConstraintAttributes(tokens, constraintNode);
1747
1748 consumeComment(tokens);
1749 }
1750 } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "UNIQUE")) {
1751
1752 tokens.consume();
1753 String uc_name = parseName(tokens);
1754 tokens.consume("UNIQUE");
1755
1756 AstNode constraintNode = nodeFactory().node(uc_name, tableNode, mixinType);
1757 constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1758
1759
1760 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1761
1762 parseConstraintAttributes(tokens, constraintNode);
1763
1764 consumeComment(tokens);
1765 } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "PRIMARY", "KEY")) {
1766
1767 tokens.consume("CONSTRAINT");
1768 String pk_name = parseName(tokens);
1769 tokens.consume("PRIMARY", "KEY");
1770
1771 AstNode constraintNode = nodeFactory().node(pk_name, tableNode, mixinType);
1772 constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1773
1774
1775 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1776
1777 parseConstraintAttributes(tokens, constraintNode);
1778
1779 consumeComment(tokens);
1780
1781 } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "FOREIGN", "KEY")) {
1782
1783 tokens.consume("CONSTRAINT");
1784 String fk_name = parseName(tokens);
1785 tokens.consume("FOREIGN", "KEY");
1786
1787 AstNode constraintNode = nodeFactory().node(fk_name, tableNode, mixinType);
1788
1789 constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1790
1791
1792 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1793
1794
1795 parseReferences(tokens, constraintNode);
1796
1797 parseConstraintAttributes(tokens, constraintNode);
1798
1799 consumeComment(tokens);
1800
1801 } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "CHECK")) {
1802
1803 tokens.consume("CONSTRAINT");
1804 String ck_name = parseName(tokens);
1805 tokens.consume("CHECK");
1806
1807 AstNode constraintNode = nodeFactory().node(ck_name, tableNode, mixinType);
1808 constraintNode.setProperty(CONSTRAINT_TYPE, CHECK);
1809
1810 String clause = consumeParenBoundedTokens(tokens, true);
1811 constraintNode.setProperty(CHECK_SEARCH_CONDITION, clause);
1812 }
1813
1814 }
1815
1816
1817
1818
1819
1820
1821
1822
1823 protected void parseConstraintAttributes( DdlTokenStream tokens,
1824 AstNode constraintNode ) throws ParsingException {
1825 assert tokens != null;
1826 assert constraintNode != null;
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
1839 if (tokens.canConsume("INITIALLY", "DEFERRED")) {
1840 AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1841 attrNode.setProperty(PROPERTY_VALUE, "INITIALLY DEFERRED");
1842 }
1843 if (tokens.canConsume("INITIALLY", "IMMEDIATE")) {
1844 AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1845 attrNode.setProperty(PROPERTY_VALUE, "INITIALLY IMMEDIATE");
1846 }
1847 if (tokens.canConsume("NOT", "DEFERRABLE")) {
1848 AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1849 attrNode.setProperty(PROPERTY_VALUE, "NOT DEFERRABLE");
1850 }
1851 if (tokens.canConsume("DEFERRABLE")) {
1852 AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1853 attrNode.setProperty(PROPERTY_VALUE, "DEFERRABLE");
1854 }
1855 if (tokens.canConsume("INITIALLY", "DEFERRED")) {
1856 AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1857 attrNode.setProperty(PROPERTY_VALUE, "INITIALLY DEFERRED");
1858 }
1859 if (tokens.canConsume("INITIALLY", "IMMEDIATE")) {
1860 AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1861 attrNode.setProperty(PROPERTY_VALUE, "INITIALLY IMMEDIATE");
1862 }
1863 }
1864
1865 protected void parseReferences( DdlTokenStream tokens,
1866 AstNode constraintNode ) throws ParsingException {
1867 assert tokens != null;
1868 assert constraintNode != null;
1869
1870 if (tokens.matches("REFERENCES")) {
1871 tokens.consume("REFERENCES");
1872
1873 String tableName = parseName(tokens);
1874
1875 nodeFactory().node(tableName, constraintNode, TYPE_TABLE_REFERENCE);
1876
1877 parseColumnNameList(tokens, constraintNode, TYPE_FK_COLUMN_REFERENCE);
1878
1879 tokens.canConsume("MATCH", "FULL");
1880 tokens.canConsume("MATCH", "PARTIAL");
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900 while (tokens.canConsume("ON", "UPDATE") || tokens.canConsume("ON", "DELETE")) {
1901
1902 if (tokens.matches("CASCADE") || tokens.matches("NOW()")) {
1903 tokens.consume();
1904 } else if (tokens.matches("SET", "NULL")) {
1905 tokens.consume("SET", "NULL");
1906 } else if (tokens.matches("SET", "DEFAULT")) {
1907 tokens.consume("SET", "DEFAULT");
1908 } else if (tokens.matches("NO", "ACTION")) {
1909 tokens.consume("NO", "ACTION");
1910 } else {
1911 System.out.println(" ERROR: ColumnDefinition REFERENCES has NO REFERENCIAL ACTION.");
1912 }
1913 }
1914 }
1915 }
1916
1917
1918
1919
1920
1921
1922
1923
1924
1925
1926
1927
1928
1929
1930 protected AstNode parseCreateViewStatement( DdlTokenStream tokens,
1931 AstNode parentNode ) throws ParsingException {
1932 assert tokens != null;
1933 assert parentNode != null;
1934
1935 markStartOfStatement(tokens);
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946 String stmtType = "CREATE";
1947 tokens.consume("CREATE");
1948 if (tokens.canConsume("OR", "REPLACE")) {
1949 stmtType = stmtType + SPACE + "OR REPLACE";
1950 }
1951 tokens.consume("VIEW");
1952 stmtType = stmtType + SPACE + "VIEW";
1953
1954 String name = parseName(tokens);
1955
1956 AstNode createViewNode = nodeFactory().node(name, parentNode, TYPE_CREATE_VIEW_STATEMENT);
1957
1958
1959 parseColumnNameList(tokens, createViewNode, TYPE_COLUMN_REFERENCE);
1960
1961 tokens.consume("AS");
1962
1963 String queryExpression = parseUntilTerminator(tokens);
1964
1965 createViewNode.setProperty(CREATE_VIEW_QUERY_EXPRESSION, queryExpression);
1966
1967 markEndOfStatement(tokens, createViewNode);
1968
1969 return createViewNode;
1970 }
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985 protected AstNode parseCreateSchemaStatement( DdlTokenStream tokens,
1986 AstNode parentNode ) throws ParsingException {
1987 markStartOfStatement(tokens);
1988
1989 AstNode schemaNode = null;
1990
1991 String authorizationIdentifier = null;
1992 String schemaName = null;
1993
1994 tokens.consume("CREATE", "SCHEMA");
1995
1996 if (tokens.canConsume("AUTHORIZATION")) {
1997 authorizationIdentifier = tokens.consume();
1998 } else {
1999 schemaName = parseName(tokens);
2000 if (tokens.canConsume("AUTHORIZATION")) {
2001 authorizationIdentifier = parseName(tokens);
2002 }
2003 }
2004
2005 assert authorizationIdentifier != null || schemaName != null;
2006
2007 if (schemaName != null) {
2008 schemaNode = nodeFactory().node(schemaName, parentNode, TYPE_CREATE_SCHEMA_STATEMENT);
2009 } else {
2010 schemaNode = nodeFactory().node(authorizationIdentifier, parentNode, TYPE_CREATE_SCHEMA_STATEMENT);
2011 }
2012
2013 if (tokens.canConsume("DEFAULT", "CHARACTER", "SET")) {
2014
2015 parseName(tokens);
2016 }
2017
2018 markEndOfStatement(tokens, schemaNode);
2019
2020 return schemaNode;
2021 }
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032 protected AstNode parseCreateAssertionStatement( DdlTokenStream tokens,
2033 AstNode parentNode ) throws ParsingException {
2034 markStartOfStatement(tokens);
2035
2036
2037
2038
2039 AstNode node = null;
2040
2041 tokens.consume("CREATE", "ASSERTION");
2042
2043 String name = parseName(tokens);
2044
2045
2046
2047 node = nodeFactory().node(name, parentNode, TYPE_CREATE_ASSERTION_STATEMENT);
2048
2049 tokens.consume("CHECK");
2050
2051 String searchCondition = consumeParenBoundedTokens(tokens, false);
2052
2053 node.setProperty(CHECK_SEARCH_CONDITION, searchCondition);
2054
2055 parseConstraintAttributes(tokens, node);
2056
2057 markEndOfStatement(tokens, node);
2058
2059 return node;
2060 }
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077 protected AstNode parseIgnorableStatement( DdlTokenStream tokens,
2078 String name,
2079 AstNode parentNode ) {
2080
2081 AstNode node = nodeFactory().node(name, parentNode, TYPE_STATEMENT);
2082
2083 parseUntilTerminator(tokens);
2084
2085
2086 return node;
2087 }
2088
2089
2090
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101 protected AstNode parseIgnorableStatement( DdlTokenStream tokens,
2102 String name,
2103 AstNode parentNode,
2104 Name mixinType ) {
2105 CheckArg.isNotNull(tokens, "tokens");
2106 CheckArg.isNotNull(name, "name");
2107 CheckArg.isNotNull(parentNode, "parentNode");
2108 CheckArg.isNotNull(mixinType, "mixinType");
2109
2110 AstNode node = nodeFactory().node(name, parentNode, mixinType);
2111
2112 parseUntilTerminator(tokens);
2113
2114 return node;
2115 }
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126 protected AstNode parseStatement( DdlTokenStream tokens,
2127 String[] stmt_start_phrase,
2128 AstNode parentNode,
2129 Name mixinType ) {
2130 CheckArg.isNotNull(tokens, "tokens");
2131 CheckArg.isNotNull(stmt_start_phrase, "stmt_start_phrase");
2132 CheckArg.isNotNull(parentNode, "parentNode");
2133 CheckArg.isNotNull(mixinType, "mixinType");
2134
2135 markStartOfStatement(tokens);
2136 tokens.consume(stmt_start_phrase);
2137 AstNode result = parseIgnorableStatement(tokens, getStatementTypeName(stmt_start_phrase), parentNode, mixinType);
2138 markEndOfStatement(tokens, result);
2139
2140 return result;
2141 }
2142
2143
2144
2145
2146
2147
2148
2149 public final AstNode unknownTerminatedNode( AstNode parentNode ) {
2150 return nodeFactory.node("unknownStatement", parentNode, StandardDdlLexicon.TYPE_UNKNOWN_STATEMENT);
2151 }
2152
2153
2154
2155
2156
2157
2158
2159 public final AstNode missingTerminatorNode( AstNode parentNode ) {
2160 return nodeFactory.node("missingTerminator", parentNode, StandardDdlLexicon.TYPE_MISSING_TERMINATOR);
2161 }
2162
2163 public final boolean isMissingTerminatorNode( AstNode node ) {
2164 return node.getName().getString().equals(MISSING_TERMINATOR_NODE_LITERAL)
2165 && nodeFactory().hasMixinType(node, TYPE_MISSING_TERMINATOR);
2166 }
2167
2168 public final boolean isValidSchemaChild( AstNode node ) {
2169 Name[] schemaChildMixins = getValidSchemaChildTypes();
2170 for (Object mixin : node.getProperty(JcrLexicon.MIXIN_TYPES).getValuesAsArray()) {
2171 if (mixin instanceof Name) {
2172 for (Name nextType : schemaChildMixins) {
2173 if (nextType.equals(mixin)) {
2174 return true;
2175 }
2176 }
2177 }
2178 }
2179
2180 return false;
2181 }
2182
2183 public final boolean setAsSchemaChildNode( AstNode statementNode,
2184 boolean stmtIsMissingTerminator ) {
2185
2186 if (!isValidSchemaChild(statementNode)) {
2187 return false;
2188 }
2189
2190
2191
2192
2193 List<AstNode> children = getRootNode().getChildren();
2194
2195 if (children.size() > 2) {
2196 AstNode previousNode = children.get(children.size() - 2);
2197 if (nodeFactory().hasMixinType(previousNode, TYPE_MISSING_TERMINATOR)) {
2198 AstNode theSchemaNode = children.get(children.size() - 3);
2199
2200
2201
2202 if (theSchemaNode.getChildCount() == 0
2203 || nodeFactory().hasMixinType(theSchemaNode.getLastChild(), TYPE_MISSING_TERMINATOR)) {
2204 if (nodeFactory().hasMixinType(theSchemaNode, TYPE_CREATE_SCHEMA_STATEMENT)) {
2205 statementNode.setParent(theSchemaNode);
2206 if (stmtIsMissingTerminator) {
2207 missingTerminatorNode(theSchemaNode);
2208 }
2209 return true;
2210 }
2211 }
2212 }
2213 }
2214
2215 return false;
2216 }
2217
2218
2219
2220
2221
2222
2223 protected String getTerminator() {
2224 return this.terminator;
2225 }
2226
2227
2228
2229
2230
2231 protected boolean setTerminator( String terminator ) {
2232 CheckArg.isNotNull(terminator, "terminator");
2233 if (this.terminator.equalsIgnoreCase(terminator)) {
2234 return false;
2235 }
2236 this.terminator = terminator;
2237 return true;
2238 }
2239
2240 protected Name[] getValidSchemaChildTypes() {
2241 return VALID_SCHEMA_CHILD_TYPES;
2242 }
2243
2244
2245
2246
2247
2248
2249
2250
2251 protected boolean isComment( DdlTokenStream tokens ) throws ParsingException {
2252 return tokens.matches(DdlTokenizer.COMMENT);
2253 }
2254
2255
2256
2257
2258
2259
2260
2261 protected void consumeComment( DdlTokenStream tokens ) throws ParsingException {
2262 tokens.canConsume(DdlTokenizer.COMMENT);
2263 }
2264
2265
2266
2267
2268
2269
2270
2271
2272
2273 protected boolean isTableConstraint( DdlTokenStream tokens ) throws ParsingException {
2274 boolean result = false;
2275
2276 if ((tokens.matches("PRIMARY", "KEY")) || (tokens.matches("FOREIGN", "KEY")) || (tokens.matches("UNIQUE"))) {
2277 result = true;
2278 } else if (tokens.matches("CONSTRAINT")) {
2279 if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "UNIQUE")
2280 || tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "PRIMARY", "KEY")
2281 || tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "FOREIGN", "KEY")
2282 || tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "CHECK")) {
2283 result = true;
2284 }
2285 }
2286
2287 return result;
2288 }
2289
2290
2291
2292
2293
2294
2295
2296
2297
2298 protected boolean isColumnDefinitionStart( DdlTokenStream tokens ) throws ParsingException {
2299 boolean result = false;
2300
2301 if (isTableConstraint(tokens)) {
2302 result = false;
2303 } else {
2304 for (String dTypeStartWord : getDataTypeStartWords()) {
2305 result = (tokens.matches(DdlTokenStream.ANY_VALUE, dTypeStartWord) || tokens.matches("COLUMN",
2306 DdlTokenStream.ANY_VALUE,
2307 dTypeStartWord));
2308 if (result) {
2309 break;
2310 }
2311 }
2312
2313 }
2314
2315 return result;
2316 }
2317
2318
2319
2320
2321
2322
2323 protected List<String> getDataTypeStartWords() {
2324 if (allDataTypeStartWords == null) {
2325 allDataTypeStartWords = new ArrayList<String>();
2326 allDataTypeStartWords.addAll(DataTypes.DATATYPE_START_WORDS);
2327 allDataTypeStartWords.addAll(getCustomDataTypeStartWords());
2328 }
2329 return allDataTypeStartWords;
2330 }
2331
2332
2333
2334
2335
2336
2337
2338 protected List<String> getCustomDataTypeStartWords() {
2339 return Collections.emptyList();
2340 }
2341
2342
2343
2344
2345
2346
2347
2348
2349 protected String parseName( DdlTokenStream tokens ) {
2350
2351
2352
2353 StringBuffer sb = new StringBuffer();
2354
2355 if (tokens.matches('[')) {
2356
2357 while (true) {
2358
2359 tokens.consume('[');
2360 sb.append(consumeIdentifier(tokens));
2361 tokens.consume(']');
2362 if (tokens.matches('.')) {
2363 sb.append(tokens.consume());
2364 } else {
2365 break;
2366 }
2367 }
2368 } else {
2369
2370
2371 while (true) {
2372
2373 sb.append(consumeIdentifier(tokens));
2374
2375 if (tokens.matches('.')) {
2376 sb.append(tokens.consume());
2377 } else {
2378 break;
2379 }
2380
2381 }
2382 }
2383
2384 return sb.toString();
2385 }
2386
2387
2388
2389
2390
2391
2392
2393
2394 protected String consumeIdentifier( DdlTokenStream tokens ) throws ParsingException {
2395 String value = tokens.consume();
2396
2397 if (value.charAt(0) == '"') {
2398 int length = value.length();
2399
2400 value = value.substring(1, length - 1);
2401 }
2402
2403
2404
2405
2406
2407
2408
2409
2410 return value;
2411 }
2412
2413
2414
2415
2416
2417
2418
2419
2420 protected boolean isTerminator( DdlTokenStream tokens ) throws ParsingException {
2421 boolean result = tokens.matches(getTerminator());
2422
2423 return result;
2424 }
2425
2426 protected void parseColumnNameList( DdlTokenStream tokens,
2427 AstNode parentNode,
2428 Name referenceType ) {
2429
2430 List<String> columnNameList = new ArrayList<String>();
2431 if (tokens.matches(L_PAREN)) {
2432 tokens.consume(L_PAREN);
2433 columnNameList = parseColumnNameList(tokens);
2434 tokens.consume(R_PAREN);
2435 }
2436
2437 for (String columnName : columnNameList) {
2438 nodeFactory().node(columnName, parentNode, referenceType);
2439 }
2440 }
2441
2442
2443
2444
2445
2446
2447
2448
2449 protected List<String> parseColumnNameList( DdlTokenStream tokens ) throws ParsingException {
2450 List<String> columnNames = new LinkedList<String>();
2451
2452 while (true) {
2453 columnNames.add(parseName(tokens));
2454 if (!tokens.canConsume(COMMA)) {
2455 break;
2456 }
2457 }
2458
2459 return columnNames;
2460 }
2461
2462
2463
2464
2465
2466
2467
2468
2469
2470 protected String parseUntilTerminator( DdlTokenStream tokens ) throws ParsingException {
2471 StringBuffer sb = new StringBuffer();
2472 if (doUseTerminator()) {
2473 boolean lastTokenWasPeriod = false;
2474 while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY) && !isTerminator(tokens)) {
2475 String thisToken = tokens.consume();
2476 boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2477 boolean thisTokenIsComma = thisToken.equals(COMMA);
2478 if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2479 sb.append(thisToken);
2480 } else {
2481 sb.append(SPACE).append(thisToken);
2482 }
2483 if (thisTokenIsPeriod) {
2484 lastTokenWasPeriod = true;
2485 } else {
2486 lastTokenWasPeriod = false;
2487 }
2488 }
2489 } else {
2490
2491 boolean lastTokenWasPeriod = false;
2492 while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY)) {
2493 String thisToken = tokens.consume();
2494 boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2495 boolean thisTokenIsComma = thisToken.equals(COMMA);
2496 if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2497 sb.append(thisToken);
2498 } else {
2499 sb.append(SPACE).append(thisToken);
2500 }
2501 if (thisTokenIsPeriod) {
2502 lastTokenWasPeriod = true;
2503 } else {
2504 lastTokenWasPeriod = false;
2505 }
2506 }
2507 }
2508
2509 return sb.toString();
2510 }
2511
2512
2513
2514
2515
2516
2517
2518
2519
2520
2521 protected String parseUntilTerminatorIgnoreEmbeddedStatements( DdlTokenStream tokens ) throws ParsingException {
2522 StringBuffer sb = new StringBuffer();
2523
2524 boolean lastTokenWasPeriod = false;
2525 while (tokens.hasNext() && !isTerminator(tokens)) {
2526 String thisToken = tokens.consume();
2527 boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2528 boolean thisTokenIsComma = thisToken.equals(COMMA);
2529 if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2530 sb.append(thisToken);
2531 } else {
2532 sb.append(SPACE).append(thisToken);
2533 }
2534 if (thisTokenIsPeriod) {
2535 lastTokenWasPeriod = true;
2536 } else {
2537 lastTokenWasPeriod = false;
2538 }
2539 }
2540
2541 return sb.toString();
2542 }
2543
2544
2545
2546
2547
2548
2549
2550
2551 protected String parseUntilSemiColon( DdlTokenStream tokens ) throws ParsingException {
2552 StringBuffer sb = new StringBuffer();
2553
2554 boolean lastTokenWasPeriod = false;
2555 while (tokens.hasNext() && !tokens.matches(SEMICOLON)) {
2556 String thisToken = tokens.consume();
2557 boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2558 boolean thisTokenIsComma = thisToken.equals(COMMA);
2559 if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2560 sb.append(thisToken);
2561 } else {
2562 sb.append(SPACE).append(thisToken);
2563 }
2564 if (thisTokenIsPeriod) {
2565 lastTokenWasPeriod = true;
2566 } else {
2567 lastTokenWasPeriod = false;
2568 }
2569 }
2570
2571 return sb.toString();
2572 }
2573
2574 protected String parseUntilCommaOrTerminator( DdlTokenStream tokens ) throws ParsingException {
2575 StringBuffer sb = new StringBuffer();
2576 if (doUseTerminator()) {
2577 while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY) && !isTerminator(tokens)
2578 && !tokens.matches(COMMA)) {
2579 sb.append(SPACE).append(tokens.consume());
2580 }
2581 } else {
2582
2583 while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY) && !tokens.matches(COMMA)) {
2584 sb.append(SPACE).append(tokens.consume());
2585 }
2586 }
2587
2588 return sb.toString();
2589 }
2590
2591
2592
2593
2594
2595
2596 public boolean doUseTerminator() {
2597 return useTerminator;
2598 }
2599
2600
2601
2602
2603
2604
2605
2606 public void setDoUseTerminator( boolean useTerminator ) {
2607 this.useTerminator = useTerminator;
2608 }
2609
2610 public String getStatementTypeName( String[] stmtPhrase ) {
2611 StringBuffer sb = new StringBuffer(100);
2612 for (int i = 0; i < stmtPhrase.length; i++) {
2613 if (i == 0) {
2614 sb.append(stmtPhrase[0]);
2615 } else {
2616 sb.append(SPACE).append(stmtPhrase[i]);
2617 }
2618 }
2619
2620 return sb.toString();
2621 }
2622
2623
2624
2625
2626
2627
2628
2629
2630
2631 protected boolean parseDefaultClause( DdlTokenStream tokens,
2632 AstNode columnNode ) throws ParsingException {
2633
2634 assert tokens != null;
2635 assert columnNode != null;
2636
2637
2638
2639
2640
2641
2642
2643
2644
2645
2646
2647
2648
2649
2650
2651
2652
2653
2654
2655
2656 String defaultValue = "";
2657
2658 if (tokens.canConsume("DEFAULT")) {
2659
2660 int optionID = -1;
2661 int precision = -1;
2662
2663 if (tokens.canConsume("CURRENT_DATE")) {
2664
2665 optionID = DEFAULT_ID_DATETIME;
2666 defaultValue = "CURRENT_DATE";
2667 } else if (tokens.canConsume("CURRENT_TIME")) {
2668 optionID = DEFAULT_ID_DATETIME;
2669 defaultValue = "CURRENT_TIME";
2670 if (tokens.canConsume(L_PAREN)) {
2671
2672 precision = integer(tokens.consume());
2673 tokens.canConsume(R_PAREN);
2674 }
2675 } else if (tokens.canConsume("CURRENT_TIMESTAMP")) {
2676 optionID = DEFAULT_ID_DATETIME;
2677 defaultValue = "CURRENT_TIMESTAMP";
2678 if (tokens.canConsume(L_PAREN)) {
2679
2680 precision = integer(tokens.consume());
2681 tokens.canConsume(R_PAREN);
2682 }
2683 } else if (tokens.canConsume("USER")) {
2684 optionID = DEFAULT_ID_USER;
2685 defaultValue = "USER";
2686 } else if (tokens.canConsume("CURRENT_USER")) {
2687 optionID = DEFAULT_ID_CURRENT_USER;
2688 defaultValue = "CURRENT_USER";
2689 } else if (tokens.canConsume("SESSION_USER")) {
2690 optionID = DEFAULT_ID_SESSION_USER;
2691 defaultValue = "SESSION_USER";
2692 } else if (tokens.canConsume("SYSTEM_USER")) {
2693 optionID = DEFAULT_ID_SYSTEM_USER;
2694 defaultValue = "SYSTEM_USER";
2695 } else if (tokens.canConsume("NULL")) {
2696 optionID = DEFAULT_ID_NULL;
2697 defaultValue = "NULL";
2698 } else if (tokens.canConsume(L_PAREN)) {
2699 optionID = DEFAULT_ID_LITERAL;
2700 while (!tokens.canConsume(R_PAREN)) {
2701 defaultValue = defaultValue + tokens.consume();
2702 }
2703 } else {
2704 optionID = DEFAULT_ID_LITERAL;
2705
2706 defaultValue = tokens.consume();
2707
2708
2709 if (tokens.canConsume(".")) {
2710 defaultValue = defaultValue + '.' + tokens.consume();
2711 }
2712 }
2713
2714 columnNode.setProperty(DEFAULT_OPTION, optionID);
2715 columnNode.setProperty(DEFAULT_VALUE, defaultValue);
2716 if (precision > -1) {
2717 columnNode.setProperty(DEFAULT_PRECISION, precision);
2718 }
2719 return true;
2720 }
2721
2722 return false;
2723 }
2724
2725
2726
2727
2728
2729
2730
2731
2732
2733 protected boolean parseCollateClause( DdlTokenStream tokens,
2734 AstNode columnNode ) throws ParsingException {
2735 assert tokens != null;
2736 assert columnNode != null;
2737
2738
2739
2740
2741
2742 if (tokens.matches("COLLATE")) {
2743 tokens.consume("COLLATE");
2744 String collationName = parseName(tokens);
2745 columnNode.setProperty(COLLATION_NAME, collationName);
2746 return true;
2747 }
2748
2749 return false;
2750 }
2751
2752
2753
2754
2755
2756
2757
2758
2759 protected int integer( String value ) {
2760 assert value != null;
2761 assert value.length() > 0;
2762
2763 return new BigInteger(value).intValue();
2764 }
2765
2766 public final Position getCurrentMarkedPosition() {
2767 return currentMarkedPosition;
2768 }
2769
2770
2771
2772
2773
2774
2775 public final void markStartOfStatement( DdlTokenStream tokens ) {
2776 tokens.mark();
2777 currentMarkedPosition = tokens.nextPosition();
2778 }
2779
2780
2781
2782
2783
2784
2785
2786
2787
2788
2789 public final void markEndOfStatement( DdlTokenStream tokens,
2790 AstNode statementNode ) {
2791 if (!tokens.canConsume(getTerminator())) {
2792
2793
2794
2795
2796
2797
2798
2799
2800 if (!setAsSchemaChildNode(statementNode, true)) {
2801 missingTerminatorNode(getRootNode());
2802 }
2803 } else {
2804 setAsSchemaChildNode(statementNode, false);
2805 }
2806
2807 String source = tokens.getMarkedContent().trim();
2808 statementNode.setProperty(DDL_EXPRESSION, source);
2809 statementNode.setProperty(DDL_START_LINE_NUMBER, currentMarkedPosition.getLine());
2810 statementNode.setProperty(DDL_START_CHAR_INDEX, currentMarkedPosition.getIndexInContent());
2811 statementNode.setProperty(DDL_START_COLUMN_NUMBER, currentMarkedPosition.getColumn());
2812
2813 testPrint("== >> SOURCE:\n" + source + "\n");
2814 }
2815
2816 protected void testPrint( String str ) {
2817 if (isTestMode()) {
2818 System.out.println(str);
2819 }
2820 }
2821
2822
2823
2824
2825 public boolean isTestMode() {
2826 return testMode;
2827 }
2828
2829
2830
2831
2832 public void setTestMode( boolean testMode ) {
2833 this.testMode = testMode;
2834 }
2835
2836
2837
2838
2839
2840
2841 public String getId() {
2842 return parserId;
2843 }
2844
2845
2846
2847
2848
2849
2850 @Override
2851 public int hashCode() {
2852 return this.parserId.hashCode();
2853 }
2854
2855
2856
2857
2858
2859
2860 @Override
2861 public boolean equals( Object obj ) {
2862 if (obj == this) return true;
2863 if (obj instanceof DdlParser) {
2864 return ((DdlParser)obj).getId().equals(this.getId());
2865 }
2866 return false;
2867 }
2868 }