1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30 package org.modeshape.sequencer.ddl;
31
32 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.ALL_PRIVILEGES;
33 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CHECK_SEARCH_CONDITION;
34 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.COLLATION_NAME;
35 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CONSTRAINT_ATTRIBUTE_TYPE;
36 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CONSTRAINT_TYPE;
37 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CREATE_VIEW_QUERY_EXPRESSION;
38 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_EXPRESSION;
39 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_CHAR_INDEX;
40 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_COLUMN_NUMBER;
41 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_LINE_NUMBER;
42 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_OPTION;
43 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_PRECISION;
44 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_VALUE;
45 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DROP_BEHAVIOR;
46 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANTEE;
47 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANT_PRIVILEGE;
48 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.MESSAGE;
49 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.NAME;
50 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.NULLABLE;
51 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.PROBLEM_LEVEL;
52 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.PROPERTY_VALUE;
53 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TEMPORARY;
54 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE;
55 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ADD_TABLE_CONSTRAINT_DEFINITION;
56 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_COLUMN_DEFINITION;
57 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_DOMAIN_STATEMENT;
58 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_TABLE_STATEMENT;
59 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_DEFINITION;
60 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_REFERENCE;
61 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_ASSERTION_STATEMENT;
62 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_CHARACTER_SET_STATEMENT;
63 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_COLLATION_STATEMENT;
64 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_DOMAIN_STATEMENT;
65 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_SCHEMA_STATEMENT;
66 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_TABLE_STATEMENT;
67 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_TRANSLATION_STATEMENT;
68 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_VIEW_STATEMENT;
69 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_ASSERTION_STATEMENT;
70 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_CHARACTER_SET_STATEMENT;
71 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_COLLATION_STATEMENT;
72 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_COLUMN_DEFINITION;
73 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_DOMAIN_STATEMENT;
74 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_SCHEMA_STATEMENT;
75 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TABLE_CONSTRAINT_DEFINITION;
76 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TABLE_STATEMENT;
77 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TRANSLATION_STATEMENT;
78 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_VIEW_STATEMENT;
79 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_FK_COLUMN_REFERENCE;
80 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_CHARACTER_SET_STATEMENT;
81 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_COLLATION_STATEMENT;
82 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_DOMAIN_STATEMENT;
83 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_TABLE_STATEMENT;
84 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_TRANSLATION_STATEMENT;
85 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_INSERT_STATEMENT;
86 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_MISSING_TERMINATOR;
87 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_PROBLEM;
88 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_CHARACTER_SET_STATEMENT;
89 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_COLLATION_STATEMENT;
90 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_DOMAIN_STATEMENT;
91 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_TABLE_STATEMENT;
92 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_TRANSLATION_STATEMENT;
93 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_SET_STATEMENT;
94 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_STATEMENT;
95 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_STATEMENT_OPTION;
96 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_TABLE_CONSTRAINT;
97 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_TABLE_REFERENCE;
98 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.VALUE;
99 import static org.modeshape.sequencer.ddl.StandardDdlLexicon.WITH_GRANT_OPTION;
100 import java.math.BigInteger;
101 import java.util.ArrayList;
102 import java.util.Collections;
103 import java.util.LinkedList;
104 import java.util.List;
105 import net.jcip.annotations.NotThreadSafe;
106 import org.modeshape.common.text.ParsingException;
107 import org.modeshape.common.text.Position;
108 import org.modeshape.common.util.CheckArg;
109 import org.modeshape.graph.JcrLexicon;
110 import org.modeshape.graph.property.Name;
111 import org.modeshape.sequencer.ddl.DdlTokenStream.DdlTokenizer;
112 import org.modeshape.sequencer.ddl.datatype.DataType;
113 import org.modeshape.sequencer.ddl.datatype.DataTypeParser;
114 import org.modeshape.sequencer.ddl.node.AstNode;
115 import org.modeshape.sequencer.ddl.node.AstNodeFactory;
116
117
118
119
120 @NotThreadSafe
121 public class StandardDdlParser implements DdlParser, DdlConstants, DdlConstants.StatementStartPhrases {
122
123 private final String parserId = "SQL92";
124 private boolean testMode = false;
125 private final List<DdlParserProblem> problems;
126 private final AstNodeFactory nodeFactory;
127 private AstNode rootNode;
128 private List<String> allDataTypeStartWords = null;
129 private DataTypeParser datatypeParser = null;
130 private String terminator = DEFAULT_TERMINATOR;
131 private boolean useTerminator = false;
132 private Position currentMarkedPosition;
133
134 public StandardDdlParser() {
135 super();
136 setDoUseTerminator(true);
137 setDatatypeParser(new DataTypeParser());
138 nodeFactory = new AstNodeFactory();
139 problems = new ArrayList<DdlParserProblem>();
140 }
141
142
143
144
145
146
147 public DataTypeParser getDatatypeParser() {
148 return datatypeParser;
149 }
150
151
152
153
154 public void setDatatypeParser( DataTypeParser datatypeParser ) {
155 this.datatypeParser = datatypeParser;
156 }
157
158
159
160
161
162
163 public AstNodeFactory nodeFactory() {
164 return this.nodeFactory;
165 }
166
167
168
169
170 public AstNode getRootNode() {
171 return rootNode;
172 }
173
174
175
176
177 public void setRootNode( AstNode rootNode ) {
178 this.rootNode = rootNode;
179 }
180
181
182
183
184
185
186
187 public Object score( String ddl,
188 String fileName,
189 DdlParserScorer scorer ) throws ParsingException {
190 CheckArg.isNotNull(ddl, "ddl");
191 CheckArg.isNotNull(scorer, "scorer");
192
193 if (fileName != null) {
194
195 scorer.scoreText(fileName, 2, getIdentifyingKeywords());
196 }
197
198
199 problems.clear();
200 boolean includeComments = true;
201 DdlTokenStream tokens = new DdlTokenStream(ddl, DdlTokenStream.ddlTokenizer(includeComments), false);
202 initializeTokenStream(tokens);
203 tokens.start();
204
205 testPrint("\n== >> StandardDdlParser.parse() PARSING STARTED: ");
206
207
208 while (tokens.matches(DdlTokenizer.COMMENT)) {
209
210 String comment = tokens.consume();
211 scorer.scoreText(comment, 2, getIdentifyingKeywords());
212 }
213
214
215 computeScore(tokens, scorer);
216
217
218 return tokens;
219 }
220
221 protected void computeScore( DdlTokenStream tokens,
222 DdlParserScorer scorer ) {
223 while (tokens.hasNext()) {
224 if (tokens.isNextKeyWord()) {
225 scorer.scoreStatements(1);
226 }
227 tokens.consume();
228 }
229 }
230
231 public String[] getIdentifyingKeywords() {
232 return new String[] {getId()};
233 }
234
235
236
237
238
239
240
241 public void parse( String ddl,
242 AstNode rootNode,
243 Object scoreReturnObject ) throws ParsingException {
244 CheckArg.isNotNull(ddl, "ddl");
245 CheckArg.isNotNull(rootNode, "rootNode");
246 problems.clear();
247 setRootNode(rootNode);
248
249 DdlTokenStream tokens = null;
250 if (scoreReturnObject instanceof DdlTokenStream) {
251 tokens = (DdlTokenStream)scoreReturnObject;
252 tokens.rewind();
253 } else {
254
255 boolean includeComments = false;
256 tokens = new DdlTokenStream(ddl, DdlTokenStream.ddlTokenizer(includeComments), false);
257 initializeTokenStream(tokens);
258 tokens.start();
259 }
260
261 testPrint("\n== >> StandardDdlParser.parse() PARSING STARTED: ");
262
263
264 while (moveToNextStatementStart(tokens)) {
265
266
267
268 AstNode stmtNode = parseNextStatement(tokens, rootNode);
269 if (stmtNode == null) {
270 markStartOfStatement(tokens);
271 String stmtName = tokens.consume();
272 stmtNode = parseIgnorableStatement(tokens, stmtName, rootNode);
273 markEndOfStatement(tokens, stmtNode);
274 }
275
276 }
277
278 rewrite(tokens, rootNode);
279
280 for (DdlParserProblem problem : problems) {
281 attachNewProblem(problem, rootNode);
282 }
283
284
285
286
287
288
289
290
291 if (testMode) {
292
293 int count = 0;
294 for (AstNode child : rootNode.getChildren()) {
295 testPrint("== >> Found Statement" + "(" + (++count) + "):\n" + child);
296 }
297 }
298 }
299
300
301
302
303
304
305
306
307 protected void initializeTokenStream( DdlTokenStream tokens ) {
308 tokens.registerKeyWords(SQL_92_RESERVED_WORDS);
309 tokens.registerStatementStartPhrase(SQL_92_ALL_PHRASES);
310 }
311
312
313
314
315
316
317
318
319
320
321 protected AstNode parseNextStatement( DdlTokenStream tokens,
322 AstNode node ) {
323 assert tokens != null;
324 assert node != null;
325
326 AstNode stmtNode = null;
327
328 if (tokens.matches(CREATE)) {
329 stmtNode = parseCreateStatement(tokens, node);
330 } else if (tokens.matches(ALTER)) {
331 stmtNode = parseAlterStatement(tokens, node);
332 } else if (tokens.matches(DROP)) {
333 stmtNode = parseDropStatement(tokens, node);
334 } else if (tokens.matches(INSERT)) {
335 stmtNode = parseInsertStatement(tokens, node);
336 } else if (tokens.matches(SET)) {
337 stmtNode = parseSetStatement(tokens, node);
338 } else if (tokens.matches(GRANT)) {
339 stmtNode = parseGrantStatement(tokens, node);
340 } else if (tokens.matches(REVOKE)) {
341 stmtNode = parseRevokeStatement(tokens, node);
342 }
343
344 if (stmtNode == null) {
345 stmtNode = parseCustomStatement(tokens, node);
346 }
347
348 return stmtNode;
349 }
350
351 private boolean moveToNextStatementStart( DdlTokenStream tokens ) throws ParsingException {
352 assert tokens != null;
353
354 StringBuffer sb = new StringBuffer();
355 DdlParserProblem problem = null;
356
357
358 if (tokens.hasNext()) {
359 while (tokens.hasNext()) {
360 if (tokens.canConsume(DdlTokenizer.COMMENT)) continue;
361
362
363 if (!tokens.matches(DdlTokenizer.STATEMENT_KEY)) {
364
365
366 if (problem == null) {
367 markStartOfStatement(tokens);
368
369 String msg = DdlSequencerI18n.unusedTokensDiscovered.text(tokens.nextPosition().getLine(),
370 tokens.nextPosition().getColumn());
371 problem = new DdlParserProblem(DdlConstants.Problems.WARNING, tokens.nextPosition(), msg);
372 }
373
374 String nextTokenValue = null;
375
376
377
378 if (tokens.matches(getTerminator()) && sb.length() > 0) {
379 nextTokenValue = getTerminator();
380
381 AstNode unknownNode = unknownTerminatedNode(getRootNode());
382 markEndOfStatement(tokens, unknownNode);
383
384 problem = null;
385 } else {
386
387
388
389
390 nextTokenValue = tokens.consume();
391 AstNode unknownNode = handleUnknownToken(tokens, nextTokenValue);
392 if (unknownNode != null) {
393 markEndOfStatement(tokens, unknownNode);
394
395 problem = null;
396 }
397 }
398 sb.append(SPACE).append(nextTokenValue);
399
400 } else {
401
402 if (problem != null && sb.length() > 0) {
403 problem.setUnusedSource(sb.toString());
404 addProblem(problem);
405 }
406 return true;
407 }
408 }
409
410
411 if (problem != null && sb.length() > 0) {
412 problem.setUnusedSource(sb.toString());
413 addProblem(problem);
414 }
415 }
416 return false;
417 }
418
419 public final void addProblem( DdlParserProblem problem,
420 AstNode node ) {
421 addProblem(problem);
422 attachNewProblem(problem, node);
423 }
424
425 public final void addProblem( DdlParserProblem problem ) {
426 problems.add(problem);
427 }
428
429 public final List<DdlParserProblem> getProblems() {
430 return this.problems;
431 }
432
433 public final void attachNewProblem( DdlParserProblem problem,
434 AstNode parentNode ) {
435 assert problem != null;
436 assert parentNode != null;
437
438 AstNode problemNode = nodeFactory().node("DDL PROBLEM", parentNode, TYPE_PROBLEM);
439 problemNode.setProperty(PROBLEM_LEVEL, problem.getLevel());
440 problemNode.setProperty(MESSAGE, problem.toString() + "[" + problem.getUnusedSource() + "]");
441
442 testPrint(problem.toString());
443 }
444
445 protected void rewrite( DdlTokenStream tokens,
446 AstNode rootNode ) {
447 assert tokens != null;
448 assert rootNode != null;
449
450
451 removeMissingTerminatorNodes(rootNode);
452 }
453
454 protected void removeMissingTerminatorNodes( AstNode parentNode ) {
455 assert parentNode != null;
456
457 List<AstNode> copyOfNodes = new ArrayList<AstNode>(parentNode.getChildren());
458
459 for (AstNode child : copyOfNodes) {
460 if (nodeFactory().hasMixinType(child, TYPE_MISSING_TERMINATOR)) {
461 parentNode.removeChild(child);
462 } else {
463 removeMissingTerminatorNodes(child);
464 }
465 }
466 }
467
468
469
470
471
472
473
474
475 public void mergeNodes( DdlTokenStream tokens,
476 AstNode firstNode,
477 AstNode secondNode ) {
478 assert tokens != null;
479 assert firstNode != null;
480 assert secondNode != null;
481
482 int firstStartIndex = (Integer)firstNode.getProperty(DDL_START_CHAR_INDEX).getFirstValue();
483 int secondStartIndex = (Integer)secondNode.getProperty(DDL_START_CHAR_INDEX).getFirstValue();
484 int deltaLength = ((String)secondNode.getProperty(DDL_EXPRESSION).getFirstValue()).length();
485 Position startPosition = new Position(firstStartIndex, 1, 0);
486 Position endPosition = new Position((secondStartIndex + deltaLength), 1, 0);
487 String source = tokens.getContentBetween(startPosition, endPosition);
488 firstNode.setProperty(DDL_EXPRESSION, source);
489 }
490
491
492
493
494
495
496
497
498
499
500
501 public AstNode handleUnknownToken( DdlTokenStream tokens,
502 String tokenValue ) throws ParsingException {
503 assert tokens != null;
504 assert tokenValue != null;
505
506 return null;
507 }
508
509
510
511
512
513
514
515
516
517 protected AstNode parseCreateStatement( DdlTokenStream tokens,
518 AstNode parentNode ) throws ParsingException {
519 assert tokens != null;
520 assert parentNode != null;
521
522 AstNode stmtNode = null;
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538 if (tokens.matches(STMT_CREATE_SCHEMA)) {
539 stmtNode = parseCreateSchemaStatement(tokens, parentNode);
540 } else if (tokens.matches(STMT_CREATE_TABLE) || tokens.matches(STMT_CREATE_GLOBAL_TEMPORARY_TABLE)
541 || tokens.matches(STMT_CREATE_LOCAL_TEMPORARY_TABLE)) {
542 stmtNode = parseCreateTableStatement(tokens, parentNode);
543 } else if (tokens.matches(STMT_CREATE_VIEW) || tokens.matches(STMT_CREATE_OR_REPLACE_VIEW)) {
544 stmtNode = parseCreateViewStatement(tokens, parentNode);
545 } else if (tokens.matches(STMT_CREATE_ASSERTION)) {
546 stmtNode = parseCreateAssertionStatement(tokens, parentNode);
547 } else if (tokens.matches(STMT_CREATE_CHARACTER_SET)) {
548 stmtNode = parseCreateCharacterSetStatement(tokens, parentNode);
549 } else if (tokens.matches(STMT_CREATE_COLLATION)) {
550 stmtNode = parseCreateCollationStatement(tokens, parentNode);
551 } else if (tokens.matches(STMT_CREATE_TRANSLATION)) {
552 stmtNode = parseCreateTranslationStatement(tokens, parentNode);
553 } else if (tokens.matches(STMT_CREATE_DOMAIN)) {
554 stmtNode = parseCreateDomainStatement(tokens, parentNode);
555 } else {
556 markStartOfStatement(tokens);
557
558 stmtNode = parseIgnorableStatement(tokens, "CREATE UNKNOWN", parentNode);
559 Position position = getCurrentMarkedPosition();
560 String msg = DdlSequencerI18n.unknownCreateStatement.text(position.getLine(), position.getColumn());
561 DdlParserProblem problem = new DdlParserProblem(DdlConstants.Problems.WARNING, position, msg);
562
563 stmtNode.setProperty(TYPE_PROBLEM, problem.toString());
564
565 markEndOfStatement(tokens, stmtNode);
566 }
567
568 return stmtNode;
569 }
570
571
572
573
574
575
576
577
578
579 protected AstNode parseAlterStatement( DdlTokenStream tokens,
580 AstNode parentNode ) throws ParsingException {
581 assert tokens != null;
582 assert parentNode != null;
583
584 if (tokens.matches(ALTER, TABLE)) {
585 return parseAlterTableStatement(tokens, parentNode);
586 } else if (tokens.matches("ALTER", "DOMAIN")) {
587 markStartOfStatement(tokens);
588 tokens.consume("ALTER", "DOMAIN");
589 String domainName = parseName(tokens);
590 AstNode alterNode = nodeFactory().node(domainName, parentNode, TYPE_ALTER_DOMAIN_STATEMENT);
591 parseUntilTerminator(tokens);
592 markEndOfStatement(tokens, alterNode);
593 return alterNode;
594 }
595 return null;
596 }
597
598
599
600
601
602
603
604
605
606 protected AstNode parseAlterTableStatement( DdlTokenStream tokens,
607 AstNode parentNode ) throws ParsingException {
608 assert tokens != null;
609 assert parentNode != null;
610
611 markStartOfStatement(tokens);
612
613
614
615
616
617
618
619
620
621
622
623 tokens.consume("ALTER", "TABLE");
624 String tableName = parseName(tokens);
625
626 AstNode alterTableNode = nodeFactory().node(tableName, parentNode, TYPE_ALTER_TABLE_STATEMENT);
627
628 if (tokens.canConsume("ADD")) {
629 if (isTableConstraint(tokens)) {
630 parseTableConstraint(tokens, alterTableNode, true);
631 } else {
632 parseSingleTerminatedColumnDefinition(tokens, alterTableNode, true);
633 }
634 } else if (tokens.canConsume("DROP")) {
635 if (tokens.canConsume("CONSTRAINT")) {
636 String constraintName = parseName(tokens);
637 AstNode constraintNode = nodeFactory().node(constraintName, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION);
638 if (tokens.canConsume(DropBehavior.CASCADE)) {
639 constraintNode.setProperty(DROP_BEHAVIOR, DropBehavior.CASCADE);
640 } else if (tokens.canConsume(DropBehavior.RESTRICT)) {
641 constraintNode.setProperty(DROP_BEHAVIOR, DropBehavior.RESTRICT);
642 }
643 } else {
644
645
646
647
648 tokens.canConsume("COLUMN");
649 String columnName = parseName(tokens);
650 AstNode columnNode = nodeFactory().node(columnName, alterTableNode, TYPE_DROP_COLUMN_DEFINITION);
651 if (tokens.canConsume(DropBehavior.CASCADE)) {
652 columnNode.setProperty(DROP_BEHAVIOR, DropBehavior.CASCADE);
653 } else if (tokens.canConsume(DropBehavior.RESTRICT)) {
654 columnNode.setProperty(DROP_BEHAVIOR, DropBehavior.RESTRICT);
655 }
656 }
657 } else if (tokens.canConsume("ALTER")) {
658
659
660
661
662 tokens.canConsume("COLUMN");
663 String alterColumnName = parseName(tokens);
664 AstNode columnNode = nodeFactory().node(alterColumnName, alterTableNode, TYPE_ALTER_COLUMN_DEFINITION);
665 if (tokens.canConsume("SET")) {
666 parseDefaultClause(tokens, columnNode);
667 } else if (tokens.canConsume("DROP", "DEFAULT")) {
668 columnNode.setProperty(DROP_BEHAVIOR, "DROP DEFAULT");
669 }
670 } else {
671 parseUntilTerminator(tokens);
672 }
673
674 markEndOfStatement(tokens, alterTableNode);
675 return alterTableNode;
676 }
677
678
679
680
681
682
683
684
685
686 protected AstNode parseDropStatement( DdlTokenStream tokens,
687 AstNode parentNode ) throws ParsingException {
688 assert tokens != null;
689 assert parentNode != null;
690
691 if (tokens.matches(STMT_DROP_TABLE)) {
692
693
694
695
696 return parseSimpleDropStatement(tokens, STMT_DROP_TABLE, parentNode, TYPE_DROP_TABLE_STATEMENT);
697 } else if (tokens.matches(STMT_DROP_VIEW)) {
698 return parseSimpleDropStatement(tokens, STMT_DROP_VIEW, parentNode, TYPE_DROP_VIEW_STATEMENT);
699 } else if (tokens.matches(STMT_DROP_SCHEMA)) {
700 return parseSimpleDropStatement(tokens, STMT_DROP_SCHEMA, parentNode, TYPE_DROP_SCHEMA_STATEMENT);
701 } else if (tokens.matches(STMT_DROP_DOMAIN)) {
702 return parseSimpleDropStatement(tokens, STMT_DROP_DOMAIN, parentNode, TYPE_DROP_DOMAIN_STATEMENT);
703 } else if (tokens.matches(STMT_DROP_TRANSLATION)) {
704 return parseSimpleDropStatement(tokens, STMT_DROP_TRANSLATION, parentNode, TYPE_DROP_TRANSLATION_STATEMENT);
705 } else if (tokens.matches(STMT_DROP_CHARACTER_SET)) {
706 return parseSimpleDropStatement(tokens, STMT_DROP_CHARACTER_SET, parentNode, TYPE_DROP_CHARACTER_SET_STATEMENT);
707 } else if (tokens.matches(STMT_DROP_ASSERTION)) {
708 return parseSimpleDropStatement(tokens, STMT_DROP_ASSERTION, parentNode, TYPE_DROP_ASSERTION_STATEMENT);
709 } else if (tokens.matches(STMT_DROP_COLLATION)) {
710 return parseSimpleDropStatement(tokens, STMT_DROP_COLLATION, parentNode, TYPE_DROP_COLLATION_STATEMENT);
711 }
712
713 return null;
714 }
715
716 private AstNode parseSimpleDropStatement( DdlTokenStream tokens,
717 String[] startPhrase,
718 AstNode parentNode,
719 Name stmtType ) throws ParsingException {
720 assert tokens != null;
721 assert startPhrase != null && startPhrase.length > 0;
722 assert parentNode != null;
723
724 markStartOfStatement(tokens);
725 String behavior = null;
726 tokens.consume(startPhrase);
727 List<String> nameList = new ArrayList<String>();
728 nameList.add(parseName(tokens));
729 while (tokens.matches(COMMA)) {
730 tokens.consume(COMMA);
731 nameList.add(parseName(tokens));
732 }
733
734 if (tokens.canConsume("CASCADE")) {
735 behavior = "CASCADE";
736 } else if (tokens.canConsume("RESTRICT")) {
737 behavior = "RESTRICT";
738 }
739
740 AstNode dropNode = nodeFactory().node(nameList.get(0), parentNode, stmtType);
741 if (behavior != null) {
742 dropNode.setProperty(DROP_BEHAVIOR, behavior);
743 }
744 markEndOfStatement(tokens, dropNode);
745
746 return dropNode;
747 }
748
749
750
751
752
753
754
755
756
757 protected AstNode parseInsertStatement( DdlTokenStream tokens,
758 AstNode parentNode ) throws ParsingException {
759 assert tokens != null;
760 assert parentNode != null;
761
762
763 if (tokens.matches(STMT_INSERT_INTO)) {
764 markStartOfStatement(tokens);
765 tokens.consume(STMT_INSERT_INTO);
766 String prefix = getStatementTypeName(STMT_INSERT_INTO);
767 AstNode node = nodeFactory().node(prefix, parentNode, TYPE_INSERT_STATEMENT);
768 parseUntilTerminator(tokens);
769 markEndOfStatement(tokens, node);
770 return node;
771 }
772 return null;
773 }
774
775
776
777
778
779
780
781
782
783 protected AstNode parseSetStatement( DdlTokenStream tokens,
784 AstNode parentNode ) throws ParsingException {
785 assert tokens != null;
786 assert parentNode != null;
787
788
789 if (tokens.matches(SET)) {
790 markStartOfStatement(tokens);
791 tokens.consume(SET);
792 AstNode node = nodeFactory().node("SET", parentNode, TYPE_SET_STATEMENT);
793 parseUntilTerminator(tokens);
794 markEndOfStatement(tokens, node);
795 return node;
796 }
797 return null;
798 }
799
800
801
802
803
804
805
806
807
808 protected AstNode parseGrantStatement( DdlTokenStream tokens,
809 AstNode parentNode ) throws ParsingException {
810 assert tokens != null;
811 assert parentNode != null;
812 assert tokens.matches(GRANT);
813
814 markStartOfStatement(tokens);
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837 AstNode grantNode = null;
838 boolean allPrivileges = false;
839
840 List<AstNode> privileges = new ArrayList<AstNode>();
841
842 tokens.consume("GRANT");
843
844 if (tokens.canConsume("ALL", "PRIVILEGES")) {
845 allPrivileges = true;
846 } else {
847 parseGrantPrivileges(tokens, privileges);
848 }
849 tokens.consume("ON");
850
851 if (tokens.canConsume("DOMAIN")) {
852 String name = parseName(tokens);
853 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_DOMAIN_STATEMENT);
854 } else if (tokens.canConsume("COLLATION")) {
855 String name = parseName(tokens);
856 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_COLLATION_STATEMENT);
857 } else if (tokens.canConsume("CHARACTER", "SET")) {
858 String name = parseName(tokens);
859 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_CHARACTER_SET_STATEMENT);
860 } else if (tokens.canConsume("TRANSLATION")) {
861 String name = parseName(tokens);
862 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_TRANSLATION_STATEMENT);
863 } else {
864 tokens.canConsume(TABLE);
865 String name = parseName(tokens);
866 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_TABLE_STATEMENT);
867 }
868
869
870 for (AstNode node : privileges) {
871 node.setParent(grantNode);
872 }
873 if (allPrivileges) {
874 grantNode.setProperty(ALL_PRIVILEGES, allPrivileges);
875 }
876
877 tokens.consume("TO");
878
879 do {
880 String grantee = parseName(tokens);
881 nodeFactory().node(grantee, grantNode, GRANTEE);
882 } while (tokens.canConsume(COMMA));
883
884 if (tokens.canConsume("WITH", "GRANT", "OPTION")) {
885 grantNode.setProperty(WITH_GRANT_OPTION, "WITH GRANT OPTION");
886 }
887
888 markEndOfStatement(tokens, grantNode);
889
890 return grantNode;
891 }
892
893 protected void parseGrantPrivileges( DdlTokenStream tokens,
894 List<AstNode> privileges ) throws ParsingException {
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911 do {
912 AstNode node = null;
913
914 if (tokens.canConsume(DELETE)) {
915 node = nodeFactory().node("privilege");
916 node.setProperty(TYPE, DELETE);
917 } else if (tokens.canConsume(INSERT)) {
918 node = nodeFactory().node("privilege");
919 node.setProperty(TYPE, INSERT);
920 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
921 } else if (tokens.canConsume("REFERENCES")) {
922 node = nodeFactory().node("privilege");
923 node.setProperty(TYPE, "REFERENCES");
924 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
925 } else if (tokens.canConsume(SELECT)) {
926 node = nodeFactory().node("privilege");
927 node.setProperty(TYPE, SELECT);
928 } else if (tokens.canConsume("USAGE")) {
929 node = nodeFactory().node("privilege");
930 node.setProperty(TYPE, "USAGE");
931 } else if (tokens.canConsume(UPDATE)) {
932 node = nodeFactory().node("privilege");
933 node.setProperty(TYPE, UPDATE);
934 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
935 }
936 if (node == null) {
937 break;
938 }
939 nodeFactory().setType(node, GRANT_PRIVILEGE);
940 privileges.add(node);
941
942 } while (tokens.canConsume(COMMA));
943
944 }
945
946 protected AstNode parseRevokeStatement( DdlTokenStream tokens,
947 AstNode parentNode ) throws ParsingException {
948 assert tokens != null;
949 assert parentNode != null;
950 assert tokens.matches(REVOKE);
951
952 markStartOfStatement(tokens);
953
954
955
956
957
958
959
960 AstNode revokeNode = null;
961 boolean allPrivileges = false;
962 boolean withGrantOption = false;
963
964 List<AstNode> privileges = new ArrayList<AstNode>();
965
966 tokens.consume("REVOKE");
967
968 withGrantOption = tokens.canConsume("WITH", "GRANT", "OPTION");
969
970 if (tokens.canConsume("ALL", "PRIVILEGES")) {
971 allPrivileges = true;
972 } else {
973 parseGrantPrivileges(tokens, privileges);
974 }
975 tokens.consume("ON");
976
977 if (tokens.canConsume("DOMAIN")) {
978 String name = parseName(tokens);
979 revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_DOMAIN_STATEMENT);
980 } else if (tokens.canConsume("COLLATION")) {
981 String name = parseName(tokens);
982 revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_COLLATION_STATEMENT);
983 } else if (tokens.canConsume("CHARACTER", "SET")) {
984 String name = parseName(tokens);
985 revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_CHARACTER_SET_STATEMENT);
986 } else if (tokens.canConsume("TRANSLATION")) {
987 String name = parseName(tokens);
988 revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_TRANSLATION_STATEMENT);
989 } else {
990 tokens.canConsume(TABLE);
991 String name = parseName(tokens);
992 revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_TABLE_STATEMENT);
993 }
994
995
996 for (AstNode node : privileges) {
997 node.setParent(revokeNode);
998 }
999
1000 if (allPrivileges) {
1001 revokeNode.setProperty(ALL_PRIVILEGES, allPrivileges);
1002 }
1003
1004 tokens.consume("FROM");
1005
1006 do {
1007 String grantee = parseName(tokens);
1008 nodeFactory().node(grantee, revokeNode, GRANTEE);
1009 } while (tokens.canConsume(COMMA));
1010
1011 String behavior = null;
1012
1013 if (tokens.canConsume("CASCADE")) {
1014 behavior = "CASCADE";
1015 } else if (tokens.canConsume("RESTRICT")) {
1016 behavior = "RESTRICT";
1017 }
1018
1019 if (behavior != null) {
1020 revokeNode.setProperty(DROP_BEHAVIOR, behavior);
1021 }
1022
1023 if (withGrantOption) {
1024 revokeNode.setProperty(WITH_GRANT_OPTION, "WITH GRANT OPTION");
1025 }
1026
1027 markEndOfStatement(tokens, revokeNode);
1028
1029 return revokeNode;
1030 }
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040 protected AstNode parseCreateDomainStatement( DdlTokenStream tokens,
1041 AstNode parentNode ) throws ParsingException {
1042 assert tokens != null;
1043 assert parentNode != null;
1044
1045
1046
1047
1048
1049
1050
1051
1052 markStartOfStatement(tokens);
1053
1054 tokens.consume(STMT_CREATE_DOMAIN);
1055
1056 String name = parseName(tokens);
1057
1058 AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_DOMAIN_STATEMENT);
1059
1060 parseUntilTerminator(tokens);
1061
1062 markEndOfStatement(tokens, node);
1063
1064 return node;
1065 }
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075 protected AstNode parseCreateCollationStatement( DdlTokenStream tokens,
1076 AstNode parentNode ) throws ParsingException {
1077 assert tokens != null;
1078 assert parentNode != null;
1079
1080 markStartOfStatement(tokens);
1081
1082 tokens.consume(STMT_CREATE_COLLATION);
1083
1084 String name = parseName(tokens);
1085
1086 AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_COLLATION_STATEMENT);
1087
1088 parseUntilTerminator(tokens);
1089
1090 markEndOfStatement(tokens, node);
1091
1092 return node;
1093 }
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103 protected AstNode parseCreateTranslationStatement( DdlTokenStream tokens,
1104 AstNode parentNode ) throws ParsingException {
1105 assert tokens != null;
1106 assert parentNode != null;
1107
1108 markStartOfStatement(tokens);
1109
1110 tokens.consume(STMT_CREATE_TRANSLATION);
1111
1112 String name = parseName(tokens);
1113
1114 AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_TRANSLATION_STATEMENT);
1115
1116 parseUntilTerminator(tokens);
1117
1118 markEndOfStatement(tokens, node);
1119
1120 return node;
1121 }
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131 protected AstNode parseCreateCharacterSetStatement( DdlTokenStream tokens,
1132 AstNode parentNode ) throws ParsingException {
1133 assert tokens != null;
1134 assert parentNode != null;
1135
1136 markStartOfStatement(tokens);
1137
1138 tokens.consume(STMT_CREATE_CHARACTER_SET);
1139
1140 String name = parseName(tokens);
1141
1142 AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_CHARACTER_SET_STATEMENT);
1143
1144 parseUntilTerminator(tokens);
1145
1146 markEndOfStatement(tokens, node);
1147
1148 return node;
1149 }
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159 protected AstNode parseCustomStatement( DdlTokenStream tokens,
1160 AstNode parentNode ) throws ParsingException {
1161 assert tokens != null;
1162 assert parentNode != null;
1163
1164
1165
1166
1167 return null;
1168 }
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182 protected AstNode parseCreateTableStatement( DdlTokenStream tokens,
1183 AstNode parentNode ) throws ParsingException {
1184 assert tokens != null;
1185 assert parentNode != null;
1186
1187 markStartOfStatement(tokens);
1188
1189 tokens.consume(CREATE);
1190 String temporaryValue = null;
1191 if (tokens.canConsume("LOCAL")) {
1192 tokens.consume("TEMPORARY");
1193 temporaryValue = "LOCAL";
1194 } else if (tokens.canConsume("GLOBAL")) {
1195 tokens.consume("TEMPORARY");
1196 temporaryValue = "GLOBAL";
1197 }
1198
1199 tokens.consume(TABLE);
1200
1201 String tableName = parseName(tokens);
1202
1203 AstNode tableNode = nodeFactory().node(tableName, parentNode, TYPE_CREATE_TABLE_STATEMENT);
1204
1205 if (temporaryValue != null) {
1206 tableNode.setProperty(TEMPORARY, temporaryValue);
1207 }
1208
1209
1210 parseColumnsAndConstraints(tokens, tableNode);
1211
1212 parseCreateTableOptions(tokens, tableNode);
1213
1214 markEndOfStatement(tokens, tableNode);
1215
1216 return tableNode;
1217 }
1218
1219 protected void parseCreateTableOptions( DdlTokenStream tokens,
1220 AstNode tableNode ) throws ParsingException {
1221 assert tokens != null;
1222 assert tableNode != null;
1223
1224
1225 while (areNextTokensCreateTableOptions(tokens)) {
1226 parseNextCreateTableOption(tokens, tableNode);
1227 }
1228
1229 }
1230
1231 protected void parseNextCreateTableOption( DdlTokenStream tokens,
1232 AstNode tableNode ) throws ParsingException {
1233 assert tokens != null;
1234 assert tableNode != null;
1235
1236 if (tokens.canConsume("ON", "COMMIT")) {
1237 String option = "";
1238
1239 if (tokens.canConsume("PRESERVE", "ROWS")) {
1240 option = option + "ON COMMIT PRESERVE ROWS";
1241 } else if (tokens.canConsume("DELETE", "ROWS")) {
1242 option = option + "ON COMMIT DELETE ROWS";
1243 } else if (tokens.canConsume("DROP")) {
1244 option = option + "ON COMMIT DROP";
1245 }
1246
1247 if (option.length() > 0) {
1248 AstNode tableOption = nodeFactory().node("option", tableNode, TYPE_STATEMENT_OPTION);
1249 tableOption.setProperty(VALUE, option);
1250 }
1251 }
1252 }
1253
1254 protected boolean areNextTokensCreateTableOptions( DdlTokenStream tokens ) throws ParsingException {
1255 assert tokens != null;
1256
1257 boolean result = false;
1258
1259
1260 if (tokens.matches("ON", "COMMIT")) {
1261 result = true;
1262 }
1263
1264 return result;
1265 }
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276 protected void parseColumnsAndConstraints( DdlTokenStream tokens,
1277 AstNode tableNode ) throws ParsingException {
1278 assert tokens != null;
1279 assert tableNode != null;
1280
1281 if (!tokens.matches(L_PAREN)) {
1282 return;
1283 }
1284
1285 String tableElementString = getTableElementsString(tokens, false);
1286
1287 DdlTokenStream localTokens = new DdlTokenStream(tableElementString, DdlTokenStream.ddlTokenizer(false), false);
1288
1289 localTokens.start();
1290
1291 StringBuffer unusedTokensSB = new StringBuffer();
1292 do {
1293 if (isTableConstraint(localTokens)) {
1294 parseTableConstraint(localTokens, tableNode, false);
1295 } else if (isColumnDefinitionStart(localTokens)) {
1296 parseColumnDefinition(localTokens, tableNode, false);
1297 } else {
1298 unusedTokensSB.append(SPACE).append(localTokens.consume());
1299 }
1300 } while (localTokens.canConsume(COMMA));
1301
1302 if (unusedTokensSB.length() > 0) {
1303 String msg = DdlSequencerI18n.unusedTokensParsingColumnsAndConstraints.text(tableNode.getProperty(NAME));
1304 DdlParserProblem problem = new DdlParserProblem(DdlConstants.Problems.WARNING, Position.EMPTY_CONTENT_POSITION, msg);
1305 problem.setUnusedSource(unusedTokensSB.toString());
1306 addProblem(problem, tableNode);
1307 }
1308
1309 }
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320 protected void parseColumnDefinition( DdlTokenStream tokens,
1321 AstNode tableNode,
1322 boolean isAlterTable ) throws ParsingException {
1323 assert tokens != null;
1324 assert tableNode != null;
1325
1326 tokens.canConsume("COLUMN");
1327 String columnName = parseName(tokens);
1328 DataType datatype = getDatatypeParser().parse(tokens);
1329
1330 AstNode columnNode = nodeFactory().node(columnName, tableNode, TYPE_COLUMN_DEFINITION);
1331
1332 getDatatypeParser().setPropertiesOnNode(columnNode, datatype);
1333
1334
1335 StringBuffer unusedTokensSB = new StringBuffer();
1336
1337 while (tokens.hasNext() && !tokens.matches(COMMA)) {
1338 boolean parsedDefaultClause = parseDefaultClause(tokens, columnNode);
1339 if (!parsedDefaultClause) {
1340 boolean parsedCollate = parseCollateClause(tokens, columnNode);
1341 boolean parsedConstraint = parseColumnConstraint(tokens, columnNode, isAlterTable);
1342 if (!parsedCollate && !parsedConstraint) {
1343
1344
1345 unusedTokensSB.append(SPACE).append(tokens.consume());
1346 }
1347 }
1348 tokens.canConsume(DdlTokenizer.COMMENT);
1349 }
1350
1351 if (unusedTokensSB.length() > 0) {
1352 String msg = DdlSequencerI18n.unusedTokensParsingColumnDefinition.text(tableNode.getName());
1353 DdlParserProblem problem = new DdlParserProblem(Problems.WARNING, Position.EMPTY_CONTENT_POSITION, msg);
1354 problem.setUnusedSource(unusedTokensSB.toString());
1355 addProblem(problem, tableNode);
1356 }
1357 }
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368 protected void parseSingleTerminatedColumnDefinition( DdlTokenStream tokens,
1369 AstNode tableNode,
1370 boolean isAlterTable ) throws ParsingException {
1371 assert tokens != null;
1372 assert tableNode != null;
1373
1374 tokens.canConsume("COLUMN");
1375 String columnName = parseName(tokens);
1376 DataType datatype = getDatatypeParser().parse(tokens);
1377
1378 AstNode columnNode = nodeFactory().node(columnName, tableNode, TYPE_COLUMN_DEFINITION);
1379
1380 getDatatypeParser().setPropertiesOnNode(columnNode, datatype);
1381
1382
1383
1384
1385 while (tokens.hasNext() && !tokens.matches(getTerminator()) && !tokens.matches(DdlTokenizer.STATEMENT_KEY)) {
1386 boolean parsedDefaultClause = parseDefaultClause(tokens, columnNode);
1387 boolean foundSomething = parsedDefaultClause;
1388 if (!parsedDefaultClause) {
1389 foundSomething |= parseCollateClause(tokens, columnNode);
1390 foundSomething |= parseColumnConstraint(tokens, columnNode, isAlterTable);
1391 }
1392 foundSomething |= consumeComment(tokens);
1393 if (tokens.canConsume(COMMA) || !foundSomething) break;
1394 }
1395 }
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405 protected String getTableElementsString( DdlTokenStream tokens,
1406 boolean useTerminator ) throws ParsingException {
1407 assert tokens != null;
1408
1409 StringBuffer sb = new StringBuffer(100);
1410
1411 if (useTerminator) {
1412 while (!isTerminator(tokens)) {
1413 sb.append(SPACE).append(tokens.consume());
1414 }
1415 } else {
1416
1417
1418 tokens.consume(L_PAREN);
1419
1420 int iParen = 0;
1421 while (tokens.hasNext()) {
1422 if (tokens.matches(L_PAREN)) {
1423 iParen++;
1424 } else if (tokens.matches(R_PAREN)) {
1425 if (iParen == 0) {
1426 tokens.consume(R_PAREN);
1427 break;
1428 }
1429 iParen--;
1430 }
1431 if (isComment(tokens)) {
1432 tokens.consume();
1433 } else {
1434 sb.append(SPACE).append(tokens.consume());
1435 }
1436 }
1437 }
1438
1439 return sb.toString();
1440
1441 }
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452 protected String consumeParenBoundedTokens( DdlTokenStream tokens,
1453 boolean includeParens ) throws ParsingException {
1454 assert tokens != null;
1455
1456
1457
1458 if (tokens.canConsume(L_PAREN)) {
1459 StringBuffer sb = new StringBuffer(100);
1460 if (includeParens) {
1461 sb.append(L_PAREN);
1462 }
1463 int iParen = 0;
1464 while (tokens.hasNext()) {
1465 if (tokens.matches(L_PAREN)) {
1466 iParen++;
1467 } else if (tokens.matches(R_PAREN)) {
1468 if (iParen == 0) {
1469 tokens.consume(R_PAREN);
1470 if (includeParens) {
1471 sb.append(SPACE).append(R_PAREN);
1472 }
1473 break;
1474 }
1475 iParen--;
1476 }
1477 if (isComment(tokens)) {
1478 tokens.consume();
1479 } else {
1480 sb.append(SPACE).append(tokens.consume());
1481 }
1482 }
1483 return sb.toString();
1484 }
1485
1486 return null;
1487 }
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499 protected boolean parseColumnConstraint( DdlTokenStream tokens,
1500 AstNode columnNode,
1501 boolean isAlterTable ) throws ParsingException {
1502 assert tokens != null;
1503 assert columnNode != null;
1504
1505 Name mixinType = isAlterTable ? TYPE_ADD_TABLE_CONSTRAINT_DEFINITION : TYPE_TABLE_CONSTRAINT;
1506
1507 boolean result = false;
1508
1509
1510
1511
1512
1513
1514
1515 String colName = columnNode.getName().getString();
1516
1517 if (tokens.canConsume("NULL")) {
1518 columnNode.setProperty(NULLABLE, "NULL");
1519 result = true;
1520 } else if (tokens.canConsume("NOT", "NULL")) {
1521 columnNode.setProperty(NULLABLE, "NOT NULL");
1522 result = true;
1523 } else if (tokens.matches("CONSTRAINT")) {
1524 result = true;
1525 tokens.consume("CONSTRAINT");
1526 String constraintName = parseName(tokens);
1527 AstNode constraintNode = nodeFactory().node(constraintName, columnNode.getParent(), mixinType);
1528
1529 if (tokens.matches("UNIQUE")) {
1530
1531 tokens.consume("UNIQUE");
1532
1533 constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1534
1535
1536 boolean columnsAdded = parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1537
1538 if (!columnsAdded) {
1539 nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1540 }
1541
1542 parseConstraintAttributes(tokens, constraintNode);
1543 } else if (tokens.matches("PRIMARY", "KEY")) {
1544
1545 tokens.consume("PRIMARY");
1546 tokens.consume("KEY");
1547
1548 constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1549
1550
1551 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1552
1553 parseConstraintAttributes(tokens, constraintNode);
1554 } else if (tokens.matches("REFERENCES")) {
1555
1556
1557
1558
1559
1560 constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1561
1562 nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1563
1564 parseReferences(tokens, constraintNode);
1565
1566 parseConstraintAttributes(tokens, constraintNode);
1567 }
1568 } else if (tokens.matches("UNIQUE")) {
1569 result = true;
1570 tokens.consume("UNIQUE");
1571
1572 String uc_name = "UC_1";
1573
1574 AstNode constraintNode = nodeFactory().node(uc_name, columnNode.getParent(), mixinType);
1575
1576 constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1577
1578 nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1579
1580 } else if (tokens.matches("PRIMARY", "KEY")) {
1581 result = true;
1582 tokens.consume("PRIMARY", "KEY");
1583
1584 String pk_name = "PK_1";
1585
1586 AstNode constraintNode = nodeFactory().node(pk_name, columnNode.getParent(), mixinType);
1587
1588 constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1589
1590 nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1591
1592 } else if (tokens.matches("FOREIGN", "KEY")) {
1593 result = true;
1594 tokens.consume("FOREIGN", "KEY");
1595
1596
1597
1598
1599
1600 String constraintName = parseName(tokens);
1601
1602 AstNode constraintNode = nodeFactory().node(constraintName, columnNode.getParent(), mixinType);
1603
1604 constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1605
1606 nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1607
1608 parseReferences(tokens, constraintNode);
1609 parseConstraintAttributes(tokens, constraintNode);
1610 } else if (tokens.matches("REFERENCES")) {
1611 result = true;
1612
1613
1614
1615
1616
1617 String constraintName = "FK_1";
1618
1619 AstNode constraintNode = nodeFactory().node(constraintName, columnNode.getParent(), mixinType);
1620
1621 constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1622
1623 nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1624
1625 parseReferences(tokens, constraintNode);
1626 parseConstraintAttributes(tokens, constraintNode);
1627 } else if (tokens.matches("CHECK")) {
1628 result = true;
1629 tokens.consume("CHECK");
1630
1631 String ck_name = "CHECK_1";
1632
1633 AstNode constraintNode = nodeFactory().node(ck_name, columnNode.getParent(), mixinType);
1634 constraintNode.setProperty(NAME, ck_name);
1635 constraintNode.setProperty(CONSTRAINT_TYPE, CHECK);
1636
1637 String clause = consumeParenBoundedTokens(tokens, true);
1638 constraintNode.setProperty(CHECK_SEARCH_CONDITION, clause);
1639 }
1640
1641 return result;
1642 }
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653 protected void parseTableConstraint( DdlTokenStream tokens,
1654 AstNode tableNode,
1655 boolean isAlterTable ) throws ParsingException {
1656 assert tokens != null;
1657 assert tableNode != null;
1658
1659 Name mixinType = isAlterTable ? TYPE_ADD_TABLE_CONSTRAINT_DEFINITION : TYPE_TABLE_CONSTRAINT;
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702 consumeComment(tokens);
1703
1704 if ((tokens.matches("PRIMARY", "KEY")) || (tokens.matches("FOREIGN", "KEY")) || (tokens.matches("UNIQUE"))) {
1705
1706
1707 if (tokens.matches("UNIQUE")) {
1708 String uc_name = "UC_1";
1709 tokens.consume();
1710
1711 AstNode constraintNode = nodeFactory().node(uc_name, tableNode, mixinType);
1712 constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1713
1714
1715 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1716
1717 parseConstraintAttributes(tokens, constraintNode);
1718
1719 consumeComment(tokens);
1720 } else if (tokens.matches("PRIMARY", "KEY")) {
1721 String pk_name = "PK_1";
1722 tokens.consume("PRIMARY", "KEY");
1723
1724 AstNode constraintNode = nodeFactory().node(pk_name, tableNode, mixinType);
1725 constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1726
1727
1728 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1729
1730 parseConstraintAttributes(tokens, constraintNode);
1731
1732 consumeComment(tokens);
1733 } else if (tokens.matches("FOREIGN", "KEY")) {
1734 String fk_name = "FK_1";
1735 tokens.consume("FOREIGN", "KEY");
1736
1737 if (!tokens.matches(L_PAREN)) {
1738
1739 fk_name = tokens.consume();
1740 }
1741
1742 AstNode constraintNode = nodeFactory().node(fk_name, tableNode, mixinType);
1743 constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1744
1745
1746 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1747
1748
1749 parseReferences(tokens, constraintNode);
1750
1751 parseConstraintAttributes(tokens, constraintNode);
1752
1753 consumeComment(tokens);
1754 }
1755 } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "UNIQUE")) {
1756
1757 tokens.consume();
1758 String uc_name = parseName(tokens);
1759 tokens.consume("UNIQUE");
1760
1761 AstNode constraintNode = nodeFactory().node(uc_name, tableNode, mixinType);
1762 constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1763
1764
1765 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1766
1767 parseConstraintAttributes(tokens, constraintNode);
1768
1769 consumeComment(tokens);
1770 } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "PRIMARY", "KEY")) {
1771
1772 tokens.consume("CONSTRAINT");
1773 String pk_name = parseName(tokens);
1774 tokens.consume("PRIMARY", "KEY");
1775
1776 AstNode constraintNode = nodeFactory().node(pk_name, tableNode, mixinType);
1777 constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1778
1779
1780 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1781
1782 parseConstraintAttributes(tokens, constraintNode);
1783
1784 consumeComment(tokens);
1785
1786 } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "FOREIGN", "KEY")) {
1787
1788 tokens.consume("CONSTRAINT");
1789 String fk_name = parseName(tokens);
1790 tokens.consume("FOREIGN", "KEY");
1791
1792 AstNode constraintNode = nodeFactory().node(fk_name, tableNode, mixinType);
1793
1794 constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1795
1796
1797 parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1798
1799
1800 parseReferences(tokens, constraintNode);
1801
1802 parseConstraintAttributes(tokens, constraintNode);
1803
1804 consumeComment(tokens);
1805
1806 } else if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "CHECK")) {
1807
1808 tokens.consume("CONSTRAINT");
1809 String ck_name = parseName(tokens);
1810 tokens.consume("CHECK");
1811
1812 AstNode constraintNode = nodeFactory().node(ck_name, tableNode, mixinType);
1813 constraintNode.setProperty(CONSTRAINT_TYPE, CHECK);
1814
1815 String clause = consumeParenBoundedTokens(tokens, true);
1816 constraintNode.setProperty(CHECK_SEARCH_CONDITION, clause);
1817 }
1818
1819 }
1820
1821
1822
1823
1824
1825
1826
1827
1828 protected void parseConstraintAttributes( DdlTokenStream tokens,
1829 AstNode constraintNode ) throws ParsingException {
1830 assert tokens != null;
1831 assert constraintNode != null;
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842
1843
1844 if (tokens.canConsume("INITIALLY", "DEFERRED")) {
1845 AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1846 attrNode.setProperty(PROPERTY_VALUE, "INITIALLY DEFERRED");
1847 }
1848 if (tokens.canConsume("INITIALLY", "IMMEDIATE")) {
1849 AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1850 attrNode.setProperty(PROPERTY_VALUE, "INITIALLY IMMEDIATE");
1851 }
1852 if (tokens.canConsume("NOT", "DEFERRABLE")) {
1853 AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1854 attrNode.setProperty(PROPERTY_VALUE, "NOT DEFERRABLE");
1855 }
1856 if (tokens.canConsume("DEFERRABLE")) {
1857 AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1858 attrNode.setProperty(PROPERTY_VALUE, "DEFERRABLE");
1859 }
1860 if (tokens.canConsume("INITIALLY", "DEFERRED")) {
1861 AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1862 attrNode.setProperty(PROPERTY_VALUE, "INITIALLY DEFERRED");
1863 }
1864 if (tokens.canConsume("INITIALLY", "IMMEDIATE")) {
1865 AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, CONSTRAINT_ATTRIBUTE_TYPE);
1866 attrNode.setProperty(PROPERTY_VALUE, "INITIALLY IMMEDIATE");
1867 }
1868 }
1869
1870 protected void parseReferences( DdlTokenStream tokens,
1871 AstNode constraintNode ) throws ParsingException {
1872 assert tokens != null;
1873 assert constraintNode != null;
1874
1875 if (tokens.matches("REFERENCES")) {
1876 tokens.consume("REFERENCES");
1877
1878 String tableName = parseName(tokens);
1879
1880 nodeFactory().node(tableName, constraintNode, TYPE_TABLE_REFERENCE);
1881
1882 parseColumnNameList(tokens, constraintNode, TYPE_FK_COLUMN_REFERENCE);
1883
1884 tokens.canConsume("MATCH", "FULL");
1885 tokens.canConsume("MATCH", "PARTIAL");
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900
1901
1902
1903
1904
1905 while (tokens.canConsume("ON", "UPDATE") || tokens.canConsume("ON", "DELETE")) {
1906
1907 if (tokens.matches("CASCADE") || tokens.matches("NOW()")) {
1908 tokens.consume();
1909 } else if (tokens.matches("SET", "NULL")) {
1910 tokens.consume("SET", "NULL");
1911 } else if (tokens.matches("SET", "DEFAULT")) {
1912 tokens.consume("SET", "DEFAULT");
1913 } else if (tokens.matches("NO", "ACTION")) {
1914 tokens.consume("NO", "ACTION");
1915 } else {
1916 System.out.println(" ERROR: ColumnDefinition REFERENCES has NO REFERENCIAL ACTION.");
1917 }
1918 }
1919 }
1920 }
1921
1922
1923
1924
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934
1935 protected AstNode parseCreateViewStatement( DdlTokenStream tokens,
1936 AstNode parentNode ) throws ParsingException {
1937 assert tokens != null;
1938 assert parentNode != null;
1939
1940 markStartOfStatement(tokens);
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951 String stmtType = "CREATE";
1952 tokens.consume("CREATE");
1953 if (tokens.canConsume("OR", "REPLACE")) {
1954 stmtType = stmtType + SPACE + "OR REPLACE";
1955 }
1956 tokens.consume("VIEW");
1957 stmtType = stmtType + SPACE + "VIEW";
1958
1959 String name = parseName(tokens);
1960
1961 AstNode createViewNode = nodeFactory().node(name, parentNode, TYPE_CREATE_VIEW_STATEMENT);
1962
1963
1964 parseColumnNameList(tokens, createViewNode, TYPE_COLUMN_REFERENCE);
1965
1966 tokens.consume("AS");
1967
1968 String queryExpression = parseUntilTerminator(tokens);
1969
1970 createViewNode.setProperty(CREATE_VIEW_QUERY_EXPRESSION, queryExpression);
1971
1972 markEndOfStatement(tokens, createViewNode);
1973
1974 return createViewNode;
1975 }
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990 protected AstNode parseCreateSchemaStatement( DdlTokenStream tokens,
1991 AstNode parentNode ) throws ParsingException {
1992 markStartOfStatement(tokens);
1993
1994 AstNode schemaNode = null;
1995
1996 String authorizationIdentifier = null;
1997 String schemaName = null;
1998
1999 tokens.consume("CREATE", "SCHEMA");
2000
2001 if (tokens.canConsume("AUTHORIZATION")) {
2002 authorizationIdentifier = tokens.consume();
2003 } else {
2004 schemaName = parseName(tokens);
2005 if (tokens.canConsume("AUTHORIZATION")) {
2006 authorizationIdentifier = parseName(tokens);
2007 }
2008 }
2009
2010 assert authorizationIdentifier != null || schemaName != null;
2011
2012 if (schemaName != null) {
2013 schemaNode = nodeFactory().node(schemaName, parentNode, TYPE_CREATE_SCHEMA_STATEMENT);
2014 } else {
2015 schemaNode = nodeFactory().node(authorizationIdentifier, parentNode, TYPE_CREATE_SCHEMA_STATEMENT);
2016 }
2017
2018 if (tokens.canConsume("DEFAULT", "CHARACTER", "SET")) {
2019
2020 parseName(tokens);
2021 }
2022
2023 markEndOfStatement(tokens, schemaNode);
2024
2025 return schemaNode;
2026 }
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037 protected AstNode parseCreateAssertionStatement( DdlTokenStream tokens,
2038 AstNode parentNode ) throws ParsingException {
2039 markStartOfStatement(tokens);
2040
2041
2042
2043
2044 AstNode node = null;
2045
2046 tokens.consume("CREATE", "ASSERTION");
2047
2048 String name = parseName(tokens);
2049
2050
2051
2052 node = nodeFactory().node(name, parentNode, TYPE_CREATE_ASSERTION_STATEMENT);
2053
2054 tokens.consume("CHECK");
2055
2056 String searchCondition = consumeParenBoundedTokens(tokens, false);
2057
2058 node.setProperty(CHECK_SEARCH_CONDITION, searchCondition);
2059
2060 parseConstraintAttributes(tokens, node);
2061
2062 markEndOfStatement(tokens, node);
2063
2064 return node;
2065 }
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
2080
2081
2082 protected AstNode parseIgnorableStatement( DdlTokenStream tokens,
2083 String name,
2084 AstNode parentNode ) {
2085
2086 AstNode node = nodeFactory().node(name, parentNode, TYPE_STATEMENT);
2087
2088 parseUntilTerminator(tokens);
2089
2090
2091 return node;
2092 }
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102
2103
2104
2105
2106 protected AstNode parseIgnorableStatement( DdlTokenStream tokens,
2107 String name,
2108 AstNode parentNode,
2109 Name mixinType ) {
2110 CheckArg.isNotNull(tokens, "tokens");
2111 CheckArg.isNotNull(name, "name");
2112 CheckArg.isNotNull(parentNode, "parentNode");
2113 CheckArg.isNotNull(mixinType, "mixinType");
2114
2115 AstNode node = nodeFactory().node(name, parentNode, mixinType);
2116
2117 parseUntilTerminator(tokens);
2118
2119 return node;
2120 }
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131 protected AstNode parseStatement( DdlTokenStream tokens,
2132 String[] stmt_start_phrase,
2133 AstNode parentNode,
2134 Name mixinType ) {
2135 CheckArg.isNotNull(tokens, "tokens");
2136 CheckArg.isNotNull(stmt_start_phrase, "stmt_start_phrase");
2137 CheckArg.isNotNull(parentNode, "parentNode");
2138 CheckArg.isNotNull(mixinType, "mixinType");
2139
2140 markStartOfStatement(tokens);
2141 tokens.consume(stmt_start_phrase);
2142 AstNode result = parseIgnorableStatement(tokens, getStatementTypeName(stmt_start_phrase), parentNode, mixinType);
2143 markEndOfStatement(tokens, result);
2144
2145 return result;
2146 }
2147
2148
2149
2150
2151
2152
2153
2154 public final AstNode unknownTerminatedNode( AstNode parentNode ) {
2155 return nodeFactory.node("unknownStatement", parentNode, StandardDdlLexicon.TYPE_UNKNOWN_STATEMENT);
2156 }
2157
2158
2159
2160
2161
2162
2163
2164 public final AstNode missingTerminatorNode( AstNode parentNode ) {
2165 return nodeFactory.node("missingTerminator", parentNode, StandardDdlLexicon.TYPE_MISSING_TERMINATOR);
2166 }
2167
2168 public final boolean isMissingTerminatorNode( AstNode node ) {
2169 return node.getName().getString().equals(MISSING_TERMINATOR_NODE_LITERAL)
2170 && nodeFactory().hasMixinType(node, TYPE_MISSING_TERMINATOR);
2171 }
2172
2173 public final boolean isValidSchemaChild( AstNode node ) {
2174 Name[] schemaChildMixins = getValidSchemaChildTypes();
2175 for (Object mixin : node.getProperty(JcrLexicon.MIXIN_TYPES).getValuesAsArray()) {
2176 if (mixin instanceof Name) {
2177 for (Name nextType : schemaChildMixins) {
2178 if (nextType.equals(mixin)) {
2179 return true;
2180 }
2181 }
2182 }
2183 }
2184
2185 return false;
2186 }
2187
2188 public final boolean setAsSchemaChildNode( AstNode statementNode,
2189 boolean stmtIsMissingTerminator ) {
2190
2191 if (!isValidSchemaChild(statementNode)) {
2192 return false;
2193 }
2194
2195
2196
2197
2198 List<AstNode> children = getRootNode().getChildren();
2199
2200 if (children.size() > 2) {
2201 AstNode previousNode = children.get(children.size() - 2);
2202 if (nodeFactory().hasMixinType(previousNode, TYPE_MISSING_TERMINATOR)) {
2203 AstNode theSchemaNode = children.get(children.size() - 3);
2204
2205
2206
2207 if (theSchemaNode.getChildCount() == 0
2208 || nodeFactory().hasMixinType(theSchemaNode.getLastChild(), TYPE_MISSING_TERMINATOR)) {
2209 if (nodeFactory().hasMixinType(theSchemaNode, TYPE_CREATE_SCHEMA_STATEMENT)) {
2210 statementNode.setParent(theSchemaNode);
2211 if (stmtIsMissingTerminator) {
2212 missingTerminatorNode(theSchemaNode);
2213 }
2214 return true;
2215 }
2216 }
2217 }
2218 }
2219
2220 return false;
2221 }
2222
2223
2224
2225
2226
2227
2228 protected String getTerminator() {
2229 return this.terminator;
2230 }
2231
2232
2233
2234
2235
2236 protected boolean setTerminator( String terminator ) {
2237 CheckArg.isNotNull(terminator, "terminator");
2238 if (this.terminator.equalsIgnoreCase(terminator)) {
2239 return false;
2240 }
2241 this.terminator = terminator;
2242 return true;
2243 }
2244
2245 protected Name[] getValidSchemaChildTypes() {
2246 return VALID_SCHEMA_CHILD_TYPES;
2247 }
2248
2249
2250
2251
2252
2253
2254
2255
2256 protected boolean isComment( DdlTokenStream tokens ) throws ParsingException {
2257 return tokens.matches(DdlTokenizer.COMMENT);
2258 }
2259
2260
2261
2262
2263
2264
2265
2266
2267 protected boolean consumeComment( DdlTokenStream tokens ) throws ParsingException {
2268 return tokens.canConsume(DdlTokenizer.COMMENT);
2269 }
2270
2271
2272
2273
2274
2275
2276
2277
2278
2279 protected boolean isTableConstraint( DdlTokenStream tokens ) throws ParsingException {
2280 boolean result = false;
2281
2282 if ((tokens.matches("PRIMARY", "KEY")) || (tokens.matches("FOREIGN", "KEY")) || (tokens.matches("UNIQUE"))) {
2283 result = true;
2284 } else if (tokens.matches("CONSTRAINT")) {
2285 if (tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "UNIQUE")
2286 || tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "PRIMARY", "KEY")
2287 || tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "FOREIGN", "KEY")
2288 || tokens.matches("CONSTRAINT", DdlTokenStream.ANY_VALUE, "CHECK")) {
2289 result = true;
2290 }
2291 }
2292
2293 return result;
2294 }
2295
2296
2297
2298
2299
2300
2301
2302
2303
2304 protected boolean isColumnDefinitionStart( DdlTokenStream tokens ) throws ParsingException {
2305 boolean result = false;
2306
2307 if (isTableConstraint(tokens)) {
2308 result = false;
2309 } else {
2310 for (String dTypeStartWord : getDataTypeStartWords()) {
2311 result = (tokens.matches(DdlTokenStream.ANY_VALUE, dTypeStartWord) || tokens.matches("COLUMN",
2312 DdlTokenStream.ANY_VALUE,
2313 dTypeStartWord));
2314 if (result) {
2315 break;
2316 }
2317 }
2318
2319 }
2320
2321 return result;
2322 }
2323
2324
2325
2326
2327
2328
2329 protected List<String> getDataTypeStartWords() {
2330 if (allDataTypeStartWords == null) {
2331 allDataTypeStartWords = new ArrayList<String>();
2332 allDataTypeStartWords.addAll(DataTypes.DATATYPE_START_WORDS);
2333 allDataTypeStartWords.addAll(getCustomDataTypeStartWords());
2334 }
2335 return allDataTypeStartWords;
2336 }
2337
2338
2339
2340
2341
2342
2343
2344 protected List<String> getCustomDataTypeStartWords() {
2345 return Collections.emptyList();
2346 }
2347
2348
2349
2350
2351
2352
2353
2354
2355 protected String parseName( DdlTokenStream tokens ) {
2356
2357
2358
2359 StringBuffer sb = new StringBuffer();
2360
2361 if (tokens.matches('[')) {
2362
2363 while (true) {
2364
2365 tokens.consume('[');
2366 sb.append(consumeIdentifier(tokens));
2367 tokens.consume(']');
2368 if (tokens.matches('.')) {
2369 sb.append(tokens.consume());
2370 } else {
2371 break;
2372 }
2373 }
2374 } else {
2375
2376
2377 while (true) {
2378
2379 sb.append(consumeIdentifier(tokens));
2380
2381 if (tokens.matches('.')) {
2382 sb.append(tokens.consume());
2383 } else {
2384 break;
2385 }
2386
2387 }
2388 }
2389
2390 return sb.toString();
2391 }
2392
2393
2394
2395
2396
2397
2398
2399
2400 protected String consumeIdentifier( DdlTokenStream tokens ) throws ParsingException {
2401 String value = tokens.consume();
2402
2403 if (value.charAt(0) == '"') {
2404 int length = value.length();
2405
2406 value = value.substring(1, length - 1);
2407 }
2408
2409
2410
2411
2412
2413
2414
2415
2416 return value;
2417 }
2418
2419
2420
2421
2422
2423
2424
2425
2426 protected boolean isTerminator( DdlTokenStream tokens ) throws ParsingException {
2427 boolean result = tokens.matches(getTerminator());
2428
2429 return result;
2430 }
2431
2432
2433
2434
2435
2436
2437
2438
2439
2440
2441 protected boolean parseColumnNameList( DdlTokenStream tokens,
2442 AstNode parentNode,
2443 Name referenceType ) {
2444 boolean parsedColumns = false;
2445
2446 List<String> columnNameList = new ArrayList<String>();
2447 if (tokens.matches(L_PAREN)) {
2448 tokens.consume(L_PAREN);
2449 columnNameList = parseColumnNameList(tokens);
2450 tokens.consume(R_PAREN);
2451 }
2452
2453 for (String columnName : columnNameList) {
2454 nodeFactory().node(columnName, parentNode, referenceType);
2455 parsedColumns = true;
2456 }
2457
2458 return parsedColumns;
2459 }
2460
2461
2462
2463
2464
2465
2466
2467
2468 protected List<String> parseColumnNameList( DdlTokenStream tokens ) throws ParsingException {
2469 List<String> columnNames = new LinkedList<String>();
2470
2471 while (true) {
2472 columnNames.add(parseName(tokens));
2473 if (!tokens.canConsume(COMMA)) {
2474 break;
2475 }
2476 }
2477
2478 return columnNames;
2479 }
2480
2481
2482
2483
2484
2485
2486
2487
2488
2489 protected String parseUntilTerminator( DdlTokenStream tokens ) throws ParsingException {
2490 StringBuffer sb = new StringBuffer();
2491 if (doUseTerminator()) {
2492 boolean lastTokenWasPeriod = false;
2493 while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY) && !isTerminator(tokens)) {
2494 String thisToken = tokens.consume();
2495 boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2496 boolean thisTokenIsComma = thisToken.equals(COMMA);
2497 if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2498 sb.append(thisToken);
2499 } else {
2500 sb.append(SPACE).append(thisToken);
2501 }
2502 if (thisTokenIsPeriod) {
2503 lastTokenWasPeriod = true;
2504 } else {
2505 lastTokenWasPeriod = false;
2506 }
2507 }
2508 } else {
2509
2510 boolean lastTokenWasPeriod = false;
2511 while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY)) {
2512 String thisToken = tokens.consume();
2513 boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2514 boolean thisTokenIsComma = thisToken.equals(COMMA);
2515 if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2516 sb.append(thisToken);
2517 } else {
2518 sb.append(SPACE).append(thisToken);
2519 }
2520 if (thisTokenIsPeriod) {
2521 lastTokenWasPeriod = true;
2522 } else {
2523 lastTokenWasPeriod = false;
2524 }
2525 }
2526 }
2527
2528 return sb.toString();
2529 }
2530
2531
2532
2533
2534
2535
2536
2537
2538
2539
2540 protected String parseUntilTerminatorIgnoreEmbeddedStatements( DdlTokenStream tokens ) throws ParsingException {
2541 StringBuffer sb = new StringBuffer();
2542
2543 boolean lastTokenWasPeriod = false;
2544 while (tokens.hasNext() && !isTerminator(tokens)) {
2545 String thisToken = tokens.consume();
2546 boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2547 boolean thisTokenIsComma = thisToken.equals(COMMA);
2548 if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2549 sb.append(thisToken);
2550 } else {
2551 sb.append(SPACE).append(thisToken);
2552 }
2553 if (thisTokenIsPeriod) {
2554 lastTokenWasPeriod = true;
2555 } else {
2556 lastTokenWasPeriod = false;
2557 }
2558 }
2559
2560 return sb.toString();
2561 }
2562
2563
2564
2565
2566
2567
2568
2569
2570 protected String parseUntilSemiColon( DdlTokenStream tokens ) throws ParsingException {
2571 StringBuffer sb = new StringBuffer();
2572
2573 boolean lastTokenWasPeriod = false;
2574 while (tokens.hasNext() && !tokens.matches(SEMICOLON)) {
2575 String thisToken = tokens.consume();
2576 boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2577 boolean thisTokenIsComma = thisToken.equals(COMMA);
2578 if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2579 sb.append(thisToken);
2580 } else {
2581 sb.append(SPACE).append(thisToken);
2582 }
2583 if (thisTokenIsPeriod) {
2584 lastTokenWasPeriod = true;
2585 } else {
2586 lastTokenWasPeriod = false;
2587 }
2588 }
2589
2590 return sb.toString();
2591 }
2592
2593 protected String parseUntilCommaOrTerminator( DdlTokenStream tokens ) throws ParsingException {
2594 StringBuffer sb = new StringBuffer();
2595 if (doUseTerminator()) {
2596 while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY) && !isTerminator(tokens)
2597 && !tokens.matches(COMMA)) {
2598 sb.append(SPACE).append(tokens.consume());
2599 }
2600 } else {
2601
2602 while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY) && !tokens.matches(COMMA)) {
2603 sb.append(SPACE).append(tokens.consume());
2604 }
2605 }
2606
2607 return sb.toString();
2608 }
2609
2610
2611
2612
2613
2614
2615 public boolean doUseTerminator() {
2616 return useTerminator;
2617 }
2618
2619
2620
2621
2622
2623
2624
2625 public void setDoUseTerminator( boolean useTerminator ) {
2626 this.useTerminator = useTerminator;
2627 }
2628
2629 public String getStatementTypeName( String[] stmtPhrase ) {
2630 StringBuffer sb = new StringBuffer(100);
2631 for (int i = 0; i < stmtPhrase.length; i++) {
2632 if (i == 0) {
2633 sb.append(stmtPhrase[0]);
2634 } else {
2635 sb.append(SPACE).append(stmtPhrase[i]);
2636 }
2637 }
2638
2639 return sb.toString();
2640 }
2641
2642
2643
2644
2645
2646
2647
2648
2649
2650 protected boolean parseDefaultClause( DdlTokenStream tokens,
2651 AstNode columnNode ) throws ParsingException {
2652
2653 assert tokens != null;
2654 assert columnNode != null;
2655
2656
2657
2658
2659
2660
2661
2662
2663
2664
2665
2666
2667
2668
2669
2670
2671
2672
2673
2674
2675 String defaultValue = "";
2676
2677 if (tokens.canConsume("DEFAULT")) {
2678
2679 int optionID = -1;
2680 int precision = -1;
2681
2682 if (tokens.canConsume("CURRENT_DATE")) {
2683
2684 optionID = DEFAULT_ID_DATETIME;
2685 defaultValue = "CURRENT_DATE";
2686 } else if (tokens.canConsume("CURRENT_TIME")) {
2687 optionID = DEFAULT_ID_DATETIME;
2688 defaultValue = "CURRENT_TIME";
2689 if (tokens.canConsume(L_PAREN)) {
2690
2691 precision = integer(tokens.consume());
2692 tokens.canConsume(R_PAREN);
2693 }
2694 } else if (tokens.canConsume("CURRENT_TIMESTAMP")) {
2695 optionID = DEFAULT_ID_DATETIME;
2696 defaultValue = "CURRENT_TIMESTAMP";
2697 if (tokens.canConsume(L_PAREN)) {
2698
2699 precision = integer(tokens.consume());
2700 tokens.canConsume(R_PAREN);
2701 }
2702 } else if (tokens.canConsume("USER")) {
2703 optionID = DEFAULT_ID_USER;
2704 defaultValue = "USER";
2705 } else if (tokens.canConsume("CURRENT_USER")) {
2706 optionID = DEFAULT_ID_CURRENT_USER;
2707 defaultValue = "CURRENT_USER";
2708 } else if (tokens.canConsume("SESSION_USER")) {
2709 optionID = DEFAULT_ID_SESSION_USER;
2710 defaultValue = "SESSION_USER";
2711 } else if (tokens.canConsume("SYSTEM_USER")) {
2712 optionID = DEFAULT_ID_SYSTEM_USER;
2713 defaultValue = "SYSTEM_USER";
2714 } else if (tokens.canConsume("NULL")) {
2715 optionID = DEFAULT_ID_NULL;
2716 defaultValue = "NULL";
2717 } else if (tokens.canConsume(L_PAREN)) {
2718 optionID = DEFAULT_ID_LITERAL;
2719 while (!tokens.canConsume(R_PAREN)) {
2720 defaultValue = defaultValue + tokens.consume();
2721 }
2722 } else {
2723 optionID = DEFAULT_ID_LITERAL;
2724
2725 defaultValue = tokens.consume();
2726
2727
2728 if (tokens.canConsume(".")) {
2729 defaultValue = defaultValue + '.' + tokens.consume();
2730 }
2731 }
2732
2733 columnNode.setProperty(DEFAULT_OPTION, optionID);
2734 columnNode.setProperty(DEFAULT_VALUE, defaultValue);
2735 if (precision > -1) {
2736 columnNode.setProperty(DEFAULT_PRECISION, precision);
2737 }
2738 return true;
2739 }
2740
2741 return false;
2742 }
2743
2744
2745
2746
2747
2748
2749
2750
2751
2752 protected boolean parseCollateClause( DdlTokenStream tokens,
2753 AstNode columnNode ) throws ParsingException {
2754 assert tokens != null;
2755 assert columnNode != null;
2756
2757
2758
2759
2760
2761 if (tokens.matches("COLLATE")) {
2762 tokens.consume("COLLATE");
2763 String collationName = parseName(tokens);
2764 columnNode.setProperty(COLLATION_NAME, collationName);
2765 return true;
2766 }
2767
2768 return false;
2769 }
2770
2771
2772
2773
2774
2775
2776
2777
2778 protected int integer( String value ) {
2779 assert value != null;
2780 assert value.length() > 0;
2781
2782 return new BigInteger(value).intValue();
2783 }
2784
2785 public final Position getCurrentMarkedPosition() {
2786 return currentMarkedPosition;
2787 }
2788
2789
2790
2791
2792
2793
2794 public final void markStartOfStatement( DdlTokenStream tokens ) {
2795 tokens.mark();
2796 currentMarkedPosition = tokens.nextPosition();
2797 }
2798
2799
2800
2801
2802
2803
2804
2805
2806
2807
2808 public final void markEndOfStatement( DdlTokenStream tokens,
2809 AstNode statementNode ) {
2810 if (!tokens.canConsume(getTerminator())) {
2811
2812
2813
2814
2815
2816
2817
2818
2819 if (!setAsSchemaChildNode(statementNode, true)) {
2820 missingTerminatorNode(getRootNode());
2821 }
2822 } else {
2823 setAsSchemaChildNode(statementNode, false);
2824 }
2825
2826 String source = tokens.getMarkedContent().trim();
2827 statementNode.setProperty(DDL_EXPRESSION, source);
2828 statementNode.setProperty(DDL_START_LINE_NUMBER, currentMarkedPosition.getLine());
2829 statementNode.setProperty(DDL_START_CHAR_INDEX, currentMarkedPosition.getIndexInContent());
2830 statementNode.setProperty(DDL_START_COLUMN_NUMBER, currentMarkedPosition.getColumn());
2831
2832 testPrint("== >> SOURCE:\n" + source + "\n");
2833 }
2834
2835 protected void testPrint( String str ) {
2836 if (isTestMode()) {
2837 System.out.println(str);
2838 }
2839 }
2840
2841
2842
2843
2844 public boolean isTestMode() {
2845 return testMode;
2846 }
2847
2848
2849
2850
2851 public void setTestMode( boolean testMode ) {
2852 this.testMode = testMode;
2853 }
2854
2855
2856
2857
2858
2859
2860 public String getId() {
2861 return parserId;
2862 }
2863
2864
2865
2866
2867
2868
2869 @Override
2870 public int hashCode() {
2871 return this.parserId.hashCode();
2872 }
2873
2874
2875
2876
2877
2878
2879 @Override
2880 public boolean equals( Object obj ) {
2881 if (obj == this) return true;
2882 if (obj instanceof DdlParser) {
2883 return ((DdlParser)obj).getId().equals(this.getId());
2884 }
2885 return false;
2886 }
2887 }