HSQLDB example source code file (DatabaseCommandInterpreter.java)
This example HSQLDB source code file (DatabaseCommandInterpreter.java) is included in the DevDaily.com
"Java Source Code
Warehouse" project. The intent of this project is to help you "Learn Java by Example" TM.
The HSQLDB DatabaseCommandInterpreter.java source code
/*
* For work developed by the HSQL Development Group:
*
* Copyright (c) 2001-2010, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*
*
* For work originally developed by the Hypersonic SQL Group:
*
* Copyright (c) 1995-2000, The Hypersonic SQL Group.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the Hypersonic SQL Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE HYPERSONIC SQL GROUP,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This software consists of voluntary contributions made by many individuals
* on behalf of the Hypersonic SQL Group.
*/
package org.hsqldb;
import java.io.IOException;
import java.io.LineNumberReader;
import java.io.StringReader;
import java.util.Locale;
import org.hsqldb.HsqlNameManager.HsqlName;
import org.hsqldb.lib.ArrayUtil;
import org.hsqldb.lib.HashMappedList;
import org.hsqldb.lib.HsqlArrayList;
import org.hsqldb.lib.StringUtil;
import org.hsqldb.lib.java.JavaSystem;
import org.hsqldb.persist.HsqlDatabaseProperties;
import org.hsqldb.scriptio.ScriptWriterBase;
import org.hsqldb.scriptio.ScriptWriterText;
/**
* Provides SQL Interpreter services relative to a Session and
* its Database.
*
* The core functionality of this class was inherited from Hypersonic and
* extensively rewritten and extended in successive versions of HSQLDB.
*
* @author Thomas Mueller (Hypersonic SQL Group)
* @version 1.8.0
* @since 1.7.2
*/
// fredt@users 20020221 - patch 513005 by sqlbob@users (RMP) - various corrections
// fredt@users 20020430 - patch 549741 by velichko - ALTER TABLE RENAME
// fredt@users 20020405 - patch 1.7.0 - other ALTER TABLE statements
// tony_lai@users 20020820 - patch 595099 - use user-defined PK name
// tony_lai@users 20020820 - patch 595156 - violation of constraint name
// fredt@users 20020912 - patch 1.7.1 by fredt - log alter statements
// kloska@users 20021030 - patch 1.7.2 - ON UPDATE CASCADE | SET NULL | SET DEFAULT
// kloska@users 20021112 - patch 1.7.2 - ON DELETE SET NULL | SET DEFAULT
// boucherb@users 20020310 - disable ALTER TABLE DDL on VIEWs (avoid NPE)
// fredt@users 20030314 - patch 1.7.2 by gilead@users - drop table if exists syntax
// boucherb@users 20030425 - DDL methods are moved to DatabaseCommandInterpreter.java
// boucherb@users 20030425 - refactoring DDL methods into smaller units
// fredt@users 20030609 - support for ALTER COLUMN SET/DROP DEFAULT / RENAME TO
// wondersonic@users 20031205 - IF EXISTS support for DROP INDEX
// fredt@users 20031224 - support for CREATE SEQUENCE ...
// fredt@users 20041209 - patch by tytar@users to set default table type
class DatabaseCommandInterpreter {
private Tokenizer tokenizer = new Tokenizer();
private Database database;
private Session session;
/**
* Constructs a new DatabaseCommandInterpreter for the given Session
*
* @param s session
*/
DatabaseCommandInterpreter(Session s) {
session = s;
database = s.getDatabase();
}
/**
* Executes the SQL String. This method is always called from a block
* synchronized on the database object.
*
* @param sql query
* @return the result of executing the given SQL String
*/
Result execute(String sql) {
Result result;
String token;
int cmd;
JavaSystem.gc();
result = null;
cmd = Token.UNKNOWNTOKEN;
try {
tokenizer.reset(sql);
while (true) {
tokenizer.setPartMarker();
session.setScripting(false);
token = tokenizer.getSimpleToken();
if (token.length() == 0) {
session.endSchemaDefinition();
break;
}
cmd = Token.get(token);
if (cmd == Token.SEMICOLON) {
session.endSchemaDefinition();
continue;
}
result = executePart(cmd, token);
if (result.isError()) {
session.endSchemaDefinition();
break;
}
if (session.getScripting()) {
database.logger.writeToLog(session,
tokenizer.getLastPart());
}
}
} catch (Throwable t) {
try {
if (session.isSchemaDefintion()) {
HsqlName schemaName = session.getSchemaHsqlName(null);
database.schemaManager.dropSchema(schemaName.name, true);
database.logger.writeToLog(session,
Token.T_DROP + ' '
+ Token.T_SCHEMA + ' '
+ schemaName.statementName
+ ' ' + Token.T_CASCADE);
database.logger.synchLog();
session.endSchemaDefinition();
}
} catch (HsqlException e) {}
result = new Result(t, tokenizer.getLastPart());
}
return result == null ? Session.emptyUpdateCount
: result;
}
private Result executePart(int cmd, String token) throws Throwable {
Result result = Session.emptyUpdateCount;
int brackets = 0;
if (session.isSchemaDefintion()) {
switch (cmd) {
case Token.CREATE :
case Token.GRANT :
break;
default :
throw Trace.error(Trace.INVALID_IDENTIFIER,
Trace.IN_SCHEMA_DEFINITION,
new Object[]{ token });
}
}
switch (cmd) {
case Token.OPENBRACKET : {
Parser parser = new Parser(session, database, tokenizer);
brackets = parser.parseOpenBracketsSelect() + 1;
}
case Token.SELECT : {
Parser parser = new Parser(session, database, tokenizer);
CompiledStatement cStatement =
parser.compileSelectStatement(brackets);
if (cStatement.parameters.length != 0) {
Trace.doAssert(
false,
Trace.getMessage(Trace.ASSERT_DIRECT_EXEC_WITH_PARAM));
}
result = session.sqlExecuteCompiledNoPreChecks(cStatement,
null);
break;
}
case Token.INSERT : {
Parser parser = new Parser(session, database, tokenizer);
CompiledStatement cStatement = parser.compileInsertStatement();
if (cStatement.parameters.length != 0) {
Trace.doAssert(
false,
Trace.getMessage(Trace.ASSERT_DIRECT_EXEC_WITH_PARAM));
}
result = session.sqlExecuteCompiledNoPreChecks(cStatement,
null);
break;
}
case Token.UPDATE : {
Parser parser = new Parser(session, database, tokenizer);
CompiledStatement cStatement = parser.compileUpdateStatement();
if (cStatement.parameters.length != 0) {
Trace.doAssert(
false,
Trace.getMessage(Trace.ASSERT_DIRECT_EXEC_WITH_PARAM));
}
result = session.sqlExecuteCompiledNoPreChecks(cStatement,
null);
break;
}
case Token.DELETE : {
Parser parser = new Parser(session, database, tokenizer);
CompiledStatement cStatement = parser.compileDeleteStatement();
if (cStatement.parameters.length != 0) {
Trace.doAssert(
false,
Trace.getMessage(Trace.ASSERT_DIRECT_EXEC_WITH_PARAM));
}
result = session.sqlExecuteCompiledNoPreChecks(cStatement,
null);
break;
}
case Token.CALL : {
Parser parser = new Parser(session, database, tokenizer);
CompiledStatement cStatement = parser.compileCallStatement();
if (cStatement.parameters.length != 0) {
Trace.doAssert(
false,
Trace.getMessage(Trace.ASSERT_DIRECT_EXEC_WITH_PARAM));
}
result = session.sqlExecuteCompiledNoPreChecks(cStatement,
null);
break;
}
case Token.SET :
processSet();
break;
case Token.COMMIT :
processCommit();
break;
case Token.ROLLBACK :
processRollback();
break;
case Token.SAVEPOINT :
processSavepoint();
break;
case Token.RELEASE :
processReleaseSavepoint();
break;
case Token.CREATE :
processCreate();
database.setMetaDirty(false);
break;
case Token.ALTER :
processAlter();
database.setMetaDirty(true);
break;
case Token.DROP :
processDrop();
database.setMetaDirty(true);
break;
case Token.GRANT :
processGrantOrRevoke(true);
database.setMetaDirty(false);
break;
case Token.REVOKE :
processGrantOrRevoke(false);
database.setMetaDirty(true);
break;
case Token.CONNECT :
processConnect();
database.setMetaDirty(false);
session.setScripting(false);
break;
case Token.DISCONNECT :
processDisconnect();
session.setScripting(true);
break;
case Token.SCRIPT :
result = processScript();
break;
case Token.SHUTDOWN :
processShutdown();
break;
case Token.CHECKPOINT :
processCheckpoint();
break;
case Token.EXPLAIN :
result = processExplainPlan();
break;
default :
throw Trace.error(Trace.UNEXPECTED_TOKEN, token);
}
return result;
}
/**
* Responsible for parsing and executing the SCRIPT SQL statement
*
* @return either an empty result or one in which each row is a DDL or DML
* @throws IOException
* @throws HsqlException
*/
private Result processScript() throws IOException, HsqlException {
String token = tokenizer.getString();
ScriptWriterText dsw = null;
session.checkAdmin();
try {
if (tokenizer.wasValue()) {
if (tokenizer.getType() != Types.VARCHAR) {
throw Trace.error(Trace.INVALID_IDENTIFIER);
}
dsw = new ScriptWriterText(database, token, true, true, true);
dsw.writeAll();
return new Result(ResultConstants.UPDATECOUNT);
} else {
tokenizer.back();
return DatabaseScript.getScript(database, false);
}
} finally {
if (dsw != null) {
dsw.close();
}
}
}
/**
* Responsible for handling CREATE ...
*
* All CREATE command require an ADMIN user except: <p>
*
* <pre>
* CREATE TEMP [MEMORY] TABLE
* </pre>
*
* @throws HsqlException
*/
private void processCreate() throws HsqlException {
boolean unique = false;
int tableType;
boolean isTempTable = false;
String token;
session.checkAdmin();
session.checkDDLWrite();
session.setScripting(true);
if (tokenizer.isGetThis(Token.T_GLOBAL)) {
tokenizer.getThis(Token.T_TEMPORARY);
isTempTable = true;
} else if (tokenizer.isGetThis(Token.T_TEMP)) {
isTempTable = true;
} else if (tokenizer.isGetThis(Token.T_TEMPORARY)) {
isTempTable = true;
}
token = tokenizer.getSimpleToken();
switch (Token.get(token)) {
// table
case Token.MEMORY :
tokenizer.getThis(Token.T_TABLE);
case Token.TABLE :
tableType = isTempTable ? Table.TEMP_TABLE
: database.getDefaultTableType();
processCreateTable(tableType);
return;
case Token.CACHED :
if (isTempTable) {
throw Trace.error(Trace.UNEXPECTED_TOKEN, token);
}
tokenizer.getThis(Token.T_TABLE);
processCreateTable(Table.CACHED_TABLE);
return;
case Token.TEXT :
if (isTempTable) {
throw Trace.error(Trace.UNEXPECTED_TOKEN, token);
}
tokenizer.getThis(Token.T_TABLE);
processCreateTable(Table.TEXT_TABLE);
return;
default :
if (isTempTable) {
throw Trace.error(Trace.UNEXPECTED_TOKEN, token);
}
}
switch (Token.get(token)) {
// other objects
case Token.ALIAS :
processCreateAlias();
break;
case Token.SEQUENCE :
processCreateSequence();
break;
case Token.SCHEMA :
session.setScripting(false);
processCreateSchema();
break;
case Token.TRIGGER :
processCreateTrigger();
break;
case Token.USER :
processCreateUser();
break;
case Token.ROLE :
database.getGranteeManager().addRole(getUserIdentifier());
break;
case Token.VIEW :
processCreateView();
break;
// index
case Token.UNIQUE :
unique = true;
tokenizer.getThis(Token.T_INDEX);
//fall thru
case Token.INDEX :
processCreateIndex(unique);
break;
default : {
throw Trace.error(Trace.UNEXPECTED_TOKEN, token);
}
}
}
/**
* Process a bracketed column list as used in the declaration of SQL
* CONSTRAINTS and return an array containing the indexes of the columns
* within the table.
*
* @param t table that contains the columns
* @return column index map
* @throws HsqlException if a column is not found or is duplicate
*/
private int[] processColumnList(Table t,
boolean acceptAscDesc)
throws HsqlException {
HashMappedList list = Parser.processColumnList(tokenizer,
acceptAscDesc);
int size = list.size();
int[] col = new int[size];
for (int i = 0; i < size; i++) {
col[i] = t.getColumnNr((String) list.getKey(i));
}
return col;
}
/**
* Responsible for handling the execution of CREATE TRIGGER SQL
* statements. <p>
*
* typical sql is: CREATE TRIGGER tr1 AFTER INSERT ON tab1 CALL "pkg.cls"
*
* @throws HsqlException
*/
private void processCreateTrigger() throws HsqlException {
Table t;
boolean isForEach;
boolean isNowait;
int queueSize;
String triggerName;
boolean isQuoted;
String sWhen;
String sOper;
String tableName;
String token;
String className;
TriggerDef td;
Trigger o;
triggerName = tokenizer.getName();
String schemaname = tokenizer.getLongNameFirst();
database.schemaManager.checkTriggerExists(triggerName,
session.getSchemaNameForWrite(schemaname), false);
isQuoted = tokenizer.wasQuotedIdentifier();
isForEach = false;
isNowait = false;
queueSize = TriggerDef.getDefaultQueueSize();
sWhen = tokenizer.getSimpleToken();
sOper = tokenizer.getSimpleToken();
tokenizer.getThis(Token.T_ON);
tableName = tokenizer.getName();
if (schemaname == null) {
schemaname =
session.getSchemaNameForWrite(tokenizer.getLongNameFirst());
} else if (!schemaname.equals(
session.getSchemaNameForWrite(tokenizer.getLongNameFirst()))) {
throw Trace.error(Trace.INVALID_SCHEMA_NAME_NO_SUBCLASS);
}
t = database.schemaManager.getUserTable(session, tableName,
schemaname);
if (t.isView()) {
throw Trace.error(Trace.NOT_A_TABLE);
}
session.setScripting(true);
// "FOR EACH ROW" or "CALL"
token = tokenizer.getSimpleToken();
if (token.equals(Token.T_FOR)) {
token = tokenizer.getSimpleToken();
if (token.equals(Token.T_EACH)) {
token = tokenizer.getSimpleToken();
if (token.equals(Token.T_ROW)) {
isForEach = true;
// should be 'NOWAIT' or 'QUEUE' or 'CALL'
token = tokenizer.getSimpleToken();
} else {
throw Trace.error(Trace.UNEXPECTED_END_OF_COMMAND, token);
}
} else {
throw Trace.error(Trace.UNEXPECTED_END_OF_COMMAND, token);
}
}
if (token.equals(Token.T_NOWAIT)) {
isNowait = true;
// should be 'CALL' or 'QUEUE'
token = tokenizer.getSimpleToken();
}
if (token.equals(Token.T_QUEUE)) {
queueSize = tokenizer.getInt();
// should be 'CALL'
token = tokenizer.getSimpleToken();
}
if (!token.equals(Token.T_CALL)) {
throw Trace.error(Trace.UNEXPECTED_END_OF_COMMAND, token);
}
className = tokenizer.getSimpleName();
if (!tokenizer.wasQuotedIdentifier()) {
throw Trace.error(Trace.UNEXPECTED_END_OF_COMMAND, className);
}
HsqlName name = database.nameManager.newHsqlName(triggerName,
isQuoted);
td = new TriggerDef(name, sWhen, sOper, isForEach, t, className,
isNowait, queueSize, database.classLoader);
t.addTrigger(td);
if (td.isValid()) {
try {
// start the trigger thread
td.start();
} catch (Exception e) {
throw Trace.error(Trace.UNKNOWN_FUNCTION, e.toString());
}
}
database.schemaManager.registerTriggerName(triggerName, t.getName());
// --
}
private Column processCreateColumn() throws HsqlException {
String token = tokenizer.getSimpleName();
boolean isQuoted = tokenizer.wasQuotedIdentifier();
HsqlName hsqlName = database.nameManager.newHsqlName(token, isQuoted);
return processCreateColumn(hsqlName);
}
/**
* Responsible for handling the creation of table columns during the
* process of executing CREATE TABLE DDL statements.
*
* @param hsqlName name of the column
* @return a Column object with indicated attributes
* @throws HsqlException
*/
private Column processCreateColumn(HsqlName hsqlName)
throws HsqlException {
boolean isIdentity = false;
long identityStart = database.firstIdentity;
long identityIncrement = 1;
boolean isPrimaryKey = false;
String typeName;
int type;
int length = 0;
int scale = 0;
boolean hasLength = false;
boolean isNullable = true;
Expression defaultExpr = null;
String token;
typeName = tokenizer.getSimpleToken();
type = Types.getTypeNr(typeName);
if (type == Types.CHAR) {
if (tokenizer.isGetThis(Token.T_VARYING)) {
type = Types.VARCHAR;
}
}
if (typeName.equals(Token.T_IDENTITY)) {
isIdentity = true;
isPrimaryKey = true;
}
// fredt - when SET IGNORECASE is in effect, all new VARCHAR columns are defined as VARCHAR_IGNORECASE
if (type == Types.DOUBLE) {
tokenizer.isGetThis(Token.T_PRECISION);
}
if (tokenizer.isGetThis(Token.T_OPENBRACKET)) {
hasLength = true;
length = tokenizer.getInt();
Trace.check(Types.acceptsPrecisionCreateParam(type),
Trace.UNEXPECTED_TOKEN);
if (type != Types.TIMESTAMP && type != Types.TIME && length == 0) {
throw Trace.error(Trace.INVALID_SIZE_PRECISION);
}
if (tokenizer.isGetThis(Token.T_COMMA)) {
Trace.check(Types.acceptsScaleCreateParam(type),
Trace.UNEXPECTED_TOKEN);
scale = tokenizer.getInt();
}
tokenizer.getThis(Token.T_CLOSEBRACKET);
} else if (type == Types.CHAR && database.sqlEnforceStrictSize) {
length = 1;
} else if (type == Types.VARCHAR && database.sqlEnforceStrictSize) {
throw Trace.error(Trace.COLUMN_SIZE_REQUIRED);
}
/**
* @todo fredt - drop support for SET IGNORECASE and replace the
* type name with a qualifier specifying the case sensitivity of VARCHAR
*/
if (type == Types.VARCHAR && database.isIgnoreCase()) {
type = Types.VARCHAR_IGNORECASE;
}
if (type == Types.FLOAT && length > 53) {
throw Trace.error(Trace.NUMERIC_VALUE_OUT_OF_RANGE);
}
if (type == Types.TIMESTAMP) {
if (!hasLength) {
length = 6;
} else if (length != 0 && length != 6) {
throw Trace.error(Trace.NUMERIC_VALUE_OUT_OF_RANGE);
}
}
if (type == Types.TIME) {
if (length != 0) {
throw Trace.error(Trace.NUMERIC_VALUE_OUT_OF_RANGE);
}
}
token = tokenizer.getSimpleToken();
if (token.equals(Token.T_DEFAULT)) {
defaultExpr = processCreateDefaultExpression(type, length, scale);
token = tokenizer.getSimpleToken();
} else if (token.equals(Token.T_GENERATED)) {
tokenizer.getThis(Token.T_BY);
tokenizer.getThis(Token.T_DEFAULT);
tokenizer.getThis(Token.T_AS);
tokenizer.getThis(Token.T_IDENTITY);
if (tokenizer.isGetThis(Token.T_OPENBRACKET)) {
tokenizer.getThis(Token.T_START);
tokenizer.getThis(Token.T_WITH);
identityStart = tokenizer.getBigint();
if (tokenizer.isGetThis(Token.T_COMMA)) {
tokenizer.getThis(Token.T_INCREMENT);
tokenizer.getThis(Token.T_BY);
identityIncrement = tokenizer.getBigint();
}
tokenizer.getThis(Token.T_CLOSEBRACKET);
}
isIdentity = true;
isPrimaryKey = true;
token = tokenizer.getSimpleToken();
}
// fredt@users - accept IDENTITY before or after NOT NULL
if (token.equals(Token.T_IDENTITY)) {
isIdentity = true;
isPrimaryKey = true;
token = tokenizer.getSimpleToken();
}
if (token.equals(Token.T_NULL)) {
token = tokenizer.getSimpleToken();
} else if (token.equals(Token.T_NOT)) {
tokenizer.getThis(Token.T_NULL);
isNullable = false;
token = tokenizer.getSimpleToken();
}
if (token.equals(Token.T_IDENTITY)) {
if (isIdentity) {
throw Trace.error(Trace.SECOND_PRIMARY_KEY, Token.T_IDENTITY);
}
isIdentity = true;
isPrimaryKey = true;
token = tokenizer.getSimpleToken();
}
if (token.equals(Token.T_PRIMARY)) {
tokenizer.getThis(Token.T_KEY);
isPrimaryKey = true;
} else {
tokenizer.back();
}
// make sure IDENTITY and DEFAULT are not used together
if (isIdentity && defaultExpr != null) {
throw Trace.error(Trace.UNEXPECTED_TOKEN, Token.T_DEFAULT);
}
Column column = new Column(hsqlName, isNullable, type, length, scale,
isPrimaryKey, defaultExpr);
column.setIdentity(isIdentity, identityStart, identityIncrement);
return column;
}
/**
* @param type data type of column
* @param length maximum length of column
* @throws HsqlException
* @return new Expression
*/
private Expression processCreateDefaultExpression(int type, int length,
int scale) throws HsqlException {
if (type == Types.OTHER) {
throw Trace.error(Trace.WRONG_DEFAULT_CLAUSE);
}
Parser parser = new Parser(session, database, tokenizer);
Expression expr = parser.readDefaultClause(type);
expr.resolveTypes(session);
int newType = expr.getType();
if (newType == Expression.VALUE || newType == Expression.TRUE
|| newType == Expression.FALSE
|| (newType == Expression.FUNCTION
&& expr.function.isSimple)) {
Object defValTemp;
try {
defValTemp = expr.getValue(session, type);
} catch (HsqlException e) {
throw Trace.error(Trace.WRONG_DEFAULT_CLAUSE);
}
if (defValTemp != null && database.sqlEnforceStrictSize) {
try {
Column.enforceSize(defValTemp, type, length, scale, true);
} catch (HsqlException e) {
// default value is too long for fixed size column
throw Trace.error(Trace.WRONG_DEFAULT_CLAUSE);
}
}
return expr;
}
throw Trace.error(Trace.WRONG_DEFAULT_CLAUSE);
}
public static void checkBooleanDefault(String s,
int type) throws HsqlException {
if (type != Types.BOOLEAN || s == null) {
return;
}
s = s.toUpperCase();
if (s.equals(Token.T_TRUE) || s.equals(Token.T_FALSE)) {
return;
}
if (s.equals("0") || s.equals("1")) {
return;
}
throw Trace.error(Trace.WRONG_DEFAULT_CLAUSE, s);
}
/**
* Responsible for handling constraints section of CREATE TABLE ...
*
* @param t table
* @param constraint CONSTRAINT keyword used
* @param primarykeycolumn primary columns
* @throws HsqlException
* @return list of constraints
*/
private HsqlArrayList processCreateConstraints(Table t,
boolean constraint, int[] primarykeycolumn) throws HsqlException {
String token;
HsqlArrayList tcList;
Constraint tempConst;
HsqlName pkHsqlName;
// fredt@users 20020225 - comment
// HSQLDB relies on primary index to be the first one defined
// and needs original or system added primary key before any
// non-unique index is created
tcList = new HsqlArrayList();
tempConst = new Constraint(null, primarykeycolumn, null, null,
Constraint.MAIN, Constraint.NO_ACTION,
Constraint.NO_ACTION);
// tony_lai@users 20020820 - patch 595099
pkHsqlName = null;
tcList.add(tempConst);
if (!constraint) {
return tcList;
}
while (true) {
HsqlName cname = null;
if (tokenizer.isGetThis(Token.T_CONSTRAINT)) {
token = tokenizer.getName();
String constraintSchema = tokenizer.getLongNameFirst();
if (constraintSchema != null) {
constraintSchema = session.getSchemaNameForWrite(
tokenizer.getLongNameFirst());
if (!t.getSchemaName().equals(constraintSchema)) {
throw Trace.error(
Trace.INVALID_SCHEMA_NAME_NO_SUBCLASS,
constraintSchema);
}
}
cname = database.nameManager.newHsqlName(token,
tokenizer.wasQuotedIdentifier());
}
token = tokenizer.getSimpleToken();
switch (Token.get(token)) {
case Token.PRIMARY : {
tokenizer.getThis(Token.T_KEY);
// tony_lai@users 20020820 - patch 595099
pkHsqlName = cname;
int[] cols = processColumnList(t, false);
Constraint mainConst;
mainConst = (Constraint) tcList.get(0);
if (mainConst.core.mainColArray != null) {
if (!ArrayUtil.areEqual(mainConst.core.mainColArray,
cols, cols.length, true)) {
throw Trace.error(Trace.SECOND_PRIMARY_KEY);
}
}
mainConst.core.mainColArray = cols;
mainConst.constName = pkHsqlName;
break;
}
case Token.UNIQUE : {
int[] col = processColumnList(t, false);
if (cname == null) {
cname = database.nameManager.newAutoName("CT");
}
tempConst = new Constraint(cname, col, null, null,
Constraint.UNIQUE,
Constraint.NO_ACTION,
Constraint.NO_ACTION);
tcList.add(tempConst);
break;
}
case Token.FOREIGN : {
tokenizer.getThis(Token.T_KEY);
tempConst = processCreateFK(t, cname);
if (tempConst.core.refColArray == null) {
Constraint mainConst = (Constraint) tcList.get(0);
tempConst.core.refColArray =
mainConst.core.mainColArray;
if (tempConst.core.refColArray == null) {
throw Trace.error(Trace.CONSTRAINT_NOT_FOUND,
Trace.TABLE_HAS_NO_PRIMARY_KEY);
}
}
checkFKColumnDefaults(t, tempConst);
t.checkColumnsMatch(tempConst.core.mainColArray,
tempConst.core.refTable,
tempConst.core.refColArray);
tcList.add(tempConst);
break;
}
case Token.CHECK : {
if (cname == null) {
cname = database.nameManager.newAutoName("CT");
}
tempConst = new Constraint(cname, null, null, null,
Constraint.CHECK,
Constraint.NO_ACTION,
Constraint.NO_ACTION);
processCreateCheckConstraintCondition(tempConst);
tcList.add(tempConst);
break;
}
}
token = tokenizer.getSimpleToken();
if (token.equals(Token.T_COMMA)) {
continue;
}
if (token.equals(Token.T_CLOSEBRACKET)) {
break;
}
throw Trace.error(Trace.UNEXPECTED_TOKEN, token);
}
return tcList;
}
/**
* Responsible for handling check constraints section of CREATE TABLE ...
*
* @param c check constraint
* @throws HsqlException
*/
private void processCreateCheckConstraintCondition(Constraint c)
throws HsqlException {
tokenizer.getThis(Token.T_OPENBRACKET);
Parser parser = new Parser(session, database, tokenizer);
Expression condition = parser.parseExpression();
tokenizer.getThis(Token.T_CLOSEBRACKET);
c.core.check = condition;
}
/**
* Responsible for handling the execution CREATE TABLE SQL statements.
*
* @param type Description of the Parameter
* @throws HsqlException
*/
private void processCreateTable(int type) throws HsqlException {
String token = tokenizer.getName();
HsqlName schemaname =
session.getSchemaHsqlNameForWrite(tokenizer.getLongNameFirst());
database.schemaManager.checkUserTableNotExists(session, token,
schemaname.name);
boolean isnamequoted = tokenizer.wasQuotedIdentifier();
int[] pkCols = null;
int colIndex = 0;
boolean constraint = false;
Table t = newTable(type, token, isnamequoted, schemaname);
tokenizer.getThis(Token.T_OPENBRACKET);
while (true) {
token = tokenizer.getString();
switch (Token.get(token)) {
case Token.CONSTRAINT :
case Token.PRIMARY :
case Token.FOREIGN :
case Token.UNIQUE :
case Token.CHECK :
// fredt@users : check for quoted reserved words used as column names
constraint = !tokenizer.wasQuotedIdentifier()
&& !tokenizer.wasLongName();
}
tokenizer.back();
if (constraint) {
break;
}
Column newcolumn = processCreateColumn();
t.addColumn(newcolumn);
if (newcolumn.isPrimaryKey()) {
Trace.check(pkCols == null, Trace.SECOND_PRIMARY_KEY,
newcolumn.columnName.name);
pkCols = new int[]{ colIndex };
}
token = tokenizer.getSimpleToken();
if (token.equals(Token.T_COMMA)) {
colIndex++;
continue;
}
if (token.equals(Token.T_CLOSEBRACKET)) {
break;
}
throw Trace.error(Trace.UNEXPECTED_TOKEN, token);
}
HsqlArrayList tempConstraints = processCreateConstraints(t,
constraint, pkCols);
if (tokenizer.isGetThis(Token.T_ON)) {
if (!t.isTemp) {
throw Trace.error(Trace.UNEXPECTED_TOKEN, Token.T_ON);
}
tokenizer.getThis(Token.T_COMMIT);
token = tokenizer.getSimpleToken();
if (token.equals(Token.T_DELETE)) {}
else if (token.equals(Token.T_PRESERVE)) {
t.onCommitPreserve = true;
} else {
throw Trace.error(Trace.UNEXPECTED_TOKEN, token);
}
tokenizer.getThis(Token.T_ROWS);
}
try {
session.commit();
Constraint primaryConst = (Constraint) tempConstraints.get(0);
t.createPrimaryKey(null, primaryConst.core.mainColArray, true);
if (primaryConst.core.mainColArray != null) {
if (primaryConst.constName == null) {
primaryConst.constName = t.makeSysPKName();
}
Constraint newconstraint =
new Constraint(primaryConst.constName, t,
t.getPrimaryIndex(),
Constraint.PRIMARY_KEY);
t.addConstraint(newconstraint);
database.schemaManager.registerConstraintName(
primaryConst.constName.name, t.getName());
}
for (int i = 1; i < tempConstraints.size(); i++) {
Constraint tempConst = (Constraint) tempConstraints.get(i);
if (tempConst.constType == Constraint.UNIQUE) {
TableWorks tableWorks = new TableWorks(session, t);
tableWorks.createUniqueConstraint(
tempConst.core.mainColArray, tempConst.constName);
t = tableWorks.getTable();
}
if (tempConst.constType == Constraint.FOREIGN_KEY) {
TableWorks tableWorks = new TableWorks(session, t);
tableWorks.createForeignKey(tempConst.core.mainColArray,
tempConst.core.refColArray,
tempConst.constName,
tempConst.core.refTable,
tempConst.core.deleteAction,
tempConst.core.updateAction);
t = tableWorks.getTable();
}
if (tempConst.constType == Constraint.CHECK) {
TableWorks tableWorks = new TableWorks(session, t);
tableWorks.createCheckConstraint(tempConst,
tempConst.constName);
t = tableWorks.getTable();
}
}
database.schemaManager.linkTable(t);
} catch (HsqlException e) {
// fredt@users 20020225 - comment
// if a HsqlException is thrown while creating table, any foreign key that has
// been created leaves it modification to the expTable in place
// need to undo those modifications. This should not happen in practice.
database.schemaManager.removeExportedKeys(t);
database.schemaManager.removeIndexNames(t.tableName);
database.schemaManager.removeConstraintNames(t.tableName);
throw e;
}
}
// fredt@users 20020221 - patch 520213 by boucherb@users - self reference FK
// allows foreign keys that reference a column in the same table
/**
* @param t table
* @param cname foreign key name
* @throws HsqlException
* @return constraint
*/
private Constraint processCreateFK(Table t,
HsqlName cname) throws HsqlException {
int[] localcol;
int[] expcol;
String expTableName;
Table expTable;
String token;
localcol = processColumnList(t, false);
tokenizer.getThis(Token.T_REFERENCES);
expTableName = tokenizer.getName();
String constraintSchema = tokenizer.getLongNameFirst();
if (constraintSchema != null) {
constraintSchema =
session.getSchemaNameForWrite(tokenizer.getLongNameFirst());
if (!t.getSchemaName().equals(constraintSchema)) {
throw Trace.error(Trace.INVALID_SCHEMA_NAME_NO_SUBCLASS,
constraintSchema);
}
}
if (t.getName().name.equals(expTableName)) {
expTable = t;
} else {
expTable = database.schemaManager.getTable(session, expTableName,
t.getSchemaName());
}
expcol = null;
token = tokenizer.getSimpleToken();
tokenizer.back();
if (token.equals(Token.T_OPENBRACKET)) {
expcol = processColumnList(expTable, false);
} else {
if (expTable.getPrimaryKey() == null) {
// getPrimaryKey() == null is true while creating the table
// fredt - FK statement is part of CREATE TABLE and is self-referencing
// reference must be to same table being created
// it is resolved in the calling method
Trace.check(t == expTable, Trace.TABLE_HAS_NO_PRIMARY_KEY);
} else {
if (expTable.hasPrimaryKey()) {
expcol = expTable.getPrimaryKey();
} else {
throw Trace.error(Trace.CONSTRAINT_NOT_FOUND,
Trace.TABLE_HAS_NO_PRIMARY_KEY);
}
}
}
token = tokenizer.getSimpleToken();
// -- In a while loop we parse a maximium of two
// -- "ON" statements following the foreign key
// -- definition this can be
// -- ON [UPDATE|DELETE] [NO ACTION|RESTRICT|CASCADE|SET [NULL|DEFAULT]]
int deleteAction = Constraint.NO_ACTION;
int updateAction = Constraint.NO_ACTION;
while (token.equals(Token.T_ON)) {
token = tokenizer.getSimpleToken();
if (deleteAction == Constraint.NO_ACTION
&& token.equals(Token.T_DELETE)) {
token = tokenizer.getSimpleToken();
if (token.equals(Token.T_SET)) {
token = tokenizer.getSimpleToken();
if (token.equals(Token.T_DEFAULT)) {
deleteAction = Constraint.SET_DEFAULT;
} else if (token.equals(Token.T_NULL)) {
deleteAction = Constraint.SET_NULL;
} else {
throw Trace.error(Trace.UNEXPECTED_TOKEN, token);
}
} else if (token.equals(Token.T_CASCADE)) {
deleteAction = Constraint.CASCADE;
} else if (token.equals(Token.T_RESTRICT)) {
// LEGACY compatibility/usability
// - same as NO ACTION or nothing at all
} else {
tokenizer.matchThis(Token.T_NO);
tokenizer.getThis(Token.T_ACTION);
}
} else if (updateAction == Constraint.NO_ACTION
&& token.equals(Token.T_UPDATE)) {
token = tokenizer.getSimpleToken();
if (token.equals(Token.T_SET)) {
token = tokenizer.getSimpleToken();
if (token.equals(Token.T_DEFAULT)) {
updateAction = Constraint.SET_DEFAULT;
} else if (token.equals(Token.T_NULL)) {
updateAction = Constraint.SET_NULL;
} else {
throw Trace.error(Trace.UNEXPECTED_TOKEN, token);
}
} else if (token.equals(Token.T_CASCADE)) {
updateAction = Constraint.CASCADE;
} else if (token.equals(Token.T_RESTRICT)) {
// LEGACY compatibility/usability
// - same as NO ACTION or nothing at all
} else {
tokenizer.matchThis(Token.T_NO);
tokenizer.getThis(Token.T_ACTION);
}
} else {
throw Trace.error(Trace.UNEXPECTED_TOKEN, token);
}
token = tokenizer.getSimpleToken();
}
tokenizer.back();
if (cname == null) {
cname = database.nameManager.newAutoName("FK");
}
return new Constraint(cname, localcol, expTable, expcol,
Constraint.FOREIGN_KEY, deleteAction,
updateAction);
}
/**
* Responsible for handling the execution CREATE VIEW SQL statements.
*
* @throws HsqlException
*/
private void processCreateView() throws HsqlException {
String name = tokenizer.getName();
HsqlName schemaname =
session.getSchemaHsqlNameForWrite(tokenizer.getLongNameFirst());
int logposition = tokenizer.getPartMarker();
database.schemaManager.checkUserViewNotExists(session, name,
schemaname.name);
HsqlName viewHsqlName = database.nameManager.newHsqlName(name,
tokenizer.wasQuotedIdentifier());
viewHsqlName.schema = schemaname;
HsqlName[] colList = null;
if (tokenizer.isGetThis(Token.T_OPENBRACKET)) {
try {
HsqlArrayList list = Parser.getColumnNames(database, null,
tokenizer, true);
colList = new HsqlName[list.size()];
colList = (HsqlName[]) list.toArray(colList);
} catch (HsqlException e) {
// fredt - a bug in 1.8.0.0 and previous versions causes view
// definitions to script without double quotes around column names
// in certain cases; the workaround here discards the column
// names
if (database.isStoredFileAccess()
&& session.isProcessingScript()) {
while (true) {
String token = tokenizer.getString();
if (token.equals(Token.T_CLOSEBRACKET)
|| token.equals("")) {
break;
}
}
} else {
throw e;
}
}
}
tokenizer.getThis(Token.T_AS);
tokenizer.setPartMarker();
Parser parser = new Parser(session, database, tokenizer);
int brackets = parser.parseOpenBracketsSelect();
Select select;
// accept ORDER BY or ORDRY BY with LIMIT - accept unions
select = parser.parseSelect(brackets, true, false, true, true);
if (select.sIntoTable != null) {
throw (Trace.error(Trace.INVALID_IDENTIFIER, Token.INTO));
}
select.prepareResult(session);
View view = new View(session, database, viewHsqlName,
tokenizer.getLastPart(), colList);
session.commit();
database.schemaManager.linkTable(view);
tokenizer.setPartMarker(logposition);
}
/**
* Responsible for handling tail of ALTER TABLE ... RENAME ...
* @param t table
* @throws HsqlException
*/
private void processAlterTableRename(Table t) throws HsqlException {
String schema = t.getSchemaName();
String newName;
boolean isquoted;
// ensures that if temp table, it also belongs to this session
/*
if (!t.equals(session, name)) {
throw Trace.error(Trace.TABLE_NOT_FOUND);
}
*/
tokenizer.getThis(Token.T_TO);
newName = tokenizer.getName();
String newSchema = tokenizer.getLongNameFirst();
isquoted = tokenizer.wasQuotedIdentifier();
newSchema = newSchema == null ? schema
: session.getSchemaNameForWrite(
newSchema);
if (!schema.equals(newSchema)) {
throw Trace.error(Trace.INVALID_SCHEMA_NAME_NO_SUBCLASS);
}
database.schemaManager.checkUserTableNotExists(session, newName,
schema);
session.commit();
session.setScripting(true);
database.schemaManager.renameTable(session, t, newName, isquoted);
}
/**
* Handles ALTER TABLE statements. <p>
*
* ALTER TABLE <name> RENAME TO
* ALTER INDEX <name> RENAME TO
*
* ALTER TABLE <name> ADD CONSTRAINT FOREIGN KEY (