Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/Cache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/CacheMode.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/CacheMode.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/CacheMode.java 17 Aug 2012 14:36:38 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/CacheMode.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc..
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,82 +20,90 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Map;
-
/**
- * Controls how the session interacts with the second-level
- * cache and query cache.
+ * Controls how the session interacts with the second-level cache and query cache.
*
- * @see Session#setCacheMode(CacheMode)
* @author Gavin King
+ * @author Strong Liu
+ * @see Session#setCacheMode(CacheMode)
*/
-public final class CacheMode implements Serializable {
- private final String name;
- private final boolean isPutEnabled;
- private final boolean isGetEnabled;
- private static final Map INSTANCES = new HashMap();
-
- private CacheMode(String name, boolean isPutEnabled, boolean isGetEnabled) {
- this.name=name;
- this.isPutEnabled = isPutEnabled;
- this.isGetEnabled = isGetEnabled;
- }
- public String toString() {
- return name;
- }
- public boolean isPutEnabled() {
- return isPutEnabled;
- }
- public boolean isGetEnabled() {
- return isGetEnabled;
- }
+public enum CacheMode {
/**
- * The session may read items from the cache, and add items to the cache
+ * The session may read items from the cache, and add items to the cache.
*/
- public static final CacheMode NORMAL = new CacheMode("NORMAL", true, true);
+ NORMAL( true, true ),
/**
* The session will never interact with the cache, except to invalidate
- * cache items when updates occur
+ * cache items when updates occur.
*/
- public static final CacheMode IGNORE = new CacheMode("IGNORE", false, false);
+ IGNORE( false, false ),
/**
- * The session may read items from the cache, but will not add items,
- * except to invalidate items when updates occur
+ * The session may read items from the cache, but will not add items,
+ * except to invalidate items when updates occur.
*/
- public static final CacheMode GET = new CacheMode("GET", false, true);
+ GET( false, true ),
/**
* The session will never read items from the cache, but will add items
* to the cache as it reads them from the database.
*/
- public static final CacheMode PUT = new CacheMode("PUT", true, false);
-
+ PUT( true, false ),
/**
* The session will never read items from the cache, but will add items
- * to the cache as it reads them from the database. In this mode, the
+ * to the cache as it reads them from the database. In this mode, the
* effect of hibernate.cache.use_minimal_puts is bypassed, in
- * order to force a cache refresh
+ * order to force a cache refresh.
*/
- public static final CacheMode REFRESH = new CacheMode("REFRESH", true, false);
-
- static {
- INSTANCES.put( NORMAL.name, NORMAL );
- INSTANCES.put( IGNORE.name, IGNORE );
- INSTANCES.put( GET.name, GET );
- INSTANCES.put( PUT.name, PUT );
- INSTANCES.put( REFRESH.name, REFRESH );
+ REFRESH( true, false );
+
+
+ private final boolean isPutEnabled;
+ private final boolean isGetEnabled;
+
+ private CacheMode( boolean isPutEnabled, boolean isGetEnabled) {
+ this.isPutEnabled = isPutEnabled;
+ this.isGetEnabled = isGetEnabled;
}
- private Object readResolve() {
- return INSTANCES.get( name );
+ /**
+ * Does this cache mode indicate that reads are allowed?
+ *
+ * @return {@code true} if cache reads are allowed; {@code false} otherwise.
+ */
+ public boolean isGetEnabled() {
+ return isGetEnabled;
}
- public static CacheMode parse(String name) {
- return ( CacheMode ) INSTANCES.get( name );
+ /**
+ * Does this cache mode indicate that writes are allowed?
+ *
+ * @return {@code true} if cache writes are allowed; {@code false} otherwise.
+ */
+ public boolean isPutEnabled() {
+ return isPutEnabled;
}
+
+ /**
+ * Used to interpret externalized forms of this enum.
+ *
+ * @param setting The externalized form.
+ *
+ * @return The matching enum value.
+ *
+ * @throws MappingException Indicates the external form was not recognized as a valid enum value.
+ */
+ public static CacheMode interpretExternalSetting(String setting) {
+ if (setting == null) {
+ return null;
+ }
+
+ try {
+ return CacheMode.valueOf( setting.toUpperCase() );
+ }
+ catch ( IllegalArgumentException e ) {
+ throw new MappingException( "Unknown Cache Mode: " + setting );
+ }
+ }
}
Index: 3rdParty_sources/hibernate-core/org/hibernate/ConnectionReleaseMode.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/ConnectionReleaseMode.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/ConnectionReleaseMode.java 17 Aug 2012 14:36:40 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/ConnectionReleaseMode.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,27 +20,23 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
-import java.io.Serializable;
-
/**
* Defines the various policies by which Hibernate might release its underlying
* JDBC connection.
*
* @author Steve Ebersole
*/
-public class ConnectionReleaseMode implements Serializable {
-
+public enum ConnectionReleaseMode{
/**
* Indicates that JDBC connection should be aggressively released after each
* SQL statement is executed. In this mode, the application must
* explicitly close all iterators and scrollable results. This mode may
* only be used with a JTA datasource.
*/
- public static final ConnectionReleaseMode AFTER_STATEMENT = new ConnectionReleaseMode( "after_statement" );
+ AFTER_STATEMENT,
/**
* Indicates that JDBC connections should be released after each transaction
@@ -49,52 +45,22 @@
*
* This is the default mode starting in 3.1; was previously {@link #ON_CLOSE}.
*/
- public static final ConnectionReleaseMode AFTER_TRANSACTION = new ConnectionReleaseMode( "after_transaction" );
+ AFTER_TRANSACTION,
/**
* Indicates that connections should only be released when the Session is explicitly closed
* or disconnected; this is the legacy (Hibernate2 and pre-3.1) behavior.
*/
- public static final ConnectionReleaseMode ON_CLOSE = new ConnectionReleaseMode( "on_close" );
+ ON_CLOSE;
-
- private String name;
-
- private ConnectionReleaseMode(String name) {
- this.name = name;
- }
-
/**
- * Override of Object.toString(). Returns the release mode name.
+ * Alias for {@link ConnectionReleaseMode#valueOf(String)} using upper-case version of the incoming name.
*
- * @return The release mode name.
- */
- public String toString() {
- return name;
- }
-
- /**
- * Determine the correct ConnectionReleaseMode instance based on the given
- * name.
+ * @param name The name to parse
*
- * @param modeName The release mode name.
- * @return The appropriate ConnectionReleaseMode instance
- * @throws HibernateException Indicates the modeName param did not match any known modes.
+ * @return The matched enum value.
*/
- public static ConnectionReleaseMode parse(String modeName) throws HibernateException {
- if ( AFTER_STATEMENT.name.equals( modeName ) ) {
- return AFTER_STATEMENT;
- }
- else if ( AFTER_TRANSACTION.name.equals( modeName ) ) {
- return AFTER_TRANSACTION;
- }
- else if ( ON_CLOSE.name.equals( modeName ) ) {
- return ON_CLOSE;
- }
- throw new HibernateException( "could not determine appropriate connection release mode [" + modeName + "]" );
+ public static ConnectionReleaseMode parse(final String name) {
+ return ConnectionReleaseMode.valueOf( name.toUpperCase() );
}
-
- private Object readResolve() {
- return parse( name );
- }
}
Index: 3rdParty_sources/hibernate-core/org/hibernate/Criteria.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/Criteria.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/Criteria.java 17 Aug 2012 14:36:40 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/Criteria.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,7 +20,6 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
@@ -30,8 +29,10 @@
import org.hibernate.criterion.Criterion;
import org.hibernate.criterion.Order;
import org.hibernate.criterion.Projection;
+import org.hibernate.sql.JoinType;
import org.hibernate.transform.ResultTransformer;
+
/**
* Criteria is a simplified API for retrieving entities
* by composing Criterion objects. This is a very
@@ -119,7 +120,7 @@
* @return this (for method chaining)
*/
public Criteria add(Criterion criterion);
-
+
/**
* Add an {@link Order ordering} to the result set.
*
@@ -135,62 +136,133 @@
*
* @param associationPath a dot seperated property path
* @param mode The fetch mode for the referenced association
+ *
* @return this (for method chaining)
+ *
+ * @throws HibernateException Indicates a problem applying the given fetch mode
*/
public Criteria setFetchMode(String associationPath, FetchMode mode) throws HibernateException;
/**
- * Set the lock mode of the current entity
+ * Set the lock mode of the current entity.
*
* @param lockMode The lock mode to be applied
+ *
* @return this (for method chaining)
*/
public Criteria setLockMode(LockMode lockMode);
/**
- * Set the lock mode of the aliased entity
+ * Set the lock mode of the aliased entity.
*
* @param alias The previously assigned alias representing the entity to
- * which the given lock mode should apply.
+ * which the given lock mode should apply.
* @param lockMode The lock mode to be applied
+ *
* @return this (for method chaining)
*/
public Criteria setLockMode(String alias, LockMode lockMode);
/**
* Join an association, assigning an alias to the joined association.
*
- * Functionally equivalent to {@link #createAlias(String, String, int)} using
- * {@link #INNER_JOIN} for the joinType.
+ * Functionally equivalent to {@link #createAlias(String, String, JoinType )} using
+ * {@link JoinType#INNER_JOIN} for the joinType.
*
* @param associationPath A dot-seperated property path
* @param alias The alias to assign to the joined association (for later reference).
+ *
* @return this (for method chaining)
+ *
+ * @throws HibernateException Indicates a problem creating the sub criteria
*/
public Criteria createAlias(String associationPath, String alias) throws HibernateException;
/**
* Join an association using the specified join-type, assigning an alias
* to the joined association.
*
+ * The joinType is expected to be one of {@link JoinType#INNER_JOIN} (the default),
+ * {@link JoinType#FULL_JOIN}, or {@link JoinType#LEFT_OUTER_JOIN}.
+ *
+ * @param associationPath A dot-seperated property path
+ * @param alias The alias to assign to the joined association (for later reference).
+ * @param joinType The type of join to use.
+ *
+ * @return this (for method chaining)
+ *
+ * @throws HibernateException Indicates a problem creating the sub criteria
+ */
+ public Criteria createAlias(String associationPath, String alias, JoinType joinType) throws HibernateException;
+
+ /**
+ * Join an association using the specified join-type, assigning an alias
+ * to the joined association.
+ *
* The joinType is expected to be one of {@link #INNER_JOIN} (the default),
* {@link #FULL_JOIN}, or {@link #LEFT_JOIN}.
*
* @param associationPath A dot-seperated property path
* @param alias The alias to assign to the joined association (for later reference).
* @param joinType The type of join to use.
+ *
* @return this (for method chaining)
+ *
+ * @throws HibernateException Indicates a problem creating the sub criteria
+ * @deprecated use {@link #createAlias(String, String, org.hibernate.sql.JoinType)}
*/
+ @Deprecated
public Criteria createAlias(String associationPath, String alias, int joinType) throws HibernateException;
/**
+ * Join an association using the specified join-type, assigning an alias
+ * to the joined association.
+ *
+ * The joinType is expected to be one of {@link JoinType#INNER_JOIN} (the default),
+ * {@link JoinType#FULL_JOIN}, or {@link JoinType#LEFT_OUTER_JOIN}.
+ *
+ * @param associationPath A dot-seperated property path
+ * @param alias The alias to assign to the joined association (for later reference).
+ * @param joinType The type of join to use.
+ * @param withClause The criteria to be added to the join condition (ON clause)
+ *
+ * @return this (for method chaining)
+ *
+ * @throws HibernateException Indicates a problem creating the sub criteria
+ */
+ public Criteria createAlias(String associationPath, String alias, JoinType joinType, Criterion withClause) throws HibernateException;
+
+ /**
+ * Join an association using the specified join-type, assigning an alias
+ * to the joined association.
+ *
+ * The joinType is expected to be one of {@link #INNER_JOIN} (the default),
+ * {@link #FULL_JOIN}, or {@link #LEFT_JOIN}.
+ *
+ * @param associationPath A dot-seperated property path
+ * @param alias The alias to assign to the joined association (for later reference).
+ * @param joinType The type of join to use.
+ * @param withClause The criteria to be added to the join condition (ON clause)
+ *
+ * @return this (for method chaining)
+ *
+ * @throws HibernateException Indicates a problem creating the sub criteria
+ * @deprecated use {@link #createAlias(String, String, JoinType, Criterion)}
+ */
+ @Deprecated
+ public Criteria createAlias(String associationPath, String alias, int joinType, Criterion withClause) throws HibernateException;
+
+ /**
* Create a new Criteria, "rooted" at the associated entity.
*
- * Functionally equivalent to {@link #createCriteria(String, int)} using
- * {@link #INNER_JOIN} for the joinType.
+ * Functionally equivalent to {@link #createCriteria(String, org.hibernate.sql.JoinType)} using
+ * {@link JoinType#INNER_JOIN} for the joinType.
*
* @param associationPath A dot-seperated property path
+ *
* @return the created "sub criteria"
+ *
+ * @throws HibernateException Indicates a problem creating the sub criteria
*/
public Criteria createCriteria(String associationPath) throws HibernateException;
@@ -200,20 +272,41 @@
*
* @param associationPath A dot-seperated property path
* @param joinType The type of join to use.
+ *
* @return the created "sub criteria"
+ *
+ * @throws HibernateException Indicates a problem creating the sub criteria
*/
+ public Criteria createCriteria(String associationPath, JoinType joinType) throws HibernateException;
+
+ /**
+ * Create a new Criteria, "rooted" at the associated entity, using the
+ * specified join type.
+ *
+ * @param associationPath A dot-seperated property path
+ * @param joinType The type of join to use.
+ *
+ * @return the created "sub criteria"
+ *
+ * @throws HibernateException Indicates a problem creating the sub criteria
+ * @deprecated use {@link #createAlias(String, String, org.hibernate.sql.JoinType)}
+ */
+ @Deprecated
public Criteria createCriteria(String associationPath, int joinType) throws HibernateException;
/**
* Create a new Criteria, "rooted" at the associated entity,
* assigning the given alias.
*
- * Functionally equivalent to {@link #createCriteria(String, String, int)} using
- * {@link #INNER_JOIN} for the joinType.
+ * Functionally equivalent to {@link #createCriteria(String, String, org.hibernate.sql.JoinType)} using
+ * {@link JoinType#INNER_JOIN} for the joinType.
*
* @param associationPath A dot-seperated property path
* @param alias The alias to assign to the joined association (for later reference).
+ *
* @return the created "sub criteria"
+ *
+ * @throws HibernateException Indicates a problem creating the sub criteria
*/
public Criteria createCriteria(String associationPath, String alias) throws HibernateException;
@@ -224,11 +317,63 @@
* @param associationPath A dot-seperated property path
* @param alias The alias to assign to the joined association (for later reference).
* @param joinType The type of join to use.
+ *
* @return the created "sub criteria"
+ *
+ * @throws HibernateException Indicates a problem creating the sub criteria
*/
+ public Criteria createCriteria(String associationPath, String alias, JoinType joinType) throws HibernateException;
+
+ /**
+ * Create a new Criteria, "rooted" at the associated entity,
+ * assigning the given alias and using the specified join type.
+ *
+ * @param associationPath A dot-seperated property path
+ * @param alias The alias to assign to the joined association (for later reference).
+ * @param joinType The type of join to use.
+ *
+ * @return the created "sub criteria"
+ *
+ * @throws HibernateException Indicates a problem creating the sub criteria
+ * @deprecated use {@link #createCriteria(String, org.hibernate.sql.JoinType)}
+ */
+ @Deprecated
public Criteria createCriteria(String associationPath, String alias, int joinType) throws HibernateException;
+
/**
+ * Create a new Criteria, "rooted" at the associated entity,
+ * assigning the given alias and using the specified join type.
+ *
+ * @param associationPath A dot-seperated property path
+ * @param alias The alias to assign to the joined association (for later reference).
+ * @param joinType The type of join to use.
+ * @param withClause The criteria to be added to the join condition (ON clause)
+ *
+ * @return the created "sub criteria"
+ *
+ * @throws HibernateException Indicates a problem creating the sub criteria
+ */
+ public Criteria createCriteria(String associationPath, String alias, JoinType joinType, Criterion withClause) throws HibernateException;
+
+ /**
+ * Create a new Criteria, "rooted" at the associated entity,
+ * assigning the given alias and using the specified join type.
+ *
+ * @param associationPath A dot-seperated property path
+ * @param alias The alias to assign to the joined association (for later reference).
+ * @param joinType The type of join to use.
+ * @param withClause The criteria to be added to the join condition (ON clause)
+ *
+ * @return the created "sub criteria"
+ *
+ * @throws HibernateException Indicates a problem creating the sub criteria
+ * @deprecated use {@link #createCriteria(String, String, org.hibernate.sql.JoinType, org.hibernate.criterion.Criterion)}
+ */
+ @Deprecated
+ public Criteria createCriteria(String associationPath, String alias, int joinType, Criterion withClause) throws HibernateException;
+
+ /**
* Set a strategy for handling the query results. This determines the
* "shape" of the query result.
*
@@ -249,16 +394,71 @@
* @return this (for method chaining)
*/
public Criteria setMaxResults(int maxResults);
-
+
/**
* Set the first result to be retrieved.
*
* @param firstResult the first result to retrieve, numbered from 0
* @return this (for method chaining)
*/
public Criteria setFirstResult(int firstResult);
-
+
/**
+ * Was the read-only/modifiable mode explicitly initialized?
+ *
+ * @return true, the read-only/modifiable mode was explicitly initialized; false, otherwise.
+ *
+ * @see Criteria#setReadOnly(boolean)
+ */
+ public boolean isReadOnlyInitialized();
+
+ /**
+ * Should entities and proxies loaded by this Criteria be put in read-only mode? If the
+ * read-only/modifiable setting was not initialized, then the default
+ * read-only/modifiable setting for the persistence context is returned instead.
+ * @see Criteria#setReadOnly(boolean)
+ * @see org.hibernate.engine.spi.PersistenceContext#isDefaultReadOnly()
+ *
+ * The read-only/modifiable setting has no impact on entities/proxies returned by the
+ * Criteria that existed in the session before the Criteria was executed.
+ *
+ * @return true, entities and proxies loaded by the criteria will be put in read-only mode
+ * false, entities and proxies loaded by the criteria will be put in modifiable mode
+ * @throws IllegalStateException if isReadOnlyInitialized()
returns false
+ * and this Criteria is not associated with a session.
+ * @see Criteria#isReadOnlyInitialized()
+ */
+ public boolean isReadOnly();
+
+ /**
+ * Set the read-only/modifiable mode for entities and proxies
+ * loaded by this Criteria. This setting overrides the default setting
+ * for the persistence context.
+ * @see org.hibernate.engine.spi.PersistenceContext#isDefaultReadOnly()
+ *
+ * To set the default read-only/modifiable setting used for
+ * entities and proxies that are loaded into the session:
+ * @see org.hibernate.engine.spi.PersistenceContext#setDefaultReadOnly(boolean)
+ * @see org.hibernate.Session#setDefaultReadOnly(boolean)
+ *
+ * Read-only entities are not dirty-checked and snapshots of persistent
+ * state are not maintained. Read-only entities can be modified, but
+ * changes are not persisted.
+ *
+ * When a proxy is initialized, the loaded entity will have the same
+ * read-only/modifiable setting as the uninitialized
+ * proxy has, regardless of the session's current setting.
+ *
+ * The read-only/modifiable setting has no impact on entities/proxies
+ * returned by the criteria that existed in the session before the criteria was executed.
+ *
+ * @param readOnly true, entities and proxies loaded by the criteria will be put in read-only mode
+ * false, entities and proxies loaded by the criteria will be put in modifiable mode
+ * @return {@code this}, for method chaining
+ */
+ public Criteria setReadOnly(boolean readOnly);
+
+ /**
* Set a fetch size for the underlying JDBC query.
*
* @param fetchSize the fetch size
@@ -306,6 +506,18 @@
* @return this (for method chaining)
*/
public Criteria setComment(String comment);
+
+
+ /**
+ * Add a DB query hint to the SQL. These differ from JPA's {@link javax.persistence.QueryHint}, which is specific
+ * to the JPA implementation and ignores DB vendor-specific hints. Instead, these are intended solely for the
+ * vendor-specific hints, such as Oracle's optimizers. Multiple query hints are supported; the Dialect will
+ * determine concatenation and placement.
+ *
+ * @param hint The database specific query hint to add.
+ * @return this (for method chaining)
+ */
+ public Criteria addQueryHint(String hint);
/**
* Override the flush mode for this particular query.
@@ -327,14 +539,20 @@
* Get the results.
*
* @return The list of matched query results.
+ *
+ * @throws HibernateException Indicates a problem either translating the criteria to SQL,
+ * exeucting the SQL or processing the SQL results.
*/
public List list() throws HibernateException;
-
+
/**
- * Get the results as an instance of {@link ScrollableResults}
+ * Get the results as an instance of {@link ScrollableResults}.
*
* @return The {@link ScrollableResults} representing the matched
* query results.
+ *
+ * @throws HibernateException Indicates a problem either translating the criteria to SQL,
+ * exeucting the SQL or processing the SQL results.
*/
public ScrollableResults scroll() throws HibernateException;
@@ -344,8 +562,12 @@
*
* @param scrollMode Indicates the type of underlying database cursor to
* request.
+ *
* @return The {@link ScrollableResults} representing the matched
* query results.
+ *
+ * @throws HibernateException Indicates a problem either translating the criteria to SQL,
+ * exeucting the SQL or processing the SQL results.
*/
public ScrollableResults scroll(ScrollMode scrollMode) throws HibernateException;
@@ -358,4 +580,4 @@
*/
public Object uniqueResult() throws HibernateException;
-}
\ No newline at end of file
+}
Index: 3rdParty_sources/hibernate-core/org/hibernate/DuplicateMappingException.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/DuplicateMappingException.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/DuplicateMappingException.java 17 Aug 2012 14:36:39 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/DuplicateMappingException.java 30 Jul 2014 15:50:59 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,31 +20,90 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
/**
- * Raised whenever a duplicate for a certain type occurs.
- * Duplicate class, table, property name etc.
+ * Raised whenever a duplicate for a certain type occurs. Duplicate class, table, property name etc.
*
* @author Max Rydahl Andersen
- *
+ * @author Steve Ebersole
*/
public class DuplicateMappingException extends MappingException {
+ /**
+ * Enumeration of the types of things that can be duplicated.
+ */
+ public static enum Type {
+ /**
+ * A duplicate entity definition was encountered.
+ */
+ ENTITY,
+ /**
+ * A duplicate table definition was encountered.
+ */
+ TABLE,
+ /**
+ * A duplicate property/attribute definition was encountered.
+ */
+ PROPERTY,
+ /**
+ * A duplicate column definition was encountered.
+ */
+ COLUMN
+ }
private final String name;
private final String type;
+ /**
+ * Creates a DuplicateMappingException using the given type and name.
+ *
+ * @param type The type of the duplicated thing.
+ * @param name The name of the duplicated thing.
+ */
+ public DuplicateMappingException(Type type, String name) {
+ this( type.name(), name );
+ }
+
+ /**
+ * Creates a DuplicateMappingException using the given type and name.
+ *
+ * @param type The type of the duplicated thing.
+ * @param name The name of the duplicated thing.
+ *
+ * @deprecated Use the for taking {@link Type} instead.
+ */
+ @Deprecated
+ public DuplicateMappingException(String type, String name) {
+ this( "Duplicate " + type + " mapping " + name, type, name );
+ }
+
+ /**
+ * Creates a DuplicateMappingException using the given customMessage, type and name.
+ *
+ * @param customMessage A custom exception message explaining the exception condition
+ * @param type The type of the duplicated thing.
+ * @param name The name of the duplicated thing.
+ */
+ public DuplicateMappingException(String customMessage, Type type, String name) {
+ this( customMessage, type.name(), name );
+ }
+
+ /**
+ * Creates a DuplicateMappingException using the given customMessage, type and name.
+ *
+ * @param customMessage A custom exception message explaining the exception condition
+ * @param type The type of the duplicated thing.
+ * @param name The name of the duplicated thing.
+ *
+ * @deprecated Use the for taking {@link Type} instead.
+ */
+ @Deprecated
public DuplicateMappingException(String customMessage, String type, String name) {
- super(customMessage);
+ super( customMessage );
this.type=type;
this.name=name;
}
-
- public DuplicateMappingException(String type, String name) {
- this("Duplicate " + type + " mapping " + name, type, name);
- }
public String getType() {
return type;
Index: 3rdParty_sources/hibernate-core/org/hibernate/EntityMode.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/EntityMode.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/EntityMode.java 17 Aug 2012 14:36:40 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/EntityMode.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,53 +20,56 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
-import java.util.Map;
-import java.util.HashMap;
-import java.io.Serializable;
-
/**
* Defines the representation modes available for entities.
*
* @author Steve Ebersole
*/
-public class EntityMode implements Serializable {
+public enum EntityMode {
+ /**
+ * The {@code pojo} entity mode describes an entity model made up of entity classes (loosely) following
+ * the java bean convention.
+ */
+ POJO( "pojo" ),
- private static final Map INSTANCES = new HashMap();
+ /**
+ * The {@code dynamic-map} entity mode describes an entity model defined using {@link java.util.Map} references.
+ */
+ MAP( "dynamic-map" );
- public static final EntityMode POJO = new EntityMode( "pojo" );
- public static final EntityMode DOM4J = new EntityMode( "dom4j" );
- public static final EntityMode MAP = new EntityMode( "dynamic-map" );
-
- static {
- INSTANCES.put( POJO.name, POJO );
- INSTANCES.put( DOM4J.name, DOM4J );
- INSTANCES.put( MAP.name, MAP );
- }
-
private final String name;
- public EntityMode(String name) {
+ private EntityMode(String name) {
this.name = name;
}
+ @Override
public String toString() {
return name;
}
- private Object readResolve() {
- return INSTANCES.get( name );
- }
+ private static final String DYNAMIC_MAP_NAME = MAP.name.toUpperCase();
- public static EntityMode parse(String name) {
- EntityMode rtn = ( EntityMode ) INSTANCES.get( name );
- if ( rtn == null ) {
- // default is POJO
- rtn = POJO;
+ /**
+ * Legacy-style entity-mode name parsing. Case insensitive
+ *
+ * @param entityMode The entity mode name to evaluate
+ *
+ * @return The appropriate entity mode; {@code null} for incoming {@code entityMode} param is treated by returning
+ * {@link #POJO}.
+ */
+ public static EntityMode parse(String entityMode) {
+ if ( entityMode == null ) {
+ return POJO;
}
- return rtn;
+ entityMode = entityMode.toUpperCase();
+ if ( DYNAMIC_MAP_NAME.equals( entityMode ) ) {
+ return MAP;
+ }
+ return valueOf( entityMode );
}
+
}
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/EntityNameResolver.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/Filter.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/Filter.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/Filter.java 17 Aug 2012 14:36:40 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/Filter.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -23,11 +23,10 @@
*
*/
package org.hibernate;
-
-import org.hibernate.engine.FilterDefinition;
-
import java.util.Collection;
+import org.hibernate.engine.spi.FilterDefinition;
+
/**
* Type definition of Filter. Filter defines the user's view into enabled dynamic filters,
* allowing them to set filter parameter values.
Index: 3rdParty_sources/hibernate-core/org/hibernate/Hibernate.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/Hibernate.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/Hibernate.java 17 Aug 2012 14:36:40 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/Hibernate.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,66 +20,19 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.Reader;
-import java.io.Serializable;
-import java.sql.Blob;
-import java.sql.Clob;
import java.util.Iterator;
-import java.util.Properties;
-import org.hibernate.collection.PersistentCollection;
+import org.hibernate.bytecode.instrumentation.internal.FieldInterceptionHelper;
+import org.hibernate.bytecode.instrumentation.spi.FieldInterceptor;
+import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.engine.HibernateIterator;
-import org.hibernate.intercept.FieldInterceptionHelper;
-import org.hibernate.intercept.FieldInterceptor;
-import org.hibernate.lob.BlobImpl;
-import org.hibernate.lob.ClobImpl;
-import org.hibernate.lob.SerializableBlob;
-import org.hibernate.lob.SerializableClob;
+import org.hibernate.engine.jdbc.LobCreator;
+import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.proxy.HibernateProxy;
import org.hibernate.proxy.LazyInitializer;
-import org.hibernate.type.AnyType;
-import org.hibernate.type.BigDecimalType;
-import org.hibernate.type.BigIntegerType;
-import org.hibernate.type.BinaryType;
-import org.hibernate.type.BlobType;
-import org.hibernate.type.BooleanType;
-import org.hibernate.type.ByteType;
-import org.hibernate.type.CalendarDateType;
-import org.hibernate.type.CalendarType;
-import org.hibernate.type.CharacterType;
-import org.hibernate.type.ClassType;
-import org.hibernate.type.ClobType;
-import org.hibernate.type.CompositeCustomType;
-import org.hibernate.type.CurrencyType;
-import org.hibernate.type.CustomType;
-import org.hibernate.type.DateType;
-import org.hibernate.type.DoubleType;
-import org.hibernate.type.FloatType;
-import org.hibernate.type.IntegerType;
-import org.hibernate.type.LocaleType;
-import org.hibernate.type.LongType;
-import org.hibernate.type.ManyToOneType;
-import org.hibernate.type.NullableType;
-import org.hibernate.type.SerializableType;
-import org.hibernate.type.ShortType;
-import org.hibernate.type.StringType;
-import org.hibernate.type.TextType;
-import org.hibernate.type.TimeType;
-import org.hibernate.type.TimeZoneType;
-import org.hibernate.type.TimestampType;
-import org.hibernate.type.TrueFalseType;
-import org.hibernate.type.Type;
-import org.hibernate.type.YesNoType;
-import org.hibernate.type.CharArrayType;
-import org.hibernate.type.WrapperBinaryType;
-import org.hibernate.type.CharacterArrayType;
-import org.hibernate.usertype.CompositeUserType;
/**
*
@@ -96,223 +49,15 @@
*/
public final class Hibernate {
-
/**
- * Hibernate long type.
- */
- public static final NullableType LONG = new LongType();
- /**
- * Hibernate short type.
- */
- public static final NullableType SHORT = new ShortType();
- /**
- * Hibernate integer type.
- */
- public static final NullableType INTEGER = new IntegerType();
- /**
- * Hibernate byte type.
- */
- public static final NullableType BYTE = new ByteType();
- /**
- * Hibernate float type.
- */
- public static final NullableType FLOAT = new FloatType();
- /**
- * Hibernate double type.
- */
- public static final NullableType DOUBLE = new DoubleType();
- /**
- * Hibernate character type.
- */
- public static final NullableType CHARACTER = new CharacterType();
- /**
- * Hibernate string type.
- */
- public static final NullableType STRING = new StringType();
- /**
- * Hibernate time type.
- */
- public static final NullableType TIME = new TimeType();
- /**
- * Hibernate date type.
- */
- public static final NullableType DATE = new DateType();
- /**
- * Hibernate timestamp type.
- */
- public static final NullableType TIMESTAMP = new TimestampType();
- /**
- * Hibernate boolean type.
- */
- public static final NullableType BOOLEAN = new BooleanType();
- /**
- * Hibernate true_false type.
- */
- public static final NullableType TRUE_FALSE = new TrueFalseType();
- /**
- * Hibernate yes_no type.
- */
- public static final NullableType YES_NO = new YesNoType();
- /**
- * Hibernate big_decimal type.
- */
- public static final NullableType BIG_DECIMAL = new BigDecimalType();
- /**
- * Hibernate big_integer type.
- */
- public static final NullableType BIG_INTEGER = new BigIntegerType();
- /**
- * Hibernate binary type.
- */
- public static final NullableType BINARY = new BinaryType();
- /**
- * Hibernate wrapper-binary type.
- */
- public static final NullableType WRAPPER_BINARY = new WrapperBinaryType();
- /**
- * Hibernate char[] type.
- */
- public static final NullableType CHAR_ARRAY = new CharArrayType();
- /**
- * Hibernate Character[] type.
- */
- public static final NullableType CHARACTER_ARRAY = new CharacterArrayType();
- /**
- * Hibernate text type.
- */
- public static final NullableType TEXT = new TextType();
- /**
- * Hibernate blob type.
- */
- public static final Type BLOB = new BlobType();
- /**
- * Hibernate clob type.
- */
- public static final Type CLOB = new ClobType();
- /**
- * Hibernate calendar type.
- */
- public static final NullableType CALENDAR = new CalendarType();
- /**
- * Hibernate calendar_date type.
- */
- public static final NullableType CALENDAR_DATE = new CalendarDateType();
- /**
- * Hibernate locale type.
- */
- public static final NullableType LOCALE = new LocaleType();
- /**
- * Hibernate currency type.
- */
- public static final NullableType CURRENCY = new CurrencyType();
- /**
- * Hibernate timezone type.
- */
- public static final NullableType TIMEZONE = new TimeZoneType();
- /**
- * Hibernate class type.
- */
- public static final NullableType CLASS = new ClassType();
- /**
- * Hibernate serializable type.
- */
- public static final NullableType SERIALIZABLE = new SerializableType( Serializable.class );
- /**
- * Hibernate object type.
- */
- public static final Type OBJECT = new AnyType();
-
-
- /**
* Cannot be instantiated.
*/
private Hibernate() {
throw new UnsupportedOperationException();
}
- /**
- * A Hibernate serializable type.
- */
- public static Type serializable(Class serializableClass) {
- return new SerializableType( serializableClass );
- }
/**
- * A Hibernate any type.
- *
- * @param metaType a type mapping java.lang.Class to a single column
- * @param identifierType the entity identifier type
- * @return the Type
- */
- public static Type any(Type metaType, Type identifierType) {
- return new AnyType( metaType, identifierType );
- }
-
- /**
- * A Hibernate persistent object (entity) type.
- *
- * @param persistentClass a mapped entity class
- */
- public static Type entity(Class persistentClass) {
- // not really a many-to-one association *necessarily*
- return new ManyToOneType( persistentClass.getName() );
- }
-
- /**
- * A Hibernate persistent object (entity) type.
- *
- * @param entityName a mapped entity class
- */
- public static Type entity(String entityName) {
- // not really a many-to-one association *necessarily*
- return new ManyToOneType( entityName );
- }
-
- /**
- * A Hibernate custom type.
- *
- * @param userTypeClass a class that implements UserType
- */
- public static Type custom(Class userTypeClass) throws HibernateException {
- return custom( userTypeClass, null );
- }
-
- /**
- * A Hibernate parameterizable custom type.
- *
- * @param userTypeClass a class that implements UserType and ParameterizableType
- * @param parameterNames the names of the parameters passed to the type
- * @param parameterValues the values of the parameters passed to the type. They must match
- * up with the order and length of the parameterNames array.
- */
- public static Type custom(Class userTypeClass, String[] parameterNames, String[] parameterValues)
- throws HibernateException {
- Properties parameters = new Properties();
- for ( int i = 0; i < parameterNames.length; i++ ) {
- parameters.setProperty( parameterNames[i], parameterValues[i] );
- }
- return custom( userTypeClass, parameters );
- }
-
- /**
- * A Hibernate parameterizable custom type.
- *
- * @param userTypeClass a class that implements UserType and ParameterizableType
- * @param parameters the parameters as a collection of name/value pairs
- */
- public static Type custom(Class userTypeClass, Properties parameters)
- throws HibernateException {
- if ( CompositeUserType.class.isAssignableFrom( userTypeClass ) ) {
- CompositeCustomType type = new CompositeCustomType( userTypeClass, parameters );
- return type;
- }
- else {
- CustomType type = new CustomType( userTypeClass, parameters );
- return type;
- }
- }
-
- /**
* Force initialization of a proxy or persistent collection.
*
* Note: This only ensures intialization of a proxy object or collection;
@@ -325,11 +70,12 @@
if ( proxy == null ) {
return;
}
- else if ( proxy instanceof HibernateProxy ) {
- ( ( HibernateProxy ) proxy ).getHibernateLazyInitializer().initialize();
+
+ if ( proxy instanceof HibernateProxy ) {
+ ( (HibernateProxy) proxy ).getHibernateLazyInitializer().initialize();
}
else if ( proxy instanceof PersistentCollection ) {
- ( ( PersistentCollection ) proxy ).forceInitialization();
+ ( (PersistentCollection) proxy ).forceInitialization();
}
}
@@ -339,12 +85,13 @@
* @param proxy a persistable object, proxy, persistent collection or null
* @return true if the argument is already initialized, or is not a proxy or collection
*/
+ @SuppressWarnings("SimplifiableIfStatement")
public static boolean isInitialized(Object proxy) {
if ( proxy instanceof HibernateProxy ) {
- return !( ( HibernateProxy ) proxy ).getHibernateLazyInitializer().isUninitialized();
+ return !( (HibernateProxy) proxy ).getHibernateLazyInitializer().isUninitialized();
}
else if ( proxy instanceof PersistentCollection ) {
- return ( ( PersistentCollection ) proxy ).wasInitialized();
+ return ( (PersistentCollection) proxy ).wasInitialized();
}
else {
return true;
@@ -361,7 +108,7 @@
*/
public static Class getClass(Object proxy) {
if ( proxy instanceof HibernateProxy ) {
- return ( ( HibernateProxy ) proxy ).getHibernateLazyInitializer()
+ return ( (HibernateProxy) proxy ).getHibernateLazyInitializer()
.getImplementation()
.getClass();
}
@@ -371,68 +118,43 @@
}
/**
- * Create a new Blob. The returned object will be initially immutable.
+ * Obtain a lob creator for the given session.
*
- * @param bytes a byte array
- * @return the Blob
- */
- public static Blob createBlob(byte[] bytes) {
- return new SerializableBlob( new BlobImpl( bytes ) );
- }
-
- /**
- * Create a new Blob. The returned object will be initially immutable.
+ * @param session The session for which to obtain a lob creator
*
- * @param stream a binary stream
- * @param length the number of bytes in the stream
- * @return the Blob
+ * @return The log creator reference
*/
- public static Blob createBlob(InputStream stream, int length) {
- return new SerializableBlob( new BlobImpl( stream, length ) );
+ public static LobCreator getLobCreator(Session session) {
+ return getLobCreator( (SessionImplementor) session );
}
/**
- * Create a new Blob. The returned object will be initially immutable.
+ * Obtain a lob creator for the given session.
*
- * @param stream a binary stream
- * @return the Blob
- * @throws IOException
- */
- public static Blob createBlob(InputStream stream) throws IOException {
- return new SerializableBlob( new BlobImpl( stream, stream.available() ) );
- }
-
- /**
- * Create a new Clob. The returned object will be initially immutable.
+ * @param session The session for which to obtain a lob creator
*
- * @param string a String
+ * @return The log creator reference
*/
- public static Clob createClob(String string) {
- return new SerializableClob( new ClobImpl( string ) );
+ public static LobCreator getLobCreator(SessionImplementor session) {
+ return session.getFactory()
+ .getJdbcServices()
+ .getLobCreator( session );
}
/**
- * Create a new Clob. The returned object will be initially immutable.
- *
- * @param reader a character stream
- * @param length the number of characters in the stream
- */
- public static Clob createClob(Reader reader, int length) {
- return new SerializableClob( new ClobImpl( reader, length ) );
- }
-
- /**
- * Close an Iterator created by iterate() immediately,
+ * Close an {@link Iterator} instances obtained from {@link org.hibernate.Query#iterate()} immediately
* instead of waiting until the session is closed or disconnected.
*
- * @param iterator an Iterator created by iterate()
- * @throws HibernateException
- * @see org.hibernate.Query#iterate
+ * @param iterator an Iterator created by iterate()
+ *
+ * @throws HibernateException Indicates a problem closing the Hibernate iterator.
+ * @throws IllegalArgumentException If the Iterator is not a "Hibernate Iterator".
+ *
* @see Query#iterate()
*/
public static void close(Iterator iterator) throws HibernateException {
if ( iterator instanceof HibernateIterator ) {
- ( ( HibernateIterator ) iterator ).close();
+ ( (HibernateIterator) iterator ).close();
}
else {
throw new IllegalArgumentException( "not a Hibernate iterator" );
@@ -445,14 +167,12 @@
*
* @param proxy The potential proxy
* @param propertyName the name of a persistent attribute of the object
- * @return true if the named property of the object is not listed as uninitialized
- * @return false if the object is an uninitialized proxy, or the named property is uninitialized
+ * @return true if the named property of the object is not listed as uninitialized; false otherwise
*/
public static boolean isPropertyInitialized(Object proxy, String propertyName) {
-
- Object entity;
+ final Object entity;
if ( proxy instanceof HibernateProxy ) {
- LazyInitializer li = ( ( HibernateProxy ) proxy ).getHibernateLazyInitializer();
+ final LazyInitializer li = ( (HibernateProxy) proxy ).getHibernateLazyInitializer();
if ( li.isUninitialized() ) {
return false;
}
@@ -465,13 +185,12 @@
}
if ( FieldInterceptionHelper.isInstrumented( entity ) ) {
- FieldInterceptor interceptor = FieldInterceptionHelper.extractFieldInterceptor( entity );
+ final FieldInterceptor interceptor = FieldInterceptionHelper.extractFieldInterceptor( entity );
return interceptor == null || interceptor.isInitialized( propertyName );
}
else {
return true;
}
-
}
}
Index: 3rdParty_sources/hibernate-core/org/hibernate/HibernateException.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/HibernateException.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/HibernateException.java 17 Aug 2012 14:36:40 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/HibernateException.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2007,2011, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,33 +20,46 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
-import org.hibernate.exception.NestableRuntimeException;
-
/**
- * Any exception that occurs inside the persistence layer
- * or JDBC driver. SQLExceptions are always wrapped
- * by instances of JDBCException.
- *
+ * The base exception type for Hibernate exceptions.
+ *
+ * Note that all {@link java.sql.SQLException SQLExceptions} will be wrapped in some form of
+ * {@link JDBCException}.
+ *
* @see JDBCException
+ *
* @author Gavin King
*/
-
-public class HibernateException extends NestableRuntimeException {
-
- public HibernateException(Throwable root) {
- super(root);
+public class HibernateException extends RuntimeException {
+ /**
+ * Constructs a HibernateException using the given exception message.
+ *
+ * @param message The message explaining the reason for the exception
+ */
+ public HibernateException(String message) {
+ super( message );
}
- public HibernateException(String string, Throwable root) {
- super(string, root);
+ /**
+ * Constructs a HibernateException using the given message and underlying cause.
+ *
+ * @param cause The underlying cause.
+ */
+ public HibernateException(Throwable cause) {
+ super( cause );
}
- public HibernateException(String s) {
- super(s);
+ /**
+ * Constructs a HibernateException using the given message and underlying cause.
+ *
+ * @param message The message explaining the reason for the exception.
+ * @param cause The underlying cause.
+ */
+ public HibernateException(String message, Throwable cause) {
+ super( message, cause );
}
}
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/IdentifierLoadAccess.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/InstantiationException.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/InstantiationException.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/InstantiationException.java 17 Aug 2012 14:36:40 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/InstantiationException.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,48 +20,75 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
/**
- * Thrown if Hibernate can't instantiate an entity or component
- * class at runtime.
+ * Thrown if Hibernate can't instantiate a class at runtime.
*
* @author Gavin King
*/
-
public class InstantiationException extends HibernateException {
-
private final Class clazz;
- public InstantiationException(String s, Class clazz, Throwable root) {
- super(s, root);
+ /**
+ * Constructs a InstantiationException.
+ *
+ * @param message A message explaining the exception condition
+ * @param clazz The Class we are attempting to instantiate
+ * @param cause The underlying exception
+ */
+ public InstantiationException(String message, Class clazz, Throwable cause) {
+ super( message, cause );
this.clazz = clazz;
}
- public InstantiationException(String s, Class clazz) {
- super(s);
- this.clazz = clazz;
+ /**
+ * Constructs a InstantiationException.
+ *
+ * @param message A message explaining the exception condition
+ * @param clazz The Class we are attempting to instantiate
+ */
+ public InstantiationException(String message, Class clazz) {
+ this( message, clazz, null );
}
- public InstantiationException(String s, Class clazz, Exception e) {
- super(s, e);
+ /**
+ * Constructs a InstantiationException.
+ *
+ * @param message A message explaining the exception condition
+ * @param clazz The Class we are attempting to instantiate
+ * @param cause The underlying exception
+ */
+ public InstantiationException(String message, Class clazz, Exception cause) {
+ super( message, cause );
this.clazz = clazz;
}
+ /**
+ * Returns the Class we were attempting to instantiate.
+ *
+ * @deprecated Use {@link #getUninstantiatableClass} instead
+ *
+ * @return The class we are unable to instantiate
+ */
+ @Deprecated
public Class getPersistentClass() {
return clazz;
}
+ /**
+ * Returns the Class we were attempting to instantiate.
+ *
+ * @return The class we are unable to instantiate
+ */
+ public Class getUninstantiatableClass() {
+ return clazz;
+ }
+
+ @Override
public String getMessage() {
- return super.getMessage() + clazz.getName();
+ return super.getMessage() + " : " + clazz.getName();
}
}
-
-
-
-
-
-
Index: 3rdParty_sources/hibernate-core/org/hibernate/MappingException.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/MappingException.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/MappingException.java 17 Aug 2012 14:36:38 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/MappingException.java 30 Jul 2014 15:50:59 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,29 +20,42 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
/**
- * An exception that usually occurs at configuration time, rather
- * than runtime, as a result of something screwy in the O-R mappings.
+ * An exception that occurs while reading mapping sources (xml/annotations),usually as a result of something
+ * screwy in the O-R mappings.
*
* @author Gavin King
*/
-
public class MappingException extends HibernateException {
-
- public MappingException(String msg, Throwable root) {
- super( msg, root );
+ /**
+ * Constructs a MappingException using the given information.
+ *
+ * @param message A message explaining the exception condition
+ * @param cause The underlying cause
+ */
+ public MappingException(String message, Throwable cause) {
+ super( message, cause );
}
- public MappingException(Throwable root) {
- super(root);
+ /**
+ * Constructs a MappingException using the given information.
+ *
+ * @param cause The underlying cause
+ */
+ public MappingException(Throwable cause) {
+ super( cause );
}
- public MappingException(String s) {
- super(s);
+ /**
+ * Constructs a MappingException using the given information.
+ *
+ * @param message A message explaining the exception condition
+ */
+ public MappingException(String message) {
+ super( message );
}
}
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/MultiTenancyStrategy.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/NaturalIdLoadAccess.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/NonUniqueResultException.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/NonUniqueResultException.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/NonUniqueResultException.java 17 Aug 2012 14:36:39 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/NonUniqueResultException.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,19 +20,22 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
/**
* Thrown when the application calls Query.uniqueResult() and
- * the query returned more than one result. Unlike all other Hibernate
+ * the query returned more than one result. Unlike all other Hibernate
* exceptions, this one is recoverable!
*
* @author Gavin King
*/
public class NonUniqueResultException extends HibernateException {
-
+ /**
+ * Constructs a NonUniqueResultException.
+ *
+ * @param resultCount The number of actual results.
+ */
public NonUniqueResultException(int resultCount) {
super( "query did not return a unique result: " + resultCount );
}
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/NullPrecedence.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/PersistentObjectException.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/PersistentObjectException.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/PersistentObjectException.java 17 Aug 2012 14:36:40 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/PersistentObjectException.java 30 Jul 2014 15:50:59 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,7 +20,6 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
@@ -31,8 +30,12 @@
* @author Gavin King
*/
public class PersistentObjectException extends HibernateException {
-
- public PersistentObjectException(String s) {
- super(s);
+ /**
+ * Constructs a PersistentObjectException using the given message.
+ *
+ * @param message A message explaining the exception condition
+ */
+ public PersistentObjectException(String message) {
+ super( message );
}
}
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/PessimisticLockException.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/PropertyAccessException.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/PropertyAccessException.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/PropertyAccessException.java 17 Aug 2012 14:36:39 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/PropertyAccessException.java 30 Jul 2014 15:50:59 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,11 +20,10 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
-import org.hibernate.util.StringHelper;
+import org.hibernate.internal.util.StringHelper;
/**
* A problem occurred accessing a property of an instance of a
@@ -39,13 +38,26 @@
* @author Gavin King
*/
public class PropertyAccessException extends HibernateException {
-
private final Class persistentClass;
private final String propertyName;
private final boolean wasSetter;
- public PropertyAccessException(Throwable root, String s, boolean wasSetter, Class persistentClass, String propertyName) {
- super(s, root);
+ /**
+ * Constructs a PropertyAccessException using the specified information.
+ *
+ * @param cause The underlying cause
+ * @param message A message explaining the exception condition
+ * @param wasSetter Was the attempting to access the setter the cause of the exception?
+ * @param persistentClass The class which is supposed to contain the property in question
+ * @param propertyName The name of the property.
+ */
+ public PropertyAccessException(
+ Throwable cause,
+ String message,
+ boolean wasSetter,
+ Class persistentClass,
+ String propertyName) {
+ super( message, cause );
this.persistentClass = persistentClass;
this.wasSetter = wasSetter;
this.propertyName = propertyName;
@@ -59,15 +71,14 @@
return propertyName;
}
+ protected String originalMessage() {
+ return super.getMessage();
+ }
+
+ @Override
public String getMessage() {
- return super.getMessage() +
- ( wasSetter ? " setter of " : " getter of ") +
- StringHelper.qualify( persistentClass.getName(), propertyName );
+ return originalMessage()
+ + ( wasSetter ? " setter of " : " getter of " )
+ + StringHelper.qualify( persistentClass.getName(), propertyName );
}
}
-
-
-
-
-
-
Index: 3rdParty_sources/hibernate-core/org/hibernate/PropertyValueException.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/PropertyValueException.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/PropertyValueException.java 17 Aug 2012 14:36:40 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/PropertyValueException.java 30 Jul 2014 15:50:59 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,11 +20,10 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
-import org.hibernate.util.StringHelper;
+import org.hibernate.internal.util.StringHelper;
/**
* Thrown when the (illegal) value of a property can not be persisted.
@@ -36,12 +35,18 @@
* @author Gavin King
*/
public class PropertyValueException extends HibernateException {
-
private final String entityName;
private final String propertyName;
- public PropertyValueException(String s, String entityName, String propertyName) {
- super(s);
+ /**
+ * Constructs a PropertyValueException using the specified information.
+ *
+ * @param message A message explaining the exception condition
+ * @param entityName The name of the entity, containing the property
+ * @param propertyName The name of the property being accessed.
+ */
+ public PropertyValueException(String message, String entityName, String propertyName) {
+ super( message );
this.entityName = entityName;
this.propertyName = propertyName;
}
@@ -54,26 +59,8 @@
return propertyName;
}
+ @Override
public String getMessage() {
- return super.getMessage() + ": " +
- StringHelper.qualify(entityName, propertyName);
+ return super.getMessage() + " : " + StringHelper.qualify( entityName, propertyName );
}
-
- /**
- * Return a well formed property path.
- * Basicaly, it will return parent.child
- *
- * @param parent parent in path
- * @param child child in path
- * @return parent-child path
- */
- public static String buildPropertyPath(String parent, String child) {
- return new StringBuffer(parent).append('.').append(child).toString();
- }
}
-
-
-
-
-
-
Index: 3rdParty_sources/hibernate-core/org/hibernate/Query.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/Query.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/Query.java 17 Aug 2012 14:36:38 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/Query.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -23,7 +23,6 @@
*
*/
package org.hibernate;
-
import java.io.Serializable;
import java.math.BigDecimal;
import java.math.BigInteger;
@@ -75,30 +74,168 @@
*
* @see org.hibernate.Session#createQuery(java.lang.String)
* @see org.hibernate.ScrollableResults
+ *
* @author Gavin King
*/
-public interface Query {
+@SuppressWarnings("UnusedDeclaration")
+public interface Query extends BasicQueryContract {
/**
* Get the query string.
*
* @return the query string
*/
public String getQueryString();
+
/**
- * Return the Hibernate types of the query result set.
- * @return an array of types
+ * Obtains the limit set on the maximum number of rows to retrieve. No set limit means there is no limit set
+ * on the number of rows returned. Technically both {@code null} and any negative values are interpreted as no
+ * limit; however, this method should always return null in such case.
+ *
+ * @return The
*/
- public Type[] getReturnTypes() throws HibernateException;
+ public Integer getMaxResults();
+
/**
- * Return the HQL select clause aliases (if any)
+ * Set the maximum number of rows to retrieve.
+ *
+ * @param maxResults the maximum number of rows
+ *
+ * @return {@code this}, for method chaining
+ *
+ * @see #getMaxResults()
+ */
+ public Query setMaxResults(int maxResults);
+
+ /**
+ * Obtain the value specified (if any) for the first row to be returned from the query results; zero-based. Used,
+ * in conjunction with {@link #getMaxResults()} in "paginated queries". No value specified means the first result
+ * is returned. Zero and negative numbers are the same as no setting.
+ *
+ * @return The first result number.
+ */
+ public Integer getFirstResult();
+
+ /**
+ * Set the first row to retrieve.
+ *
+ * @param firstResult a row number, numbered from 0
+ *
+ * @return {@code this}, for method chaining
+ *
+ * @see #getFirstResult()
+ */
+ public Query setFirstResult(int firstResult);
+
+ @Override
+ public Query setFlushMode(FlushMode flushMode);
+
+ @Override
+ public Query setCacheMode(CacheMode cacheMode);
+
+ @Override
+ public Query setCacheable(boolean cacheable);
+
+ @Override
+ public Query setCacheRegion(String cacheRegion);
+
+ @Override
+ public Query setTimeout(int timeout);
+
+ @Override
+ public Query setFetchSize(int fetchSize);
+
+ @Override
+ public Query setReadOnly(boolean readOnly);
+
+ /**
+ * Obtains the LockOptions in effect for this query.
+ *
+ * @return The LockOptions
+ *
+ * @see LockOptions
+ */
+ public LockOptions getLockOptions();
+
+ /**
+ * Set the lock options for the query. Specifically only the following are taken into consideration:
+ * - {@link LockOptions#getLockMode()}
+ * - {@link LockOptions#getScope()}
+ * - {@link LockOptions#getTimeOut()}
+ *
+ * For alias-specific locking, use {@link #setLockMode(String, LockMode)}.
+ *
+ * @param lockOptions The lock options to apply to the query.
+ *
+ * @return {@code this}, for method chaining
+ *
+ * @see #getLockOptions()
+ */
+ public Query setLockOptions(LockOptions lockOptions);
+
+ /**
+ * Set the LockMode to use for specific alias (as defined in the query's FROM clause).
+ *
+ * The alias-specific lock modes specified here are added to the query's internal
+ * {@link #getLockOptions() LockOptions}.
+ *
+ * The effect of these alias-specific LockModes is somewhat dependent on the driver/database in use. Generally
+ * speaking, for maximum portability, this method should only be used to mark that the rows corresponding to
+ * the given alias should be included in pessimistic locking ({@link LockMode#PESSIMISTIC_WRITE}).
+ *
+ * @param alias a query alias, or {@code "this"} for a collection filter
+ * @param lockMode The lock mode to apply.
+ *
+ * @return {@code this}, for method chaining
+ *
+ * @see #getLockOptions()
+ */
+ public Query setLockMode(String alias, LockMode lockMode);
+
+ /**
+ * Obtain the comment currently associated with this query. Provided SQL commenting is enabled
+ * (generally by enabling the {@code hibernate.use_sql_comments} config setting), this comment will also be added
+ * to the SQL query sent to the database. Often useful for identifying the source of troublesome queries on the
+ * database side.
+ *
+ * @return The comment.
+ */
+ public String getComment();
+
+ /**
+ * Set the comment for this query.
+ *
+ * @param comment The human-readable comment
+ *
+ * @return {@code this}, for method chaining
+ *
+ * @see #getComment()
+ */
+ public Query setComment(String comment);
+
+ /**
+ * Add a DB query hint to the SQL. These differ from JPA's {@link javax.persistence.QueryHint}, which is specific
+ * to the JPA implementation and ignores DB vendor-specific hints. Instead, these are intended solely for the
+ * vendor-specific hints, such as Oracle's optimizers. Multiple query hints are supported; the Dialect will
+ * determine concatenation and placement.
+ *
+ * @param hint The database specific query hint to add.
+ */
+ public Query addQueryHint(String hint);
+
+ /**
+ * Return the HQL select clause aliases, if any.
+ *
* @return an array of aliases as strings
*/
- public String[] getReturnAliases() throws HibernateException;
+ public String[] getReturnAliases();
+
/**
* Return the names of all named parameters of the query.
+ *
* @return the parameter names, in no particular order
*/
- public String[] getNamedParameters() throws HibernateException;
+ public String[] getNamedParameters();
+
/**
* Return the query results as an Iterator. If the query
* contains multiple results pre row, the results are returned in
@@ -108,294 +245,626 @@
* SQL query returns identifiers only.
*
* @return the result iterator
- * @throws HibernateException
*/
- public Iterator iterate() throws HibernateException;
+ public Iterator iterate();
+
/**
* Return the query results as ScrollableResults. The
* scrollability of the returned results depends upon JDBC driver
* support for scrollable ResultSets.
*
* @see ScrollableResults
+ *
* @return the result iterator
- * @throws HibernateException
*/
- public ScrollableResults scroll() throws HibernateException;
+ public ScrollableResults scroll();
+
/**
- * Return the query results as ScrollableResults. The
- * scrollability of the returned results depends upon JDBC driver
- * support for scrollable ResultSets.
+ * Return the query results as ScrollableResults. The scrollability of the returned results
+ * depends upon JDBC driver support for scrollable ResultSets.
*
+ * @param scrollMode The scroll mode
+ *
+ * @return the result iterator
+ *
* @see ScrollableResults
* @see ScrollMode
- * @return the result iterator
- * @throws HibernateException
+ *
*/
- public ScrollableResults scroll(ScrollMode scrollMode) throws HibernateException;
+ public ScrollableResults scroll(ScrollMode scrollMode);
+
/**
* Return the query results as a List. If the query contains
- * multiple results pre row, the results are returned in an instance
+ * multiple results per row, the results are returned in an instance
* of Object[].
*
* @return the result list
- * @throws HibernateException
*/
- public List list() throws HibernateException;
+ public List list();
+
/**
* Convenience method to return a single instance that matches
* the query, or null if the query returns no results.
*
* @return the single result or null
+ *
* @throws NonUniqueResultException if there is more than one matching result
*/
- public Object uniqueResult() throws HibernateException;
+ public Object uniqueResult();
/**
* Execute the update or delete statement.
- *
- * The semantics are compliant with the ejb3 Query.executeUpdate()
- * method.
*
+ * The semantics are compliant with the ejb3 Query.executeUpdate() method.
+ *
* @return The number of entities updated or deleted.
- * @throws HibernateException
*/
- public int executeUpdate() throws HibernateException;
+ public int executeUpdate();
/**
- * Set the maximum number of rows to retrieve. If not set,
- * there is no limit to the number of rows retrieved.
- * @param maxResults the maximum number of rows
- */
- public Query setMaxResults(int maxResults);
- /**
- * Set the first row to retrieve. If not set, rows will be
- * retrieved beginnning from row 0.
- * @param firstResult a row number, numbered from 0
- */
- public Query setFirstResult(int firstResult);
-
- /**
- * Entities retrieved by this query will be loaded in
- * a read-only mode where Hibernate will never dirty-check
- * them or make changes persistent.
- *
- */
- public Query setReadOnly(boolean readOnly);
-
- /**
- * Enable caching of this query result set.
- * @param cacheable Should the query results be cacheable?
- */
- public Query setCacheable(boolean cacheable);
-
- /**
- * Set the name of the cache region.
- * @param cacheRegion the name of a query cache region, or null
- * for the default query cache
- */
- public Query setCacheRegion(String cacheRegion);
-
- /**
- * Set a timeout for the underlying JDBC query.
- * @param timeout the timeout in seconds
- */
- public Query setTimeout(int timeout);
- /**
- * Set a fetch size for the underlying JDBC query.
- * @param fetchSize the fetch size
- */
- public Query setFetchSize(int fetchSize);
-
- /**
- * Set the lockmode for the objects idententified by the
- * given alias that appears in the FROM clause.
- * @param alias a query alias, or this for a collection filter
- */
- public Query setLockMode(String alias, LockMode lockMode);
-
- /**
- * Add a comment to the generated SQL.
- * @param comment a human-readable string
- */
- public Query setComment(String comment);
-
- /**
- * Override the current session flush mode, just for
- * this query.
- * @see org.hibernate.FlushMode
- */
- public Query setFlushMode(FlushMode flushMode);
-
- /**
- * Override the current session cache mode, just for
- * this query.
- * @see org.hibernate.CacheMode
- */
- public Query setCacheMode(CacheMode cacheMode);
-
- /**
* Bind a value to a JDBC-style query parameter.
+ *
* @param position the position of the parameter in the query
* string, numbered from 0.
* @param val the possibly-null parameter value
* @param type the Hibernate type
+ *
+ * @return {@code this}, for method chaining
*/
public Query setParameter(int position, Object val, Type type);
+
/**
* Bind a value to a named query parameter.
+ *
* @param name the name of the parameter
* @param val the possibly-null parameter value
* @param type the Hibernate type
+ *
+ * @return {@code this}, for method chaining
*/
public Query setParameter(String name, Object val, Type type);
/**
* Bind a value to a JDBC-style query parameter. The Hibernate type of the parameter is
* first detected via the usage/position in the query and if not sufficient secondly
* guessed from the class of the given object.
+ *
* @param position the position of the parameter in the query
* string, numbered from 0.
* @param val the non-null parameter value
- * @throws org.hibernate.HibernateException if no type could be determined
+ *
+ * @return {@code this}, for method chaining
*/
- public Query setParameter(int position, Object val) throws HibernateException;
+ public Query setParameter(int position, Object val);
+
/**
* Bind a value to a named query parameter. The Hibernate type of the parameter is
* first detected via the usage/position in the query and if not sufficient secondly
* guessed from the class of the given object.
+ *
* @param name the name of the parameter
* @param val the non-null parameter value
- * @throws org.hibernate.HibernateException if no type could be determined
+ *
+ * @return {@code this}, for method chaining
*/
- public Query setParameter(String name, Object val) throws HibernateException;
+ public Query setParameter(String name, Object val);
/**
- * Bind values and types to positional parameters.
+ * Bind values and types to positional parameters. Allows binding more than one at a time; no real performance
+ * impact.
+ *
+ * The number of elements in each array should match. That is, element number-0 in types array corresponds to
+ * element-0 in the values array, etc,
+ *
+ * @param types The types
+ * @param values The values
+ *
+ * @return {@code this}, for method chaining
*/
- public Query setParameters(Object[] values, Type[] types) throws HibernateException;
+ public Query setParameters(Object[] values, Type[] types);
/**
* Bind multiple values to a named query parameter. This is useful for binding
* a list of values to an expression such as foo.bar in (:value_list).
+ *
* @param name the name of the parameter
- * @param vals a collection of values to list
+ * @param values a collection of values to list
* @param type the Hibernate type of the values
+ *
+ * @return {@code this}, for method chaining
*/
- public Query setParameterList(String name, Collection vals, Type type) throws HibernateException;
+ public Query setParameterList(String name, Collection values, Type type);
/**
* Bind multiple values to a named query parameter. The Hibernate type of the parameter is
* first detected via the usage/position in the query and if not sufficient secondly
* guessed from the class of the first object in the collection. This is useful for binding a list of values
* to an expression such as foo.bar in (:value_list).
+ *
* @param name the name of the parameter
- * @param vals a collection of values to list
+ * @param values a collection of values to list
+ *
+ * @return {@code this}, for method chaining
*/
- public Query setParameterList(String name, Collection vals) throws HibernateException;
+ public Query setParameterList(String name, Collection values);
/**
* Bind multiple values to a named query parameter. This is useful for binding
* a list of values to an expression such as foo.bar in (:value_list).
+ *
* @param name the name of the parameter
- * @param vals a collection of values to list
+ * @param values a collection of values to list
* @param type the Hibernate type of the values
+ *
+ * @return {@code this}, for method chaining
*/
- public Query setParameterList(String name, Object[] vals, Type type) throws HibernateException;
+ public Query setParameterList(String name, Object[] values, Type type);
/**
* Bind multiple values to a named query parameter. The Hibernate type of the parameter is
* first detected via the usage/position in the query and if not sufficient secondly
* guessed from the class of the first object in the array. This is useful for binding a list of values
* to an expression such as foo.bar in (:value_list).
+ *
* @param name the name of the parameter
- * @param vals a collection of values to list
+ * @param values a collection of values to list
+ *
+ * @return {@code this}, for method chaining
*/
- public Query setParameterList(String name, Object[] vals) throws HibernateException;
+ public Query setParameterList(String name, Object[] values);
/**
* Bind the property values of the given bean to named parameters of the query,
* matching property names with parameter names and mapping property types to
- * Hibernate types using hueristics.
+ * Hibernate types using heuristics.
+ *
* @param bean any JavaBean or POJO
+ *
+ * @return {@code this}, for method chaining
*/
- public Query setProperties(Object bean) throws HibernateException;
+ public Query setProperties(Object bean);
/**
* Bind the values of the given Map for each named parameters of the query,
* matching key names with parameter names and mapping value types to
- * Hibernate types using hueristics.
+ * Hibernate types using heuristics.
+ *
* @param bean a java.util.Map
+ *
+ * @return {@code this}, for method chaining
*/
- public Query setProperties(Map bean) throws HibernateException;
+ public Query setProperties(Map bean);
+ /**
+ * Bind a positional String-valued parameter.
+ *
+ * @param position The parameter position
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setString(int position, String val);
+
+ /**
+ * Bind a positional char-valued parameter.
+ *
+ * @param position The parameter position
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setCharacter(int position, char val);
+
+ /**
+ * Bind a positional boolean-valued parameter.
+ *
+ * @param position The parameter position
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setBoolean(int position, boolean val);
+
+ /**
+ * Bind a positional byte-valued parameter.
+ *
+ * @param position The parameter position
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setByte(int position, byte val);
+
+ /**
+ * Bind a positional short-valued parameter.
+ *
+ * @param position The parameter position
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setShort(int position, short val);
+
+ /**
+ * Bind a positional int-valued parameter.
+ *
+ * @param position The parameter position
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setInteger(int position, int val);
+
+ /**
+ * Bind a positional long-valued parameter.
+ *
+ * @param position The parameter position
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setLong(int position, long val);
+
+ /**
+ * Bind a positional float-valued parameter.
+ *
+ * @param position The parameter position
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setFloat(int position, float val);
+
+ /**
+ * Bind a positional double-valued parameter.
+ *
+ * @param position The parameter position
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setDouble(int position, double val);
+
+ /**
+ * Bind a positional binary-valued parameter.
+ *
+ * @param position The parameter position
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setBinary(int position, byte[] val);
+
+ /**
+ * Bind a positional String-valued parameter using streaming.
+ *
+ * @param position The parameter position
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setText(int position, String val);
+
+ /**
+ * Bind a positional binary-valued parameter using serialization.
+ *
+ * @param position The parameter position
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setSerializable(int position, Serializable val);
+
+ /**
+ * Bind a positional Locale-valued parameter.
+ *
+ * @param position The parameter position
+ * @param locale The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setLocale(int position, Locale locale);
+
+ /**
+ * Bind a positional BigDecimal-valued parameter.
+ *
+ * @param position The parameter position
+ * @param number The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setBigDecimal(int position, BigDecimal number);
+
+ /**
+ * Bind a positional BigDecimal-valued parameter.
+ *
+ * @param position The parameter position
+ * @param number The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setBigInteger(int position, BigInteger number);
+ /**
+ * Bind a positional Date-valued parameter using just the Date portion.
+ *
+ * @param position The parameter position
+ * @param date The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setDate(int position, Date date);
+
+ /**
+ * Bind a positional Date-valued parameter using just the Time portion.
+ *
+ * @param position The parameter position
+ * @param date The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setTime(int position, Date date);
+
+ /**
+ * Bind a positional Date-valued parameter using the full Timestamp.
+ *
+ * @param position The parameter position
+ * @param date The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setTimestamp(int position, Date date);
+ /**
+ * Bind a positional Calendar-valued parameter using the full Timestamp portion.
+ *
+ * @param position The parameter position
+ * @param calendar The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setCalendar(int position, Calendar calendar);
+
+ /**
+ * Bind a positional Calendar-valued parameter using just the Date portion.
+ *
+ * @param position The parameter position
+ * @param calendar The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setCalendarDate(int position, Calendar calendar);
+ /**
+ * Bind a named String-valued parameter.
+ *
+ * @param name The parameter name
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setString(String name, String val);
+
+ /**
+ * Bind a named char-valued parameter.
+ *
+ * @param name The parameter name
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setCharacter(String name, char val);
+
+ /**
+ * Bind a named boolean-valued parameter.
+ *
+ * @param name The parameter name
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setBoolean(String name, boolean val);
+
+ /**
+ * Bind a named byte-valued parameter.
+ *
+ * @param name The parameter name
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setByte(String name, byte val);
+
+ /**
+ * Bind a named short-valued parameter.
+ *
+ * @param name The parameter name
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setShort(String name, short val);
+
+ /**
+ * Bind a named int-valued parameter.
+ *
+ * @param name The parameter name
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setInteger(String name, int val);
+
+ /**
+ * Bind a named long-valued parameter.
+ *
+ * @param name The parameter name
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setLong(String name, long val);
+
+ /**
+ * Bind a named float-valued parameter.
+ *
+ * @param name The parameter name
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setFloat(String name, float val);
+
+ /**
+ * Bind a named double-valued parameter.
+ *
+ * @param name The parameter name
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setDouble(String name, double val);
+
+ /**
+ * Bind a named binary-valued parameter.
+ *
+ * @param name The parameter name
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setBinary(String name, byte[] val);
+
+ /**
+ * Bind a named String-valued parameter using streaming.
+ *
+ * @param name The parameter name
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setText(String name, String val);
+
+ /**
+ * Bind a named binary-valued parameter using serialization.
+ *
+ * @param name The parameter name
+ * @param val The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setSerializable(String name, Serializable val);
+
+ /**
+ * Bind a named Locale-valued parameter.
+ *
+ * @param name The parameter name
+ * @param locale The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setLocale(String name, Locale locale);
+
+ /**
+ * Bind a named BigDecimal-valued parameter.
+ *
+ * @param name The parameter name
+ * @param number The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setBigDecimal(String name, BigDecimal number);
+
+ /**
+ * Bind a named BigInteger-valued parameter.
+ *
+ * @param name The parameter name
+ * @param number The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setBigInteger(String name, BigInteger number);
+ /**
+ * Bind the date (time is truncated) of a given Date object to a named query parameter.
+ *
+ * @param name The name of the parameter
+ * @param date The date object
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setDate(String name, Date date);
+
+ /**
+ * Bind the time (date is truncated) of a given Date object to a named query parameter.
+ *
+ * @param name The name of the parameter
+ * @param date The date object
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setTime(String name, Date date);
+
+ /**
+ * Bind the date and the time of a given Date object to a named query parameter.
+ *
+ * @param name The name of the parameter
+ * @param date The date object
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setTimestamp(String name, Date date);
+ /**
+ * Bind a named Calendar-valued parameter using the full Timestamp.
+ *
+ * @param name The parameter name
+ * @param calendar The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setCalendar(String name, Calendar calendar);
+
+ /**
+ * Bind a named Calendar-valued parameter using just the Date portion.
+ *
+ * @param name The parameter name
+ * @param calendar The bind value
+ *
+ * @return {@code this}, for method chaining
+ */
public Query setCalendarDate(String name, Calendar calendar);
/**
* Bind an instance of a mapped persistent class to a JDBC-style query parameter.
+ * Use {@link #setParameter(int, Object)} for null values.
+ *
* @param position the position of the parameter in the query
* string, numbered from 0.
* @param val a non-null instance of a persistent class
+ *
+ * @return {@code this}, for method chaining
*/
- public Query setEntity(int position, Object val); // use setParameter for null values
+ public Query setEntity(int position, Object val);
/**
- * Bind an instance of a mapped persistent class to a named query parameter.
+ * Bind an instance of a mapped persistent class to a named query parameter. Use
+ * {@link #setParameter(String, Object)} for null values.
+ *
* @param name the name of the parameter
* @param val a non-null instance of a persistent class
+ *
+ * @return {@code this}, for method chaining
*/
- public Query setEntity(String name, Object val); // use setParameter for null values
+ public Query setEntity(String name, Object val);
/**
* Set a strategy for handling the query results. This can be used to change
* "shape" of the query result.
*
* @param transformer The transformer to apply
- * @return this (for method chaining)
+ * @return this (for method chaining)
*/
public Query setResultTransformer(ResultTransformer transformer);
Index: 3rdParty_sources/hibernate-core/org/hibernate/QueryParameterException.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/QueryParameterException.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/QueryParameterException.java 17 Aug 2012 14:36:38 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/QueryParameterException.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,30 +20,37 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
/**
- * Parameter invalid or not found in the query
+ * Parameter invalid or not found in the query.
*
* @author Emmanuel Bernard
*/
public class QueryParameterException extends QueryException {
-
- public QueryParameterException(Exception e) {
- super( e );
- }
-
+ /**
+ * Constructs a QueryParameterException using the supplied exception message.
+ *
+ * @param message The message explaining the exception condition
+ */
public QueryParameterException(String message) {
super( message );
}
- public QueryParameterException(String message, Throwable e) {
- super( message, e );
+ /**
+ * Constructs a QueryParameterException
+ *
+ * @param message The message explaining the exception condition
+ * @param queryString The query that led to the exception
+ * @param cause The underlying cause
+ */
+ public QueryParameterException(String message, String queryString, Exception cause) {
+ super( message, queryString, cause );
}
- public QueryParameterException(String message, String queryString) {
- super( message, queryString );
+ @Override
+ protected QueryException generateQueryException(String queryString) {
+ return new QueryParameterException( super.getOriginalMessage(), queryString, this );
}
}
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/QueryTimeoutException.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/ResourceClosedException.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/SQLQuery.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/SQLQuery.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/SQLQuery.java 17 Aug 2012 14:36:39 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/SQLQuery.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,97 +20,327 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
+import java.util.List;
+import org.hibernate.engine.query.spi.sql.NativeSQLQueryReturn;
import org.hibernate.type.Type;
/**
- * Allows the user to declare the types and select list injection
- * points of all entities returned by the query. Also allows
- * declaration of the type and column alias of any scalar results
- * of the query.
+ * Represents a "native sql" query.
+ *
+ * Allows the user to define certain aspects about its execution, such as:
+ * -
+ * result-set value mapping (see below)
+ *
+ * -
+ * Tables used via {@link #addSynchronizedQuerySpace}, {@link #addSynchronizedEntityName} and
+ * {@link #addSynchronizedEntityClass}. This allows Hibernate to know how to properly deal with
+ * auto-flush checking as well as cached query results if the results of the query are being
+ * cached.
+ *
+ *
+ *
+ * In terms of result-set mapping, there are 3 approaches to defining:
+ * -
+ * If this represents a named sql query, the mapping could be associated with the query as part
+ * of its metadata
+ *
+ * -
+ * A pre-defined (defined in metadata and named) mapping can be associated with
+ * {@link #setResultSetMapping}
+ *
+ * -
+ * Defined locally per the various {@link #addEntity}, {@link #addRoot}, {@link #addJoin},
+ * {@link #addFetch} and {@link #addScalar} methods
+ *
+ *
*
* @author Gavin King
+ * @author Steve Ebersole
*/
-public interface SQLQuery extends Query {
+public interface SQLQuery extends Query, SynchronizeableQuery {
+ @Override
+ SQLQuery addSynchronizedQuerySpace(String querySpace);
+
+ @Override
+ SQLQuery addSynchronizedEntityName(String entityName) throws MappingException;
+
+ @Override
+ SQLQuery addSynchronizedEntityClass(Class entityClass) throws MappingException;
+
/**
- * Declare a "root" entity, without specifying an alias
+ * Use a predefined named result-set mapping. This might be defined by a {@code } element in a
+ * Hibernate hbm.xml file or through a {@link javax.persistence.SqlResultSetMapping} annotation.
+ *
+ * @param name The name of the mapping to use.
+ *
+ * @return this, for method chaining
*/
- public SQLQuery addEntity(String entityName);
+ public SQLQuery setResultSetMapping(String name);
+
/**
- * Declare a "root" entity
+ * Is this native-SQL query known to be callable?
+ *
+ * @return {@code true} if the query is known to be callable; {@code false} otherwise.
*/
- public SQLQuery addEntity(String alias, String entityName);
+ public boolean isCallable();
+
/**
- * Declare a "root" entity, specifying a lock mode
+ * Retrieve the returns associated with this query.
+ *
+ * @return The return descriptors
*/
- public SQLQuery addEntity(String alias, String entityName, LockMode lockMode);
+ public List getQueryReturns();
+
/**
- * Declare a "root" entity, without specifying an alias
+ * Declare a scalar query result. Hibernate will attempt to automatically detect the underlying type.
+ *
+ * Functions like {@code } in {@code hbm.xml} or {@link javax.persistence.ColumnResult}
+ *
+ * @param columnAlias The column alias in the result-set to be processed as a scalar result
+ *
+ * @return {@code this}, for method chaining
*/
- public SQLQuery addEntity(Class entityClass);
+ public SQLQuery addScalar(String columnAlias);
+
/**
- * Declare a "root" entity
+ * Declare a scalar query result.
+ *
+ * Functions like {@code } in {@code hbm.xml} or {@link javax.persistence.ColumnResult}
+ *
+ * @param columnAlias The column alias in the result-set to be processed as a scalar result
+ * @param type The Hibernate type as which to treat the value.
+ *
+ * @return {@code this}, for method chaining
*/
- public SQLQuery addEntity(String alias, Class entityClass);
+ public SQLQuery addScalar(String columnAlias, Type type);
+
/**
- * Declare a "root" entity, specifying a lock mode
+ * Add a new root return mapping, returning a {@link RootReturn} to allow further definition.
+ *
+ * @param tableAlias The SQL table alias to map to this entity
+ * @param entityName The name of the entity.
+ *
+ * @return The return config object for further control.
+ *
+ * @since 3.6
*/
- public SQLQuery addEntity(String alias, Class entityClass, LockMode lockMode);
+ public RootReturn addRoot(String tableAlias, String entityName);
/**
- * Declare a "joined" entity
+ * Add a new root return mapping, returning a {@link RootReturn} to allow further definition.
+ *
+ * @param tableAlias The SQL table alias to map to this entity
+ * @param entityType The java type of the entity.
+ *
+ * @return The return config object for further control.
+ *
+ * @since 3.6
*/
- public SQLQuery addJoin(String alias, String path);
+ public RootReturn addRoot(String tableAlias, Class entityType);
+
/**
- * Declare a "joined" entity, specifying a lock mode
+ * Declare a "root" entity, without specifying an alias. The expectation here is that the table alias is the
+ * same as the unqualified entity name
+ *
+ * Use {@link #addRoot} if you need further control of the mapping
+ *
+ * @param entityName The entity name that is the root return of the query.
+ *
+ * @return {@code this}, for method chaining
*/
- public SQLQuery addJoin(String alias, String path, LockMode lockMode);
-
+ public SQLQuery addEntity(String entityName);
+
/**
- * Declare a scalar query result
+ * Declare a "root" entity.
+ *
+ * @param tableAlias The SQL table alias
+ * @param entityName The entity name
+ *
+ * @return {@code this}, for method chaining
*/
- public SQLQuery addScalar(String columnAlias, Type type);
+ public SQLQuery addEntity(String tableAlias, String entityName);
/**
- * Declare a scalar query. Hibernate will attempt to automatically detect the underlying type.
+ * Declare a "root" entity, specifying a lock mode.
+ *
+ * @param tableAlias The SQL table alias
+ * @param entityName The entity name
+ * @param lockMode The lock mode for this return.
+ *
+ * @return {@code this}, for method chaining
*/
- public SQLQuery addScalar(String columnAlias);
+ public SQLQuery addEntity(String tableAlias, String entityName, LockMode lockMode);
/**
- * Use a predefined named ResultSetMapping
+ * Declare a "root" entity, without specifying an alias. The expectation here is that the table alias is the
+ * same as the unqualified entity name
+ *
+ * @param entityType The java type of the entity to add as a root
+ *
+ * @return {@code this}, for method chaining
*/
- public SQLQuery setResultSetMapping(String name);
+ public SQLQuery addEntity(Class entityType);
/**
- * Adds a query space for auto-flush synchronization.
+ * Declare a "root" entity.
*
- * @param querySpace The query space to be auto-flushed for this query.
- * @return this, for method chaning
+ * @param tableAlias The SQL table alias
+ * @param entityType The java type of the entity to add as a root
+ *
+ * @return {@code this}, for method chaining
*/
- public SQLQuery addSynchronizedQuerySpace(String querySpace);
+ public SQLQuery addEntity(String tableAlias, Class entityType);
/**
- * Adds an entity name or auto-flush synchronization.
+ * Declare a "root" entity, specifying a lock mode.
*
- * @param entityName The name of the entity upon whose defined
- * query spaces we should additionally synchronize.
- * @return this, for method chaning
- * @throws MappingException Indicates the given entity name could not be
- * resolved.
+ * @param tableAlias The SQL table alias
+ * @param entityName The entity name
+ * @param lockMode The lock mode for this return.
+ *
+ * @return {@code this}, for method chaining
*/
- public SQLQuery addSynchronizedEntityName(String entityName) throws MappingException;
+ public SQLQuery addEntity(String tableAlias, Class entityName, LockMode lockMode);
/**
- * Adds an entity name or auto-flush synchronization.
+ * Declare a join fetch result.
*
- * @param entityClass The class of the entity upon whose defined
- * query spaces we should additionally synchronize.
- * @return this, for method chaning
- * @throws MappingException Indicates the given entity class could not be
- * resolved.
+ * @param tableAlias The SQL table alias for the data to be mapped to this fetch
+ * @param ownerTableAlias Identify the table alias of the owner of this association. Should match the alias of a
+ * previously added root or fetch
+ * @param joinPropertyName The name of the property being join fetched.
+ *
+ * @return The return config object for further control.
+ *
+ * @since 3.6
*/
- public SQLQuery addSynchronizedEntityClass(Class entityClass) throws MappingException;
+ public FetchReturn addFetch(String tableAlias, String ownerTableAlias, String joinPropertyName);
+
+ /**
+ * Declare a join fetch result.
+ *
+ * @param tableAlias The SQL table alias for the data to be mapped to this fetch
+ * @param path The association path ([owner-alias].[property-name]).
+ *
+ * @return {@code this}, for method chaining
+ */
+ public SQLQuery addJoin(String tableAlias, String path);
+
+ /**
+ * Declare a join fetch result.
+ *
+ * @param tableAlias The SQL table alias for the data to be mapped to this fetch
+ * @param ownerTableAlias Identify the table alias of the owner of this association. Should match the alias of a
+ * previously added root or fetch
+ * @param joinPropertyName The name of the property being join fetched.
+ *
+ * @return {@code this}, for method chaining
+ *
+ * @since 3.6
+ */
+ public SQLQuery addJoin(String tableAlias, String ownerTableAlias, String joinPropertyName);
+
+ /**
+ * Declare a join fetch result, specifying a lock mode.
+ *
+ * @param tableAlias The SQL table alias for the data to be mapped to this fetch
+ * @param path The association path ([owner-alias].[property-name]).
+ * @param lockMode The lock mode for this return.
+ *
+ * @return {@code this}, for method chaining
+ */
+ public SQLQuery addJoin(String tableAlias, String path, LockMode lockMode);
+
+ /**
+ * Allows access to further control how properties within a root or join fetch are mapped back from the result set.
+ * Generally used in composite value scenarios.
+ */
+ public static interface ReturnProperty {
+ /**
+ * Add a column alias to this property mapping.
+ *
+ * @param columnAlias The column alias.
+ *
+ * @return {@code this}, for method chaining
+ */
+ public ReturnProperty addColumnAlias(String columnAlias);
+ }
+
+ /**
+ * Allows access to further control how root returns are mapped back from result sets.
+ */
+ public static interface RootReturn {
+ /**
+ * Set the lock mode for this return.
+ *
+ * @param lockMode The new lock mode.
+ *
+ * @return {@code this}, for method chaining
+ */
+ public RootReturn setLockMode(LockMode lockMode);
+
+ /**
+ * Name the column alias that identifies the entity's discriminator.
+ *
+ * @param columnAlias The discriminator column alias
+ *
+ * @return {@code this}, for method chaining
+ */
+ public RootReturn setDiscriminatorAlias(String columnAlias);
+
+ /**
+ * Add a simple property-to-one-column mapping.
+ *
+ * @param propertyName The name of the property.
+ * @param columnAlias The name of the column
+ *
+ * @return {@code this}, for method chaining
+ */
+ public RootReturn addProperty(String propertyName, String columnAlias);
+
+ /**
+ * Add a property, presumably with more than one column.
+ *
+ * @param propertyName The name of the property.
+ *
+ * @return The config object for further control.
+ */
+ public ReturnProperty addProperty(String propertyName);
+ }
+
+ /**
+ * Allows access to further control how join fetch returns are mapped back from result sets.
+ */
+ public static interface FetchReturn {
+ /**
+ * Set the lock mode for this return.
+ *
+ * @param lockMode The new lock mode.
+ *
+ * @return {@code this}, for method chaining
+ */
+ public FetchReturn setLockMode(LockMode lockMode);
+
+ /**
+ * Add a simple property-to-one-column mapping.
+ *
+ * @param propertyName The name of the property.
+ * @param columnAlias The name of the column
+ *
+ * @return {@code this}, for method chaining
+ */
+ public FetchReturn addProperty(String propertyName, String columnAlias);
+
+ /**
+ * Add a property, presumably with more than one column.
+ *
+ * @param propertyName The name of the property.
+ *
+ * @return The config object for further control.
+ */
+ public ReturnProperty addProperty(String propertyName);
+ }
}
Index: 3rdParty_sources/hibernate-core/org/hibernate/ScrollableResults.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/ScrollableResults.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/ScrollableResults.java 17 Aug 2012 14:36:39 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/ScrollableResults.java 30 Jul 2014 15:50:59 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,10 +20,8 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
-
import java.math.BigDecimal;
import java.math.BigInteger;
import java.sql.Blob;
@@ -45,179 +43,335 @@
* Contrary to JDBC, columns of results are numbered from zero.
*
* @see Query#scroll()
+ *
* @author Gavin King
*/
public interface ScrollableResults {
/**
- * Advance to the next result
- * @return true if there is another result
+ * Advance to the next result.
+ *
+ * @return {@code true} if there is another result
*/
- public boolean next() throws HibernateException;
+ public boolean next();
+
/**
- * Retreat to the previous result
- * @return true if there is a previous result
+ * Retreat to the previous result.
+ *
+ * @return {@code true} if there is a previous result
*/
- public boolean previous() throws HibernateException;
+ public boolean previous();
+
/**
- * Scroll an arbitrary number of locations
- * @param i a positive (forward) or negative (backward) number of rows
- * @return true if there is a result at the new location
+ * Scroll the specified number of positions from the current position.
+ *
+ * @param positions a positive (forward) or negative (backward) number of rows
+ *
+ * @return {@code true} if there is a result at the new location
*/
- public boolean scroll(int i) throws HibernateException;
+ public boolean scroll(int positions);
+
/**
- * Go to the last result
- * @return true if there are any results
+ * Go to the last result.
+ *
+ * @return {@code true} if there are any results
*/
- public boolean last() throws HibernateException;
+ public boolean last();
+
/**
- * Go to the first result
- * @return true if there are any results
+ * Go to the first result.
+ *
+ * @return {@code true} if there are any results
*/
- public boolean first() throws HibernateException;
+ public boolean first();
+
/**
- * Go to a location just before first result (this is the initial location)
+ * Go to a location just before first result, This is the location of the cursor on a newly returned
+ * scrollable result.
*/
- public void beforeFirst() throws HibernateException;
+ public void beforeFirst();
+
/**
- * Go to a location just after the last result
+ * Go to a location just after the last result.
*/
- public void afterLast() throws HibernateException;
+ public void afterLast();
+
/**
* Is this the first result?
*
- * @return true if this is the first row of results
- * @throws HibernateException
+ * @return {@code true} if this is the first row of results, otherwise {@code false}
*/
- public boolean isFirst() throws HibernateException;
+ public boolean isFirst();
+
/**
* Is this the last result?
*
- * @return true if this is the last row of results
- * @throws HibernateException
+ * @return {@code true} if this is the last row of results.
*/
- public boolean isLast() throws HibernateException;
+ public boolean isLast();
+
/**
+ * Get the current position in the results. The first position is number 0 (unlike JDBC).
+ *
+ * @return The current position number, numbered from 0; -1 indicates that there is no current row
+ */
+ public int getRowNumber();
+
+ /**
+ * Set the current position in the result set. Can be numbered from the first position (positive number) or
+ * the last row (negative number).
+ *
+ * @param rowNumber the row number. A positive number indicates a value numbered from the first row; a
+ * negative number indicates a value numbered from the last row.
+ *
+ * @return true if there is a row at that row number
+ */
+ public boolean setRowNumber(int rowNumber);
+
+ /**
* Release resources immediately.
*/
- public void close() throws HibernateException;
+ public void close();
+
/**
- * Get the current row of results
- * @return an object or array
+ * Get the current row of results.
+ *
+ * @return The array of results
*/
- public Object[] get() throws HibernateException;
+ public Object[] get();
+
/**
* Get the ith object in the current row of results, without
* initializing any other results in the row. This method may be used
* safely, regardless of the type of the column (ie. even for scalar
* results).
+ *
* @param i the column, numbered from zero
- * @return an object of any Hibernate type or null
+ *
+ * @return The requested result object; may return {@code null}
+ *
+ * @throws IndexOutOfBoundsException If i is an invalid index.
*/
- public Object get(int i) throws HibernateException;
+ public Object get(int i);
/**
- * Get the type of the ith column of results
+ * Get the type of the ith column of results.
+ *
* @param i the column, numbered from zero
+ *
* @return the Hibernate type
+ *
+ * @throws IndexOutOfBoundsException If i is an invalid index.
*/
public Type getType(int i);
/**
- * Convenience method to read an integer
+ * Convenience method to read an integer.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as an integer
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public Integer getInteger(int col) throws HibernateException;
+ public Integer getInteger(int col);
+
/**
- * Convenience method to read a long
+ * Convenience method to read a long.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a long
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public Long getLong(int col) throws HibernateException;
+ public Long getLong(int col);
+
/**
- * Convenience method to read a float
+ * Convenience method to read a float.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a float
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public Float getFloat(int col) throws HibernateException;
+ public Float getFloat(int col);
+
/**
- * Convenience method to read a boolean
+ * Convenience method to read a boolean.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a boolean
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public Boolean getBoolean(int col) throws HibernateException;
+ public Boolean getBoolean(int col);
+
/**
- * Convenience method to read a double
+ * Convenience method to read a double.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a double
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public Double getDouble(int col) throws HibernateException;
+ public Double getDouble(int col);
+
/**
- * Convenience method to read a short
+ * Convenience method to read a short.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a short
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public Short getShort(int col) throws HibernateException;
+ public Short getShort(int col);
+
/**
- * Convenience method to read a byte
+ * Convenience method to read a byte.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a byte
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public Byte getByte(int col) throws HibernateException;
+ public Byte getByte(int col);
+
/**
- * Convenience method to read a character
+ * Convenience method to read a char.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a char
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public Character getCharacter(int col) throws HibernateException;
+ public Character getCharacter(int col);
+
/**
- * Convenience method to read a binary
+ * Convenience method to read a binary (byte[]).
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a binary (byte[])
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public byte[] getBinary(int col) throws HibernateException;
+ public byte[] getBinary(int col);
+
/**
- * Convenience method to read text
+ * Convenience method to read a String using streaming.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a String
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public String getText(int col) throws HibernateException;
+ public String getText(int col);
+
/**
- * Convenience method to read a blob
+ * Convenience method to read a blob.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a Blob
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public Blob getBlob(int col) throws HibernateException;
+ public Blob getBlob(int col);
+
/**
- * Convenience method to read a clob
+ * Convenience method to read a clob.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a Clob
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public Clob getClob(int col) throws HibernateException;
+ public Clob getClob(int col);
+
/**
- * Convenience method to read a string
+ * Convenience method to read a string.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a String
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public String getString(int col) throws HibernateException;
+ public String getString(int col);
+
/**
- * Convenience method to read a big_decimal
+ * Convenience method to read a BigDecimal.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a BigDecimal
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public BigDecimal getBigDecimal(int col) throws HibernateException;
+ public BigDecimal getBigDecimal(int col);
+
/**
- * Convenience method to read a big_integer
+ * Convenience method to read a BigInteger.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a BigInteger
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public BigInteger getBigInteger(int col) throws HibernateException;
+ public BigInteger getBigInteger(int col);
+
/**
- * Convenience method to read a date, time or timestamp
+ * Convenience method to read a Date.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a Date
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public Date getDate(int col) throws HibernateException;
+ public Date getDate(int col);
+
/**
- * Convenience method to read a locale
+ * Convenience method to read a Locale.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a Locale
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public Locale getLocale(int col) throws HibernateException;
+ public Locale getLocale(int col);
+
/**
- * Convenience method to read a calendar or calendar_date
+ * Convenience method to read a Calendar.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a Calendar
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- public Calendar getCalendar(int col) throws HibernateException;
+ public Calendar getCalendar(int col);
+
/**
- * Convenience method to read a currency
+ * Convenience method to read a TimeZone.
+ *
+ * @param col The column, numbered from zero
+ *
+ * @return The column value as a TimeZone
+ *
+ * @throws IndexOutOfBoundsException If col is an invalid index.
*/
- //public Currency getCurrency(int col) throws HibernateException;
- /**
- * Convenience method to read a timezone
- */
- public TimeZone getTimeZone(int col) throws HibernateException;
- /**
- * Get the current location in the result set. The first
- * row is number 0, contrary to JDBC.
- * @return the row number, numbered from 0, or -1 if
- * there is no current row
- */
- public int getRowNumber() throws HibernateException;
- /**
- * Set the current location in the result set, numbered from either the
- * first row (row number 0), or the last row (row
- * number -1).
- * @param rowNumber the row number, numbered from the last row, in the
- * case of a negative row number
- * @return true if there is a row at that row number
- */
- public boolean setRowNumber(int rowNumber) throws HibernateException;
+ public TimeZone getTimeZone(int col);
}
Index: 3rdParty_sources/hibernate-core/org/hibernate/Session.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/Session.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/Session.java 17 Aug 2012 14:36:40 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/Session.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,13 +20,13 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
import java.io.Serializable;
import java.sql.Connection;
+import org.hibernate.jdbc.ReturningWork;
import org.hibernate.jdbc.Work;
import org.hibernate.stat.SessionStatistics;
@@ -89,30 +89,18 @@
* @see SessionFactory
* @author Gavin King
*/
-public interface Session extends Serializable {
-
+public interface Session extends SharedSessionContract {
/**
- * Retrieve the entity mode in effect for this session.
+ * Obtain a {@link Session} builder with the ability to grab certain information from this session.
*
- * @return The entity mode for this session.
+ * @return The session builder
*/
- public EntityMode getEntityMode();
+ public SharedSessionBuilder sessionWithOptions();
/**
- * Starts a new Session with the given entity mode in effect. This secondary
- * Session inherits the connection, transaction, and other context
- * information from the primary Session. It doesn't need to be flushed
- * or closed by the developer.
- *
- * @param entityMode The entity mode to use for the new session.
- * @return The new session
- */
- public Session getSession(EntityMode entityMode);
-
- /**
* Force this session to flush. Must be called at the end of a
- * unit of work, before commiting the transaction and closing the
- * session (depending on {@link #setFlushMode flush-mode},
+ * unit of work, before committing the transaction and closing the
+ * session (depending on {@link #setFlushMode(FlushMode)},
* {@link Transaction#commit()} calls this method).
*
* Flushing is the process of synchronizing the underlying persistent
@@ -168,26 +156,10 @@
*
* @return The session factory.
* @see SessionFactory
-
*/
public SessionFactory getSessionFactory();
/**
- * Get the JDBC connection of this Session.
- *
- * If the session is using aggressive collection release (as in a
- * CMT environment), it is the application's responsibility to
- * close the connection returned by this call. Otherwise, the
- * application should not close the connection.
- *
- * @return the JDBC connection in use by the Session
- * @throws HibernateException if the Session is disconnected
- * @deprecated (scheduled for removal in 4.x). Replacement depends on need; for doing direct JDBC stuff use
- * {@link #doWork}; for opening a 'temporary Session' use (TBD).
- */
- public Connection connection() throws HibernateException;
-
- /**
* End the session by releasing the JDBC connection and cleaning up. It is
* not strictly necessary to close the session but you must at least
* {@link #disconnect()} it.
@@ -232,6 +204,44 @@
public boolean isDirty() throws HibernateException;
/**
+ * Will entities and proxies that are loaded into this session be made
+ * read-only by default?
+ *
+ * To determine the read-only/modifiable setting for a particular entity
+ * or proxy:
+ * @see Session#isReadOnly(Object)
+ *
+ * @return true, loaded entities/proxies will be made read-only by default;
+ * false, loaded entities/proxies will be made modifiable by default.
+ */
+ public boolean isDefaultReadOnly();
+
+ /**
+ * Change the default for entities and proxies loaded into this session
+ * from modifiable to read-only mode, or from modifiable to read-only mode.
+ *
+ * Read-only entities are not dirty-checked and snapshots of persistent
+ * state are not maintained. Read-only entities can be modified, but
+ * changes are not persisted.
+ *
+ * When a proxy is initialized, the loaded entity will have the same
+ * read-only/modifiable setting as the uninitialized
+ * proxy has, regardless of the session's current setting.
+ *
+ * To change the read-only/modifiable setting for a particular entity
+ * or proxy that is already in this session:
+ * @see Session#setReadOnly(Object,boolean)
+ *
+ * To override this session's read-only/modifiable setting for entities
+ * and proxies loaded by a Query:
+ * @see Query#setReadOnly(boolean)
+ *
+ * @param readOnly true, the default for loaded entities/proxies is read-only;
+ * false, the default for loaded entities/proxies is modifiable
+ */
+ public void setDefaultReadOnly(boolean readOnly);
+
+ /**
* Return the identifier value of the given entity as associated with this
* session. An exception is thrown if the given entity instance is transient
* or detached in relation to this session.
@@ -241,7 +251,7 @@
* @throws TransientObjectException if the instance is transient or associated with
* a different session
*/
- public Serializable getIdentifier(Object object) throws HibernateException;
+ public Serializable getIdentifier(Object object);
/**
* Check if this instance is associated with this Session.
@@ -256,10 +266,12 @@
* not be synchronized with the database. This operation cascades to associated
* instances if the association is mapped with cascade="evict".
*
- * @param object a persistent instance
- * @throws HibernateException
+ * @param object The entity to evict
+ *
+ * @throws NullPointerException if the passed object is {@code null}
+ * @throws IllegalArgumentException if the passed object is not defined as an entity
*/
- public void evict(Object object) throws HibernateException;
+ public void evict(Object object);
/**
* Return the persistent instance of the given entity class with the given identifier,
@@ -268,25 +280,54 @@
* @param theClass a persistent class
* @param id a valid identifier of an existing persistent instance of the class
* @param lockMode the lock level
+ *
* @return the persistent instance or proxy
- * @throws HibernateException
+ *
+ * @deprecated LockMode parameter should be replaced with LockOptions
*/
- public Object load(Class theClass, Serializable id, LockMode lockMode) throws HibernateException;
+ @Deprecated
+ public Object load(Class theClass, Serializable id, LockMode lockMode);
/**
* Return the persistent instance of the given entity class with the given identifier,
* obtaining the specified lock mode, assuming the instance exists.
*
+ * @param theClass a persistent class
+ * @param id a valid identifier of an existing persistent instance of the class
+ * @param lockOptions contains the lock level
+ * @return the persistent instance or proxy
+ */
+ public Object load(Class theClass, Serializable id, LockOptions lockOptions);
+
+ /**
+ * Return the persistent instance of the given entity class with the given identifier,
+ * obtaining the specified lock mode, assuming the instance exists.
+ *
* @param entityName a persistent class
* @param id a valid identifier of an existing persistent instance of the class
* @param lockMode the lock level
+ *
* @return the persistent instance or proxy
- * @throws HibernateException
+ *
+ * @deprecated LockMode parameter should be replaced with LockOptions
*/
- public Object load(String entityName, Serializable id, LockMode lockMode) throws HibernateException;
+ @Deprecated
+ public Object load(String entityName, Serializable id, LockMode lockMode);
/**
* Return the persistent instance of the given entity class with the given identifier,
+ * obtaining the specified lock mode, assuming the instance exists.
+ *
+ * @param entityName a persistent class
+ * @param id a valid identifier of an existing persistent instance of the class
+ * @param lockOptions contains the lock level
+ *
+ * @return the persistent instance or proxy
+ */
+ public Object load(String entityName, Serializable id, LockOptions lockOptions);
+
+ /**
+ * Return the persistent instance of the given entity class with the given identifier,
* assuming that the instance exists. This method might return a proxied instance that
* is initialized on-demand, when a non-identifier method is accessed.
*
@@ -296,10 +337,10 @@
*
* @param theClass a persistent class
* @param id a valid identifier of an existing persistent instance of the class
+ *
* @return the persistent instance or proxy
- * @throws HibernateException
*/
- public Object load(Class theClass, Serializable id) throws HibernateException;
+ public Object load(Class theClass, Serializable id);
/**
* Return the persistent instance of the given entity class with the given identifier,
@@ -312,114 +353,117 @@
*
* @param entityName a persistent class
* @param id a valid identifier of an existing persistent instance of the class
+ *
* @return the persistent instance or proxy
- * @throws HibernateException
*/
- public Object load(String entityName, Serializable id) throws HibernateException;
+ public Object load(String entityName, Serializable id);
/**
* Read the persistent state associated with the given identifier into the given transient
* instance.
*
* @param object an "empty" instance of the persistent class
* @param id a valid identifier of an existing persistent instance of the class
- * @throws HibernateException
*/
- public void load(Object object, Serializable id) throws HibernateException;
+ public void load(Object object, Serializable id);
/**
* Persist the state of the given detached instance, reusing the current
* identifier value. This operation cascades to associated instances if
- * the association is mapped with cascade="replicate".
+ * the association is mapped with {@code cascade="replicate"}
*
* @param object a detached instance of a persistent class
+ * @param replicationMode The replication mode to use
*/
- public void replicate(Object object, ReplicationMode replicationMode) throws HibernateException;
+ public void replicate(Object object, ReplicationMode replicationMode);
/**
* Persist the state of the given detached instance, reusing the current
* identifier value. This operation cascades to associated instances if
- * the association is mapped with cascade="replicate".
+ * the association is mapped with {@code cascade="replicate"}
*
+ * @param entityName The entity name
* @param object a detached instance of a persistent class
+ * @param replicationMode The replication mode to use
*/
- public void replicate(String entityName, Object object, ReplicationMode replicationMode) throws HibernateException;
+ public void replicate(String entityName, Object object, ReplicationMode replicationMode) ;
/**
* Persist the given transient instance, first assigning a generated identifier. (Or
* using the current value of the identifier property if the assigned
* generator is used.) This operation cascades to associated instances if the
- * association is mapped with cascade="save-update".
+ * association is mapped with {@code cascade="save-update"}
*
* @param object a transient instance of a persistent class
+ *
* @return the generated identifier
- * @throws HibernateException
*/
- public Serializable save(Object object) throws HibernateException;
+ public Serializable save(Object object);
/**
* Persist the given transient instance, first assigning a generated identifier. (Or
* using the current value of the identifier property if the assigned
* generator is used.) This operation cascades to associated instances if the
- * association is mapped with cascade="save-update".
+ * association is mapped with {@code cascade="save-update"}
*
+ * @param entityName The entity name
* @param object a transient instance of a persistent class
+ *
* @return the generated identifier
- * @throws HibernateException
*/
- public Serializable save(String entityName, Object object) throws HibernateException;
+ public Serializable save(String entityName, Object object);
/**
* Either {@link #save(Object)} or {@link #update(Object)} the given
* instance, depending upon resolution of the unsaved-value checks (see the
* manual for discussion of unsaved-value checking).
*
* This operation cascades to associated instances if the association is mapped
- * with cascade="save-update".
+ * with {@code cascade="save-update"}
*
+ * @param object a transient or detached instance containing new or updated state
+ *
* @see Session#save(java.lang.Object)
* @see Session#update(Object object)
- * @param object a transient or detached instance containing new or updated state
- * @throws HibernateException
*/
- public void saveOrUpdate(Object object) throws HibernateException;
+ public void saveOrUpdate(Object object);
/**
* Either {@link #save(String, Object)} or {@link #update(String, Object)}
* the given instance, depending upon resolution of the unsaved-value checks
* (see the manual for discussion of unsaved-value checking).
*
* This operation cascades to associated instances if the association is mapped
- * with cascade="save-update".
+ * with {@code cascade="save-update"}
*
+ * @param entityName The entity name
+ * @param object a transient or detached instance containing new or updated state
+ *
* @see Session#save(String,Object)
* @see Session#update(String,Object)
- * @param object a transient or detached instance containing new or updated state
- * @throws HibernateException
*/
- public void saveOrUpdate(String entityName, Object object) throws HibernateException;
+ public void saveOrUpdate(String entityName, Object object);
/**
* Update the persistent instance with the identifier of the given detached
* instance. If there is a persistent instance with the same identifier,
* an exception is thrown. This operation cascades to associated instances
- * if the association is mapped with cascade="save-update".
+ * if the association is mapped with {@code cascade="save-update"}
*
* @param object a detached instance containing updated state
- * @throws HibernateException
*/
- public void update(Object object) throws HibernateException;
+ public void update(Object object);
/**
* Update the persistent instance with the identifier of the given detached
* instance. If there is a persistent instance with the same identifier,
* an exception is thrown. This operation cascades to associated instances
- * if the association is mapped with cascade="save-update".
+ * if the association is mapped with {@code cascade="save-update"}
*
+ * @param entityName The entity name
* @param object a detached instance containing updated state
- * @throws HibernateException
*/
- public void update(String entityName, Object object) throws HibernateException;
+ public void update(String entityName, Object object);
/**
* Copy the state of the given object onto the persistent object with the same
@@ -428,14 +472,15 @@
* given instance is unsaved, save a copy of and return it as a newly persistent
* instance. The given instance does not become associated with the session.
* This operation cascades to associated instances if the association is mapped
- * with cascade="merge".
- *
+ * with {@code cascade="merge"}
+ *
* The semantics of this method are defined by JSR-220.
*
* @param object a detached instance with state to be copied
+ *
* @return an updated persistent instance
*/
- public Object merge(Object object) throws HibernateException;
+ public Object merge(Object object);
/**
* Copy the state of the given object onto the persistent object with the same
@@ -444,86 +489,107 @@
* given instance is unsaved, save a copy of and return it as a newly persistent
* instance. The given instance does not become associated with the session.
* This operation cascades to associated instances if the association is mapped
- * with cascade="merge".
- *
+ * with {@code cascade="merge"}
+ *
* The semantics of this method are defined by JSR-220.
*
+ * @param entityName The entity name
* @param object a detached instance with state to be copied
+ *
* @return an updated persistent instance
*/
- public Object merge(String entityName, Object object) throws HibernateException;
+ public Object merge(String entityName, Object object);
/**
* Make a transient instance persistent. This operation cascades to associated
- * instances if the association is mapped with cascade="persist".
- *
+ * instances if the association is mapped with {@code cascade="persist"}
+ *
* The semantics of this method are defined by JSR-220.
*
* @param object a transient instance to be made persistent
*/
- public void persist(Object object) throws HibernateException;
+ public void persist(Object object);
/**
* Make a transient instance persistent. This operation cascades to associated
- * instances if the association is mapped with cascade="persist".
- *
+ * instances if the association is mapped with {@code cascade="persist"}
+ *
* The semantics of this method are defined by JSR-220.
*
+ * @param entityName The entity name
* @param object a transient instance to be made persistent
*/
- public void persist(String entityName, Object object) throws HibernateException;
+ public void persist(String entityName, Object object);
/**
* Remove a persistent instance from the datastore. The argument may be
* an instance associated with the receiving Session or a transient
* instance with an identifier associated with existing persistent state.
* This operation cascades to associated instances if the association is mapped
- * with cascade="delete".
+ * with {@code cascade="delete"}
*
* @param object the instance to be removed
- * @throws HibernateException
*/
- public void delete(Object object) throws HibernateException;
+ public void delete(Object object);
/**
* Remove a persistent instance from the datastore. The object argument may be
* an instance associated with the receiving Session or a transient
* instance with an identifier associated with existing persistent state.
* This operation cascades to associated instances if the association is mapped
- * with cascade="delete".
+ * with {@code cascade="delete"}
*
* @param entityName The entity name for the instance to be removed.
* @param object the instance to be removed
- * @throws HibernateException
*/
- public void delete(String entityName, Object object) throws HibernateException;
+ public void delete(String entityName, Object object);
/**
* Obtain the specified lock level upon the given object. This may be used to
* perform a version check (LockMode.READ), to upgrade to a pessimistic
- * lock (LockMode.UPGRADE), or to simply reassociate a transient instance
+ * lock (LockMode.PESSIMISTIC_WRITE), or to simply reassociate a transient instance
* with a session (LockMode.NONE). This operation cascades to associated
* instances if the association is mapped with cascade="lock".
*
* @param object a persistent or transient instance
* @param lockMode the lock level
- * @throws HibernateException
+ *
+ * @deprecated instead call buildLockRequest(LockMode).lock(object)
*/
- public void lock(Object object, LockMode lockMode) throws HibernateException;
+ @Deprecated
+ public void lock(Object object, LockMode lockMode);
/**
* Obtain the specified lock level upon the given object. This may be used to
- * perform a version check (LockMode.READ), to upgrade to a pessimistic
- * lock (LockMode.UPGRADE), or to simply reassociate a transient instance
+ * perform a version check (LockMode.OPTIMISTIC), to upgrade to a pessimistic
+ * lock (LockMode.PESSIMISTIC_WRITE), or to simply reassociate a transient instance
* with a session (LockMode.NONE). This operation cascades to associated
* instances if the association is mapped with cascade="lock".
*
+ * @param entityName The name of the entity
* @param object a persistent or transient instance
* @param lockMode the lock level
- * @throws HibernateException
+ *
+ * @deprecated instead call buildLockRequest(LockMode).lock(entityName, object)
*/
- public void lock(String entityName, Object object, LockMode lockMode) throws HibernateException;
+ @SuppressWarnings( {"JavaDoc"})
+ @Deprecated
+ public void lock(String entityName, Object object, LockMode lockMode);
/**
+ * Build a LockRequest that specifies the LockMode, pessimistic lock timeout and lock scope.
+ * timeout and scope is ignored for optimistic locking. After building the LockRequest,
+ * call LockRequest.lock to perform the requested locking.
+ *
+ * Example usage:
+ * {@code session.buildLockRequest().setLockMode(LockMode.PESSIMISTIC_WRITE).setTimeOut(60000).lock(entity);}
+ *
+ * @param lockOptions contains the lock level
+ *
+ * @return a lockRequest that can be used to lock the passed object.
+ */
+ public LockRequest buildLockRequest(LockOptions lockOptions);
+
+ /**
* Re-read the state of the given instance from the underlying database. It is
* inadvisable to use this to implement long-running sessions that span many
* business tasks. This method is, however, useful in certain special circumstances.
@@ -535,127 +601,82 @@
*
*
* @param object a persistent or detached instance
- * @throws HibernateException
*/
- public void refresh(Object object) throws HibernateException;
+ public void refresh(Object object);
/**
+ * Re-read the state of the given instance from the underlying database. It is
+ * inadvisable to use this to implement long-running sessions that span many
+ * business tasks. This method is, however, useful in certain special circumstances.
+ * For example
+ *
+ * - where a database trigger alters the object state upon insert or update
+ *
- after executing direct SQL (eg. a mass update) in the same session
+ *
- after inserting a Blob or Clob
+ *
+ *
+ * @param entityName a persistent class
+ * @param object a persistent or detached instance
+ */
+ public void refresh(String entityName, Object object);
+
+ /**
* Re-read the state of the given instance from the underlying database, with
* the given LockMode. It is inadvisable to use this to implement
* long-running sessions that span many business tasks. This method is, however,
* useful in certain special circumstances.
*
* @param object a persistent or detached instance
* @param lockMode the lock mode to use
- * @throws HibernateException
- */
- public void refresh(Object object, LockMode lockMode) throws HibernateException;
-
- /**
- * Determine the current lock mode of the given object.
*
- * @param object a persistent instance
- * @return the current lock mode
- * @throws HibernateException
+ * @deprecated LockMode parameter should be replaced with LockOptions
*/
- public LockMode getCurrentLockMode(Object object) throws HibernateException;
+ @Deprecated
+ public void refresh(Object object, LockMode lockMode);
/**
- * Begin a unit of work and return the associated Transaction object.
- * If a new underlying transaction is required, begin the transaction. Otherwise
- * continue the new work in the context of the existing underlying transaction.
- * The class of the returned Transaction object is determined by the
- * property hibernate.transaction_factory.
+ * Re-read the state of the given instance from the underlying database, with
+ * the given LockMode. It is inadvisable to use this to implement
+ * long-running sessions that span many business tasks. This method is, however,
+ * useful in certain special circumstances.
*
- * @return a Transaction instance
- * @throws HibernateException
- * @see Transaction
+ * @param object a persistent or detached instance
+ * @param lockOptions contains the lock mode to use
*/
- public Transaction beginTransaction() throws HibernateException;
+ public void refresh(Object object, LockOptions lockOptions);
/**
- * Get the Transaction instance associated with this session.
- * The class of the returned Transaction object is determined by the
- * property hibernate.transaction_factory.
+ * Re-read the state of the given instance from the underlying database, with
+ * the given LockMode. It is inadvisable to use this to implement
+ * long-running sessions that span many business tasks. This method is, however,
+ * useful in certain special circumstances.
*
- * @return a Transaction instance
- * @throws HibernateException
- * @see Transaction
+ * @param entityName a persistent class
+ * @param object a persistent or detached instance
+ * @param lockOptions contains the lock mode to use
*/
- public Transaction getTransaction();
+ public void refresh(String entityName, Object object, LockOptions lockOptions);
/**
- * Create a new Criteria instance, for the given entity class,
- * or a superclass of an entity class.
+ * Determine the current lock mode of the given object.
*
- * @param persistentClass a class, which is persistent, or has persistent subclasses
- * @return Criteria
- */
- public Criteria createCriteria(Class persistentClass);
-
- /**
- * Create a new Criteria instance, for the given entity class,
- * or a superclass of an entity class, with the given alias.
+ * @param object a persistent instance
*
- * @param persistentClass a class, which is persistent, or has persistent subclasses
- * @return Criteria
+ * @return the current lock mode
*/
- public Criteria createCriteria(Class persistentClass, String alias);
+ public LockMode getCurrentLockMode(Object object);
/**
- * Create a new Criteria instance, for the given entity name.
+ * Create a {@link Query} instance for the given collection and filter string. Contains an implicit {@code FROM}
+ * element named {@code this} which refers to the defined table for the collection elements, as well as an implicit
+ * {@code WHERE} restriction for this particular collection instance's key value.
*
- * @param entityName
- * @return Criteria
- */
- public Criteria createCriteria(String entityName);
-
- /**
- * Create a new Criteria instance, for the given entity name,
- * with the given alias.
- *
- * @param entityName
- * @return Criteria
- */
- public Criteria createCriteria(String entityName, String alias);
-
- /**
- * Create a new instance of Query for the given HQL query string.
- *
- * @param queryString a HQL query
- * @return Query
- * @throws HibernateException
- */
- public Query createQuery(String queryString) throws HibernateException;
-
- /**
- * Create a new instance of SQLQuery for the given SQL query string.
- *
- * @param queryString a SQL query
- * @return SQLQuery
- * @throws HibernateException
- */
- public SQLQuery createSQLQuery(String queryString) throws HibernateException;
-
- /**
- * Create a new instance of Query for the given collection and filter string.
- *
* @param collection a persistent collection
- * @param queryString a Hibernate query
- * @return Query
- * @throws HibernateException
- */
- public Query createFilter(Object collection, String queryString) throws HibernateException;
-
- /**
- * Obtain an instance of Query for a named query string defined in the
- * mapping file.
+ * @param queryString a Hibernate query fragment.
*
- * @param queryName the name of a query defined externally
- * @return Query
- * @throws HibernateException
+ * @return The query instance for manipulation and execution
*/
- public Query getNamedQuery(String queryName) throws HibernateException;
+ public Query createFilter(Object collection, String queryString);
/**
* Completely clear the session. Evict all loaded instances and cancel all pending
@@ -668,14 +689,13 @@
* Return the persistent instance of the given entity class with the given identifier,
* or null if there is no such persistent instance. (If the instance is already associated
* with the session, return that instance. This method never returns an uninitialized instance.)
- * Obtain the specified lock mode if the instance exists.
*
* @param clazz a persistent class
* @param id an identifier
+ *
* @return a persistent instance or null
- * @throws HibernateException
*/
- public Object get(Class clazz, Serializable id) throws HibernateException;
+ public Object get(Class clazz, Serializable id);
/**
* Return the persistent instance of the given entity class with the given identifier,
@@ -686,22 +706,39 @@
* @param clazz a persistent class
* @param id an identifier
* @param lockMode the lock mode
+ *
* @return a persistent instance or null
- * @throws HibernateException
+ *
+ * @deprecated LockMode parameter should be replaced with LockOptions
*/
- public Object get(Class clazz, Serializable id, LockMode lockMode) throws HibernateException;
+ @Deprecated
+ public Object get(Class clazz, Serializable id, LockMode lockMode);
/**
+ * Return the persistent instance of the given entity class with the given identifier,
+ * or null if there is no such persistent instance. (If the instance is already associated
+ * with the session, return that instance. This method never returns an uninitialized instance.)
+ * Obtain the specified lock mode if the instance exists.
+ *
+ * @param clazz a persistent class
+ * @param id an identifier
+ * @param lockOptions the lock mode
+ *
+ * @return a persistent instance or null
+ */
+ public Object get(Class clazz, Serializable id, LockOptions lockOptions);
+
+ /**
* Return the persistent instance of the given named entity with the given identifier,
* or null if there is no such persistent instance. (If the instance is already associated
* with the session, return that instance. This method never returns an uninitialized instance.)
*
* @param entityName the entity name
* @param id an identifier
+ *
* @return a persistent instance or null
- * @throws HibernateException
*/
- public Object get(String entityName, Serializable id) throws HibernateException;
+ public Object get(String entityName, Serializable id);
/**
* Return the persistent instance of the given entity class with the given identifier,
@@ -712,34 +749,126 @@
* @param entityName the entity name
* @param id an identifier
* @param lockMode the lock mode
+ *
* @return a persistent instance or null
- * @throws HibernateException
+ *
+ * @deprecated LockMode parameter should be replaced with LockOptions
*/
- public Object get(String entityName, Serializable id, LockMode lockMode) throws HibernateException;
+ @Deprecated
+ public Object get(String entityName, Serializable id, LockMode lockMode);
-
/**
- * Return the entity name for a persistent entity
+ * Return the persistent instance of the given entity class with the given identifier,
+ * or null if there is no such persistent instance. (If the instance is already associated
+ * with the session, return that instance. This method never returns an uninitialized instance.)
+ * Obtain the specified lock mode if the instance exists.
+ *
+ * @param entityName the entity name
+ * @param id an identifier
+ * @param lockOptions contains the lock mode
+ *
+ * @return a persistent instance or null
+ */
+ public Object get(String entityName, Serializable id, LockOptions lockOptions);
+
+ /**
+ * Return the entity name for a persistent entity.
*
* @param object a persistent entity
+ *
* @return the entity name
- * @throws HibernateException
*/
- public String getEntityName(Object object) throws HibernateException;
+ public String getEntityName(Object object);
+
+ /**
+ * Create an {@link IdentifierLoadAccess} instance to retrieve the specified entity type by
+ * primary key.
+ *
+ * @param entityName The entity name of the entity type to be retrieved
+ *
+ * @return load delegate for loading the specified entity type by primary key
+ *
+ * @throws HibernateException If the specified entity name cannot be resolved as an entity name
+ */
+ public IdentifierLoadAccess byId(String entityName);
/**
+ * Create an {@link IdentifierLoadAccess} instance to retrieve the specified entity by
+ * primary key.
+ *
+ * @param entityClass The entity type to be retrieved
+ *
+ * @return load delegate for loading the specified entity type by primary key
+ *
+ * @throws HibernateException If the specified Class cannot be resolved as a mapped entity
+ */
+ public IdentifierLoadAccess byId(Class entityClass);
+
+ /**
+ * Create an {@link NaturalIdLoadAccess} instance to retrieve the specified entity by
+ * its natural id.
+ *
+ * @param entityName The entity name of the entity type to be retrieved
+ *
+ * @return load delegate for loading the specified entity type by natural id
+ *
+ * @throws HibernateException If the specified entity name cannot be resolved as an entity name
+ */
+ public NaturalIdLoadAccess byNaturalId(String entityName);
+
+ /**
+ * Create an {@link NaturalIdLoadAccess} instance to retrieve the specified entity by
+ * its natural id.
+ *
+ * @param entityClass The entity type to be retrieved
+ *
+ * @return load delegate for loading the specified entity type by natural id
+ *
+ * @throws HibernateException If the specified Class cannot be resolved as a mapped entity
+ */
+ public NaturalIdLoadAccess byNaturalId(Class entityClass);
+
+ /**
+ * Create an {@link SimpleNaturalIdLoadAccess} instance to retrieve the specified entity by
+ * its natural id.
+ *
+ * @param entityName The entity name of the entity type to be retrieved
+ *
+ * @return load delegate for loading the specified entity type by natural id
+ *
+ * @throws HibernateException If the specified entityClass cannot be resolved as a mapped entity, or if the
+ * entity does not define a natural-id or if its natural-id is made up of multiple attributes.
+ */
+ public SimpleNaturalIdLoadAccess bySimpleNaturalId(String entityName);
+
+ /**
+ * Create an {@link SimpleNaturalIdLoadAccess} instance to retrieve the specified entity by
+ * its simple (single attribute) natural id.
+ *
+ * @param entityClass The entity type to be retrieved
+ *
+ * @return load delegate for loading the specified entity type by natural id
+ *
+ * @throws HibernateException If the specified entityClass cannot be resolved as a mapped entity, or if the
+ * entity does not define a natural-id or if its natural-id is made up of multiple attributes.
+ */
+ public SimpleNaturalIdLoadAccess bySimpleNaturalId(Class entityClass);
+
+ /**
* Enable the named filter for this current session.
*
* @param filterName The name of the filter to be enabled.
- * @return The Filter instance representing the enabled fiter.
+ *
+ * @return The Filter instance representing the enabled filter.
*/
public Filter enableFilter(String filterName);
/**
* Retrieve a currently enabled filter by name.
*
* @param filterName The name of the filter to be retrieved.
- * @return The Filter instance representing the enabled fiter.
+ *
+ * @return The Filter instance representing the enabled filter.
*/
public Filter getEnabledFilter(String filterName);
@@ -752,65 +881,228 @@
/**
* Get the statistics for this session.
+ *
+ * @return The session statistics being collected for this session
*/
public SessionStatistics getStatistics();
-
+
/**
- * Set an unmodified persistent object to read only mode, or a read only
- * object to modifiable mode. In read only mode, no snapshot is maintained
- * and the instance is never dirty checked.
+ * Is the specified entity or proxy read-only?
+ *
+ * To get the default read-only/modifiable setting used for
+ * entities and proxies that are loaded into the session:
+ * @see org.hibernate.Session#isDefaultReadOnly()
+ *
+ * @param entityOrProxy an entity or HibernateProxy
+ * @return {@code true} if the entity or proxy is read-only, {@code false} if the entity or proxy is modifiable.
+ */
+ public boolean isReadOnly(Object entityOrProxy);
+
+ /**
+ * Set an unmodified persistent object to read-only mode, or a read-only
+ * object to modifiable mode. In read-only mode, no snapshot is maintained,
+ * the instance is never dirty checked, and changes are not persisted.
+ *
+ * If the entity or proxy already has the specified read-only/modifiable
+ * setting, then this method does nothing.
*
+ * To set the default read-only/modifiable setting used for
+ * entities and proxies that are loaded into the session:
+ * @see org.hibernate.Session#setDefaultReadOnly(boolean)
+ *
+ * To override this session's read-only/modifiable setting for entities
+ * and proxies loaded by a Query:
* @see Query#setReadOnly(boolean)
+ *
+ * @param entityOrProxy an entity or HibernateProxy
+ * @param readOnly {@code true} if the entity or proxy should be made read-only; {@code false} if the entity or
+ * proxy should be made modifiable
*/
- public void setReadOnly(Object entity, boolean readOnly);
+ public void setReadOnly(Object entityOrProxy, boolean readOnly);
/**
- * Controller for allowing users to perform JDBC related work using the Connection
- * managed by this Session.
+ * Controller for allowing users to perform JDBC related work using the Connection managed by this Session.
*
* @param work The work to be performed.
* @throws HibernateException Generally indicates wrapped {@link java.sql.SQLException}
*/
public void doWork(Work work) throws HibernateException;
+ /**
+ * Controller for allowing users to perform JDBC related work using the Connection managed by this Session. After
+ * execution returns the result of the {@link ReturningWork#execute} call.
+ *
+ * @param work The work to be performed.
+ * @param The type of the result returned from the work
+ *
+ * @return the result from calling {@link ReturningWork#execute}.
+ *
+ * @throws HibernateException Generally indicates wrapped {@link java.sql.SQLException}
+ */
+ public T doReturningWork(ReturningWork work) throws HibernateException;
/**
- * Disconnect the Session from the current JDBC connection. If
- * the connection was obtained by Hibernate close it and return it to
- * the connection pool; otherwise, return it to the application.
+ * Disconnect the session from its underlying JDBC connection. This is intended for use in cases where the
+ * application has supplied the JDBC connection to the session and which require long-sessions (aka, conversations).
*
- * This is used by applications which supply JDBC connections to Hibernate
- * and which require long-sessions (or long-conversations)
+ * It is considered an error to call this method on a session which was not opened by supplying the JDBC connection
+ * and an exception will be thrown.
*
- * Note that disconnect() called on a session where the connection was
- * retrieved by Hibernate through its configured
- * {@link org.hibernate.connection.ConnectionProvider} has no effect,
- * provided {@link ConnectionReleaseMode#ON_CLOSE} is not in effect.
+ * For non-user-supplied scenarios, normal transaction management already handles disconnection and reconnection
+ * automatically.
*
- * @return the application-supplied connection or null
+ * @return the application-supplied connection or {@code null}
+ *
* @see #reconnect(Connection)
- * @see #reconnect()
*/
- Connection disconnect() throws HibernateException;
+ Connection disconnect();
/**
- * Obtain a new JDBC connection. This is used by applications which
- * require long transactions and do not supply connections to the
- * session.
+ * Reconnect to the given JDBC connection.
*
+ * @param connection a JDBC connection
+ *
* @see #disconnect()
- * @deprecated Manual reconnection is only needed in the case of
- * application-supplied connections, in which case the
- * {@link #reconnect(java.sql.Connection)} for should be used.
*/
- void reconnect() throws HibernateException;
+ void reconnect(Connection connection);
/**
- * Reconnect to the given JDBC connection. This is used by applications
- * which require long transactions and use application-supplied connections.
+ * Is a particular fetch profile enabled on this session?
*
- * @param connection a JDBC connection
- * @see #disconnect()
+ * @param name The name of the profile to be checked.
+ * @return True if fetch profile is enabled; false if not.
+ * @throws UnknownProfileException Indicates that the given name does not
+ * match any known profile names
+ *
+ * @see org.hibernate.engine.profile.FetchProfile for discussion of this feature
*/
- void reconnect(Connection connection) throws HibernateException;
+ public boolean isFetchProfileEnabled(String name) throws UnknownProfileException;
+
+ /**
+ * Enable a particular fetch profile on this session. No-op if requested
+ * profile is already enabled.
+ *
+ * @param name The name of the fetch profile to be enabled.
+ * @throws UnknownProfileException Indicates that the given name does not
+ * match any known profile names
+ *
+ * @see org.hibernate.engine.profile.FetchProfile for discussion of this feature
+ */
+ public void enableFetchProfile(String name) throws UnknownProfileException;
+
+ /**
+ * Disable a particular fetch profile on this session. No-op if requested
+ * profile is already disabled.
+ *
+ * @param name The name of the fetch profile to be disabled.
+ * @throws UnknownProfileException Indicates that the given name does not
+ * match any known profile names
+ *
+ * @see org.hibernate.engine.profile.FetchProfile for discussion of this feature
+ */
+ public void disableFetchProfile(String name) throws UnknownProfileException;
+
+ /**
+ * Convenience access to the {@link TypeHelper} associated with this session's {@link SessionFactory}.
+ *
+ * Equivalent to calling {@link #getSessionFactory()}.{@link SessionFactory#getTypeHelper getTypeHelper()}
+ *
+ * @return The {@link TypeHelper} associated with this session's {@link SessionFactory}
+ */
+ public TypeHelper getTypeHelper();
+
+ /**
+ * Retrieve this session's helper/delegate for creating LOB instances.
+ *
+ * @return This session's LOB helper
+ */
+ public LobHelper getLobHelper();
+
+ /**
+ * Contains locking details (LockMode, Timeout and Scope).
+ */
+ public interface LockRequest {
+ /**
+ * Constant usable as a time out value that indicates no wait semantics should be used in
+ * attempting to acquire locks.
+ */
+ static final int PESSIMISTIC_NO_WAIT = 0;
+ /**
+ * Constant usable as a time out value that indicates that attempting to acquire locks should be allowed to
+ * wait forever (apply no timeout).
+ */
+ static final int PESSIMISTIC_WAIT_FOREVER = -1;
+
+ /**
+ * Get the lock mode.
+ *
+ * @return the lock mode.
+ */
+ LockMode getLockMode();
+
+ /**
+ * Specify the LockMode to be used. The default is LockMode.none.
+ *
+ * @param lockMode The lock mode to use for this request
+ *
+ * @return this LockRequest instance for operation chaining.
+ */
+ LockRequest setLockMode(LockMode lockMode);
+
+ /**
+ * Get the timeout setting.
+ *
+ * @return timeout in milliseconds, -1 for indefinite wait and 0 for no wait.
+ */
+ int getTimeOut();
+
+ /**
+ * Specify the pessimistic lock timeout (check if your dialect supports this option).
+ * The default pessimistic lock behavior is to wait forever for the lock.
+ *
+ * @param timeout is time in milliseconds to wait for lock. -1 means wait forever and 0 means no wait.
+ *
+ * @return this LockRequest instance for operation chaining.
+ */
+ LockRequest setTimeOut(int timeout);
+
+ /**
+ * Check if locking is cascaded to owned collections and relationships.
+ *
+ * @return true if locking will be extended to owned collections and relationships.
+ */
+ boolean getScope();
+
+ /**
+ * Specify if LockMode should be cascaded to owned collections and relationships.
+ * The association must be mapped with {@code cascade="lock"} for scope=true to work.
+ *
+ * @param scope {@code true} to cascade locks; {@code false} to not.
+ *
+ * @return {@code this}, for method chaining
+ */
+ LockRequest setScope(boolean scope);
+
+ /**
+ * Perform the requested locking.
+ *
+ * @param entityName The name of the entity to lock
+ * @param object The instance of the entity to lock
+ */
+ void lock(String entityName, Object object);
+
+ /**
+ * Perform the requested locking.
+ *
+ * @param object The instance of the entity to lock
+ */
+ void lock(Object object);
+ }
+
+ /**
+ * Add one or more listeners to the Session
+ *
+ * @param listeners The listener(s) to add
+ */
+ public void addEventListeners(SessionEventListener... listeners);
}
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/SessionBuilder.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/SessionEventListener.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/SessionFactoryObserver.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/SessionFactoryObserver.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/SessionFactoryObserver.java 17 Aug 2012 14:36:40 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/SessionFactoryObserver.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,7 +20,6 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
Index: 3rdParty_sources/hibernate-core/org/hibernate/StaleStateException.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/StaleStateException.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/StaleStateException.java 17 Aug 2012 14:36:40 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/StaleStateException.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,25 +20,26 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
/**
- * Thrown when a version number or timestamp check failed, indicating that the
- * Session contained stale data (when using long transactions
- * with versioning). Also occurs if we try delete or update a row that does
- * not exist.
- *
- * Note that this exception often indicates that the user failed to specify the
- * correct unsaved-value strategy for a class!
+ * Thrown when a version number or timestamp check failed, indicating that the Session contained
+ * stale data (when using long transactions with versioning). Also occurs if we try delete or update
+ * a row that does not exist.
*
- * @see StaleObjectStateException
+ * Note that this exception often indicates that the user failed to specify the correct
+ * {@code unsaved-value} strategy for an entity
+ *
* @author Gavin King
*/
public class StaleStateException extends HibernateException {
-
- public StaleStateException(String s) {
- super(s);
+ /**
+ * Constructs a StaleStateException using the supplied message.
+ *
+ * @param message The message explaining the exception condition
+ */
+ public StaleStateException(String message) {
+ super( message );
}
}
Index: 3rdParty_sources/hibernate-core/org/hibernate/StatelessSession.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/StatelessSession.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/StatelessSession.java 17 Aug 2012 14:36:40 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/StatelessSession.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,32 +20,29 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
import java.io.Serializable;
import java.sql.Connection;
/**
- * A command-oriented API for performing bulk operations
- * against a database.
- *
- * A stateless session does not implement a first-level cache nor
- * interact with any second-level cache, nor does it implement
- * transactional write-behind or automatic dirty checking, nor do
- * operations cascade to associated instances. Collections are
- * ignored by a stateless session. Operations performed via a
- * stateless session bypass Hibernate's event model and
- * interceptors. Stateless sessions are vulnerable to data
- * aliasing effects, due to the lack of a first-level cache.
- *
- * For certain kinds of transactions, a stateless session may
- * perform slightly faster than a stateful session.
+ * A command-oriented API for performing bulk operations against a database.
+ *
+ * A stateless session does not implement a first-level cache nor interact
+ * with any second-level cache, nor does it implement transactional
+ * write-behind or automatic dirty checking, nor do operations cascade to
+ * associated instances. Collections are ignored by a stateless session.
+ * Operations performed via a stateless session bypass Hibernate's event model
+ * and interceptors. Stateless sessions are vulnerable to data aliasing
+ * effects, due to the lack of a first-level cache.
+ *
+ * For certain kinds of transactions, a stateless session may perform slightly
+ * faster than a stateful session.
*
* @author Gavin King
*/
-public interface StatelessSession extends Serializable {
+public interface StatelessSession extends SharedSessionContract {
/**
* Close the stateless session and release the JDBC connection.
*/
@@ -55,6 +52,8 @@
* Insert a row.
*
* @param entity a new transient instance
+ *
+ * @return The identifier of the inserted entity
*/
public Serializable insert(Object entity);
@@ -63,6 +62,7 @@
*
* @param entityName The entityName for the entity to be inserted
* @param entity a new transient instance
+ *
* @return the identifier of the instance
*/
public Serializable insert(String entityName, Object entity);
@@ -100,27 +100,41 @@
/**
* Retrieve a row.
*
+ * @param entityName The name of the entity to retrieve
+ * @param id The id of the entity to retrieve
+ *
* @return a detached entity instance
*/
public Object get(String entityName, Serializable id);
/**
* Retrieve a row.
*
+ * @param entityClass The class of the entity to retrieve
+ * @param id The id of the entity to retrieve
+ *
* @return a detached entity instance
*/
public Object get(Class entityClass, Serializable id);
/**
* Retrieve a row, obtaining the specified lock mode.
*
+ * @param entityName The name of the entity to retrieve
+ * @param id The id of the entity to retrieve
+ * @param lockMode The lock mode to apply to the entity
+ *
* @return a detached entity instance
*/
public Object get(String entityName, Serializable id, LockMode lockMode);
/**
* Retrieve a row, obtaining the specified lock mode.
*
+ * @param entityClass The class of the entity to retrieve
+ * @param id The id of the entity to retrieve
+ * @param lockMode The lock mode to apply to the entity
+ *
* @return a detached entity instance
*/
public Object get(Class entityClass, Serializable id, LockMode lockMode);
@@ -158,83 +172,18 @@
public void refresh(String entityName, Object entity, LockMode lockMode);
/**
- * Create a new instance of Query for the given HQL query string.
- * Entities returned by the query are detached.
- */
- public Query createQuery(String queryString);
-
- /**
- * Obtain an instance of Query for a named query string defined in
- * the mapping file. Entities returned by the query are detached.
- */
- public Query getNamedQuery(String queryName);
-
- /**
- * Create a new Criteria instance, for the given entity class,
- * or a superclass of an entity class. Entities returned by the query are
- * detached.
- *
- * @param persistentClass a class, which is persistent, or has persistent subclasses
- * @return Criteria
- */
- public Criteria createCriteria(Class persistentClass);
-
- /**
- * Create a new Criteria instance, for the given entity class,
- * or a superclass of an entity class, with the given alias.
- * Entities returned by the query are detached.
- *
- * @param persistentClass a class, which is persistent, or has persistent subclasses
- * @return Criteria
- */
- public Criteria createCriteria(Class persistentClass, String alias);
-
- /**
- * Create a new Criteria instance, for the given entity name.
- * Entities returned by the query are detached.
- *
- * @param entityName
- * @return Criteria
- */
- public Criteria createCriteria(String entityName);
-
- /**
- * Create a new Criteria instance, for the given entity name,
- * with the given alias. Entities returned by the query are detached.
- *
- * @param entityName
- * @return Criteria
- */
- public Criteria createCriteria(String entityName, String alias);
-
- /**
- * Create a new instance of SQLQuery for the given SQL query string.
- * Entities returned by the query are detached.
- *
- * @param queryString a SQL query
- * @return SQLQuery
- * @throws HibernateException
- */
- public SQLQuery createSQLQuery(String queryString) throws HibernateException;
-
- /**
- * Begin a Hibernate transaction.
- */
- public Transaction beginTransaction();
-
- /**
- * Get the current Hibernate transaction.
- */
- public Transaction getTransaction();
-
- /**
* Returns the current JDBC connection associated with this
* instance.
*
* If the session is using aggressive connection release (as in a
* CMT environment), it is the application's responsibility to
* close the connection returned by this call. Otherwise, the
* application should not close the connection.
+ *
+ * @deprecated just missed when deprecating same method from {@link Session}
+ *
+ * @return The connection associated with this stateless session
*/
+ @Deprecated
public Connection connection();
}
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/SynchronizeableQuery.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/Transaction.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/Transaction.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/Transaction.java 17 Aug 2012 14:36:39 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/Transaction.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2007-2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,109 +20,152 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
import javax.transaction.Synchronization;
+import org.hibernate.engine.transaction.spi.LocalStatus;
+
/**
- * Allows the application to define units of work, while
- * maintaining abstraction from the underlying transaction
- * implementation (eg. JTA, JDBC).
- *
- * A transaction is associated with a Session and is
- * usually instantiated by a call to Session.beginTransaction().
- * A single session might span multiple transactions since
- * the notion of a session (a conversation between the application
- * and the datastore) is of coarser granularity than the notion of
- * a transaction. However, it is intended that there be at most one
- * uncommitted Transaction associated with a particular
- * Session at any time.
- *
- * Implementors are not intended to be threadsafe.
+ * Defines the contract for abstracting applications from the configured underlying means of transaction management.
+ * Allows the application to define units of work, while maintaining abstraction from the underlying transaction
+ * implementation (eg. JTA, JDBC).
+ *
+ * A transaction is associated with a {@link Session} and is usually initiated by a call to
+ * {@link org.hibernate.Session#beginTransaction()}. A single session might span multiple transactions since
+ * the notion of a session (a conversation between the application and the datastore) is of coarser granularity than
+ * the notion of a transaction. However, it is intended that there be at most one uncommitted transaction associated
+ * with a particular {@link Session} at any time.
+ *
+ * Implementers are not intended to be thread-safe.
*
- * @see Session#beginTransaction()
- * @see org.hibernate.transaction.TransactionFactory
* @author Anton van Straaten
+ * @author Steve Ebersole
*/
public interface Transaction {
-
/**
- * Begin a new transaction.
+ * Is this transaction the initiator of any underlying transaction?
+ *
+ * @return {@code true} if this transaction initiated the underlying transaction; {@code false} otherwise.
*/
- public void begin() throws HibernateException;
+ public boolean isInitiator();
/**
- * Flush the associated Session and end the unit of work (unless
- * we are in {@link FlushMode#MANUAL}.
- *
- * This method will commit the underlying transaction if and only
- * if the underlying transaction was initiated by this object.
+ * Begin this transaction. No-op if the transaction has already been begun. Note that this is not necessarily
+ * symmetrical since usually multiple calls to {@link #commit} or {@link #rollback} will error.
*
- * @throws HibernateException
+ * @throws HibernateException Indicates a problem beginning the transaction.
*/
- public void commit() throws HibernateException;
+ public void begin();
/**
- * Force the underlying transaction to roll back.
+ * Commit this transaction. This might entail a number of things depending on the context:
+ * -
+ * If this transaction is the {@link #isInitiator initiator}, {@link Session#flush} the {@link Session}
+ * with which it is associated (unless {@link Session} is in {@link FlushMode#MANUAL}).
+ *
+ * -
+ * If this transaction is the {@link #isInitiator initiator}, commit the underlying transaction.
+ *
+ * -
+ * Coordinate various callbacks
+ *
+ *
*
- * @throws HibernateException
+ * @throws HibernateException Indicates a problem committing the transaction.
*/
- public void rollback() throws HibernateException;
+ public void commit();
/**
- * Was this transaction rolled back or set to rollback only?
- *
- * This only accounts for actions initiated from this local transaction.
- * If, for example, the underlying transaction is forced to rollback via
- * some other means, this method still reports false because the rollback
- * was not initiated from here.
+ * Rollback this transaction. Either rolls back the underlying transaction or ensures it cannot later commit
+ * (depending on the actual underlying strategy).
*
- * @return boolean True if the transaction was rolled back via this
- * local transaction; false otherwise.
- * @throws HibernateException
+ * @throws HibernateException Indicates a problem rolling back the transaction.
*/
- public boolean wasRolledBack() throws HibernateException;
+ public void rollback();
/**
- * Check if this transaction was successfully committed.
+ * Get the current local status of this transaction.
*
- * This method could return false even after successful invocation
- * of {@link #commit}. As an example, JTA based strategies no-op on
- * {@link #commit} calls if they did not start the transaction; in that case,
- * they also report {@link #wasCommitted} as false.
+ * This only accounts for the local view of the transaction status. In other words it does not check the status
+ * of the actual underlying transaction.
*
- * @return boolean True if the transaction was (unequivocally) committed
- * via this local transaction; false otherwise.
- * @throws HibernateException
+ * @return The current local status.
*/
- public boolean wasCommitted() throws HibernateException;
-
+ public LocalStatus getLocalStatus();
+
/**
* Is this transaction still active?
*
- * Again, this only returns information in relation to the
- * local transaction, not the actual underlying transaction.
+ * Answers on a best effort basis. For example, in the case of JDBC based transactions we cannot know that a
+ * transaction is active when it is initiated directly through the JDBC {@link java.sql.Connection}, only when
+ * it is initiated from here.
*
- * @return boolean Treu if this local transaction is still active.
+ * @return {@code true} if the transaction is still active; {@code false} otherwise.
+ *
+ * @throws HibernateException Indicates a problem checking the transaction status.
*/
- public boolean isActive() throws HibernateException;
+ public boolean isActive();
/**
+ * Is Hibernate participating in the underlying transaction?
+ *
+ * Generally speaking this will be the same as {@link #isActive()}.
+ *
+ * @return {@code true} if Hibernate is known to be participating in the underlying transaction; {@code false}
+ * otherwise.
+ */
+ public boolean isParticipating();
+
+ /**
+ * Was this transaction committed?
+ *
+ * Answers on a best effort basis. For example, in the case of JDBC based transactions we cannot know that a
+ * transaction was committed when the commit was performed directly through the JDBC {@link java.sql.Connection},
+ * only when the commit was done from this.
+ *
+ * @return {@code true} if the transaction is rolled back; {@code false} otherwise.
+ *
+ * @throws HibernateException Indicates a problem checking the transaction status.
+ */
+ @SuppressWarnings( {"UnusedDeclaration"})
+ public boolean wasCommitted();
+
+ /**
+ * Was this transaction rolled back or set to rollback only?
+ *
+ * Answers on a best effort basis. For example, in the case of JDBC based transactions we cannot know that a
+ * transaction was rolled back when rollback was performed directly through the JDBC {@link java.sql.Connection},
+ * only when it was rolled back from here.
+ *
+ * @return {@literal true} if the transaction is rolled back; {@literal false} otherwise.
+ *
+ * @throws HibernateException Indicates a problem checking the transaction status.
+ */
+ @SuppressWarnings( {"UnusedDeclaration"})
+ public boolean wasRolledBack();
+
+ /**
* Register a user synchronization callback for this transaction.
*
* @param synchronization The Synchronization callback to register.
- * @throws HibernateException
+ *
+ * @throws HibernateException Indicates a problem registering the synchronization.
*/
- public void registerSynchronization(Synchronization synchronization)
- throws HibernateException;
+ public void registerSynchronization(Synchronization synchronization) throws HibernateException;
/**
- * Set the transaction timeout for any transaction started by
- * a subsequent call to begin() on this instance.
+ * Set the transaction timeout for any transaction started by a subsequent call to {@link #begin} on this instance.
*
* @param seconds The number of seconds before a timeout.
*/
public void setTimeout(int seconds);
+
+ /**
+ * Retrieve the transaction timeout set for this transaction. A negative indicates no timeout has been set.
+ *
+ * @return The timeout, in seconds.
+ */
+ public int getTimeout();
}
Index: 3rdParty_sources/hibernate-core/org/hibernate/TransactionException.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/TransactionException.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/TransactionException.java 17 Aug 2012 14:36:39 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/TransactionException.java 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,26 +20,33 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
/**
* Indicates that a transaction could not be begun, committed
* or rolled back.
*
- * @see Transaction
* @author Anton van Straaten
*/
-
public class TransactionException extends HibernateException {
-
- public TransactionException(String message, Throwable root) {
- super(message,root);
+ /**
+ * Constructs a TransactionException using the specified information.
+ *
+ * @param message The message explaining the exception condition
+ * @param cause The underlying cause
+ */
+ public TransactionException(String message, Throwable cause) {
+ super( message, cause );
}
+ /**
+ * Constructs a TransactionException using the specified information.
+ *
+ * @param message The message explaining the exception condition
+ */
public TransactionException(String message) {
- super(message);
+ super( message );
}
}
Index: 3rdParty_sources/hibernate-core/org/hibernate/TransientObjectException.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/TransientObjectException.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/TransientObjectException.java 17 Aug 2012 14:36:39 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/TransientObjectException.java 30 Jul 2014 15:50:59 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,21 +20,22 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
/**
- * Thrown when the user passes a transient instance to a Session
- * method that expects a persistent instance.
+ * Thrown when the user passes a transient instance to a Session method that expects a persistent instance.
*
* @author Gavin King
*/
-
public class TransientObjectException extends HibernateException {
-
- public TransientObjectException(String s) {
- super(s);
+ /**
+ * Constructs a TransientObjectException using the supplied message.
+ *
+ * @param message The message explaining the exception condition
+ */
+ public TransientObjectException(String message) {
+ super( message );
}
}
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/TransientPropertyValueException.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/TypeHelper.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/UnknownProfileException.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/UnresolvableObjectException.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/UnresolvableObjectException.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/UnresolvableObjectException.java 17 Aug 2012 14:36:39 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/UnresolvableObjectException.java 30 Jul 2014 15:50:59 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,7 +20,6 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate;
@@ -35,34 +34,52 @@
* @author Gavin King
*/
public class UnresolvableObjectException extends HibernateException {
-
private final Serializable identifier;
private final String entityName;
- public UnresolvableObjectException(Serializable identifier, String clazz) {
- this("No row with the given identifier exists", identifier, clazz);
+ /**
+ * Constructs an UnresolvableObjectException using the specified information.
+ *
+ * @param identifier The identifier of the entity which could not be resolved
+ * @param entityName The name of the entity which could not be resolved
+ */
+ public UnresolvableObjectException(Serializable identifier, String entityName) {
+ this( "No row with the given identifier exists", identifier, entityName );
}
- UnresolvableObjectException(String message, Serializable identifier, String clazz) {
- super(message);
+
+ protected UnresolvableObjectException(String message, Serializable identifier, String clazz) {
+ super( message );
this.identifier = identifier;
this.entityName = clazz;
}
+
+ /**
+ * Factory method for building and throwing an UnresolvableObjectException if the entity is null.
+ *
+ * @param entity The entity to check for nullness
+ * @param identifier The identifier of the entity
+ * @param entityName The name of the entity
+ *
+ * @throws UnresolvableObjectException Thrown if entity is null
+ */
+ public static void throwIfNull(Object entity, Serializable identifier, String entityName)
+ throws UnresolvableObjectException {
+ if ( entity == null ) {
+ throw new UnresolvableObjectException( identifier, entityName );
+ }
+ }
+
public Serializable getIdentifier() {
return identifier;
}
- public String getMessage() {
- return super.getMessage() + ": " +
- MessageHelper.infoString(entityName, identifier);
- }
-
public String getEntityName() {
return entityName;
}
- public static void throwIfNull(Object o, Serializable id, String clazz)
- throws UnresolvableObjectException {
- if (o==null) throw new UnresolvableObjectException(id, clazz);
+ @Override
+ public String getMessage() {
+ return super.getMessage() + ": " + MessageHelper.infoString( entityName, identifier );
}
}
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/checkstyle_checks.xml'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/package.html
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/package.html,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/package.html 17 Aug 2012 14:36:39 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/package.html 30 Jul 2014 15:51:00 -0000 1.1.2.1
@@ -1,10 +1,10 @@
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/BulkOperationCleanupAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/CollectionAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/CollectionRecreateAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/CollectionRemoveAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/CollectionUpdateAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/DelayedPostInsertIdentifier.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/EntityAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/EntityDeleteAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/EntityIdentityInsertAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/EntityInsertAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/EntityUpdateAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/Executable.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/action/package.html
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/action/package.html,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/action/package.html 17 Aug 2012 14:34:02 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/action/package.html 30 Jul 2014 15:52:31 -0000 1.1.2.1
@@ -1,10 +1,10 @@
- This package defines "actions" that are scheduled for
- asycnchronous execution by the event listeners.
+ This package defines "actions" that are scheduled for asynchronous execution by the event listeners.
+ The {@link org.hibernate.engine.ActionQueue} is responsible for execution of these actions.
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/AbstractEntityInsertAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/BulkOperationCleanupAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/CollectionAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/CollectionRecreateAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/CollectionRemoveAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/CollectionUpdateAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/DelayedPostInsertIdentifier.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/EntityAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/EntityDeleteAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/EntityIdentityInsertAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/EntityIncrementVersionProcess.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/EntityInsertAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/EntityUpdateAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/EntityVerifyVersionProcess.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/OrphanRemovalAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/QueuedOperationCollectionAction.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/UnresolvedEntityInsertActions.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/internal/package-info.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/spi/AfterTransactionCompletionProcess.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/spi/BeforeTransactionCompletionProcess.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/spi/Executable.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/action/spi/package-info.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/BootstrapServiceRegistry.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/BootstrapServiceRegistryBuilder.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/StandardServiceInitiator.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/StandardServiceRegistry.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/StandardServiceRegistryBuilder.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/package-info.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/classloading/internal/ClassLoaderServiceImpl.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/classloading/internal/package-info.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/classloading/spi/ClassLoaderService.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/classloading/spi/ClassLoadingException.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/classloading/spi/package-info.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/internal/BootstrapServiceRegistryImpl.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/internal/StandardServiceRegistryImpl.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/internal/package-info.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/selector/SimpleStrategyRegistrationImpl.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/selector/StrategyRegistration.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/selector/StrategyRegistrationProvider.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/selector/package-info.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/selector/internal/StrategySelectorBuilder.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/selector/internal/StrategySelectorImpl.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/selector/internal/package-info.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/selector/spi/StrategySelectionException.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/selector/spi/StrategySelector.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/boot/registry/selector/spi/package-info.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/AbstractJndiBoundCacheProvider.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/Cache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/CacheConcurrencyStrategy.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/CacheDataDescription.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/cache/CacheException.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/cache/CacheException.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/cache/CacheException.java 17 Aug 2012 14:33:59 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/cache/CacheException.java 30 Jul 2014 15:51:50 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,7 +20,6 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.cache;
@@ -30,17 +29,32 @@
* Something went wrong in the cache
*/
public class CacheException extends HibernateException {
-
- public CacheException(String s) {
- super(s);
+ /**
+ * Constructs a CacheException.
+ *
+ * @param message Message explaining the exception condition
+ */
+ public CacheException(String message) {
+ super( message );
}
- public CacheException(String s, Throwable e) {
- super(s, e);
+ /**
+ * Constructs a CacheException.
+ *
+ * @param message Message explaining the exception condition
+ * @param cause The underlying cause
+ */
+ public CacheException(String message, Throwable cause) {
+ super( message, cause );
}
-
- public CacheException(Throwable e) {
- super(e);
+
+ /**
+ * Constructs a CacheException.
+ *
+ * @param cause The underlying cause
+ */
+ public CacheException(Throwable cause) {
+ super( cause );
}
}
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/CacheKey.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/CacheProvider.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/CollectionRegion.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/EntityRegion.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/FilterKey.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/GeneralDataRegion.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/HashtableCache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/HashtableCacheProvider.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/NoCacheProvider.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/NoCacheRegionFactoryAvailableException.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/NoCachingEnabledException.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/NonstrictReadWriteCache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/OptimisticCache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/OptimisticCacheSource.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/QueryCache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/QueryCacheFactory.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/QueryKey.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/QueryResultsRegion.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/ReadOnlyCache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/ReadWriteCache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/Region.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/cache/RegionFactory.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/cache/RegionFactory.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/cache/RegionFactory.java 17 Aug 2012 14:33:57 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/cache/RegionFactory.java 30 Jul 2014 15:51:49 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,111 +20,16 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.cache;
-import java.util.Properties;
-
-import org.hibernate.cfg.Settings;
-
/**
- * Contract for building second level cache regions.
- *
- * Implementors should define a constructor in one of two forms:
- * - MyRegionFactoryImpl({@link java.util.Properties})
- * - MyRegionFactoryImpl()
- *
- * Use the first when we need to read config properties prior to
- * {@link #start} being called. For an example, have a look at
- * {@link org.hibernate.cache.impl.bridge.RegionFactoryCacheProviderBridge}
- * where we need the properties in order to determine which legacy
- * {@link CacheProvider} to use so that we can answer the
- * {@link #isMinimalPutsEnabledByDefault()} question for the
- * {@link org.hibernate.cfg.SettingsFactory}.
+ * Legacy (deprecated) namespace for the RegionFactory contract.
*
* @author Steve Ebersole
+ *
+ * @deprecated Moved, but still need this definition for ehcache
*/
-public interface RegionFactory {
-
- /**
- * Lifecycle callback to perform any necessary initialization of the
- * underlying cache implementation(s). Called exactly once during the
- * construction of a {@link org.hibernate.impl.SessionFactoryImpl}.
- *
- * @param settings The settings in effect.
- * @param properties The defined cfg properties
- * @throws CacheException Indicates problems starting the L2 cache impl;
- * considered as a sign to stop {@link org.hibernate.SessionFactory}
- * building.
- */
- public void start(Settings settings, Properties properties) throws CacheException;
-
- /**
- * Lifecycle callback to perform any necessary cleanup of the underlying
- * cache implementation(s). Called exactly once during
- * {@link org.hibernate.SessionFactory#close}.
- */
- public void stop();
-
- /**
- * By default should we perform "minimal puts" when using this second
- * level cache implementation?
- *
- * @return True if "minimal puts" should be performed by default; false
- * otherwise.
- */
- public boolean isMinimalPutsEnabledByDefault();
-
- /**
- * Generate a timestamp.
- *
- * This is generally used for cache content locking/unlocking purposes
- * depending upon the access-strategy being used.
- *
- * @return The generated timestamp.
- */
- public long nextTimestamp();
-
- /**
- * Build a cache region specialized for storing entity data.
- *
- * @param regionName The name of the region.
- * @param properties Configuration properties.
- * @param metadata Information regarding the type of data to be cached
- * @return The built region
- * @throws CacheException Indicates problems building the region.
- */
- public EntityRegion buildEntityRegion(String regionName, Properties properties, CacheDataDescription metadata) throws CacheException;
-
- /**
- * Build a cache region specialized for storing collection data.
- *
- * @param regionName The name of the region.
- * @param properties Configuration properties.
- * @param metadata Information regarding the type of data to be cached
- * @return The built region
- * @throws CacheException Indicates problems building the region.
- */
- public CollectionRegion buildCollectionRegion(String regionName, Properties properties, CacheDataDescription metadata) throws CacheException;
-
- /**
- * Build a cache region specialized for storing query results
- *
- * @param regionName The name of the region.
- * @param properties Configuration properties.
- * @return The built region
- * @throws CacheException Indicates problems building the region.
- */
- public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties) throws CacheException;
-
- /**
- * Build a cache region specialized for storing update-timestamps data.
- *
- * @param regionName The name of the region.
- * @param properties Configuration properties.
- * @return The built region
- * @throws CacheException Indicates problems building the region.
- */
- public TimestampsRegion buildTimestampsRegion(String regionName, Properties properties) throws CacheException;
+@Deprecated
+public interface RegionFactory extends org.hibernate.cache.spi.RegionFactory {
}
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/StandardQueryCache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/StandardQueryCacheFactory.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/Timestamper.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/TimestampsRegion.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/TransactionAwareCache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/TransactionalCache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/TransactionalDataRegion.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/UpdateTimestampsCache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/cache/package.html
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/cache/package.html,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/cache/package.html 17 Aug 2012 14:33:57 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/cache/package.html 30 Jul 2014 15:51:50 -0000 1.1.2.1
@@ -1,10 +1,10 @@
-
-
-
-
- This package defines APIs/SPIs and implementations for the Hibernate second-level cache.
+ This package defines API of the Hibernate second level cache service. The
+ org.hibernate.cache.spi package defines the SPI used to
+ integrate with Hibernate internals.
-
- The legacy (and now deprecated) approach to caching is defined by the {@link org.hibernate.cache.CacheProvider} and
- {@link org.hibernate.cache.Cache} interfaces as well as the {@link org.hibernate.cache.CacheConcurrencyStrategy}
- interface along with the various implementations of all these interfaces. In that scheme, a
- {@link org.hibernate.cache.CacheProvider} defined how to configure and perform lifecycle operations
- in regards to a particular underlying caching library; it also defined how to build {@link org.hibernate.cache.Cache}
- instances which in turn defined how to access the "regions" of the underlying cache instance.
- For entity and collection data cache regions, {@link org.hibernate.cache.CacheConcurrencyStrategy} wrapped
- access to those cache regions to apply transactional/concurrent access semantics.
-
-
- The improved approach is based on {@link org.hibernate.cache.RegionFactory}, the various
- {@link org.hibernate.cache.Region} specializations and the two access strategies contracts
- ({@link org.hibernate.cache.access.EntityRegionAccessStrategy} and
- {@link org.hibernate.cache.access.CollectionRegionAccessStrategy}). The general approach here is that
- {@link org.hibernate.cache.RegionFactory} defined how to configure and perform lifecycle operations
- in regards to a particular underlying caching library (or libraries).
- {@link org.hibernate.cache.RegionFactory} also defines how to build specialized
- {@link org.hibernate.cache.Region} instances based on the type of data we will be storing in that given
- region. The fact that {@link org.hibernate.cache.RegionFactory} is asked to build specialized
- regions (as opposed to just general access) is the first improvement over the legacy scheme. The
- second improvement is the fact that the regions (well the ones like entity and collection regions
- that are responsible for storing {@link org.hibernate.cache.TransactionalDataRegion transactional} data) are
- asked to build their own access strategies (see {@link org.hibernate.cache.EntityRegion#buildAccessStrategy}
- and {@link org.hibernate.cache.CollectionRegion#buildAccessStrategy}).
-
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/access/AccessType.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/access/CollectionRegionAccessStrategy.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/access/EntityRegionAccessStrategy.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/access/SoftLock.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/access/package.html'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/entry/CacheEntry.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/entry/CacheEntryStructure.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/entry/CollectionCacheEntry.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/entry/StructuredCacheEntry.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/entry/StructuredCollectionCacheEntry.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/entry/StructuredMapCacheEntry.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/entry/UnstructuredCacheEntry.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/entry/package.html'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/impl/CacheDataDescriptionImpl.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/impl/NoCachingRegionFactory.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/impl/bridge/BaseGeneralDataRegionAdapter.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/impl/bridge/BaseRegionAdapter.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/impl/bridge/BaseTransactionalDataRegionAdapter.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/impl/bridge/CollectionAccessStrategyAdapter.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/impl/bridge/CollectionRegionAdapter.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/impl/bridge/EntityAccessStrategyAdapter.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/impl/bridge/EntityRegionAdapter.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/impl/bridge/OptimisticCacheSourceAdapter.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/impl/bridge/QueryResultsRegionAdapter.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/impl/bridge/RegionFactoryCacheProviderBridge.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1.2.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/impl/bridge/TimestampsRegionAdapter.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/internal/CacheDataDescriptionImpl.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/internal/CollectionCacheInvalidator.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/internal/NoCachingRegionFactory.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/internal/RegionFactoryInitiator.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/internal/StandardQueryCache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/internal/StandardQueryCacheFactory.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/internal/package-info.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/CacheDataDescription.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/CacheKey.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/CollectionRegion.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/EntityRegion.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/FilterKey.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/GeneralDataRegion.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/NaturalIdCacheKey.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/NaturalIdRegion.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/OptimisticCacheSource.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/QueryCache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/QueryCacheFactory.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/QueryKey.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/QueryResultsRegion.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/Region.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/RegionFactory.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/TimestampsRegion.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/TransactionAwareCache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/TransactionalDataRegion.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/UpdateTimestampsCache.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/package.html'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/access/AccessType.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/access/CollectionRegionAccessStrategy.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/access/EntityRegionAccessStrategy.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/access/NaturalIdRegionAccessStrategy.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/access/RegionAccessStrategy.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/access/SoftLock.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/access/UnknownAccessTypeException.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/access/package.html'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/entry/CacheEntry.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/entry/CacheEntryStructure.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/entry/CollectionCacheEntry.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/entry/ReferenceCacheEntryImpl.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/entry/StandardCacheEntryImpl.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/entry/StructuredCacheEntry.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/entry/StructuredCollectionCacheEntry.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/entry/StructuredMapCacheEntry.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/entry/UnstructuredCacheEntry.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cache/spi/entry/package.html'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/AbstractPropertyHolder.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/AccessType.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/AnnotatedClassType.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/AnnotationBinder.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/AnnotationConfiguration.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/AttributeConversionInfo.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/AttributeConverterDefinition.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/AvailableSettings.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/BaselineSessionEventsListenerBuilder.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/BinderHelper.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/ClassPropertyHolder.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/CollectionPropertyHolder.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/cfg/CollectionSecondPass.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/cfg/CollectionSecondPass.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/cfg/CollectionSecondPass.java 17 Aug 2012 14:33:53 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/cfg/CollectionSecondPass.java 30 Jul 2014 15:51:05 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,30 +20,32 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.cfg;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.hibernate.MappingException;
+import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.IndexedCollection;
import org.hibernate.mapping.OneToMany;
import org.hibernate.mapping.Selectable;
import org.hibernate.mapping.Value;
+import org.jboss.logging.Logger;
+
/**
* Collection second pass
*
* @author Emmanuel Bernard
*/
public abstract class CollectionSecondPass implements SecondPass {
- private static Logger log = LoggerFactory.getLogger( CollectionSecondPass.class );
+
+ private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, CollectionSecondPass.class.getName());
+
Mappings mappings;
Collection collection;
private Map localInheritedMetas;
@@ -60,13 +62,15 @@
public void doSecondPass(java.util.Map persistentClasses)
throws MappingException {
- if ( log.isDebugEnabled() )
- log.debug( "Second pass for collection: " + collection.getRole() );
+ final boolean debugEnabled = LOG.isDebugEnabled();
+ if ( debugEnabled ) {
+ LOG.debugf( "Second pass for collection: %s", collection.getRole() );
+ }
secondPass( persistentClasses, localInheritedMetas ); // using local since the inheritedMetas at this point is not the correct map since it is always the empty map
collection.createAllKeys();
- if ( log.isDebugEnabled() ) {
+ if ( debugEnabled ) {
String msg = "Mapped collection key: " + columns( collection.getKey() );
if ( collection.isIndexed() )
msg += ", index: " + columns( ( (IndexedCollection) collection ).getIndex() );
@@ -77,15 +81,15 @@
else {
msg += ", element: " + columns( collection.getElement() );
}
- log.debug( msg );
+ LOG.debug( msg );
}
}
abstract public void secondPass(java.util.Map persistentClasses, java.util.Map inheritedMetas)
throws MappingException;
private static String columns(Value val) {
- StringBuffer columns = new StringBuffer();
+ StringBuilder columns = new StringBuilder();
Iterator iter = val.getColumnIterator();
while ( iter.hasNext() ) {
columns.append( ( (Selectable) iter.next() ).getText() );
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/ColumnsBuilder.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/ComponentPropertyHolder.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/cfg/Configuration.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/cfg/Configuration.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/cfg/Configuration.java 17 Aug 2012 14:33:53 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/cfg/Configuration.java 30 Jul 2014 15:51:06 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,7 +20,6 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.cfg;
@@ -33,31 +32,34 @@
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.io.StringReader;
-import java.lang.reflect.Array;
import java.net.URL;
import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
+import java.util.StringTokenizer;
import java.util.TreeMap;
+import java.util.concurrent.ConcurrentHashMap;
import java.util.jar.JarFile;
import java.util.zip.ZipEntry;
+import javax.persistence.AttributeConverter;
+import javax.persistence.Converter;
+import javax.persistence.Embeddable;
+import javax.persistence.Entity;
+import javax.persistence.MapsId;
-import org.dom4j.Attribute;
-import org.dom4j.DocumentException;
-import org.dom4j.Element;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.w3c.dom.Document;
-import org.xml.sax.EntityResolver;
-import org.xml.sax.InputSource;
-
+import org.hibernate.AnnotationException;
+import org.hibernate.AssertionFailure;
+import org.hibernate.DuplicateMappingException;
import org.hibernate.EmptyInterceptor;
import org.hibernate.HibernateException;
import org.hibernate.Interceptor;
@@ -66,69 +68,100 @@
import org.hibernate.MappingNotFoundException;
import org.hibernate.SessionFactory;
import org.hibernate.SessionFactoryObserver;
+import org.hibernate.annotations.AnyMetaDef;
+import org.hibernate.annotations.common.reflection.MetadataProvider;
+import org.hibernate.annotations.common.reflection.MetadataProviderInjector;
+import org.hibernate.annotations.common.reflection.ReflectionManager;
+import org.hibernate.annotations.common.reflection.XClass;
+import org.hibernate.annotations.common.reflection.java.JavaReflectionManager;
+import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
+import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
+import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl;
+import org.hibernate.cfg.annotations.NamedEntityGraphDefinition;
+import org.hibernate.cfg.annotations.NamedProcedureCallDefinition;
+import org.hibernate.cfg.annotations.reflection.JPAMetadataProvider;
+import org.hibernate.context.spi.CurrentTenantIdentifierResolver;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.MySQLDialect;
import org.hibernate.dialect.function.SQLFunction;
-import org.hibernate.engine.FilterDefinition;
-import org.hibernate.engine.Mapping;
-import org.hibernate.event.AutoFlushEventListener;
-import org.hibernate.event.DeleteEventListener;
-import org.hibernate.event.DirtyCheckEventListener;
-import org.hibernate.event.EventListeners;
-import org.hibernate.event.EvictEventListener;
-import org.hibernate.event.FlushEntityEventListener;
-import org.hibernate.event.FlushEventListener;
-import org.hibernate.event.InitializeCollectionEventListener;
-import org.hibernate.event.LoadEventListener;
-import org.hibernate.event.LockEventListener;
-import org.hibernate.event.MergeEventListener;
-import org.hibernate.event.PersistEventListener;
-import org.hibernate.event.PostCollectionRecreateEventListener;
-import org.hibernate.event.PostCollectionRemoveEventListener;
-import org.hibernate.event.PostCollectionUpdateEventListener;
-import org.hibernate.event.PostDeleteEventListener;
-import org.hibernate.event.PostInsertEventListener;
-import org.hibernate.event.PostLoadEventListener;
-import org.hibernate.event.PostUpdateEventListener;
-import org.hibernate.event.PreCollectionRecreateEventListener;
-import org.hibernate.event.PreCollectionRemoveEventListener;
-import org.hibernate.event.PreCollectionUpdateEventListener;
-import org.hibernate.event.PreDeleteEventListener;
-import org.hibernate.event.PreInsertEventListener;
-import org.hibernate.event.PreLoadEventListener;
-import org.hibernate.event.PreUpdateEventListener;
-import org.hibernate.event.RefreshEventListener;
-import org.hibernate.event.ReplicateEventListener;
-import org.hibernate.event.SaveOrUpdateEventListener;
+import org.hibernate.engine.ResultSetMappingDefinition;
+import org.hibernate.engine.jdbc.spi.JdbcServices;
+import org.hibernate.engine.spi.FilterDefinition;
+import org.hibernate.engine.spi.Mapping;
+import org.hibernate.engine.spi.NamedQueryDefinition;
+import org.hibernate.engine.spi.NamedSQLQueryDefinition;
import org.hibernate.id.IdentifierGenerator;
+import org.hibernate.id.IdentifierGeneratorAggregator;
import org.hibernate.id.PersistentIdentifierGenerator;
-import org.hibernate.impl.SessionFactoryImpl;
+import org.hibernate.id.factory.IdentifierGeneratorFactory;
+import org.hibernate.id.factory.internal.DefaultIdentifierGeneratorFactory;
+import org.hibernate.id.factory.spi.MutableIdentifierGeneratorFactory;
+import org.hibernate.internal.CoreMessageLogger;
+import org.hibernate.internal.SessionFactoryImpl;
+import org.hibernate.internal.util.ClassLoaderHelper;
+import org.hibernate.internal.util.ConfigHelper;
+import org.hibernate.internal.util.ReflectHelper;
+import org.hibernate.internal.util.SerializationHelper;
+import org.hibernate.internal.util.StringHelper;
+import org.hibernate.internal.util.collections.ArrayHelper;
+import org.hibernate.internal.util.collections.CollectionHelper;
+import org.hibernate.internal.util.collections.JoinedIterator;
+import org.hibernate.internal.util.config.ConfigurationHelper;
+import org.hibernate.internal.util.xml.ErrorLogger;
+import org.hibernate.internal.util.xml.MappingReader;
+import org.hibernate.internal.util.xml.Origin;
+import org.hibernate.internal.util.xml.OriginImpl;
+import org.hibernate.internal.util.xml.XMLHelper;
+import org.hibernate.internal.util.xml.XmlDocument;
+import org.hibernate.internal.util.xml.XmlDocumentImpl;
import org.hibernate.mapping.AuxiliaryDatabaseObject;
import org.hibernate.mapping.Collection;
+import org.hibernate.mapping.Column;
+import org.hibernate.mapping.Constraint;
+import org.hibernate.mapping.DenormalizedTable;
+import org.hibernate.mapping.FetchProfile;
import org.hibernate.mapping.ForeignKey;
+import org.hibernate.mapping.IdGenerator;
import org.hibernate.mapping.IdentifierCollection;
import org.hibernate.mapping.Index;
+import org.hibernate.mapping.Join;
+import org.hibernate.mapping.MappedSuperclass;
+import org.hibernate.mapping.MetadataSource;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Property;
import org.hibernate.mapping.RootClass;
import org.hibernate.mapping.SimpleValue;
import org.hibernate.mapping.Table;
+import org.hibernate.mapping.TypeDef;
import org.hibernate.mapping.UniqueKey;
+import org.hibernate.metamodel.spi.TypeContributions;
+import org.hibernate.metamodel.spi.TypeContributor;
import org.hibernate.proxy.EntityNotFoundDelegate;
-import org.hibernate.secure.JACCConfiguration;
+import org.hibernate.secure.spi.GrantedPermission;
+import org.hibernate.secure.spi.JaccPermissionDeclarations;
+import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.hbm2ddl.DatabaseMetadata;
+import org.hibernate.tool.hbm2ddl.IndexMetadata;
+import org.hibernate.tool.hbm2ddl.SchemaUpdateScript;
import org.hibernate.tool.hbm2ddl.TableMetadata;
+import org.hibernate.tool.hbm2ddl.UniqueConstraintSchemaUpdateStrategy;
+import org.hibernate.tuple.entity.EntityTuplizerFactory;
+import org.hibernate.type.BasicType;
import org.hibernate.type.SerializationException;
import org.hibernate.type.Type;
-import org.hibernate.util.ArrayHelper;
-import org.hibernate.util.CollectionHelper;
-import org.hibernate.util.ConfigHelper;
-import org.hibernate.util.PropertiesHelper;
-import org.hibernate.util.ReflectHelper;
-import org.hibernate.util.SerializationHelper;
-import org.hibernate.util.StringHelper;
-import org.hibernate.util.XMLHelper;
+import org.hibernate.type.TypeResolver;
+import org.hibernate.usertype.CompositeUserType;
+import org.hibernate.usertype.UserType;
+import org.jboss.logging.Logger;
+
+import org.dom4j.Attribute;
+import org.dom4j.Document;
+import org.dom4j.DocumentException;
+import org.dom4j.Element;
+import org.xml.sax.EntityResolver;
+import org.xml.sax.InputSource;
+
/**
* An instance of Configuration allows the application
* to specify properties and mapping documents to be used when
@@ -142,79 +175,115 @@
*
* A new Configuration will use the properties specified in
* hibernate.properties by default.
+ *
+ * NOTE : This will be replaced by use of {@link org.hibernate.boot.registry.StandardServiceRegistryBuilder} and
+ * {@link org.hibernate.metamodel.MetadataSources} instead after the 4.0 release at which point this class will become
+ * deprecated and scheduled for removal in 5.0. See
+ * HHH-6183,
+ * HHH-2578 and
+ * HHH-6586 for details
*
* @author Gavin King
* @see org.hibernate.SessionFactory
*/
+@SuppressWarnings( {"UnusedDeclaration"})
public class Configuration implements Serializable {
- private static Logger log = LoggerFactory.getLogger( Configuration.class );
+ private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, Configuration.class.getName());
- protected Map classes;
- protected Map imports;
- protected Map collections;
- protected Map tables;
- protected List auxiliaryDatabaseObjects;
- protected Map sqlFunctions;
- protected Map namedQueries;
- protected Map namedSqlQueries;
+ public static final String DEFAULT_CACHE_CONCURRENCY_STRATEGY = AvailableSettings.DEFAULT_CACHE_CONCURRENCY_STRATEGY;
+
+ public static final String USE_NEW_ID_GENERATOR_MAPPINGS = AvailableSettings.USE_NEW_ID_GENERATOR_MAPPINGS;
+
+ public static final String ARTEFACT_PROCESSING_ORDER = "hibernate.mapping.precedence";
+
/**
- * Map result set name, result set description
+ * Class name of the class needed to enable Search.
*/
- protected Map sqlResultSetMappings;
- protected Map filterDefinitions;
- protected List secondPasses;
- protected List propertyReferences;
-// protected List extendsQueue;
- protected Map extendsQueue;
+ private static final String SEARCH_STARTUP_CLASS = "org.hibernate.search.event.EventListenerRegister";
+
+ /**
+ * Method to call to enable Search.
+ */
+ private static final String SEARCH_STARTUP_METHOD = "enableHibernateSearch";
+
+ protected MetadataSourceQueue metadataSourceQueue;
+ private transient ReflectionManager reflectionManager;
+
+ protected Map classes;
+ protected Map imports;
+ protected Map collections;
+ protected Map tables;
+ protected List auxiliaryDatabaseObjects;
+
+ protected Map namedQueries;
+ protected Map namedSqlQueries;
+ protected Map namedProcedureCallMap;
+ protected Map sqlResultSetMappings;
+ protected Map namedEntityGraphMap;
+
+ protected Map typeDefs;
+ protected Map filterDefinitions;
+ protected Map fetchProfiles;
+
protected Map tableNameBinding;
protected Map columnNameBindingPerTable;
+
+ protected List secondPasses;
+ protected List propertyReferences;
+ protected Map extendsQueue;
+
+ protected Map sqlFunctions;
+
+ private TypeResolver typeResolver = new TypeResolver();
+ private List typeContributorRegistrations = new ArrayList();
+
+ private EntityTuplizerFactory entityTuplizerFactory;
+// private ComponentTuplizerFactory componentTuplizerFactory; todo : HHH-3517 and HHH-1907
+
private Interceptor interceptor;
private Properties properties;
private EntityResolver entityResolver;
private EntityNotFoundDelegate entityNotFoundDelegate;
protected transient XMLHelper xmlHelper;
- protected transient Map typeDefs;
-
protected NamingStrategy namingStrategy;
+ private SessionFactoryObserver sessionFactoryObserver;
- private EventListeners eventListeners;
-
protected final SettingsFactory settingsFactory;
- private SessionFactoryObserver sessionFactoryObserver;
+ private transient Mapping mapping = buildMapping();
- protected void reset() {
- classes = new HashMap();
- imports = new HashMap();
- collections = new HashMap();
- tables = new TreeMap();
- namedQueries = new HashMap();
- namedSqlQueries = new HashMap();
- sqlResultSetMappings = new HashMap();
- xmlHelper = new XMLHelper();
- typeDefs = new HashMap();
- propertyReferences = new ArrayList();
- secondPasses = new ArrayList();
- interceptor = EmptyInterceptor.INSTANCE;
- properties = Environment.getProperties();
- entityResolver = XMLHelper.DEFAULT_DTD_RESOLVER;
- eventListeners = new EventListeners();
- filterDefinitions = new HashMap();
-// extendsQueue = new ArrayList();
- extendsQueue = new HashMap();
- auxiliaryDatabaseObjects = new ArrayList();
- tableNameBinding = new HashMap();
- columnNameBindingPerTable = new HashMap();
- namingStrategy = DefaultNamingStrategy.INSTANCE;
- sqlFunctions = new HashMap();
- }
+ private MutableIdentifierGeneratorFactory identifierGeneratorFactory;
- private transient Mapping mapping = buildMapping();
+ private Map, org.hibernate.mapping.MappedSuperclass> mappedSuperClasses;
+ private Map namedGenerators;
+ private Map> joins;
+ private Map classTypes;
+ private Set defaultNamedQueryNames;
+ private Set defaultNamedNativeQueryNames;
+ private Set defaultSqlResultSetMappingNames;
+ private Set defaultNamedProcedure;
+ private Set defaultNamedGenerators;
+ private Map generatorTables;
+ private Map> uniqueConstraintHoldersByTable;
+ private Map> jpaIndexHoldersByTable;
+ private Map mappedByResolver;
+ private Map propertyRefResolver;
+ private Map anyMetaDefs;
+ private List caches;
+ private boolean inSecondPass = false;
+ private boolean isDefaultProcessed = false;
+ private boolean isValidatorNotPresentLogged;
+ private Map> propertiesAnnotatedWithMapsId;
+ private Map> propertiesAnnotatedWithIdAndToOne;
+ private CurrentTenantIdentifierResolver currentTenantIdentifierResolver;
+ private boolean specjProprietarySyntaxEnabled;
+ private ConcurrentHashMap attributeConverterDefinitionsByClass;
+
protected Configuration(SettingsFactory settingsFactory) {
this.settingsFactory = settingsFactory;
reset();
@@ -224,12 +293,88 @@
this( new SettingsFactory() );
}
+ protected void reset() {
+ metadataSourceQueue = new MetadataSourceQueue();
+ createReflectionManager();
+
+ classes = new HashMap();
+ imports = new HashMap();
+ collections = new HashMap();
+ tables = new TreeMap();
+
+ namedQueries = new HashMap();
+ namedSqlQueries = new HashMap();
+ sqlResultSetMappings = new HashMap();
+ namedEntityGraphMap = new HashMap();
+ namedProcedureCallMap = new HashMap( );
+ typeDefs = new HashMap();
+ filterDefinitions = new HashMap();
+ fetchProfiles = new HashMap();
+ auxiliaryDatabaseObjects = new ArrayList();
+
+ tableNameBinding = new HashMap();
+ columnNameBindingPerTable = new HashMap();
+
+ secondPasses = new ArrayList();
+ propertyReferences = new ArrayList();
+ extendsQueue = new HashMap();
+
+ xmlHelper = new XMLHelper();
+ interceptor = EmptyInterceptor.INSTANCE;
+ properties = Environment.getProperties();
+ entityResolver = XMLHelper.DEFAULT_DTD_RESOLVER;
+
+ sqlFunctions = new HashMap();
+
+ entityTuplizerFactory = new EntityTuplizerFactory();
+// componentTuplizerFactory = new ComponentTuplizerFactory();
+
+ identifierGeneratorFactory = new DefaultIdentifierGeneratorFactory();
+
+ mappedSuperClasses = new HashMap, MappedSuperclass>();
+
+ metadataSourcePrecedence = Collections.emptyList();
+
+ namedGenerators = new HashMap();
+ joins = new HashMap>();
+ classTypes = new HashMap();
+ generatorTables = new HashMap();
+ defaultNamedQueryNames = new HashSet();
+ defaultNamedNativeQueryNames = new HashSet();
+ defaultSqlResultSetMappingNames = new HashSet();
+ defaultNamedProcedure = new HashSet( );
+ defaultNamedGenerators = new HashSet();
+ uniqueConstraintHoldersByTable = new HashMap>();
+ jpaIndexHoldersByTable = new HashMap>( );
+ mappedByResolver = new HashMap();
+ propertyRefResolver = new HashMap();
+ caches = new ArrayList();
+ namingStrategy = EJB3NamingStrategy.INSTANCE;
+ setEntityResolver( new EJB3DTDEntityResolver() );
+ anyMetaDefs = new HashMap();
+ propertiesAnnotatedWithMapsId = new HashMap>();
+ propertiesAnnotatedWithIdAndToOne = new HashMap>();
+ specjProprietarySyntaxEnabled = System.getProperty( "hibernate.enable_specj_proprietary_syntax" ) != null;
+ }
+
+ public EntityTuplizerFactory getEntityTuplizerFactory() {
+ return entityTuplizerFactory;
+ }
+
+ public ReflectionManager getReflectionManager() {
+ return reflectionManager;
+ }
+
+// public ComponentTuplizerFactory getComponentTuplizerFactory() {
+// return componentTuplizerFactory;
+// }
+
/**
* Iterate the entity mappings
*
* @return Iterator of the entity mappings currently contained in the configuration.
*/
- public Iterator getClassMappings() {
+ public Iterator getClassMappings() {
return classes.values().iterator();
}
@@ -247,18 +392,39 @@
*
* @return Iterator of the table mappings currently contained in the configuration.
*/
- public Iterator getTableMappings() {
+ public Iterator getTableMappings() {
return tables.values().iterator();
}
/**
+ * Iterate the mapped super class mappings
+ * EXPERIMENTAL Consider this API as PRIVATE
+ *
+ * @return iterator over the MappedSuperclass mapping currently contained in the configuration.
+ */
+ public Iterator getMappedSuperclassMappings() {
+ return mappedSuperClasses.values().iterator();
+ }
+
+ /**
+ * Get a copy of all known MappedSuperclasses
+ *
+ * EXPERIMENTAL Consider this API as PRIVATE
+ *
+ * @return Set of all known MappedSuperclasses
+ */
+ public java.util.Set getMappedSuperclassMappingsCopy() {
+ return new HashSet( mappedSuperClasses.values() );
+ }
+
+ /**
* Get the mapping for a particular entity
*
* @param entityName An entity name.
* @return the entity mapping information
*/
public PersistentClass getClassMapping(String entityName) {
- return (PersistentClass) classes.get( entityName );
+ return classes.get( entityName );
}
/**
@@ -268,13 +434,13 @@
* @return The collection mapping information
*/
public Collection getCollectionMapping(String role) {
- return (Collection) collections.get( role );
+ return collections.get( role );
}
/**
* Set a custom entity resolver. This entity resolver must be
* set before addXXX(misc) call.
- * Default value is {@link org.hibernate.util.DTDEntityResolver}
+ * Default value is {@link org.hibernate.internal.util.xml.DTDEntityResolver}
*
* @param entityResolver entity resolver to use
*/
@@ -325,34 +491,57 @@
*
* @param xmlFile a path to a file
* @return this (for method chaining purposes)
- * @throws org.hibernate.MappingException Indicates inability to locate or parse
- * the specified mapping file.
+ * @throws MappingException Indicates inability to locate the specified mapping file. Historically this could
+ * have indicated a problem parsing the XML document, but that is now delayed until after {@link #buildMappings}
*/
- public Configuration addFile(File xmlFile) throws MappingException {
- log.info( "Reading mappings from file: " + xmlFile.getPath() );
- if ( !xmlFile.exists() ) {
- throw new MappingNotFoundException( "file", xmlFile.toString() );
- }
+ public Configuration addFile(final File xmlFile) throws MappingException {
+ LOG.readingMappingsFromFile( xmlFile.getPath() );
+ final String name = xmlFile.getAbsolutePath();
+ final InputSource inputSource;
try {
- List errors = new ArrayList();
- org.dom4j.Document doc = xmlHelper.createSAXReader( xmlFile.toString(), errors, entityResolver ).read( xmlFile );
- if ( errors.size() != 0 ) {
- throw new InvalidMappingException( "file", xmlFile.toString(), ( Throwable ) errors.get( 0 ) );
- }
- add( doc );
- return this;
+ inputSource = new InputSource( new FileInputStream( xmlFile ) );
}
- catch ( InvalidMappingException e ) {
- throw e;
+ catch ( FileNotFoundException e ) {
+ throw new MappingNotFoundException( "file", xmlFile.toString() );
}
- catch ( MappingNotFoundException e ) {
- throw e;
+ add( inputSource, "file", name );
+ return this;
+ }
+
+ private XmlDocument add(InputSource inputSource, String originType, String originName) {
+ return add( inputSource, new OriginImpl( originType, originName ) );
+ }
+
+ private XmlDocument add(InputSource inputSource, Origin origin) {
+ XmlDocument metadataXml = MappingReader.INSTANCE.readMappingDocument( entityResolver, inputSource, origin );
+ add( metadataXml );
+ return metadataXml;
+ }
+
+ public void add(XmlDocument metadataXml) {
+ if ( inSecondPass || !isOrmXml( metadataXml ) ) {
+ metadataSourceQueue.add( metadataXml );
}
- catch ( Exception e ) {
- throw new InvalidMappingException( "file", xmlFile.toString(), e );
+ else {
+ final MetadataProvider metadataProvider = ( (MetadataProviderInjector) reflectionManager ).getMetadataProvider();
+ JPAMetadataProvider jpaMetadataProvider = ( JPAMetadataProvider ) metadataProvider;
+ List classNames = jpaMetadataProvider.getXMLContext().addDocument( metadataXml.getDocumentTree() );
+ for ( String className : classNames ) {
+ try {
+ metadataSourceQueue.add( reflectionManager.classForName( className, this.getClass() ) );
+ }
+ catch ( ClassNotFoundException e ) {
+ throw new AnnotationException( "Unable to load class defined in XML: " + className, e );
+ }
+ }
+ jpaMetadataProvider.getXMLContext().applyDiscoveredAttributeConverters( this );
}
}
+ private static boolean isOrmXml(XmlDocument xmlDocument) {
+ return "entity-mappings".equals( xmlDocument.getDocumentTree().getRootElement().getName() );
+ }
+
/**
* Add a cached mapping file. A cached file is a serialized representation
* of the DOM structure of a particular mapping. It is saved from a previous
@@ -370,70 +559,73 @@
* the non-cached file.
*/
public Configuration addCacheableFile(File xmlFile) throws MappingException {
+ File cachedFile = determineCachedDomFile( xmlFile );
+
try {
- File cachedFile = new File( xmlFile.getAbsolutePath() + ".bin" );
- org.dom4j.Document doc = null;
+ return addCacheableFileStrictly( xmlFile );
+ }
+ catch ( SerializationException e ) {
+ LOG.unableToDeserializeCache( cachedFile.getPath(), e );
+ }
+ catch ( FileNotFoundException e ) {
+ LOG.cachedFileNotFound( cachedFile.getPath(), e );
+ }
- final boolean useCachedFile = xmlFile.exists() &&
- cachedFile.exists() &&
- xmlFile.lastModified() < cachedFile.lastModified();
+ final String name = xmlFile.getAbsolutePath();
+ final InputSource inputSource;
+ try {
+ inputSource = new InputSource( new FileInputStream( xmlFile ) );
+ }
+ catch ( FileNotFoundException e ) {
+ throw new MappingNotFoundException( "file", xmlFile.toString() );
+ }
- if ( useCachedFile ) {
- try {
- log.info( "Reading mappings from cache file: " + cachedFile );
- doc = ( org.dom4j.Document ) SerializationHelper.deserialize( new FileInputStream( cachedFile ) );
- }
- catch ( SerializationException e ) {
- log.warn( "Could not deserialize cache file: " + cachedFile.getPath(), e );
- }
- catch ( FileNotFoundException e ) {
- log.warn( "I/O reported cached file could not be found : " + cachedFile.getPath(), e );
- }
- }
+ LOG.readingMappingsFromFile( xmlFile.getPath() );
+ XmlDocument metadataXml = add( inputSource, "file", name );
- // if doc is null, then for whatever reason, the cached file cannot be used...
- if ( doc == null ) {
- if ( !xmlFile.exists() ) {
- throw new MappingNotFoundException( "file", xmlFile.toString() );
- }
+ try {
+ LOG.debugf( "Writing cache file for: %s to: %s", xmlFile, cachedFile );
+ SerializationHelper.serialize( ( Serializable ) metadataXml.getDocumentTree(), new FileOutputStream( cachedFile ) );
+ }
+ catch ( Exception e ) {
+ LOG.unableToWriteCachedFile( cachedFile.getPath(), e.getMessage() );
+ }
- log.info( "Reading mappings from file: " + xmlFile );
- List errors = new ArrayList();
- try {
- doc = xmlHelper.createSAXReader( xmlFile.getAbsolutePath(), errors, entityResolver ).read( xmlFile );
- if ( errors.size() != 0 ) {
- throw new MappingException( "invalid mapping", ( Throwable ) errors.get( 0 ) );
- }
- }
- catch( DocumentException e){
- throw new MappingException( "invalid mapping", e );
- }
+ return this;
+ }
- try {
- log.debug( "Writing cache file for: " + xmlFile + " to: " + cachedFile );
- SerializationHelper.serialize( ( Serializable ) doc, new FileOutputStream( cachedFile ) );
- }
- catch ( SerializationException e ) {
- log.warn( "Could not write cached file: " + cachedFile, e );
- }
- catch ( FileNotFoundException e ) {
- log.warn( "I/O reported error writing cached file : " + cachedFile.getPath(), e );
- }
- }
+ private File determineCachedDomFile(File xmlFile) {
+ return new File( xmlFile.getAbsolutePath() + ".bin" );
+ }
- add( doc );
- return this;
+ /**
+ * INTENDED FOR TESTSUITE USE ONLY!
+ *
+ * Much like {@link #addCacheableFile(File)} except that here we will fail immediately if
+ * the cache version cannot be found or used for whatever reason
+ *
+ * @param xmlFile The xml file, not the bin!
+ *
+ * @return The dom "deserialized" from the cached file.
+ *
+ * @throws SerializationException Indicates a problem deserializing the cached dom tree
+ * @throws FileNotFoundException Indicates that the cached file was not found or was not usable.
+ */
+ public Configuration addCacheableFileStrictly(File xmlFile) throws SerializationException, FileNotFoundException {
+ final File cachedFile = determineCachedDomFile( xmlFile );
+ final boolean useCachedFile = xmlFile.exists()
+ && cachedFile.exists()
+ && xmlFile.lastModified() < cachedFile.lastModified();
+
+ if ( ! useCachedFile ) {
+ throw new FileNotFoundException( "Cached file could not be found or could not be used" );
}
- catch ( InvalidMappingException e ) {
- throw e;
- }
- catch ( MappingNotFoundException e ) {
- throw e;
- }
- catch ( Exception e ) {
- throw new InvalidMappingException( "file", xmlFile.toString(), e );
- }
+
+ LOG.readingCachedMappings( cachedFile );
+ Document document = ( Document ) SerializationHelper.deserialize( new FileInputStream( cachedFile ) );
+ add( new XmlDocumentImpl( document, "file", xmlFile.getAbsolutePath() ) );
+ return this;
}
/**
@@ -460,21 +652,9 @@
* given XML string
*/
public Configuration addXML(String xml) throws MappingException {
- if ( log.isDebugEnabled() ) {
- log.debug( "Mapping XML:\n" + xml );
- }
- try {
- List errors = new ArrayList();
- org.dom4j.Document doc = xmlHelper.createSAXReader( "XML String", errors, entityResolver )
- .read( new StringReader( xml ) );
- if ( errors.size() != 0 ) {
- throw new MappingException( "invalid mapping", (Throwable) errors.get( 0 ) );
- }
- add( doc );
- }
- catch (DocumentException e) {
- throw new MappingException( "Could not parse mapping document in XML string", e );
- }
+ LOG.debugf( "Mapping XML:\n%s", xml );
+ final InputSource inputSource = new InputSource( new StringReader( xml ) );
+ add( inputSource, "string", "XML String" );
return this;
}
@@ -487,21 +667,34 @@
* the mapping document.
*/
public Configuration addURL(URL url) throws MappingException {
- if ( log.isDebugEnabled() ) {
- log.debug( "Reading mapping document from URL:" + url.toExternalForm() );
- }
+ final String urlExternalForm = url.toExternalForm();
+
+ LOG.debugf( "Reading mapping document from URL : %s", urlExternalForm );
+
try {
- addInputStream( url.openStream() );
+ add( url.openStream(), "URL", urlExternalForm );
}
- catch ( InvalidMappingException e ) {
- throw new InvalidMappingException( "URL", url.toExternalForm(), e.getCause() );
+ catch ( IOException e ) {
+ throw new InvalidMappingException( "Unable to open url stream [" + urlExternalForm + "]", "URL", urlExternalForm, e );
}
- catch (Exception e) {
- throw new InvalidMappingException( "URL", url.toExternalForm(), e );
- }
return this;
}
+ private XmlDocument add(InputStream inputStream, final String type, final String name) {
+ final InputSource inputSource = new InputSource( inputStream );
+ try {
+ return add( inputSource, type, name );
+ }
+ finally {
+ try {
+ inputStream.close();
+ }
+ catch ( IOException ignore ) {
+ LOG.trace( "Was unable to close input stream");
+ }
+ }
+ }
+
/**
* Read mappings from a DOM Document
*
@@ -510,11 +703,12 @@
* @throws MappingException Indicates problems reading the DOM or processing
* the mapping document.
*/
- public Configuration addDocument(Document doc) throws MappingException {
- if ( log.isDebugEnabled() ) {
- log.debug( "Mapping document:\n" + doc );
- }
- add( xmlHelper.createDOMReader().read( doc ) );
+ public Configuration addDocument(org.w3c.dom.Document doc) throws MappingException {
+ LOG.debugf( "Mapping Document:\n%s", doc );
+
+ final Document document = xmlHelper.createDOMReader().read( doc );
+ add( new XmlDocumentImpl( document, "unknown", null ) );
+
return this;
}
@@ -527,27 +721,8 @@
* processing the contained mapping document.
*/
public Configuration addInputStream(InputStream xmlInputStream) throws MappingException {
- try {
- List errors = new ArrayList();
- org.dom4j.Document doc = xmlHelper.createSAXReader( "XML InputStream", errors, entityResolver )
- .read( new InputSource( xmlInputStream ) );
- if ( errors.size() != 0 ) {
- throw new InvalidMappingException( "invalid mapping", null, (Throwable) errors.get( 0 ) );
- }
- add( doc );
- return this;
- }
- catch (DocumentException e) {
- throw new InvalidMappingException( "input stream", null, e );
- }
- finally {
- try {
- xmlInputStream.close();
- }
- catch (IOException ioe) {
- log.warn( "Could not close input stream", ioe );
- }
- }
+ add( xmlInputStream, "input stream", null );
+ return this;
}
/**
@@ -560,51 +735,43 @@
* processing the contained mapping document.
*/
public Configuration addResource(String resourceName, ClassLoader classLoader) throws MappingException {
- log.info( "Reading mappings from resource: " + resourceName );
- InputStream rsrc = classLoader.getResourceAsStream( resourceName );
- if ( rsrc == null ) {
+ LOG.readingMappingsFromResource( resourceName );
+ InputStream resourceInputStream = classLoader.getResourceAsStream( resourceName );
+ if ( resourceInputStream == null ) {
throw new MappingNotFoundException( "resource", resourceName );
}
- try {
- return addInputStream( rsrc );
- }
- catch (MappingException me) {
- throw new InvalidMappingException( "resource", resourceName, me );
- }
+ add( resourceInputStream, "resource", resourceName );
+ return this;
}
/**
* Read mappings as a application resourceName (i.e. classpath lookup)
- * trying different classloaders.
+ * trying different class loaders.
*
* @param resourceName The resource name
* @return this (for method chaining purposes)
* @throws MappingException Indicates problems locating the resource or
* processing the contained mapping document.
*/
public Configuration addResource(String resourceName) throws MappingException {
- log.info( "Reading mappings from resource : " + resourceName );
- ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
- InputStream rsrc = null;
- if (contextClassLoader!=null) {
- rsrc = contextClassLoader.getResourceAsStream( resourceName );
+ LOG.readingMappingsFromResource( resourceName );
+ ClassLoader contextClassLoader = ClassLoaderHelper.getContextClassLoader();
+ InputStream resourceInputStream = null;
+ if ( contextClassLoader != null ) {
+ resourceInputStream = contextClassLoader.getResourceAsStream( resourceName );
}
- if ( rsrc == null ) {
- rsrc = Environment.class.getClassLoader().getResourceAsStream( resourceName );
+ if ( resourceInputStream == null ) {
+ resourceInputStream = Environment.class.getClassLoader().getResourceAsStream( resourceName );
}
- if ( rsrc == null ) {
+ if ( resourceInputStream == null ) {
throw new MappingNotFoundException( "resource", resourceName );
}
- try {
- return addInputStream( rsrc );
- }
- catch (MappingException me) {
- throw new InvalidMappingException( "resource", resourceName, me );
- }
+ add( resourceInputStream, "resource", resourceName );
+ return this;
}
/**
- * Read a mapping as an application resouurce using the convention that a class
+ * Read a mapping as an application resource using the convention that a class
* named foo.bar.Foo is mapped by a file foo/bar/Foo.hbm.xml
* which can be resolved as a classpath resource.
*
@@ -615,11 +782,46 @@
*/
public Configuration addClass(Class persistentClass) throws MappingException {
String mappingResourceName = persistentClass.getName().replace( '.', '/' ) + ".hbm.xml";
- log.info( "Reading mappings from resource: " + mappingResourceName );
+ LOG.readingMappingsFromResource( mappingResourceName );
return addResource( mappingResourceName, persistentClass.getClassLoader() );
}
/**
+ * Read metadata from the annotations associated with this class.
+ *
+ * @param annotatedClass The class containing annotations
+ *
+ * @return this (for method chaining)
+ */
+ @SuppressWarnings({ "unchecked" })
+ public Configuration addAnnotatedClass(Class annotatedClass) {
+ XClass xClass = reflectionManager.toXClass( annotatedClass );
+ metadataSourceQueue.add( xClass );
+ return this;
+ }
+
+ /**
+ * Read package-level metadata.
+ *
+ * @param packageName java package name
+ *
+ * @return this (for method chaining)
+ *
+ * @throws MappingException in case there is an error in the mapping data
+ */
+ public Configuration addPackage(String packageName) throws MappingException {
+ LOG.debugf( "Mapping Package %s", packageName );
+ try {
+ AnnotationBinder.bindPackage( packageName, createMappings() );
+ return this;
+ }
+ catch ( MappingException me ) {
+ LOG.unableToParseMetadata( packageName );
+ throw me;
+ }
+ }
+
+ /**
* Read all mappings from a jar file
*
* Assumes that any file named *.hbm.xml is a mapping document.
@@ -630,7 +832,7 @@
* processing the contained mapping documents.
*/
public Configuration addJar(File jar) throws MappingException {
- log.info( "Searching for mapping documents in jar: " + jar.getName() );
+ LOG.searchingForMappingDocuments( jar.getName() );
JarFile jarFile = null;
try {
try {
@@ -646,7 +848,7 @@
while ( jarEntries.hasMoreElements() ) {
ZipEntry ze = (ZipEntry) jarEntries.nextElement();
if ( ze.getName().endsWith( ".hbm.xml" ) ) {
- log.info( "Found mapping document in jar: " + ze.getName() );
+ LOG.foundMappingDocument( ze.getName() );
try {
addInputStream( jarFile.getInputStream( ze ) );
}
@@ -668,7 +870,7 @@
}
}
catch (IOException ioe) {
- log.error("could not close jar", ioe);
+ LOG.unableToCloseJar( ioe.getMessage() );
}
}
@@ -687,92 +889,68 @@
*/
public Configuration addDirectory(File dir) throws MappingException {
File[] files = dir.listFiles();
- for ( int i = 0; i < files.length ; i++ ) {
- if ( files[i].isDirectory() ) {
- addDirectory( files[i] );
+ if ( files != null ) {
+ for ( File file : files ) {
+ if ( file.isDirectory() ) {
+ addDirectory( file );
+ }
+ else if ( file.getName().endsWith( ".hbm.xml" ) ) {
+ addFile( file );
+ }
}
- else if ( files[i].getName().endsWith( ".hbm.xml" ) ) {
- addFile( files[i] );
- }
}
return this;
}
- protected void add(org.dom4j.Document doc) throws MappingException {
- HbmBinder.bindRoot( doc, createMappings(), CollectionHelper.EMPTY_MAP );
- }
-
/**
- * Create a new Mappings to add class and collection
- * mappings to.
+ * Create a new Mappings to add class and collection mappings to.
+ *
+ * @return The created mappings
*/
public Mappings createMappings() {
- return new Mappings(
- classes,
- collections,
- tables,
- namedQueries,
- namedSqlQueries,
- sqlResultSetMappings,
- imports,
- secondPasses,
- propertyReferences,
- namingStrategy,
- typeDefs,
- filterDefinitions,
- extendsQueue,
- auxiliaryDatabaseObjects,
- tableNameBinding,
- columnNameBindingPerTable
- );
+ return new MappingsImpl();
}
- private Iterator iterateGenerators(Dialect dialect) throws MappingException {
+ @SuppressWarnings({ "unchecked" })
+ public Iterator iterateGenerators(Dialect dialect) throws MappingException {
TreeMap generators = new TreeMap();
String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG );
String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA );
- Iterator iter = classes.values().iterator();
- while ( iter.hasNext() ) {
- PersistentClass pc = (PersistentClass) iter.next();
-
+ for ( PersistentClass pc : classes.values() ) {
if ( !pc.isInherited() ) {
+ IdentifierGenerator ig = pc.getIdentifier().createIdentifierGenerator(
+ getIdentifierGeneratorFactory(),
+ dialect,
+ defaultCatalog,
+ defaultSchema,
+ (RootClass) pc
+ );
- IdentifierGenerator ig = pc.getIdentifier()
- .createIdentifierGenerator(
- dialect,
- defaultCatalog,
- defaultSchema,
- (RootClass) pc
- );
-
if ( ig instanceof PersistentIdentifierGenerator ) {
generators.put( ( (PersistentIdentifierGenerator) ig ).generatorKey(), ig );
}
-
+ else if ( ig instanceof IdentifierGeneratorAggregator ) {
+ ( (IdentifierGeneratorAggregator) ig ).registerPersistentGenerators( generators );
+ }
}
}
- iter = collections.values().iterator();
- while ( iter.hasNext() ) {
- Collection collection = (Collection) iter.next();
-
+ for ( Collection collection : collections.values() ) {
if ( collection.isIdentified() ) {
+ IdentifierGenerator ig = ( ( IdentifierCollection ) collection ).getIdentifier().createIdentifierGenerator(
+ getIdentifierGeneratorFactory(),
+ dialect,
+ defaultCatalog,
+ defaultSchema,
+ null
+ );
- IdentifierGenerator ig = ( (IdentifierCollection) collection ).getIdentifier()
- .createIdentifierGenerator(
- dialect,
- defaultCatalog,
- defaultSchema,
- null
- );
-
if ( ig instanceof PersistentIdentifierGenerator ) {
generators.put( ( (PersistentIdentifierGenerator) ig ).generatorKey(), ig );
}
-
}
}
@@ -782,34 +960,41 @@
/**
* Generate DDL for dropping tables
*
+ * @param dialect The dialect for which to generate the drop script
+
+ * @return The sequence of DDL commands to drop the schema objects
+
+ * @throws HibernateException Generally indicates a problem calling {@link #buildMappings()}
+
* @see org.hibernate.tool.hbm2ddl.SchemaExport
*/
public String[] generateDropSchemaScript(Dialect dialect) throws HibernateException {
-
secondPassCompile();
String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG );
String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA );
- ArrayList script = new ArrayList( 50 );
+ ArrayList script = new ArrayList( 50 );
// drop them in reverse order in case db needs it done that way...
- ListIterator itr = auxiliaryDatabaseObjects.listIterator( auxiliaryDatabaseObjects.size() );
- while ( itr.hasPrevious() ) {
- AuxiliaryDatabaseObject object = (AuxiliaryDatabaseObject) itr.previous();
- if ( object.appliesToDialect( dialect ) ) {
- script.add( object.sqlDropString( dialect, defaultCatalog, defaultSchema ) );
+ {
+ ListIterator itr = auxiliaryDatabaseObjects.listIterator( auxiliaryDatabaseObjects.size() );
+ while ( itr.hasPrevious() ) {
+ AuxiliaryDatabaseObject object = (AuxiliaryDatabaseObject) itr.previous();
+ if ( object.appliesToDialect( dialect ) ) {
+ script.add( object.sqlDropString( dialect, defaultCatalog, defaultSchema ) );
+ }
}
}
if ( dialect.dropConstraints() ) {
- Iterator iter = getTableMappings();
- while ( iter.hasNext() ) {
- Table table = (Table) iter.next();
+ Iterator itr = getTableMappings();
+ while ( itr.hasNext() ) {
+ Table table = (Table) itr.next();
if ( table.isPhysicalTable() ) {
- Iterator subIter = table.getForeignKeyIterator();
- while ( subIter.hasNext() ) {
- ForeignKey fk = (ForeignKey) subIter.next();
+ Iterator subItr = table.getForeignKeyIterator();
+ while ( subItr.hasNext() ) {
+ ForeignKey fk = (ForeignKey) subItr.next();
if ( fk.isPhysicalConstraint() ) {
script.add(
fk.sqlDropString(
@@ -825,10 +1010,10 @@
}
- Iterator iter = getTableMappings();
- while ( iter.hasNext() ) {
+ Iterator itr = getTableMappings();
+ while ( itr.hasNext() ) {
- Table table = (Table) iter.next();
+ Table table = (Table) itr.next();
if ( table.isPhysicalTable() ) {
/*Iterator subIter = table.getIndexIterator();
@@ -851,26 +1036,29 @@
}
- iter = iterateGenerators( dialect );
- while ( iter.hasNext() ) {
- String[] lines = ( (PersistentIdentifierGenerator) iter.next() ).sqlDropStrings( dialect );
- for ( int i = 0; i < lines.length ; i++ ) {
- script.add( lines[i] );
- }
+ itr = iterateGenerators( dialect );
+ while ( itr.hasNext() ) {
+ String[] lines = ( (PersistentIdentifierGenerator) itr.next() ).sqlDropStrings( dialect );
+ script.addAll( Arrays.asList( lines ) );
}
return ArrayHelper.toStringArray( script );
}
/**
- * Generate DDL for creating tables
+ * @param dialect The dialect for which to generate the creation script
*
+ * @return The sequence of DDL commands to create the schema objects
+ *
+ * @throws HibernateException Generally indicates a problem calling {@link #buildMappings()}
+ *
* @see org.hibernate.tool.hbm2ddl.SchemaExport
*/
+ @SuppressWarnings({ "unchecked" })
public String[] generateSchemaCreationScript(Dialect dialect) throws HibernateException {
secondPassCompile();
- ArrayList script = new ArrayList( 50 );
+ ArrayList script = new ArrayList( 50 );
String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG );
String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA );
@@ -886,7 +1074,7 @@
defaultSchema
)
);
- Iterator comments = table.sqlCommentStrings( dialect, defaultCatalog, defaultSchema );
+ Iterator comments = table.sqlCommentStrings( dialect, defaultCatalog, defaultSchema );
while ( comments.hasNext() ) {
script.add( comments.next() );
}
@@ -898,17 +1086,15 @@
Table table = (Table) iter.next();
if ( table.isPhysicalTable() ) {
- if ( !dialect.supportsUniqueConstraintInCreateAlterTable() ) {
- Iterator subIter = table.getUniqueKeyIterator();
- while ( subIter.hasNext() ) {
- UniqueKey uk = (UniqueKey) subIter.next();
- String constraintString = uk.sqlCreateString( dialect, mapping, defaultCatalog, defaultSchema );
- if (constraintString != null) script.add( constraintString );
- }
+ Iterator subIter = table.getUniqueKeyIterator();
+ while ( subIter.hasNext() ) {
+ UniqueKey uk = (UniqueKey) subIter.next();
+ String constraintString = uk.sqlCreateString( dialect, mapping, defaultCatalog, defaultSchema );
+ if (constraintString != null) script.add( constraintString );
}
- Iterator subIter = table.getIndexIterator();
+ subIter = table.getIndexIterator();
while ( subIter.hasNext() ) {
Index index = (Index) subIter.next();
script.add(
@@ -920,9 +1106,17 @@
)
);
}
+ }
+ }
+ // Foreign keys must be created *after* unique keys for numerous DBs. See HH-8390.
+ iter = getTableMappings();
+ while ( iter.hasNext() ) {
+ Table table = (Table) iter.next();
+ if ( table.isPhysicalTable() ) {
+
if ( dialect.hasAlterTable() ) {
- subIter = table.getForeignKeyIterator();
+ Iterator subIter = table.getForeignKeyIterator();
while ( subIter.hasNext() ) {
ForeignKey fk = (ForeignKey) subIter.next();
if ( fk.isPhysicalConstraint() ) {
@@ -943,74 +1137,85 @@
iter = iterateGenerators( dialect );
while ( iter.hasNext() ) {
String[] lines = ( (PersistentIdentifierGenerator) iter.next() ).sqlCreateStrings( dialect );
- for ( int i = 0; i < lines.length ; i++ ) {
- script.add( lines[i] );
- }
+ script.addAll( Arrays.asList( lines ) );
}
- Iterator itr = auxiliaryDatabaseObjects.iterator();
- while ( itr.hasNext() ) {
- AuxiliaryDatabaseObject object = (AuxiliaryDatabaseObject) itr.next();
- if ( object.appliesToDialect( dialect ) ) {
- script.add( object.sqlCreateString( dialect, mapping, defaultCatalog, defaultSchema ) );
+ for ( AuxiliaryDatabaseObject auxiliaryDatabaseObject : auxiliaryDatabaseObjects ) {
+ if ( auxiliaryDatabaseObject.appliesToDialect( dialect ) ) {
+ script.add( auxiliaryDatabaseObject.sqlCreateString( dialect, mapping, defaultCatalog, defaultSchema ) );
}
}
return ArrayHelper.toStringArray( script );
}
/**
- * Generate DDL for altering tables
+ * @param dialect The dialect for which to generate the creation script
+ * @param databaseMetadata The database catalog information for the database to be updated; needed to work out what
+ * should be created/altered
*
+ * @return The sequence of DDL commands to apply the schema objects
+ *
+ * @throws HibernateException Generally indicates a problem calling {@link #buildMappings()}
+ *
* @see org.hibernate.tool.hbm2ddl.SchemaUpdate
+ *
+ * @deprecated Use {@link #generateSchemaUpdateScriptList(Dialect, DatabaseMetadata)} instead
*/
+ @SuppressWarnings({ "unchecked" })
+ @Deprecated
public String[] generateSchemaUpdateScript(Dialect dialect, DatabaseMetadata databaseMetadata)
throws HibernateException {
+ List scripts = generateSchemaUpdateScriptList( dialect, databaseMetadata );
+ return SchemaUpdateScript.toStringArray( scripts );
+ }
+
+ /**
+ * @param dialect The dialect for which to generate the creation script
+ * @param databaseMetadata The database catalog information for the database to be updated; needed to work out what
+ * should be created/altered
+ *
+ * @return The sequence of DDL commands to apply the schema objects
+ *
+ * @throws HibernateException Generally indicates a problem calling {@link #buildMappings()}
+ *
+ * @see org.hibernate.tool.hbm2ddl.SchemaUpdate
+ */
+ public List generateSchemaUpdateScriptList(Dialect dialect, DatabaseMetadata databaseMetadata)
+ throws HibernateException {
secondPassCompile();
String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG );
String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA );
+ UniqueConstraintSchemaUpdateStrategy constraintMethod = UniqueConstraintSchemaUpdateStrategy.interpret( properties
+ .get( Environment.UNIQUE_CONSTRAINT_SCHEMA_UPDATE_STRATEGY ) );
- ArrayList script = new ArrayList( 50 );
+ List scripts = new ArrayList();
Iterator iter = getTableMappings();
while ( iter.hasNext() ) {
Table table = (Table) iter.next();
+ String tableSchema = ( table.getSchema() == null ) ? defaultSchema : table.getSchema();
+ String tableCatalog = ( table.getCatalog() == null ) ? defaultCatalog : table.getCatalog();
if ( table.isPhysicalTable() ) {
-
- TableMetadata tableInfo = databaseMetadata.getTableMetadata(
- table.getName(),
- ( table.getSchema() == null ) ? defaultSchema : table.getSchema(),
- ( table.getCatalog() == null ) ? defaultCatalog : table.getCatalog(),
- table.isQuoted()
- );
+ TableMetadata tableInfo = databaseMetadata.getTableMetadata( table.getName(), tableSchema,
+ tableCatalog, table.isQuoted() );
if ( tableInfo == null ) {
- script.add(
- table.sqlCreateString(
- dialect,
- mapping,
- defaultCatalog,
- defaultSchema
- )
- );
+ scripts.add( new SchemaUpdateScript( table.sqlCreateString( dialect, mapping, tableCatalog,
+ tableSchema ), false ) );
}
else {
- Iterator subiter = table.sqlAlterStrings(
- dialect,
- mapping,
- tableInfo,
- defaultCatalog,
- defaultSchema
- );
+ Iterator subiter = table.sqlAlterStrings( dialect, mapping, tableInfo, tableCatalog,
+ tableSchema );
while ( subiter.hasNext() ) {
- script.add( subiter.next() );
+ scripts.add( new SchemaUpdateScript( subiter.next(), false ) );
}
}
- Iterator comments = table.sqlCommentStrings( dialect, defaultCatalog, defaultSchema );
+ Iterator comments = table.sqlCommentStrings( dialect, defaultCatalog, defaultSchema );
while ( comments.hasNext() ) {
- script.add( comments.next() );
+ scripts.add( new SchemaUpdateScript( comments.next(), false ) );
}
}
@@ -1019,61 +1224,79 @@
iter = getTableMappings();
while ( iter.hasNext() ) {
Table table = (Table) iter.next();
+ String tableSchema = ( table.getSchema() == null ) ? defaultSchema : table.getSchema();
+ String tableCatalog = ( table.getCatalog() == null ) ? defaultCatalog : table.getCatalog();
if ( table.isPhysicalTable() ) {
- TableMetadata tableInfo = databaseMetadata.getTableMetadata(
- table.getName(),
- table.getSchema(),
- table.getCatalog(),
- table.isQuoted()
- );
+ TableMetadata tableInfo = databaseMetadata.getTableMetadata( table.getName(), tableSchema,
+ tableCatalog, table.isQuoted() );
+ if (! constraintMethod.equals( UniqueConstraintSchemaUpdateStrategy.SKIP )) {
+ Iterator uniqueIter = table.getUniqueKeyIterator();
+ while ( uniqueIter.hasNext() ) {
+ final UniqueKey uniqueKey = (UniqueKey) uniqueIter.next();
+ // Skip if index already exists. Most of the time, this
+ // won't work since most Dialects use Constraints. However,
+ // keep it for the few that do use Indexes.
+ if ( tableInfo != null && StringHelper.isNotEmpty( uniqueKey.getName() ) ) {
+ final IndexMetadata meta = tableInfo.getIndexMetadata( uniqueKey.getName() );
+ if ( meta != null ) {
+ continue;
+ }
+ }
+ String constraintString = uniqueKey.sqlCreateString( dialect, mapping, tableCatalog, tableSchema );
+ if ( constraintString != null && !constraintString.isEmpty() )
+ if ( constraintMethod.equals( UniqueConstraintSchemaUpdateStrategy.DROP_RECREATE_QUIETLY ) ) {
+ String constraintDropString = uniqueKey.sqlDropString( dialect, tableCatalog, tableSchema );
+ scripts.add( new SchemaUpdateScript( constraintDropString, true) );
+ }
+ scripts.add( new SchemaUpdateScript( constraintString, true) );
+ }
+ }
+
+ Iterator subIter = table.getIndexIterator();
+ while ( subIter.hasNext() ) {
+ final Index index = (Index) subIter.next();
+ // Skip if index already exists
+ if ( tableInfo != null && StringHelper.isNotEmpty( index.getName() ) ) {
+ final IndexMetadata meta = tableInfo.getIndexMetadata( index.getName() );
+ if ( meta != null ) {
+ continue;
+ }
+ }
+ scripts.add( new SchemaUpdateScript( index.sqlCreateString( dialect, mapping, tableCatalog,
+ tableSchema ), false ) );
+ }
+ }
+ }
+
+ // Foreign keys must be created *after* unique keys for numerous DBs. See HH-8390.
+ iter = getTableMappings();
+ while ( iter.hasNext() ) {
+ Table table = (Table) iter.next();
+ String tableSchema = ( table.getSchema() == null ) ? defaultSchema : table.getSchema();
+ String tableCatalog = ( table.getCatalog() == null ) ? defaultCatalog : table.getCatalog();
+ if ( table.isPhysicalTable() ) {
+
+ TableMetadata tableInfo = databaseMetadata.getTableMetadata( table.getName(), tableSchema,
+ tableCatalog, table.isQuoted() );
+
if ( dialect.hasAlterTable() ) {
Iterator subIter = table.getForeignKeyIterator();
while ( subIter.hasNext() ) {
ForeignKey fk = (ForeignKey) subIter.next();
if ( fk.isPhysicalConstraint() ) {
- boolean create = tableInfo == null || (
- tableInfo.getForeignKeyMetadata( fk.getName() ) == null && (
- //Icky workaround for MySQL bug:
- !( dialect instanceof MySQLDialect ) ||
- tableInfo.getIndexMetadata( fk.getName() ) == null
- )
- );
+ boolean create = tableInfo == null || ( tableInfo.getForeignKeyMetadata( fk ) == null && (
+ // Icky workaround for MySQL bug:
+ !( dialect instanceof MySQLDialect ) || tableInfo.getIndexMetadata( fk.getName() ) == null ) );
if ( create ) {
- script.add(
- fk.sqlCreateString(
- dialect,
- mapping,
- defaultCatalog,
- defaultSchema
- )
- );
+ scripts.add( new SchemaUpdateScript( fk.sqlCreateString( dialect, mapping,
+ tableCatalog, tableSchema ), false ) );
}
}
}
}
-
}
-
- /*//broken, 'cos we don't generate these with names in SchemaExport
- subIter = table.getIndexIterator();
- while ( subIter.hasNext() ) {
- Index index = (Index) subIter.next();
- if ( !index.isForeignKey() || !dialect.hasImplicitIndexForForeignKey() ) {
- if ( tableInfo==null || tableInfo.getIndexMetadata( index.getFilterName() ) == null ) {
- script.add( index.sqlCreateString(dialect, mapping) );
- }
- }
- }
- //broken, 'cos we don't generate these with names in SchemaExport
- subIter = table.getUniqueKeyIterator();
- while ( subIter.hasNext() ) {
- UniqueKey uk = (UniqueKey) subIter.next();
- if ( tableInfo==null || tableInfo.getIndexMetadata( uk.getFilterName() ) == null ) {
- script.add( uk.sqlCreateString(dialect, mapping) );
- }
- }*/
}
iter = iterateGenerators( dialect );
@@ -1082,28 +1305,25 @@
Object key = generator.generatorKey();
if ( !databaseMetadata.isSequence( key ) && !databaseMetadata.isTable( key ) ) {
String[] lines = generator.sqlCreateStrings( dialect );
- for ( int i = 0; i < lines.length ; i++ ) {
- script.add( lines[i] );
- }
+ scripts.addAll( SchemaUpdateScript.fromStringArray( lines, false ) );
}
}
- return ArrayHelper.toStringArray( script );
+ return scripts;
}
- public void validateSchema(Dialect dialect, DatabaseMetadata databaseMetadata)
- throws HibernateException {
+ public void validateSchema(Dialect dialect, DatabaseMetadata databaseMetadata)throws HibernateException {
secondPassCompile();
String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG );
String defaultSchema = properties.getProperty( Environment.DEFAULT_SCHEMA );
-
+
Iterator iter = getTableMappings();
while ( iter.hasNext() ) {
Table table = (Table) iter.next();
if ( table.isPhysicalTable() ) {
-
+
TableMetadata tableInfo = databaseMetadata.getTableMetadata(
table.getName(),
( table.getSchema() == null ) ? defaultSchema : table.getSchema(),
@@ -1123,6 +1343,9 @@
while ( iter.hasNext() ) {
PersistentIdentifierGenerator generator = (PersistentIdentifierGenerator) iter.next();
Object key = generator.generatorKey();
+ if (key instanceof String) {
+ key = normalizer.normalizeIdentifierQuoting( (String) key );
+ }
if ( !databaseMetadata.isSequence( key ) && !databaseMetadata.isTable( key ) ) {
throw new HibernateException( "Missing sequence or table: " + key );
}
@@ -1148,37 +1371,346 @@
secondPassCompile();
}
- // This method may be called many times!!
protected void secondPassCompile() throws MappingException {
- log.debug( "processing extends queue" );
+ LOG.trace( "Starting secondPassCompile() processing" );
+
+ // TEMPORARY
+ // Ensure the correct ClassLoader is used in commons-annotations.
+ ClassLoader tccl = Thread.currentThread().getContextClassLoader();
+ Thread.currentThread().setContextClassLoader( ClassLoaderHelper.getContextClassLoader() );
- processExtendsQueue();
+ //process default values first
+ {
+ if ( !isDefaultProcessed ) {
+ //use global delimiters if orm.xml declare it
+ Map defaults = reflectionManager.getDefaults();
+ final Object isDelimited = defaults.get( "delimited-identifier" );
+ if ( isDelimited != null && isDelimited == Boolean.TRUE ) {
+ getProperties().put( Environment.GLOBALLY_QUOTED_IDENTIFIERS, "true" );
+ }
+ // Set default schema name if orm.xml declares it.
+ final String schema = (String) defaults.get( "schema" );
+ if ( StringHelper.isNotEmpty( schema ) ) {
+ getProperties().put( Environment.DEFAULT_SCHEMA, schema );
+ }
+ // Set default catalog name if orm.xml declares it.
+ final String catalog = (String) defaults.get( "catalog" );
+ if ( StringHelper.isNotEmpty( catalog ) ) {
+ getProperties().put( Environment.DEFAULT_CATALOG, catalog );
+ }
- log.debug( "processing collection mappings" );
+ AnnotationBinder.bindDefaults( createMappings() );
+ isDefaultProcessed = true;
+ }
+ }
+ // process metadata queue
+ {
+ metadataSourceQueue.syncAnnotatedClasses();
+ metadataSourceQueue.processMetadata( determineMetadataSourcePrecedence() );
+ }
+
+
+
+ try {
+ inSecondPass = true;
+ processSecondPassesOfType( PkDrivenByDefaultMapsIdSecondPass.class );
+ processSecondPassesOfType( SetSimpleValueTypeSecondPass.class );
+ processSecondPassesOfType( CopyIdentifierComponentSecondPass.class );
+ processFkSecondPassInOrder();
+ processSecondPassesOfType( CreateKeySecondPass.class );
+ processSecondPassesOfType( SecondaryTableSecondPass.class );
+
+ originalSecondPassCompile();
+
+ inSecondPass = false;
+ }
+ catch ( RecoverableException e ) {
+ //the exception was not recoverable after all
+ throw ( RuntimeException ) e.getCause();
+ }
+
+ // process cache queue
+ {
+ for ( CacheHolder holder : caches ) {
+ if ( holder.isClass ) {
+ applyCacheConcurrencyStrategy( holder );
+ }
+ else {
+ applyCollectionCacheConcurrencyStrategy( holder );
+ }
+ }
+ caches.clear();
+ }
+
+ for ( Map.Entry> tableListEntry : uniqueConstraintHoldersByTable.entrySet() ) {
+ final Table table = tableListEntry.getKey();
+ final List uniqueConstraints = tableListEntry.getValue();
+ for ( UniqueConstraintHolder holder : uniqueConstraints ) {
+ buildUniqueKeyFromColumnNames( table, holder.getName(), holder.getColumns() );
+ }
+ }
+
+ for(Table table : jpaIndexHoldersByTable.keySet()){
+ final List jpaIndexHolders = jpaIndexHoldersByTable.get( table );
+ for ( JPAIndexHolder holder : jpaIndexHolders ) {
+ buildUniqueKeyFromColumnNames( table, holder.getName(), holder.getColumns(), holder.getOrdering(), holder.isUnique() );
+ }
+ }
+
+ Thread.currentThread().setContextClassLoader( tccl );
+ }
+
+ private void processSecondPassesOfType(Class extends SecondPass> type) {
Iterator iter = secondPasses.iterator();
while ( iter.hasNext() ) {
- SecondPass sp = (SecondPass) iter.next();
- if ( ! (sp instanceof QuerySecondPass) ) {
+ SecondPass sp = ( SecondPass ) iter.next();
+ //do the second pass of simple value types first and remove them
+ if ( type.isInstance( sp ) ) {
sp.doSecondPass( classes );
iter.remove();
}
}
+ }
- log.debug( "processing native query and ResultSetMapping mappings" );
- iter = secondPasses.iterator();
- while ( iter.hasNext() ) {
- SecondPass sp = (SecondPass) iter.next();
+ /**
+ * Processes FKSecondPass instances trying to resolve any
+ * graph circularity (ie PK made of a many to one linking to
+ * an entity having a PK made of a ManyToOne ...).
+ */
+ private void processFkSecondPassInOrder() {
+ LOG.debug("Processing fk mappings (*ToOne and JoinedSubclass)");
+ List fkSecondPasses = getFKSecondPassesOnly();
+
+ if ( fkSecondPasses.size() == 0 ) {
+ return; // nothing to do here
+ }
+
+ // split FkSecondPass instances into primary key and non primary key FKs.
+ // While doing so build a map of class names to FkSecondPass instances depending on this class.
+ Map> isADependencyOf = new HashMap>();
+ List endOfQueueFkSecondPasses = new ArrayList( fkSecondPasses.size() );
+ for ( FkSecondPass sp : fkSecondPasses ) {
+ if ( sp.isInPrimaryKey() ) {
+ String referenceEntityName = sp.getReferencedEntityName();
+ PersistentClass classMapping = getClassMapping( referenceEntityName );
+ String dependentTable = quotedTableName(classMapping.getTable());
+ if ( !isADependencyOf.containsKey( dependentTable ) ) {
+ isADependencyOf.put( dependentTable, new HashSet() );
+ }
+ isADependencyOf.get( dependentTable ).add( sp );
+ }
+ else {
+ endOfQueueFkSecondPasses.add( sp );
+ }
+ }
+
+ // using the isADependencyOf map we order the FkSecondPass recursively instances into the right order for processing
+ List orderedFkSecondPasses = new ArrayList( fkSecondPasses.size() );
+ for ( String tableName : isADependencyOf.keySet() ) {
+ buildRecursiveOrderedFkSecondPasses( orderedFkSecondPasses, isADependencyOf, tableName, tableName );
+ }
+
+ // process the ordered FkSecondPasses
+ for ( FkSecondPass sp : orderedFkSecondPasses ) {
sp.doSecondPass( classes );
- iter.remove();
}
- log.debug( "processing association property references" );
+ processEndOfQueue( endOfQueueFkSecondPasses );
+ }
- iter = propertyReferences.iterator();
+ /**
+ * @return Returns a list of all secondPasses
instances which are a instance of
+ * FkSecondPass
.
+ */
+ private List getFKSecondPassesOnly() {
+ Iterator iter = secondPasses.iterator();
+ List fkSecondPasses = new ArrayList( secondPasses.size() );
while ( iter.hasNext() ) {
- Mappings.PropertyReference upr = (Mappings.PropertyReference) iter.next();
+ SecondPass sp = ( SecondPass ) iter.next();
+ //do the second pass of fk before the others and remove them
+ if ( sp instanceof FkSecondPass ) {
+ fkSecondPasses.add( ( FkSecondPass ) sp );
+ iter.remove();
+ }
+ }
+ return fkSecondPasses;
+ }
+ /**
+ * Recursively builds a list of FkSecondPass instances ready to be processed in this order.
+ * Checking all dependencies recursively seems quite expensive, but the original code just relied
+ * on some sort of table name sorting which failed in certain circumstances.
+ *
+ * See ANN-722 and ANN-730
+ *
+ * @param orderedFkSecondPasses The list containing the FkSecondPass instances ready
+ * for processing.
+ * @param isADependencyOf Our lookup data structure to determine dependencies between tables
+ * @param startTable Table name to start recursive algorithm.
+ * @param currentTable The current table name used to check for 'new' dependencies.
+ */
+ private void buildRecursiveOrderedFkSecondPasses(
+ List orderedFkSecondPasses,
+ Map> isADependencyOf,
+ String startTable,
+ String currentTable) {
+
+ Set dependencies = isADependencyOf.get( currentTable );
+
+ // bottom out
+ if ( dependencies == null || dependencies.size() == 0 ) {
+ return;
+ }
+
+ for ( FkSecondPass sp : dependencies ) {
+ String dependentTable = quotedTableName(sp.getValue().getTable());
+ if ( dependentTable.compareTo( startTable ) == 0 ) {
+ String sb = "Foreign key circularity dependency involving the following tables: ";
+ throw new AnnotationException( sb );
+ }
+ buildRecursiveOrderedFkSecondPasses( orderedFkSecondPasses, isADependencyOf, startTable, dependentTable );
+ if ( !orderedFkSecondPasses.contains( sp ) ) {
+ orderedFkSecondPasses.add( 0, sp );
+ }
+ }
+ }
+
+ private String quotedTableName(Table table) {
+ return Table.qualify( table.getCatalog(), table.getQuotedSchema(), table.getQuotedName() );
+ }
+
+ private void processEndOfQueue(List endOfQueueFkSecondPasses) {
+ /*
+ * If a second pass raises a recoverableException, queue it for next round
+ * stop of no pass has to be processed or if the number of pass to processes
+ * does not diminish between two rounds.
+ * If some failing pass remain, raise the original exception
+ */
+ boolean stopProcess = false;
+ RuntimeException originalException = null;
+ while ( !stopProcess ) {
+ List failingSecondPasses = new ArrayList();
+ for ( FkSecondPass pass : endOfQueueFkSecondPasses ) {
+ try {
+ pass.doSecondPass( classes );
+ }
+ catch (RecoverableException e) {
+ failingSecondPasses.add( pass );
+ if ( originalException == null ) {
+ originalException = (RuntimeException) e.getCause();
+ }
+ }
+ }
+ stopProcess = failingSecondPasses.size() == 0 || failingSecondPasses.size() == endOfQueueFkSecondPasses.size();
+ endOfQueueFkSecondPasses = failingSecondPasses;
+ }
+ if ( endOfQueueFkSecondPasses.size() > 0 ) {
+ throw originalException;
+ }
+ }
+
+ private void buildUniqueKeyFromColumnNames(Table table, String keyName, String[] columnNames){
+ buildUniqueKeyFromColumnNames( table, keyName, columnNames, null, true );
+ }
+
+ private void buildUniqueKeyFromColumnNames(Table table, String keyName, String[] columnNames, String[] orderings, boolean unique) {
+ int size = columnNames.length;
+ Column[] columns = new Column[size];
+ Set unbound = new HashSet();
+ Set unboundNoLogical = new HashSet();
+ for ( int index = 0; index < size; index++ ) {
+ String column = columnNames[index];
+ try {
+ final String columnName = createMappings().getPhysicalColumnName( column, table );
+ columns[index] = new Column( columnName );
+ unbound.add( columns[index] );
+ //column equals and hashcode is based on column name
+ }
+ catch ( MappingException e ) {
+ // If at least 1 columnName does exist, 'columns' will contain a mix of Columns and nulls. In order
+ // to exhaustively report all of the unbound columns at once, w/o an NPE in
+ // Constraint#generateName's array sorting, simply create a fake Column.
+ columns[index] = new Column( column );
+ unboundNoLogical.add( columns[index] );
+ }
+ }
+
+ if ( StringHelper.isEmpty( keyName ) ) {
+ keyName = Constraint.generateName( "UK_", table, columns );
+ }
+ keyName = normalizer.normalizeIdentifierQuoting( keyName );
+
+ if ( unique ) {
+ UniqueKey uk = table.getOrCreateUniqueKey( keyName );
+ for ( int i = 0; i < columns.length; i++ ) {
+ Column column = columns[i];
+ String order = orderings != null ? orderings[i] : null;
+ if ( table.containsColumn( column ) ) {
+ uk.addColumn( column, order );
+ unbound.remove( column );
+ }
+ }
+ }
+ else {
+ Index index = table.getOrCreateIndex( keyName );
+ for ( int i = 0; i < columns.length; i++ ) {
+ Column column = columns[i];
+ String order = orderings != null ? orderings[i] : null;
+ if ( table.containsColumn( column ) ) {
+ index.addColumn( column, order );
+ unbound.remove( column );
+ }
+ }
+ }
+
+ if ( unbound.size() > 0 || unboundNoLogical.size() > 0 ) {
+ StringBuilder sb = new StringBuilder( "Unable to create unique key constraint (" );
+ for ( String columnName : columnNames ) {
+ sb.append( columnName ).append( ", " );
+ }
+ sb.setLength( sb.length() - 2 );
+ sb.append( ") on table " ).append( table.getName() ).append( ": database column " );
+ for ( Column column : unbound ) {
+ sb.append("'").append( column.getName() ).append( "', " );
+ }
+ for ( Column column : unboundNoLogical ) {
+ sb.append("'").append( column.getName() ).append( "', " );
+ }
+ sb.setLength( sb.length() - 2 );
+ sb.append( " not found. Make sure that you use the correct column name which depends on the naming strategy in use (it may not be the same as the property name in the entity, especially for relational types)" );
+ throw new AnnotationException( sb.toString() );
+ }
+ }
+
+ private void originalSecondPassCompile() throws MappingException {
+ LOG.debug( "Processing extends queue" );
+ processExtendsQueue();
+
+ LOG.debug( "Processing collection mappings" );
+ Iterator itr = secondPasses.iterator();
+ while ( itr.hasNext() ) {
+ SecondPass sp = (SecondPass) itr.next();
+ if ( ! (sp instanceof QuerySecondPass) ) {
+ sp.doSecondPass( classes );
+ itr.remove();
+ }
+ }
+
+ LOG.debug( "Processing native query and ResultSetMapping mappings" );
+ itr = secondPasses.iterator();
+ while ( itr.hasNext() ) {
+ SecondPass sp = (SecondPass) itr.next();
+ sp.doSecondPass( classes );
+ itr.remove();
+ }
+
+ LOG.debug( "Processing association property references" );
+
+ itr = propertyReferences.iterator();
+ while ( itr.hasNext() ) {
+ Mappings.PropertyReference upr = (Mappings.PropertyReference) itr.next();
+
PersistentClass clazz = getClassMapping( upr.referencedClass );
if ( clazz == null ) {
throw new MappingException(
@@ -1192,37 +1724,35 @@
( (SimpleValue) prop.getValue() ).setAlternateUniqueKey( true );
}
}
-
+
//TODO: Somehow add the newly created foreign keys to the internal collection
- log.debug( "processing foreign key constraints" );
+ LOG.debug( "Creating tables' unique integer identifiers" );
+ LOG.debug( "Processing foreign key constraints" );
- iter = getTableMappings();
- Set done = new HashSet();
- while ( iter.hasNext() ) {
- secondPassCompileForeignKeys( (Table) iter.next(), done );
+ itr = getTableMappings();
+ int uniqueInteger = 0;
+ Set done = new HashSet();
+ while ( itr.hasNext() ) {
+ Table table = (Table) itr.next();
+ table.setUniqueInteger( uniqueInteger++ );
+ secondPassCompileForeignKeys( table, done );
}
}
- /**
- * Try to empty the extends queue.
- */
- private void processExtendsQueue() {
- // todo : would love to have this work on a notification basis
- // where the successful binding of an entity/subclass would
- // emit a notification which the extendsQueue entries could
- // react to...
- org.dom4j.Document document = findPossibleExtends();
- while ( document != null ) {
- add( document );
- document = findPossibleExtends();
+ private int processExtendsQueue() {
+ LOG.debug( "Processing extends queue" );
+ int added = 0;
+ ExtendsQueueEntry extendsQueueEntry = findPossibleExtends();
+ while ( extendsQueueEntry != null ) {
+ metadataSourceQueue.processHbmXml( extendsQueueEntry.getMetadataXml(), extendsQueueEntry.getEntityNames() );
+ extendsQueueEntry = findPossibleExtends();
}
if ( extendsQueue.size() > 0 ) {
-// Iterator iterator = extendsQueue.iterator();
Iterator iterator = extendsQueue.keySet().iterator();
- StringBuffer buf = new StringBuffer( "Following superclasses referenced in extends not found: " );
+ StringBuilder buf = new StringBuilder( "Following super classes referenced in extends not found: " );
while ( iterator.hasNext() ) {
final ExtendsQueueEntry entry = ( ExtendsQueueEntry ) iterator.next();
buf.append( entry.getExplicitName() );
@@ -1235,34 +1765,26 @@
}
throw new MappingException( buf.toString() );
}
+
+ return added;
}
- /**
- * Find the first possible element in the queue of extends.
- */
- protected org.dom4j.Document findPossibleExtends() {
-// Iterator iter = extendsQueue.iterator();
- Iterator iter = extendsQueue.keySet().iterator();
- while ( iter.hasNext() ) {
- final ExtendsQueueEntry entry = ( ExtendsQueueEntry ) iter.next();
- if ( getClassMapping( entry.getExplicitName() ) != null ) {
- // found
- iter.remove();
- return entry.getDocument();
+ protected ExtendsQueueEntry findPossibleExtends() {
+ Iterator itr = extendsQueue.keySet().iterator();
+ while ( itr.hasNext() ) {
+ final ExtendsQueueEntry entry = itr.next();
+ boolean found = getClassMapping( entry.getExplicitName() ) != null
+ || getClassMapping( HbmBinder.getClassName( entry.getExplicitName(), entry.getMappingPackage() ) ) != null;
+ if ( found ) {
+ itr.remove();
+ return entry;
}
- else if ( getClassMapping( HbmBinder.getClassName( entry.getExplicitName(), entry.getMappingPackage() ) ) != null ) {
- // found
- iter.remove();
- return entry.getDocument();
- }
}
return null;
}
- protected void secondPassCompileForeignKeys(Table table, Set done) throws MappingException {
-
+ protected void secondPassCompileForeignKeys(Table table, Set done) throws MappingException {
table.createForeignKeys();
-
Iterator iter = table.getForeignKeyIterator();
while ( iter.hasNext() ) {
@@ -1277,10 +1799,8 @@
" does not specify the referenced entity"
);
}
- if ( log.isDebugEnabled() ) {
- log.debug( "resolving reference to class: " + referencedEntityName );
- }
- PersistentClass referencedClass = (PersistentClass) classes.get( referencedEntityName );
+ LOG.debugf( "Resolving reference to class: %s", referencedEntityName );
+ PersistentClass referencedClass = classes.get( referencedEntityName );
if ( referencedClass == null ) {
throw new MappingException(
"An association from the table " +
@@ -1298,127 +1818,221 @@
}
}
- /**
- * Get the named queries
- */
- public Map getNamedQueries() {
+ public Map getNamedQueries() {
return namedQueries;
}
+ public Map getNamedProcedureCallMap() {
+ return namedProcedureCallMap;
+ }
+
/**
- * Instantiate a new SessionFactory, using the properties and
- * mappings in this configuration. The SessionFactory will be
- * immutable, so changes made to the Configuration after
- * building the SessionFactory will not affect it.
+ * Create a {@link SessionFactory} using the properties and mappings in this configuration. The
+ * {@link SessionFactory} will be immutable, so changes made to {@code this} {@link Configuration} after
+ * building the {@link SessionFactory} will not affect it.
*
- * @return a new factory for Sessions
- * @see org.hibernate.SessionFactory
+ * @param serviceRegistry The registry of services to be used in creating this session factory.
+ *
+ * @return The built {@link SessionFactory}
+ *
+ * @throws HibernateException usually indicates an invalid configuration or invalid mapping information
*/
- public SessionFactory buildSessionFactory() throws HibernateException {
- log.debug( "Preparing to build session factory with filters : " + filterDefinitions );
+ public SessionFactory buildSessionFactory(ServiceRegistry serviceRegistry) throws HibernateException {
+ LOG.debugf( "Preparing to build session factory with filters : %s", filterDefinitions );
+
+ buildTypeRegistrations( serviceRegistry );
secondPassCompile();
+ if ( !metadataSourceQueue.isEmpty() ) {
+ LOG.incompleteMappingMetadataCacheProcessing();
+ }
+
validate();
+
Environment.verifyProperties( properties );
Properties copy = new Properties();
copy.putAll( properties );
- PropertiesHelper.resolvePlaceHolders( copy );
- Settings settings = buildSettings( copy );
+ ConfigurationHelper.resolvePlaceHolders( copy );
+ Settings settings = buildSettings( copy, serviceRegistry );
return new SessionFactoryImpl(
this,
mapping,
+ serviceRegistry,
settings,
- getInitializedEventListeners(),
sessionFactoryObserver
);
}
+
+ private void buildTypeRegistrations(ServiceRegistry serviceRegistry) {
+ final TypeContributions typeContributions = new TypeContributions() {
+ @Override
+ public void contributeType(BasicType type) {
+ typeResolver.registerTypeOverride( type );
+ }
- private EventListeners getInitializedEventListeners() {
- EventListeners result = (EventListeners) eventListeners.shallowCopy();
- result.initializeListeners( this );
- return result;
+ @Override
+ public void contributeType(UserType type, String[] keys) {
+ typeResolver.registerTypeOverride( type, keys );
+ }
+
+ @Override
+ public void contributeType(CompositeUserType type, String[] keys) {
+ typeResolver.registerTypeOverride( type, keys );
+ }
+ };
+
+ // add Dialect contributed types
+ final Dialect dialect = serviceRegistry.getService( JdbcServices.class ).getDialect();
+ dialect.contributeTypes( typeContributions, serviceRegistry );
+
+ // add TypeContributor contributed types.
+ ClassLoaderService classLoaderService = serviceRegistry.getService( ClassLoaderService.class );
+ for ( TypeContributor contributor : classLoaderService.loadJavaServices( TypeContributor.class ) ) {
+ contributor.contribute( typeContributions, serviceRegistry );
+ }
+ // from app registrations
+ for ( TypeContributor contributor : typeContributorRegistrations ) {
+ contributor.contribute( typeContributions, serviceRegistry );
+ }
}
/**
- * Return the configured Interceptor
+ * Create a {@link SessionFactory} using the properties and mappings in this configuration. The
+ * {@link SessionFactory} will be immutable, so changes made to {@code this} {@link Configuration} after
+ * building the {@link SessionFactory} will not affect it.
+ *
+ * @return The build {@link SessionFactory}
+ *
+ * @throws HibernateException usually indicates an invalid configuration or invalid mapping information
+ *
+ * @deprecated Use {@link #buildSessionFactory(ServiceRegistry)} instead
*/
+ public SessionFactory buildSessionFactory() throws HibernateException {
+ Environment.verifyProperties( properties );
+ ConfigurationHelper.resolvePlaceHolders( properties );
+ final ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder()
+ .applySettings( properties )
+ .build();
+ setSessionFactoryObserver(
+ new SessionFactoryObserver() {
+ @Override
+ public void sessionFactoryCreated(SessionFactory factory) {
+ }
+
+ @Override
+ public void sessionFactoryClosed(SessionFactory factory) {
+ ( (StandardServiceRegistryImpl) serviceRegistry ).destroy();
+ }
+ }
+ );
+ return buildSessionFactory( serviceRegistry );
+ }
+
+ /**
+ * Retrieve the configured {@link Interceptor}.
+ *
+ * @return The current {@link Interceptor}
+ */
public Interceptor getInterceptor() {
return interceptor;
}
/**
+ * Set the current {@link Interceptor}
+ *
+ * @param interceptor The {@link Interceptor} to use for the {@link #buildSessionFactory built}
+ * {@link SessionFactory}.
+ *
+ * @return this for method chaining
+ */
+ public Configuration setInterceptor(Interceptor interceptor) {
+ this.interceptor = interceptor;
+ return this;
+ }
+
+ /**
* Get all properties
+ *
+ * @return all properties
*/
public Properties getProperties() {
return properties;
}
/**
- * Configure an Interceptor
+ * Get a property value by name
+ *
+ * @param propertyName The name of the property
+ *
+ * @return The value currently associated with that property name; may be null.
*/
- public Configuration setInterceptor(Interceptor interceptor) {
- this.interceptor = interceptor;
- return this;
+ public String getProperty(String propertyName) {
+ return properties.getProperty( propertyName );
}
/**
* Specify a completely new set of properties
+ *
+ * @param properties The new set of properties
+ *
+ * @return this for method chaining
*/
public Configuration setProperties(Properties properties) {
this.properties = properties;
return this;
}
/**
- * Set the given properties
+ * Add the given properties to ours.
+ *
+ * @param extraProperties The properties to add.
+ *
+ * @return this for method chaining
+ *
*/
public Configuration addProperties(Properties extraProperties) {
this.properties.putAll( extraProperties );
return this;
}
/**
- * Adds the incoming properties to the internap properties structure,
- * as long as the internal structure does not already contain an
- * entry for the given key.
+ * Adds the incoming properties to the internal properties structure, as long as the internal structure does not
+ * already contain an entry for the given key.
*
- * @param properties
- * @return this
+ * @param properties The properties to merge
+ *
+ * @return this for ethod chaining
*/
public Configuration mergeProperties(Properties properties) {
- Iterator itr = properties.entrySet().iterator();
- while ( itr.hasNext() ) {
- final Map.Entry entry = ( Map.Entry ) itr.next();
+ for ( Map.Entry entry : properties.entrySet() ) {
if ( this.properties.containsKey( entry.getKey() ) ) {
continue;
}
- this.properties.setProperty( ( String ) entry.getKey(), ( String ) entry.getValue() );
+ this.properties.setProperty( (String) entry.getKey(), (String) entry.getValue() );
}
return this;
}
/**
- * Set a property
+ * Set a property value by name
+ *
+ * @param propertyName The name of the property to set
+ * @param value The new property value
+ *
+ * @return this for method chaining
*/
public Configuration setProperty(String propertyName, String value) {
properties.setProperty( propertyName, value );
return this;
}
- /**
- * Get a property
- */
- public String getProperty(String propertyName) {
- return properties.getProperty( propertyName );
- }
-
private void addProperties(Element parent) {
- Iterator iter = parent.elementIterator( "property" );
- while ( iter.hasNext() ) {
- Element node = (Element) iter.next();
+ Iterator itr = parent.elementIterator( "property" );
+ while ( itr.hasNext() ) {
+ Element node = (Element) itr.next();
String name = node.attributeValue( "name" );
String value = node.getText().trim();
- log.debug( name + "=" + value );
+ LOG.debugf( "%s=%s", name, value );
properties.setProperty( name, value );
if ( !name.startsWith( "hibernate" ) ) {
properties.setProperty( "hibernate." + name, value );
@@ -1428,51 +2042,71 @@
}
/**
- * Get the configuration file as an InputStream. Might be overridden
- * by subclasses to allow the configuration to be located by some arbitrary
- * mechanism.
+ * Use the mappings and properties specified in an application resource named hibernate.cfg.xml.
+ *
+ * @return this for method chaining
+ *
+ * @throws HibernateException Generally indicates we cannot find hibernate.cfg.xml
+ *
+ * @see #configure(String)
*/
- protected InputStream getConfigurationInputStream(String resource) throws HibernateException {
-
- log.info( "Configuration resource: " + resource );
-
- return ConfigHelper.getResourceAsStream( resource );
-
- }
-
- /**
- * Use the mappings and properties specified in an application
- * resource named hibernate.cfg.xml.
- */
public Configuration configure() throws HibernateException {
configure( "/hibernate.cfg.xml" );
return this;
}
/**
- * Use the mappings and properties specified in the given application
- * resource. The format of the resource is defined in
- * hibernate-configuration-3.0.dtd.
+ * Use the mappings and properties specified in the given application resource. The format of the resource is
+ * defined in hibernate-configuration-3.0.dtd.
*
- * The resource is found via getConfigurationInputStream(resource).
+ * The resource is found via {@link #getConfigurationInputStream}
+ *
+ * @param resource The resource to use
+ *
+ * @return this for method chaining
+ *
+ * @throws HibernateException Generally indicates we cannot find the named resource
+ *
+ * @see #doConfigure(java.io.InputStream, String)
*/
public Configuration configure(String resource) throws HibernateException {
- log.info( "configuring from resource: " + resource );
+ LOG.configuringFromResource( resource );
InputStream stream = getConfigurationInputStream( resource );
return doConfigure( stream, resource );
}
/**
- * Use the mappings and properties specified in the given document.
- * The format of the document is defined in
+ * Get the configuration file as an InputStream. Might be overridden
+ * by subclasses to allow the configuration to be located by some arbitrary
+ * mechanism.
+ *
+ * By default here we use classpath resource resolution
+ *
+ * @param resource The resource to locate
+ *
+ * @return The stream
+ *
+ * @throws HibernateException Generally indicates we cannot find the named resource
+ */
+ protected InputStream getConfigurationInputStream(String resource) throws HibernateException {
+ LOG.configurationResource( resource );
+ return ConfigHelper.getResourceAsStream( resource );
+ }
+
+ /**
+ * Use the mappings and properties specified in the given document. The format of the document is defined in
* hibernate-configuration-3.0.dtd.
*
* @param url URL from which you wish to load the configuration
- * @return A configuration configured via the file
- * @throws HibernateException
+ *
+ * @return this for method chaining
+ *
+ * @throws HibernateException Generally indicates a problem access the url
+ *
+ * @see #doConfigure(java.io.InputStream, String)
*/
public Configuration configure(URL url) throws HibernateException {
- log.info( "configuring from url: " + url.toString() );
+ LOG.configuringFromUrl( url );
try {
return doConfigure( url.openStream(), url.toString() );
}
@@ -1482,16 +2116,19 @@
}
/**
- * Use the mappings and properties specified in the given application
- * file. The format of the file is defined in
+ * Use the mappings and properties specified in the given application file. The format of the file is defined in
* hibernate-configuration-3.0.dtd.
*
- * @param configFile File from which you wish to load the configuration
- * @return A configuration configured via the file
- * @throws HibernateException
+ * @param configFile File from which you wish to load the configuration
+ *
+ * @return this for method chaining
+ *
+ * @throws HibernateException Generally indicates a problem access the file
+ *
+ * @see #doConfigure(java.io.InputStream, String)
*/
public Configuration configure(File configFile) throws HibernateException {
- log.info( "configuring from file: " + configFile.getName() );
+ LOG.configuringFromFile( configFile.getName() );
try {
return doConfigure( new FileInputStream( configFile ), configFile.toString() );
}
@@ -1501,46 +2138,39 @@
}
/**
- * Use the mappings and properties specified in the given application
- * resource. The format of the resource is defined in
- * hibernate-configuration-3.0.dtd.
+ * Configure this configuration's state from the contents of the given input stream. The expectation is that
+ * the stream contents represent an XML document conforming to the Hibernate Configuration DTD. See
+ * {@link #doConfigure(Document)} for further details.
*
- * @param stream Inputstream to be read from
+ * @param stream The input stream from which to read
* @param resourceName The name to use in warning/error messages
- * @return A configuration configured via the stream
- * @throws HibernateException
+ *
+ * @return this for method chaining
+ *
+ * @throws HibernateException Indicates a problem reading the stream contents.
*/
protected Configuration doConfigure(InputStream stream, String resourceName) throws HibernateException {
-
- org.dom4j.Document doc;
try {
- List errors = new ArrayList();
- doc = xmlHelper.createSAXReader( resourceName, errors, entityResolver )
+ ErrorLogger errorLogger = new ErrorLogger( resourceName );
+ Document document = xmlHelper.createSAXReader( errorLogger, entityResolver )
.read( new InputSource( stream ) );
- if ( errors.size() != 0 ) {
- throw new MappingException(
- "invalid configuration",
- (Throwable) errors.get( 0 )
- );
+ if ( errorLogger.hasErrors() ) {
+ throw new MappingException( "invalid configuration", errorLogger.getErrors().get( 0 ) );
}
+ doConfigure( document );
}
catch (DocumentException e) {
- throw new HibernateException(
- "Could not parse configuration: " + resourceName,
- e
- );
+ throw new HibernateException( "Could not parse configuration: " + resourceName, e );
}
finally {
try {
stream.close();
}
catch (IOException ioe) {
- log.warn( "could not close input stream for: " + resourceName, ioe );
+ LOG.unableToCloseInputStreamForResource( resourceName, ioe );
}
}
-
- return doConfigure( doc );
-
+ return this;
}
/**
@@ -1552,13 +2182,22 @@
* @return A configuration configured via the Document
* @throws HibernateException if there is problem in accessing the file.
*/
- public Configuration configure(Document document) throws HibernateException {
- log.info( "configuring from XML document" );
+ public Configuration configure(org.w3c.dom.Document document) throws HibernateException {
+ LOG.configuringFromXmlDocument();
return doConfigure( xmlHelper.createDOMReader().read( document ) );
}
- protected Configuration doConfigure(org.dom4j.Document doc) throws HibernateException {
-
+ /**
+ * Parse a dom4j document conforming to the Hibernate Configuration DTD (hibernate-configuration-3.0.dtd)
+ * and use its information to configure this {@link Configuration}'s state
+ *
+ * @param doc The dom4j document
+ *
+ * @return this for method chaining
+ *
+ * @throws HibernateException Indicates a problem performing the configuration task
+ */
+ protected Configuration doConfigure(Document doc) throws HibernateException {
Element sfNode = doc.getRootElement().element( "session-factory" );
String name = sfNode.attributeValue( "name" );
if ( name != null ) {
@@ -1572,11 +2211,10 @@
parseSecurity( secNode );
}
- log.info( "Configured SessionFactory: " + name );
- log.debug( "properties: " + properties );
+ LOG.configuredSessionFactory( name );
+ LOG.debugf( "Properties: %s", properties );
return this;
-
}
@@ -1601,603 +2239,1663 @@
final String region = ( regionNode == null ) ? role : regionNode.getValue();
setCollectionCacheConcurrencyStrategy( role, subelement.attributeValue( "usage" ), region );
}
- else if ( "listener".equals( subelementName ) ) {
- parseListener( subelement );
- }
- else if ( "event".equals( subelementName ) ) {
- parseEvent( subelement );
- }
}
}
- protected void parseMappingElement(Element subelement, String name) {
- Attribute rsrc = subelement.attribute( "resource" );
- Attribute file = subelement.attribute( "file" );
- Attribute jar = subelement.attribute( "jar" );
- Attribute pkg = subelement.attribute( "package" );
- Attribute clazz = subelement.attribute( "class" );
- if ( rsrc != null ) {
- log.debug( name + "<-" + rsrc );
- addResource( rsrc.getValue() );
+ private void parseMappingElement(Element mappingElement, String name) {
+ final Attribute resourceAttribute = mappingElement.attribute( "resource" );
+ final Attribute fileAttribute = mappingElement.attribute( "file" );
+ final Attribute jarAttribute = mappingElement.attribute( "jar" );
+ final Attribute packageAttribute = mappingElement.attribute( "package" );
+ final Attribute classAttribute = mappingElement.attribute( "class" );
+
+ if ( resourceAttribute != null ) {
+ final String resourceName = resourceAttribute.getValue();
+ LOG.debugf( "Session-factory config [%s] named resource [%s] for mapping", name, resourceName );
+ addResource( resourceName );
}
- else if ( jar != null ) {
- log.debug( name + "<-" + jar );
- addJar( new File( jar.getValue() ) );
+ else if ( fileAttribute != null ) {
+ final String fileName = fileAttribute.getValue();
+ LOG.debugf( "Session-factory config [%s] named file [%s] for mapping", name, fileName );
+ addFile( fileName );
}
- else if ( pkg != null ) {
- throw new MappingException(
- "An AnnotationConfiguration instance is required to use "
- );
+ else if ( jarAttribute != null ) {
+ final String jarFileName = jarAttribute.getValue();
+ LOG.debugf( "Session-factory config [%s] named jar file [%s] for mapping", name, jarFileName );
+ addJar( new File( jarFileName ) );
}
- else if ( clazz != null ) {
- throw new MappingException(
- "An AnnotationConfiguration instance is required to use "
- );
+ else if ( packageAttribute != null ) {
+ final String packageName = packageAttribute.getValue();
+ LOG.debugf( "Session-factory config [%s] named package [%s] for mapping", name, packageName );
+ addPackage( packageName );
}
- else {
- if ( file == null ) {
+ else if ( classAttribute != null ) {
+ final String className = classAttribute.getValue();
+ LOG.debugf( "Session-factory config [%s] named class [%s] for mapping", name, className );
+ try {
+ addAnnotatedClass( ReflectHelper.classForName( className ) );
+ }
+ catch ( Exception e ) {
throw new MappingException(
- " element in configuration specifies no attributes"
- );
+ "Unable to load class [ " + className + "] declared in Hibernate configuration entry",
+ e
+ );
}
- log.debug( name + "<-" + file );
- addFile( file.getValue() );
}
+ else {
+ throw new MappingException( " element in configuration specifies no known attributes" );
+ }
}
+ private JaccPermissionDeclarations jaccPermissionDeclarations;
+
private void parseSecurity(Element secNode) {
- String contextId = secNode.attributeValue( "context" );
- setProperty(Environment.JACC_CONTEXTID, contextId);
- log.info( "JACC contextID: " + contextId );
- JACCConfiguration jcfg = new JACCConfiguration( contextId );
+ final String nodeContextId = secNode.attributeValue( "context" );
+
+ final String explicitContextId = getProperty( AvailableSettings.JACC_CONTEXT_ID );
+ if ( explicitContextId == null ) {
+ setProperty( AvailableSettings.JACC_CONTEXT_ID, nodeContextId );
+ LOG.jaccContextId( nodeContextId );
+ }
+ else {
+ // if they dont match, throw an error
+ if ( ! nodeContextId.equals( explicitContextId ) ) {
+ throw new HibernateException( "Non-matching JACC context ids" );
+ }
+ }
+ jaccPermissionDeclarations = new JaccPermissionDeclarations( nodeContextId );
+
Iterator grantElements = secNode.elementIterator();
while ( grantElements.hasNext() ) {
- Element grantElement = (Element) grantElements.next();
- String elementName = grantElement.getName();
+ final Element grantElement = (Element) grantElements.next();
+ final String elementName = grantElement.getName();
if ( "grant".equals( elementName ) ) {
- jcfg.addPermission(
- grantElement.attributeValue( "role" ),
- grantElement.attributeValue( "entity-name" ),
- grantElement.attributeValue( "actions" )
- );
+ jaccPermissionDeclarations.addPermissionDeclaration(
+ new GrantedPermission(
+ grantElement.attributeValue( "role" ),
+ grantElement.attributeValue( "entity-name" ),
+ grantElement.attributeValue( "actions" )
+ )
+ );
}
}
}
- private void parseEvent(Element element) {
- String type = element.attributeValue( "type" );
- List listeners = element.elements();
- String[] listenerClasses = new String[ listeners.size() ];
- for ( int i = 0; i < listeners.size() ; i++ ) {
- listenerClasses[i] = ( (Element) listeners.get( i ) ).attributeValue( "class" );
+ public JaccPermissionDeclarations getJaccPermissionDeclarations() {
+ return jaccPermissionDeclarations;
+ }
+
+ RootClass getRootClassMapping(String clazz) throws MappingException {
+ try {
+ return (RootClass) getClassMapping( clazz );
}
- log.debug( "Event listeners: " + type + "=" + StringHelper.toString( listenerClasses ) );
- setListeners( type, listenerClasses );
+ catch (ClassCastException cce) {
+ throw new MappingException( "You may only specify a cache for root mappings. Attempted on " + clazz );
+ }
}
- private void parseListener(Element element) {
- String type = element.attributeValue( "type" );
- if ( type == null ) {
- throw new MappingException( "No type specified for listener" );
+ /**
+ * Set up a cache for an entity class
+ *
+ * @param entityName The name of the entity to which we shoudl associate these cache settings
+ * @param concurrencyStrategy The cache strategy to use
+ *
+ * @return this for method chaining
+ */
+ public Configuration setCacheConcurrencyStrategy(String entityName, String concurrencyStrategy) {
+ setCacheConcurrencyStrategy( entityName, concurrencyStrategy, entityName );
+ return this;
+ }
+
+ /**
+ * Set up a cache for an entity class, giving an explicit region name
+ *
+ * @param entityName The name of the entity to which we should associate these cache settings
+ * @param concurrencyStrategy The cache strategy to use
+ * @param region The name of the cache region to use
+ *
+ * @return this for method chaining
+ */
+ public Configuration setCacheConcurrencyStrategy(String entityName, String concurrencyStrategy, String region) {
+ setCacheConcurrencyStrategy( entityName, concurrencyStrategy, region, true );
+ return this;
+ }
+
+ public void setCacheConcurrencyStrategy(
+ String entityName,
+ String concurrencyStrategy,
+ String region,
+ boolean cacheLazyProperty) throws MappingException {
+ caches.add( new CacheHolder( entityName, concurrencyStrategy, region, true, cacheLazyProperty ) );
+ }
+
+ private void applyCacheConcurrencyStrategy(CacheHolder holder) {
+ RootClass rootClass = getRootClassMapping( holder.role );
+ if ( rootClass == null ) {
+ throw new MappingException( "Cannot cache an unknown entity: " + holder.role );
}
- String impl = element.attributeValue( "class" );
- log.debug( "Event listener: " + type + "=" + impl );
- setListeners( type, new String[]{impl} );
+ rootClass.setCacheConcurrencyStrategy( holder.usage );
+ rootClass.setCacheRegionName( holder.region );
+ rootClass.setLazyPropertiesCacheable( holder.cacheLazy );
}
- public void setListener(String type, String listener) {
- String[] listeners = null;
- if ( listener != null ) {
- listeners = (String[]) Array.newInstance( String.class, 1 );
- listeners[0] = listener;
+ /**
+ * Set up a cache for a collection role
+ *
+ * @param collectionRole The name of the collection to which we should associate these cache settings
+ * @param concurrencyStrategy The cache strategy to use
+ *
+ * @return this for method chaining
+ */
+ public Configuration setCollectionCacheConcurrencyStrategy(String collectionRole, String concurrencyStrategy) {
+ setCollectionCacheConcurrencyStrategy( collectionRole, concurrencyStrategy, collectionRole );
+ return this;
+ }
+
+ /**
+ * Set up a cache for a collection role, giving an explicit region name
+ *
+ * @param collectionRole The name of the collection to which we should associate these cache settings
+ * @param concurrencyStrategy The cache strategy to use
+ * @param region The name of the cache region to use
+ */
+ public void setCollectionCacheConcurrencyStrategy(String collectionRole, String concurrencyStrategy, String region) {
+ caches.add( new CacheHolder( collectionRole, concurrencyStrategy, region, false, false ) );
+ }
+
+ private void applyCollectionCacheConcurrencyStrategy(CacheHolder holder) {
+ Collection collection = getCollectionMapping( holder.role );
+ if ( collection == null ) {
+ throw new MappingException( "Cannot cache an unknown collection: " + holder.role );
}
- setListeners( type, listeners );
+ collection.setCacheConcurrencyStrategy( holder.usage );
+ collection.setCacheRegionName( holder.region );
}
- public void setListeners(String type, String[] listenerClasses) {
- Object[] listeners = null;
- if ( listenerClasses != null ) {
- listeners = (Object[]) Array.newInstance( eventListeners.getListenerClassFor(type), listenerClasses.length );
- for ( int i = 0; i < listeners.length ; i++ ) {
- try {
- listeners[i] = ReflectHelper.classForName( listenerClasses[i] ).newInstance();
+ /**
+ * Get the query language imports
+ *
+ * @return a mapping from "import" names to fully qualified class names
+ */
+ public Map getImports() {
+ return imports;
+ }
+
+ /**
+ * Create an object-oriented view of the configuration properties
+ *
+ * @param serviceRegistry The registry of services to be used in building these settings.
+ *
+ * @return The build settings
+ */
+ public Settings buildSettings(ServiceRegistry serviceRegistry) {
+ Properties clone = ( Properties ) properties.clone();
+ ConfigurationHelper.resolvePlaceHolders( clone );
+ return buildSettingsInternal( clone, serviceRegistry );
+ }
+
+ public Settings buildSettings(Properties props, ServiceRegistry serviceRegistry) throws HibernateException {
+ return buildSettingsInternal( props, serviceRegistry );
+ }
+
+ private Settings buildSettingsInternal(Properties props, ServiceRegistry serviceRegistry) {
+ final Settings settings = settingsFactory.buildSettings( props, serviceRegistry );
+ settings.setEntityTuplizerFactory( this.getEntityTuplizerFactory() );
+// settings.setComponentTuplizerFactory( this.getComponentTuplizerFactory() );
+ return settings;
+ }
+
+ public Map getNamedSQLQueries() {
+ return namedSqlQueries;
+ }
+
+ public Map getSqlResultSetMappings() {
+ return sqlResultSetMappings;
+ }
+
+ public NamingStrategy getNamingStrategy() {
+ return namingStrategy;
+ }
+
+ /**
+ * Set a custom naming strategy
+ *
+ * @param namingStrategy the NamingStrategy to set
+ *
+ * @return this for method chaining
+ */
+ public Configuration setNamingStrategy(NamingStrategy namingStrategy) {
+ this.namingStrategy = namingStrategy;
+ return this;
+ }
+
+ /**
+ * Retrieve the IdentifierGeneratorFactory in effect for this configuration.
+ *
+ * @return This configuration's IdentifierGeneratorFactory.
+ */
+ public MutableIdentifierGeneratorFactory getIdentifierGeneratorFactory() {
+ return identifierGeneratorFactory;
+ }
+
+ public Mapping buildMapping() {
+ return new Mapping() {
+ public IdentifierGeneratorFactory getIdentifierGeneratorFactory() {
+ return identifierGeneratorFactory;
+ }
+
+ /**
+ * Returns the identifier type of a mapped class
+ */
+ public Type getIdentifierType(String entityName) throws MappingException {
+ PersistentClass pc = classes.get( entityName );
+ if ( pc == null ) {
+ throw new MappingException( "persistent class not known: " + entityName );
}
- catch (Exception e) {
+ return pc.getIdentifier().getType();
+ }
+
+ public String getIdentifierPropertyName(String entityName) throws MappingException {
+ final PersistentClass pc = classes.get( entityName );
+ if ( pc == null ) {
+ throw new MappingException( "persistent class not known: " + entityName );
+ }
+ if ( !pc.hasIdentifierProperty() ) {
+ return null;
+ }
+ return pc.getIdentifierProperty().getName();
+ }
+
+ public Type getReferencedPropertyType(String entityName, String propertyName) throws MappingException {
+ final PersistentClass pc = classes.get( entityName );
+ if ( pc == null ) {
+ throw new MappingException( "persistent class not known: " + entityName );
+ }
+ Property prop = pc.getReferencedProperty( propertyName );
+ if ( prop == null ) {
throw new MappingException(
- "Unable to instantiate specified event (" + type + ") listener class: " + listenerClasses[i],
- e
+ "property not known: " +
+ entityName + '.' + propertyName
);
}
+ return prop.getType();
}
+ };
+ }
+
+ private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException {
+ //we need reflectionManager before reading the other components (MetadataSourceQueue in particular)
+ final MetadataProvider metadataProvider = (MetadataProvider) ois.readObject();
+ this.mapping = buildMapping();
+ xmlHelper = new XMLHelper();
+ createReflectionManager(metadataProvider);
+ ois.defaultReadObject();
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws IOException {
+ //We write MetadataProvider first as we need reflectionManager before reading the other components
+ final MetadataProvider metadataProvider = ( ( MetadataProviderInjector ) reflectionManager ).getMetadataProvider();
+ out.writeObject( metadataProvider );
+ out.defaultWriteObject();
+ }
+
+ private void createReflectionManager() {
+ createReflectionManager( new JPAMetadataProvider() );
+ }
+
+ private void createReflectionManager(MetadataProvider metadataProvider) {
+ reflectionManager = new JavaReflectionManager();
+ ( ( MetadataProviderInjector ) reflectionManager ).setMetadataProvider( metadataProvider );
+ }
+
+ public Map getFilterDefinitions() {
+ return filterDefinitions;
+ }
+
+ public void addFilterDefinition(FilterDefinition definition) {
+ filterDefinitions.put( definition.getFilterName(), definition );
+ }
+
+ public Iterator iterateFetchProfiles() {
+ return fetchProfiles.values().iterator();
+ }
+
+ public void addFetchProfile(FetchProfile fetchProfile) {
+ fetchProfiles.put( fetchProfile.getName(), fetchProfile );
+ }
+
+ public void addAuxiliaryDatabaseObject(AuxiliaryDatabaseObject object) {
+ auxiliaryDatabaseObjects.add( object );
+ }
+
+ public Map getSqlFunctions() {
+ return sqlFunctions;
+ }
+
+ public void addSqlFunction(String functionName, SQLFunction function) {
+ // HHH-7721: SQLFunctionRegistry expects all lowercase. Enforce,
+ // just in case a user's customer dialect uses mixed cases.
+ sqlFunctions.put( functionName.toLowerCase(), function );
+ }
+
+ public TypeResolver getTypeResolver() {
+ return typeResolver;
+ }
+
+ /**
+ * Allows registration of a type into the type registry. The phrase 'override' in the method name simply
+ * reminds that registration *potentially* replaces a previously registered type .
+ *
+ * @param type The type to register.
+ */
+ public void registerTypeOverride(BasicType type) {
+ getTypeResolver().registerTypeOverride( type );
+ }
+
+
+ public void registerTypeOverride(UserType type, String[] keys) {
+ getTypeResolver().registerTypeOverride( type, keys );
+ }
+
+ public void registerTypeOverride(CompositeUserType type, String[] keys) {
+ getTypeResolver().registerTypeOverride( type, keys );
+ }
+
+ public void registerTypeContributor(TypeContributor typeContributor) {
+ typeContributorRegistrations.add( typeContributor );
+ }
+
+ public SessionFactoryObserver getSessionFactoryObserver() {
+ return sessionFactoryObserver;
+ }
+
+ public void setSessionFactoryObserver(SessionFactoryObserver sessionFactoryObserver) {
+ this.sessionFactoryObserver = sessionFactoryObserver;
+ }
+
+ public CurrentTenantIdentifierResolver getCurrentTenantIdentifierResolver() {
+ return currentTenantIdentifierResolver;
+ }
+
+ public void setCurrentTenantIdentifierResolver(CurrentTenantIdentifierResolver currentTenantIdentifierResolver) {
+ this.currentTenantIdentifierResolver = currentTenantIdentifierResolver;
+ }
+
+ /**
+ * Adds the AttributeConverter Class to this Configuration.
+ *
+ * @param attributeConverterClass The AttributeConverter class.
+ * @param autoApply Should the AttributeConverter be auto applied to property types as specified
+ * by its "entity attribute" parameterized type?
+ */
+ public void addAttributeConverter(Class extends AttributeConverter> attributeConverterClass, boolean autoApply) {
+ addAttributeConverter(
+ instantiateAttributeConverter( attributeConverterClass ),
+ autoApply
+ );
+ }
+
+ private AttributeConverter instantiateAttributeConverter(Class extends AttributeConverter> attributeConverterClass) {
+ AttributeConverter attributeConverter;
+ try {
+ attributeConverter = attributeConverterClass.newInstance();
}
- setListeners( type, listeners );
+ catch (Exception e) {
+ throw new AnnotationException(
+ "Unable to instantiate AttributeConverter [" + attributeConverterClass.getName() + "]",
+ e
+ );
+ }
+ return attributeConverter;
}
- public void setListener(String type, Object listener) {
- Object[] listeners = null;
- if ( listener != null ) {
- listeners = (Object[]) Array.newInstance( eventListeners.getListenerClassFor(type), 1 );
- listeners[0] = listener;
+ /**
+ * Adds the AttributeConverter Class to this Configuration.
+ *
+ * @param attributeConverterClass The AttributeConverter class.
+ */
+ public void addAttributeConverter(Class extends AttributeConverter> attributeConverterClass) {
+ addAttributeConverter( instantiateAttributeConverter( attributeConverterClass ) );
+ }
+
+ /**
+ * Adds the AttributeConverter instance to this Configuration. This form is mainly intended for developers
+ * to programatically add their own AttributeConverter instance. HEM, instead, uses the
+ * {@link #addAttributeConverter(Class, boolean)} form
+ *
+ * @param attributeConverter The AttributeConverter instance.
+ */
+ public void addAttributeConverter(AttributeConverter attributeConverter) {
+ boolean autoApply = false;
+ Converter converterAnnotation = attributeConverter.getClass().getAnnotation( Converter.class );
+ if ( converterAnnotation != null ) {
+ autoApply = converterAnnotation.autoApply();
}
- setListeners( type, listeners );
+
+ addAttributeConverter( new AttributeConverterDefinition( attributeConverter, autoApply ) );
}
- public void setListeners(String type, Object[] listeners) {
- if ( "auto-flush".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setAutoFlushEventListeners( new AutoFlushEventListener[]{} );
+ /**
+ * Adds the AttributeConverter instance to this Configuration. This form is mainly intended for developers
+ * to programatically add their own AttributeConverter instance. HEM, instead, uses the
+ * {@link #addAttributeConverter(Class, boolean)} form
+ *
+ * @param attributeConverter The AttributeConverter instance.
+ * @param autoApply Should the AttributeConverter be auto applied to property types as specified
+ * by its "entity attribute" parameterized type?
+ */
+ public void addAttributeConverter(AttributeConverter attributeConverter, boolean autoApply) {
+ addAttributeConverter( new AttributeConverterDefinition( attributeConverter, autoApply ) );
+ }
+
+ public void addAttributeConverter(AttributeConverterDefinition definition) {
+ if ( attributeConverterDefinitionsByClass == null ) {
+ attributeConverterDefinitionsByClass = new ConcurrentHashMap();
+ }
+
+ final Object old = attributeConverterDefinitionsByClass.put( definition.getAttributeConverter().getClass(), definition );
+
+ if ( old != null ) {
+ throw new AssertionFailure(
+ String.format(
+ "AttributeConverter class [%s] registered multiple times",
+ definition.getAttributeConverter().getClass()
+ )
+ );
+ }
+ }
+
+ public java.util.Collection getNamedEntityGraphs() {
+ return namedEntityGraphMap == null
+ ? Collections.emptyList()
+ : namedEntityGraphMap.values();
+ }
+
+
+ // Mappings impl ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ /**
+ * Internal implementation of the Mappings interface giving access to the Configuration's internal
+ * metadata repository state ({@link Configuration#classes}, {@link Configuration#tables}, etc).
+ */
+ @SuppressWarnings( {"deprecation", "unchecked"})
+ protected class MappingsImpl implements ExtendedMappings, Serializable {
+
+ private String schemaName;
+
+ public String getSchemaName() {
+ return schemaName;
+ }
+
+ public void setSchemaName(String schemaName) {
+ this.schemaName = schemaName;
+ }
+
+
+ private String catalogName;
+
+ public String getCatalogName() {
+ return catalogName;
+ }
+
+ public void setCatalogName(String catalogName) {
+ this.catalogName = catalogName;
+ }
+
+
+ private String defaultPackage;
+
+ public String getDefaultPackage() {
+ return defaultPackage;
+ }
+
+ public void setDefaultPackage(String defaultPackage) {
+ this.defaultPackage = defaultPackage;
+ }
+
+
+ private boolean autoImport;
+
+ public boolean isAutoImport() {
+ return autoImport;
+ }
+
+ public void setAutoImport(boolean autoImport) {
+ this.autoImport = autoImport;
+ }
+
+
+ private boolean defaultLazy;
+
+ public boolean isDefaultLazy() {
+ return defaultLazy;
+ }
+
+ public void setDefaultLazy(boolean defaultLazy) {
+ this.defaultLazy = defaultLazy;
+ }
+
+
+ private String defaultCascade;
+
+ public String getDefaultCascade() {
+ return defaultCascade;
+ }
+
+ public void setDefaultCascade(String defaultCascade) {
+ this.defaultCascade = defaultCascade;
+ }
+
+
+ private String defaultAccess;
+
+ public String getDefaultAccess() {
+ return defaultAccess;
+ }
+
+ public void setDefaultAccess(String defaultAccess) {
+ this.defaultAccess = defaultAccess;
+ }
+
+
+ public NamingStrategy getNamingStrategy() {
+ return namingStrategy;
+ }
+
+ public void setNamingStrategy(NamingStrategy namingStrategy) {
+ Configuration.this.namingStrategy = namingStrategy;
+ }
+
+ public TypeResolver getTypeResolver() {
+ return typeResolver;
+ }
+
+ public Iterator iterateClasses() {
+ return classes.values().iterator();
+ }
+
+ public PersistentClass getClass(String entityName) {
+ return classes.get( entityName );
+ }
+
+ public PersistentClass locatePersistentClassByEntityName(String entityName) {
+ PersistentClass persistentClass = classes.get( entityName );
+ if ( persistentClass == null ) {
+ String actualEntityName = imports.get( entityName );
+ if ( StringHelper.isNotEmpty( actualEntityName ) ) {
+ persistentClass = classes.get( actualEntityName );
+ }
}
- else {
- eventListeners.setAutoFlushEventListeners( (AutoFlushEventListener[]) listeners );
+ return persistentClass;
+ }
+
+ public void addClass(PersistentClass persistentClass) throws DuplicateMappingException {
+ Object old = classes.put( persistentClass.getEntityName(), persistentClass );
+ if ( old != null ) {
+ throw new DuplicateMappingException( "class/entity", persistentClass.getEntityName() );
}
}
- else if ( "merge".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setMergeEventListeners( new MergeEventListener[]{} );
+
+ public void addImport(String entityName, String rename) throws DuplicateMappingException {
+ String existing = imports.put( rename, entityName );
+ if ( existing != null ) {
+ if (existing.equals(entityName)) LOG.duplicateImport(entityName, rename);
+ else throw new DuplicateMappingException("duplicate import: " + rename + " refers to both " + entityName + " and "
+ + existing + " (try using auto-import=\"false\")", "import", rename);
}
- else {
- eventListeners.setMergeEventListeners( (MergeEventListener[]) listeners );
+ }
+
+ public Collection getCollection(String role) {
+ return collections.get( role );
+ }
+
+ public Iterator iterateCollections() {
+ return collections.values().iterator();
+ }
+
+ public void addCollection(Collection collection) throws DuplicateMappingException {
+ Object old = collections.put( collection.getRole(), collection );
+ if ( old != null ) {
+ throw new DuplicateMappingException( "collection role", collection.getRole() );
}
}
- else if ( "create".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPersistEventListeners( new PersistEventListener[]{} );
+
+ public Table getTable(String schema, String catalog, String name) {
+ String key = Table.qualify(catalog, schema, name);
+ return tables.get( key );
+ }
+
+ public Iterator iterateTables() {
+ return tables.values().iterator();
+ }
+
+ public Table addTable(
+ String schema,
+ String catalog,
+ String name,
+ String subselect,
+ boolean isAbstract) {
+ name = getObjectNameNormalizer().normalizeIdentifierQuoting( name );
+ schema = getObjectNameNormalizer().normalizeIdentifierQuoting( schema );
+ catalog = getObjectNameNormalizer().normalizeIdentifierQuoting( catalog );
+
+ String key = subselect == null ? Table.qualify( catalog, schema, name ) : subselect;
+ Table table = tables.get( key );
+
+ if ( table == null ) {
+ table = new Table();
+ table.setAbstract( isAbstract );
+ table.setName( name );
+ table.setSchema( schema );
+ table.setCatalog( catalog );
+ table.setSubselect( subselect );
+ tables.put( key, table );
}
else {
- eventListeners.setPersistEventListeners( (PersistEventListener[]) listeners );
+ if ( !isAbstract ) {
+ table.setAbstract( false );
+ }
}
+
+ return table;
}
- else if ( "create-onflush".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPersistOnFlushEventListeners( new PersistEventListener[]{} );
+
+ public Table addDenormalizedTable(
+ String schema,
+ String catalog,
+ String name,
+ boolean isAbstract,
+ String subselect,
+ Table includedTable) throws DuplicateMappingException {
+ name = getObjectNameNormalizer().normalizeIdentifierQuoting( name );
+ schema = getObjectNameNormalizer().normalizeIdentifierQuoting( schema );
+ catalog = getObjectNameNormalizer().normalizeIdentifierQuoting( catalog );
+
+ String key = subselect == null ? Table.qualify(catalog, schema, name) : subselect;
+ if ( tables.containsKey( key ) ) {
+ throw new DuplicateMappingException( "table", name );
}
- else {
- eventListeners.setPersistOnFlushEventListeners( (PersistEventListener[]) listeners );
+
+ Table table = new DenormalizedTable( includedTable );
+ table.setAbstract( isAbstract );
+ table.setName( name );
+ table.setSchema( schema );
+ table.setCatalog( catalog );
+ table.setSubselect( subselect );
+
+ tables.put( key, table );
+ return table;
+ }
+
+ public NamedQueryDefinition getQuery(String name) {
+ return namedQueries.get( name );
+ }
+
+ public void addQuery(String name, NamedQueryDefinition query) throws DuplicateMappingException {
+ if ( !defaultNamedQueryNames.contains( name ) ) {
+ applyQuery( name, query );
}
}
- else if ( "delete".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setDeleteEventListeners( new DeleteEventListener[]{} );
+
+ private void applyQuery(String name, NamedQueryDefinition query) {
+ checkQueryName( name );
+ namedQueries.put( name.intern(), query );
+ }
+
+ private void checkQueryName(String name) throws DuplicateMappingException {
+ if ( namedQueries.containsKey( name ) || namedSqlQueries.containsKey( name ) ) {
+ throw new DuplicateMappingException( "query", name );
}
- else {
- eventListeners.setDeleteEventListeners( (DeleteEventListener[]) listeners );
+ }
+
+ public void addDefaultQuery(String name, NamedQueryDefinition query) {
+ applyQuery( name, query );
+ defaultNamedQueryNames.add( name );
+ }
+
+ public NamedSQLQueryDefinition getSQLQuery(String name) {
+ return namedSqlQueries.get( name );
+ }
+
+ public void addSQLQuery(String name, NamedSQLQueryDefinition query) throws DuplicateMappingException {
+ if ( !defaultNamedNativeQueryNames.contains( name ) ) {
+ applySQLQuery( name, query );
}
}
- else if ( "dirty-check".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setDirtyCheckEventListeners( new DirtyCheckEventListener[]{} );
+
+ private void applySQLQuery(String name, NamedSQLQueryDefinition query) throws DuplicateMappingException {
+ checkQueryName( name );
+ namedSqlQueries.put( name.intern(), query );
+ }
+
+ @Override
+ public void addNamedProcedureCallDefinition(NamedProcedureCallDefinition definition)
+ throws DuplicateMappingException {
+ final String name = definition.getRegisteredName();
+ if ( !defaultNamedProcedure.contains( name ) ) {
+ final NamedProcedureCallDefinition previous = namedProcedureCallMap.put( name, definition );
+ if ( previous != null ) {
+ throw new DuplicateMappingException( "named stored procedure query", name );
+ }
}
- else {
- eventListeners.setDirtyCheckEventListeners( (DirtyCheckEventListener[]) listeners );
+ }
+ @Override
+ public void addDefaultNamedProcedureCallDefinition(NamedProcedureCallDefinition definition)
+ throws DuplicateMappingException {
+ addNamedProcedureCallDefinition( definition );
+ defaultNamedProcedure.add( definition.getRegisteredName() );
+ }
+
+ @Override
+ public void addNamedEntityGraphDefintion(NamedEntityGraphDefinition definition)
+ throws DuplicateMappingException {
+ final String name = definition.getRegisteredName();
+
+ final NamedEntityGraphDefinition previous = namedEntityGraphMap.put( name, definition );
+ if ( previous != null ) {
+ throw new DuplicateMappingException( "NamedEntityGraph", name );
}
}
- else if ( "evict".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setEvictEventListeners( new EvictEventListener[]{} );
+
+ public void addDefaultSQLQuery(String name, NamedSQLQueryDefinition query) {
+ applySQLQuery( name, query );
+ defaultNamedNativeQueryNames.add( name );
+ }
+
+ public ResultSetMappingDefinition getResultSetMapping(String name) {
+ return sqlResultSetMappings.get(name);
+ }
+
+ public void addResultSetMapping(ResultSetMappingDefinition sqlResultSetMapping) throws DuplicateMappingException {
+ if ( !defaultSqlResultSetMappingNames.contains( sqlResultSetMapping.getName() ) ) {
+ applyResultSetMapping( sqlResultSetMapping );
}
- else {
- eventListeners.setEvictEventListeners( (EvictEventListener[]) listeners );
+ }
+
+ public void applyResultSetMapping(ResultSetMappingDefinition sqlResultSetMapping) throws DuplicateMappingException {
+ Object old = sqlResultSetMappings.put( sqlResultSetMapping.getName(), sqlResultSetMapping );
+ if ( old != null ) {
+ throw new DuplicateMappingException( "resultSet", sqlResultSetMapping.getName() );
}
}
- else if ( "flush".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setFlushEventListeners( new FlushEventListener[]{} );
+
+ public void addDefaultResultSetMapping(ResultSetMappingDefinition definition) {
+ final String name = definition.getName();
+ if ( !defaultSqlResultSetMappingNames.contains( name ) && getResultSetMapping( name ) != null ) {
+ removeResultSetMapping( name );
}
- else {
- eventListeners.setFlushEventListeners( (FlushEventListener[]) listeners );
+ applyResultSetMapping( definition );
+ defaultSqlResultSetMappingNames.add( name );
+ }
+
+ protected void removeResultSetMapping(String name) {
+ sqlResultSetMappings.remove( name );
+ }
+
+ public TypeDef getTypeDef(String typeName) {
+ return typeDefs.get( typeName );
+ }
+
+ public void addTypeDef(String typeName, String typeClass, Properties paramMap) {
+ TypeDef def = new TypeDef( typeClass, paramMap );
+ typeDefs.put( typeName, def );
+ LOG.debugf( "Added %s with class %s", typeName, typeClass );
+ }
+
+ public Map getFilterDefinitions() {
+ return filterDefinitions;
+ }
+
+ public FilterDefinition getFilterDefinition(String name) {
+ return filterDefinitions.get( name );
+ }
+
+ public void addFilterDefinition(FilterDefinition definition) {
+ filterDefinitions.put( definition.getFilterName(), definition );
+ }
+
+ public FetchProfile findOrCreateFetchProfile(String name, MetadataSource source) {
+ FetchProfile profile = fetchProfiles.get( name );
+ if ( profile == null ) {
+ profile = new FetchProfile( name, source );
+ fetchProfiles.put( name, profile );
}
+ return profile;
}
- else if ( "flush-entity".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setFlushEntityEventListeners( new FlushEntityEventListener[]{} );
+
+ public Iterator iterateAuxliaryDatabaseObjects() {
+ return iterateAuxiliaryDatabaseObjects();
+ }
+
+ public Iterator iterateAuxiliaryDatabaseObjects() {
+ return auxiliaryDatabaseObjects.iterator();
+ }
+
+ public ListIterator iterateAuxliaryDatabaseObjectsInReverse() {
+ return iterateAuxiliaryDatabaseObjectsInReverse();
+ }
+
+ public ListIterator iterateAuxiliaryDatabaseObjectsInReverse() {
+ return auxiliaryDatabaseObjects.listIterator( auxiliaryDatabaseObjects.size() );
+ }
+
+ public void addAuxiliaryDatabaseObject(AuxiliaryDatabaseObject auxiliaryDatabaseObject) {
+ auxiliaryDatabaseObjects.add( auxiliaryDatabaseObject );
+ }
+
+ /**
+ * Internal struct used to help track physical table names to logical table names.
+ */
+ private class TableDescription implements Serializable {
+ final String logicalName;
+ final Table denormalizedSupertable;
+
+ TableDescription(String logicalName, Table denormalizedSupertable) {
+ this.logicalName = logicalName;
+ this.denormalizedSupertable = denormalizedSupertable;
}
- else {
- eventListeners.setFlushEntityEventListeners( (FlushEntityEventListener[]) listeners );
+ }
+
+ public String getLogicalTableName(Table table) throws MappingException {
+ return getLogicalTableName( table.getQuotedSchema(), table.getQuotedCatalog(), table.getQuotedName() );
+ }
+
+ private String getLogicalTableName(String schema, String catalog, String physicalName) throws MappingException {
+ String key = buildTableNameKey( schema, catalog, physicalName );
+ TableDescription descriptor = (TableDescription) tableNameBinding.get( key );
+ if (descriptor == null) {
+ throw new MappingException( "Unable to find physical table: " + physicalName);
}
+ return descriptor.logicalName;
}
- else if ( "load".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setLoadEventListeners( new LoadEventListener[]{} );
+
+ public void addTableBinding(
+ String schema,
+ String catalog,
+ String logicalName,
+ String physicalName,
+ Table denormalizedSuperTable) throws DuplicateMappingException {
+ String key = buildTableNameKey( schema, catalog, physicalName );
+ TableDescription tableDescription = new TableDescription( logicalName, denormalizedSuperTable );
+ TableDescription oldDescriptor = ( TableDescription ) tableNameBinding.put( key, tableDescription );
+ if ( oldDescriptor != null && ! oldDescriptor.logicalName.equals( logicalName ) ) {
+ //TODO possibly relax that
+ throw new DuplicateMappingException(
+ "Same physical table name [" + physicalName + "] references several logical table names: [" +
+ oldDescriptor.logicalName + "], [" + logicalName + ']',
+ "table",
+ physicalName
+ );
}
- else {
- eventListeners.setLoadEventListeners( (LoadEventListener[]) listeners );
- }
}
- else if ( "load-collection".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setInitializeCollectionEventListeners(
- new InitializeCollectionEventListener[]{}
- );
+
+ private String buildTableNameKey(String schema, String catalog, String finalName) {
+ StringBuilder keyBuilder = new StringBuilder();
+ if (schema != null) keyBuilder.append( schema );
+ keyBuilder.append( ".");
+ if (catalog != null) keyBuilder.append( catalog );
+ keyBuilder.append( ".");
+ keyBuilder.append( finalName );
+ return keyBuilder.toString();
+ }
+
+ /**
+ * Internal struct used to maintain xref between physical and logical column
+ * names for a table. Mainly this is used to ensure that the defined
+ * {@link NamingStrategy} is not creating duplicate column names.
+ */
+ private class TableColumnNameBinding implements Serializable {
+ private final String tableName;
+ private Map/**/ logicalToPhysical = new HashMap();
+ private Map/**/ physicalToLogical = new HashMap();
+
+ private TableColumnNameBinding(String tableName) {
+ this.tableName = tableName;
}
- else {
- eventListeners.setInitializeCollectionEventListeners(
- (InitializeCollectionEventListener[]) listeners
+
+ public void addBinding(String logicalName, Column physicalColumn) {
+ bindLogicalToPhysical( logicalName, physicalColumn );
+ bindPhysicalToLogical( logicalName, physicalColumn );
+ }
+
+ private void bindLogicalToPhysical(String logicalName, Column physicalColumn) throws DuplicateMappingException {
+ final String logicalKey = logicalName.toLowerCase();
+ final String physicalName = physicalColumn.getQuotedName();
+ final String existingPhysicalName = ( String ) logicalToPhysical.put( logicalKey, physicalName );
+ if ( existingPhysicalName != null ) {
+ boolean areSamePhysicalColumn = physicalColumn.isQuoted()
+ ? existingPhysicalName.equals( physicalName )
+ : existingPhysicalName.equalsIgnoreCase( physicalName );
+ if ( ! areSamePhysicalColumn ) {
+ throw new DuplicateMappingException(
+ " Table [" + tableName + "] contains logical column name [" + logicalName
+ + "] referenced by multiple physical column names: [" + existingPhysicalName
+ + "], [" + physicalName + "]",
+ "column-binding",
+ tableName + "." + logicalName
+ );
+ }
+ }
+ }
+
+ private void bindPhysicalToLogical(String logicalName, Column physicalColumn) throws DuplicateMappingException {
+ final String physicalName = physicalColumn.getQuotedName();
+ final String existingLogicalName = ( String ) physicalToLogical.put( physicalName, logicalName );
+ if ( existingLogicalName != null && ! existingLogicalName.equals( logicalName ) ) {
+ throw new DuplicateMappingException(
+ " Table [" + tableName + "] contains physical column name [" + physicalName
+ + "] represented by different logical column names: [" + existingLogicalName
+ + "], [" + logicalName + "]",
+ "column-binding",
+ tableName + "." + physicalName
);
+ }
}
}
- else if ( "lock".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setLockEventListeners( new LockEventListener[]{} );
+
+ public void addColumnBinding(String logicalName, Column physicalColumn, Table table) throws DuplicateMappingException {
+ TableColumnNameBinding binding = ( TableColumnNameBinding ) columnNameBindingPerTable.get( table );
+ if ( binding == null ) {
+ binding = new TableColumnNameBinding( table.getName() );
+ columnNameBindingPerTable.put( table, binding );
}
- else {
- eventListeners.setLockEventListeners( (LockEventListener[]) listeners );
+ binding.addBinding( logicalName, physicalColumn );
+ }
+
+ public String getPhysicalColumnName(String logicalName, Table table) throws MappingException {
+ logicalName = logicalName.toLowerCase();
+ String finalName = null;
+ Table currentTable = table;
+ do {
+ TableColumnNameBinding binding = ( TableColumnNameBinding ) columnNameBindingPerTable.get( currentTable );
+ if ( binding != null ) {
+ finalName = ( String ) binding.logicalToPhysical.get( logicalName );
+ }
+ String key = buildTableNameKey(
+ currentTable.getQuotedSchema(), currentTable.getQuotedCatalog(), currentTable.getQuotedName()
+ );
+ TableDescription description = ( TableDescription ) tableNameBinding.get( key );
+ if ( description != null ) {
+ currentTable = description.denormalizedSupertable;
+ }
+ else {
+ currentTable = null;
+ }
+ } while ( finalName == null && currentTable != null );
+
+ if ( finalName == null ) {
+ throw new MappingException(
+ "Unable to find column with logical name " + logicalName + " in table " + table.getName()
+ );
}
+ return finalName;
}
- else if ( "refresh".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setRefreshEventListeners( new RefreshEventListener[]{} );
+ @Override
+ public String getLogicalColumnName(String physicalName, Table table) throws MappingException {
+ String logical = null;
+ Table currentTable = table;
+ TableDescription description = null;
+ do {
+ TableColumnNameBinding binding = ( TableColumnNameBinding ) columnNameBindingPerTable.get( currentTable );
+ if ( binding != null ) {
+ logical = ( String ) binding.physicalToLogical.get( physicalName );
+ }
+ String key = buildTableNameKey(
+ currentTable.getQuotedSchema(), currentTable.getQuotedCatalog(), currentTable.getQuotedName()
+ );
+ description = ( TableDescription ) tableNameBinding.get( key );
+ if ( description != null ) {
+ currentTable = description.denormalizedSupertable;
+ }
+ else {
+ currentTable = null;
+ }
}
- else {
- eventListeners.setRefreshEventListeners( (RefreshEventListener[]) listeners );
+ while ( logical == null && currentTable != null );
+ if ( logical == null ) {
+ throw new MappingException(
+ "Unable to find logical column name from physical name "
+ + physicalName + " in table " + table.getName()
+ );
}
+ return logical;
}
- else if ( "replicate".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setReplicateEventListeners( new ReplicateEventListener[]{} );
+
+ public void addSecondPass(SecondPass sp) {
+ addSecondPass( sp, false );
+ }
+
+ public void addSecondPass(SecondPass sp, boolean onTopOfTheQueue) {
+ if ( onTopOfTheQueue ) {
+ secondPasses.add( 0, sp );
}
else {
- eventListeners.setReplicateEventListeners( (ReplicateEventListener[]) listeners );
+ secondPasses.add( sp );
}
}
- else if ( "save-update".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setSaveOrUpdateEventListeners( new SaveOrUpdateEventListener[]{} );
+
+ @Override
+ public AttributeConverterDefinition locateAttributeConverter(Class converterClass) {
+ if ( attributeConverterDefinitionsByClass == null ) {
+ return null;
}
- else {
- eventListeners.setSaveOrUpdateEventListeners( (SaveOrUpdateEventListener[]) listeners );
+ return attributeConverterDefinitionsByClass.get( converterClass );
+ }
+
+ @Override
+ public java.util.Collection getAttributeConverters() {
+ if ( attributeConverterDefinitionsByClass == null ) {
+ return Collections.emptyList();
}
+ return attributeConverterDefinitionsByClass.values();
}
- else if ( "save".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setSaveEventListeners( new SaveOrUpdateEventListener[]{} );
+
+ public void addPropertyReference(String referencedClass, String propertyName) {
+ propertyReferences.add( new PropertyReference( referencedClass, propertyName, false ) );
+ }
+
+ public void addUniquePropertyReference(String referencedClass, String propertyName) {
+ propertyReferences.add( new PropertyReference( referencedClass, propertyName, true ) );
+ }
+
+ public void addToExtendsQueue(ExtendsQueueEntry entry) {
+ extendsQueue.put( entry, null );
+ }
+
+ public MutableIdentifierGeneratorFactory getIdentifierGeneratorFactory() {
+ return identifierGeneratorFactory;
+ }
+
+ public void addMappedSuperclass(Class type, MappedSuperclass mappedSuperclass) {
+ mappedSuperClasses.put( type, mappedSuperclass );
+ }
+
+ public MappedSuperclass getMappedSuperclass(Class type) {
+ return mappedSuperClasses.get( type );
+ }
+
+ public ObjectNameNormalizer getObjectNameNormalizer() {
+ return normalizer;
+ }
+
+ public Properties getConfigurationProperties() {
+ return properties;
+ }
+
+ public void addDefaultGenerator(IdGenerator generator) {
+ this.addGenerator( generator );
+ defaultNamedGenerators.add( generator.getName() );
+ }
+
+ public boolean isInSecondPass() {
+ return inSecondPass;
+ }
+
+ public PropertyData getPropertyAnnotatedWithMapsId(XClass entityType, String propertyName) {
+ final Map map = propertiesAnnotatedWithMapsId.get( entityType );
+ return map == null ? null : map.get( propertyName );
+ }
+
+ public void addPropertyAnnotatedWithMapsId(XClass entityType, PropertyData property) {
+ Map map = propertiesAnnotatedWithMapsId.get( entityType );
+ if ( map == null ) {
+ map = new HashMap();
+ propertiesAnnotatedWithMapsId.put( entityType, map );
}
- else {
- eventListeners.setSaveEventListeners( (SaveOrUpdateEventListener[]) listeners );
+ map.put( property.getProperty().getAnnotation( MapsId.class ).value(), property );
+ }
+
+ public boolean isSpecjProprietarySyntaxEnabled() {
+ return specjProprietarySyntaxEnabled;
+ }
+
+ public void addPropertyAnnotatedWithMapsIdSpecj(XClass entityType, PropertyData property, String mapsIdValue) {
+ Map map = propertiesAnnotatedWithMapsId.get( entityType );
+ if ( map == null ) {
+ map = new HashMap();
+ propertiesAnnotatedWithMapsId.put( entityType, map );
}
+ map.put( mapsIdValue, property );
}
- else if ( "update".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setUpdateEventListeners( new SaveOrUpdateEventListener[]{} );
+
+ public PropertyData getPropertyAnnotatedWithIdAndToOne(XClass entityType, String propertyName) {
+ final Map map = propertiesAnnotatedWithIdAndToOne.get( entityType );
+ return map == null ? null : map.get( propertyName );
+ }
+
+ public void addToOneAndIdProperty(XClass entityType, PropertyData property) {
+ Map map = propertiesAnnotatedWithIdAndToOne.get( entityType );
+ if ( map == null ) {
+ map = new HashMap();
+ propertiesAnnotatedWithIdAndToOne.put( entityType, map );
}
- else {
- eventListeners.setUpdateEventListeners( (SaveOrUpdateEventListener[]) listeners );
+ map.put( property.getPropertyName(), property );
+ }
+
+ private Boolean useNewGeneratorMappings;
+
+ @Override
+ public boolean useNewGeneratorMappings() {
+ if ( useNewGeneratorMappings == null ) {
+ final String booleanName = getConfigurationProperties()
+ .getProperty( AvailableSettings.USE_NEW_ID_GENERATOR_MAPPINGS );
+ useNewGeneratorMappings = Boolean.valueOf( booleanName );
}
+ return useNewGeneratorMappings;
}
- else if ( "pre-load".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPreLoadEventListeners( new PreLoadEventListener[]{} );
+
+
+ private Boolean implicitDiscriminatorColumnForJoinedInheritance;
+
+ @Override
+ public boolean useImplicitDiscriminatorColumnForJoinedInheritance() {
+ if ( implicitDiscriminatorColumnForJoinedInheritance == null ) {
+ final String booleanName = getConfigurationProperties()
+ .getProperty( AvailableSettings.IMPLICIT_DISCRIMINATOR_COLUMNS_FOR_JOINED_SUBCLASS );
+ implicitDiscriminatorColumnForJoinedInheritance = Boolean.valueOf( booleanName );
}
- else {
- eventListeners.setPreLoadEventListeners( (PreLoadEventListener[]) listeners );
- }
+ return implicitDiscriminatorColumnForJoinedInheritance;
}
- else if ( "pre-update".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPreUpdateEventListeners( new PreUpdateEventListener[]{} );
+
+
+ private Boolean ignoreExplicitDiscriminatorColumnForJoinedInheritance;
+
+ @Override
+ public boolean ignoreExplicitDiscriminatorColumnForJoinedInheritance() {
+ if ( ignoreExplicitDiscriminatorColumnForJoinedInheritance == null ) {
+ final String booleanName = getConfigurationProperties()
+ .getProperty( AvailableSettings.IGNORE_EXPLICIT_DISCRIMINATOR_COLUMNS_FOR_JOINED_SUBCLASS );
+ ignoreExplicitDiscriminatorColumnForJoinedInheritance = Boolean.valueOf( booleanName );
}
- else {
- eventListeners.setPreUpdateEventListeners( (PreUpdateEventListener[]) listeners );
- }
+ return ignoreExplicitDiscriminatorColumnForJoinedInheritance;
}
- else if ( "pre-delete".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPreDeleteEventListeners( new PreDeleteEventListener[]{} );
+
+
+ private Boolean useNationalizedCharacterData;
+
+ @Override
+ public boolean useNationalizedCharacterData() {
+ if ( useNationalizedCharacterData == null ) {
+ final String booleanName = getConfigurationProperties()
+ .getProperty( AvailableSettings.USE_NATIONALIZED_CHARACTER_DATA );
+ useNationalizedCharacterData = Boolean.valueOf( booleanName );
}
- else {
- eventListeners.setPreDeleteEventListeners( (PreDeleteEventListener[]) listeners );
- }
+ return useNationalizedCharacterData;
}
- else if ( "pre-insert".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPreInsertEventListeners( new PreInsertEventListener[]{} );
+
+ private Boolean forceDiscriminatorInSelectsByDefault;
+
+ @Override
+ public boolean forceDiscriminatorInSelectsByDefault() {
+ if ( forceDiscriminatorInSelectsByDefault == null ) {
+ final String booleanName = getConfigurationProperties()
+ .getProperty( AvailableSettings.FORCE_DISCRIMINATOR_IN_SELECTS_BY_DEFAULT );
+ forceDiscriminatorInSelectsByDefault = Boolean.valueOf( booleanName );
}
- else {
- eventListeners.setPreInsertEventListeners( (PreInsertEventListener[]) listeners );
- }
+ return forceDiscriminatorInSelectsByDefault;
}
- else if ( "pre-collection-recreate".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPreCollectionRecreateEventListeners( new PreCollectionRecreateEventListener[]{} );
+
+ public IdGenerator getGenerator(String name) {
+ return getGenerator( name, null );
+ }
+
+ public IdGenerator getGenerator(String name, Map localGenerators) {
+ if ( localGenerators != null ) {
+ IdGenerator result = localGenerators.get( name );
+ if ( result != null ) {
+ return result;
+ }
}
- else {
- eventListeners.setPreCollectionRecreateEventListeners( (PreCollectionRecreateEventListener[]) listeners );
- }
+ return namedGenerators.get( name );
}
- else if ( "pre-collection-remove".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPreCollectionRemoveEventListeners( new PreCollectionRemoveEventListener[]{} );
+
+ public void addGenerator(IdGenerator generator) {
+ if ( !defaultNamedGenerators.contains( generator.getName() ) ) {
+ IdGenerator old = namedGenerators.put( generator.getName(), generator );
+ if ( old != null ) {
+ LOG.duplicateGeneratorName( old.getName() );
+ }
}
- else {
- eventListeners.setPreCollectionRemoveEventListeners( ( PreCollectionRemoveEventListener[]) listeners );
- }
}
- else if ( "pre-collection-update".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPreCollectionUpdateEventListeners( new PreCollectionUpdateEventListener[]{} );
+
+ public void addGeneratorTable(String name, Properties params) {
+ Object old = generatorTables.put( name, params );
+ if ( old != null ) {
+ LOG.duplicateGeneratorTable( name );
}
- else {
- eventListeners.setPreCollectionUpdateEventListeners( ( PreCollectionUpdateEventListener[]) listeners );
- }
}
- else if ( "post-load".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPostLoadEventListeners( new PostLoadEventListener[]{} );
+
+ public Properties getGeneratorTableProperties(String name, Map localGeneratorTables) {
+ if ( localGeneratorTables != null ) {
+ Properties result = localGeneratorTables.get( name );
+ if ( result != null ) {
+ return result;
+ }
}
- else {
- eventListeners.setPostLoadEventListeners( (PostLoadEventListener[]) listeners );
- }
+ return generatorTables.get( name );
}
- else if ( "post-update".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPostUpdateEventListeners( new PostUpdateEventListener[]{} );
+
+ public Map getJoins(String entityName) {
+ return joins.get( entityName );
+ }
+
+ public void addJoins(PersistentClass persistentClass, Map joins) {
+ Object old = Configuration.this.joins.put( persistentClass.getEntityName(), joins );
+ if ( old != null ) {
+ LOG.duplicateJoins( persistentClass.getEntityName() );
}
- else {
- eventListeners.setPostUpdateEventListeners( (PostUpdateEventListener[]) listeners );
- }
}
- else if ( "post-delete".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPostDeleteEventListeners( new PostDeleteEventListener[]{} );
+
+ public AnnotatedClassType getClassType(XClass clazz) {
+ AnnotatedClassType type = classTypes.get( clazz.getName() );
+ if ( type == null ) {
+ return addClassType( clazz );
}
else {
- eventListeners.setPostDeleteEventListeners( (PostDeleteEventListener[]) listeners );
+ return type;
}
}
- else if ( "post-insert".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPostInsertEventListeners( new PostInsertEventListener[]{} );
+
+ //FIXME should be private but is part of the ExtendedMapping contract
+
+ public AnnotatedClassType addClassType(XClass clazz) {
+ AnnotatedClassType type;
+ if ( clazz.isAnnotationPresent( Entity.class ) ) {
+ type = AnnotatedClassType.ENTITY;
}
- else {
- eventListeners.setPostInsertEventListeners( (PostInsertEventListener[]) listeners );
+ else if ( clazz.isAnnotationPresent( Embeddable.class ) ) {
+ type = AnnotatedClassType.EMBEDDABLE;
}
- }
- else if ( "post-commit-update".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPostCommitUpdateEventListeners(
- new PostUpdateEventListener[]{}
- );
+ else if ( clazz.isAnnotationPresent( javax.persistence.MappedSuperclass.class ) ) {
+ type = AnnotatedClassType.EMBEDDABLE_SUPERCLASS;
}
else {
- eventListeners.setPostCommitUpdateEventListeners( (PostUpdateEventListener[]) listeners );
+ type = AnnotatedClassType.NONE;
}
+ classTypes.put( clazz.getName(), type );
+ return type;
}
- else if ( "post-commit-delete".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPostCommitDeleteEventListeners(
- new PostDeleteEventListener[]{}
- );
+
+ /**
+ * {@inheritDoc}
+ */
+ public Map> getTableUniqueConstraints() {
+ final Map> deprecatedStructure = new HashMap>(
+ CollectionHelper.determineProperSizing( getUniqueConstraintHoldersByTable() ),
+ CollectionHelper.LOAD_FACTOR
+ );
+ for ( Map.Entry> entry : getUniqueConstraintHoldersByTable().entrySet() ) {
+ List columnsPerConstraint = new ArrayList(
+ CollectionHelper.determineProperSizing( entry.getValue().size() )
+ );
+ deprecatedStructure.put( entry.getKey(), columnsPerConstraint );
+ for ( UniqueConstraintHolder holder : entry.getValue() ) {
+ columnsPerConstraint.add( holder.getColumns() );
+ }
}
- else {
- eventListeners.setPostCommitDeleteEventListeners( (PostDeleteEventListener[]) listeners );
- }
+ return deprecatedStructure;
}
- else if ( "post-commit-insert".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPostCommitInsertEventListeners(
- new PostInsertEventListener[]{}
+
+ public Map> getUniqueConstraintHoldersByTable() {
+ return uniqueConstraintHoldersByTable;
+ }
+
+ @SuppressWarnings({ "unchecked" })
+ public void addUniqueConstraints(Table table, List uniqueConstraints) {
+ List constraintHolders = new ArrayList(
+ CollectionHelper.determineProperSizing( uniqueConstraints.size() )
+ );
+
+ int keyNameBase = determineCurrentNumberOfUniqueConstraintHolders( table );
+ for ( String[] columns : ( List ) uniqueConstraints ) {
+ final String keyName = "key" + keyNameBase++;
+ constraintHolders.add(
+ new UniqueConstraintHolder().setName( keyName ).setColumns( columns )
);
}
- else {
- eventListeners.setPostCommitInsertEventListeners( (PostInsertEventListener[]) listeners );
- }
+ addUniqueConstraintHolders( table, constraintHolders );
}
- else if ( "post-collection-recreate".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPostCollectionRecreateEventListeners( new PostCollectionRecreateEventListener[]{} );
+
+ private int determineCurrentNumberOfUniqueConstraintHolders(Table table) {
+ List currentHolders = getUniqueConstraintHoldersByTable().get( table );
+ return currentHolders == null
+ ? 0
+ : currentHolders.size();
+ }
+
+ public void addUniqueConstraintHolders(Table table, List uniqueConstraintHolders) {
+ List holderList = getUniqueConstraintHoldersByTable().get( table );
+ if ( holderList == null ) {
+ holderList = new ArrayList();
+ getUniqueConstraintHoldersByTable().put( table, holderList );
}
- else {
- eventListeners.setPostCollectionRecreateEventListeners( (PostCollectionRecreateEventListener[]) listeners );
- }
+ holderList.addAll( uniqueConstraintHolders );
}
- else if ( "post-collection-remove".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPostCollectionRemoveEventListeners( new PostCollectionRemoveEventListener[]{} );
+
+ public void addJpaIndexHolders(Table table, List holders) {
+ List holderList = jpaIndexHoldersByTable.get( table );
+ if ( holderList == null ) {
+ holderList = new ArrayList();
+ jpaIndexHoldersByTable.put( table, holderList );
}
- else {
- eventListeners.setPostCollectionRemoveEventListeners( ( PostCollectionRemoveEventListener[]) listeners );
- }
+ holderList.addAll( holders );
}
- else if ( "post-collection-update".equals( type ) ) {
- if ( listeners == null ) {
- eventListeners.setPostCollectionUpdateEventListeners( new PostCollectionUpdateEventListener[]{} );
+
+ public void addMappedBy(String entityName, String propertyName, String inversePropertyName) {
+ mappedByResolver.put( entityName + "." + propertyName, inversePropertyName );
+ }
+
+ public String getFromMappedBy(String entityName, String propertyName) {
+ return mappedByResolver.get( entityName + "." + propertyName );
+ }
+
+ public void addPropertyReferencedAssociation(String entityName, String propertyName, String propertyRef) {
+ propertyRefResolver.put( entityName + "." + propertyName, propertyRef );
+ }
+
+ public String getPropertyReferencedAssociation(String entityName, String propertyName) {
+ return propertyRefResolver.get( entityName + "." + propertyName );
+ }
+
+ public ReflectionManager getReflectionManager() {
+ return reflectionManager;
+ }
+
+ public Map getClasses() {
+ return classes;
+ }
+
+ public void addAnyMetaDef(AnyMetaDef defAnn) throws AnnotationException {
+ if ( anyMetaDefs.containsKey( defAnn.name() ) ) {
+ throw new AnnotationException( "Two @AnyMetaDef with the same name defined: " + defAnn.name() );
}
- else {
- eventListeners.setPostCollectionUpdateEventListeners( ( PostCollectionUpdateEventListener[]) listeners );
- }
+ anyMetaDefs.put( defAnn.name(), defAnn );
}
- else {
- throw new MappingException("Unrecognized listener type [" + type + "]");
+
+ public AnyMetaDef getAnyMetaDef(String name) {
+ return anyMetaDefs.get( name );
}
}
- public EventListeners getEventListeners() {
- return eventListeners;
- }
+ final ObjectNameNormalizer normalizer = new ObjectNameNormalizerImpl();
- RootClass getRootClassMapping(String clazz) throws MappingException {
- try {
- return (RootClass) getClassMapping( clazz );
+ final class ObjectNameNormalizerImpl extends ObjectNameNormalizer implements Serializable {
+ public boolean isUseQuotedIdentifiersGlobally() {
+ //Do not cache this value as we lazily set it in Hibernate Annotation (AnnotationConfiguration)
+ //TODO use a dedicated protected useQuotedIdentifier flag in Configuration (overriden by AnnotationConfiguration)
+ String setting = (String) properties.get( Environment.GLOBALLY_QUOTED_IDENTIFIERS );
+ return setting != null && Boolean.valueOf( setting );
}
- catch (ClassCastException cce) {
- throw new MappingException( "You may only specify a cache for root mappings" );
+
+ public NamingStrategy getNamingStrategy() {
+ return namingStrategy;
}
}
- /**
- * Set up a cache for an entity class
- *
- * @param clazz
- * @param concurrencyStrategy
- * @return Configuration
- * @throws MappingException
- */
- public Configuration setCacheConcurrencyStrategy(String clazz, String concurrencyStrategy)
- throws MappingException {
- setCacheConcurrencyStrategy( clazz, concurrencyStrategy, clazz );
- return this;
- }
+ protected class MetadataSourceQueue implements Serializable {
+ private LinkedHashMap> hbmMetadataToEntityNamesMap
+ = new LinkedHashMap>();
+ private Map hbmMetadataByEntityNameXRef = new HashMap();
- public void setCacheConcurrencyStrategy(String clazz, String concurrencyStrategy, String region)
- throws MappingException {
- setCacheConcurrencyStrategy( clazz, concurrencyStrategy, region, true );
- }
+ //XClass are not serializable by default
+ private transient List annotatedClasses = new ArrayList();
+ //only used during the secondPhaseCompile pass, hence does not need to be serialized
+ private transient Map annotatedClassesByEntityNameMap = new HashMap();
- void setCacheConcurrencyStrategy(String clazz, String concurrencyStrategy, String region, boolean includeLazy)
- throws MappingException {
- RootClass rootClass = getRootClassMapping( clazz );
- if ( rootClass == null ) {
- throw new MappingException( "Cannot cache an unknown entity: " + clazz );
+ private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException {
+ ois.defaultReadObject();
+ annotatedClassesByEntityNameMap = new HashMap();
+
+ //build back annotatedClasses
+ @SuppressWarnings( "unchecked" )
+ List serializableAnnotatedClasses = (List) ois.readObject();
+ annotatedClasses = new ArrayList( serializableAnnotatedClasses.size() );
+ for ( Class clazz : serializableAnnotatedClasses ) {
+ annotatedClasses.add( reflectionManager.toXClass( clazz ) );
+ }
}
- rootClass.setCacheConcurrencyStrategy( concurrencyStrategy );
- rootClass.setCacheRegionName( region );
- rootClass.setLazyPropertiesCacheable( includeLazy );
- }
- /**
- * Set up a cache for a collection role
- *
- * @param collectionRole
- * @param concurrencyStrategy
- * @return Configuration
- * @throws MappingException
- */
- public Configuration setCollectionCacheConcurrencyStrategy(String collectionRole, String concurrencyStrategy)
- throws MappingException {
- setCollectionCacheConcurrencyStrategy( collectionRole, concurrencyStrategy, collectionRole );
- return this;
- }
+ private void writeObject(java.io.ObjectOutputStream out) throws IOException {
+ out.defaultWriteObject();
+ List serializableAnnotatedClasses = new ArrayList( annotatedClasses.size() );
+ for ( XClass xClass : annotatedClasses ) {
+ serializableAnnotatedClasses.add( reflectionManager.toClass( xClass ) );
+ }
+ out.writeObject( serializableAnnotatedClasses );
+ }
- public void setCollectionCacheConcurrencyStrategy(String collectionRole, String concurrencyStrategy, String region)
- throws MappingException {
- Collection collection = getCollectionMapping( collectionRole );
- if ( collection == null ) {
- throw new MappingException( "Cannot cache an unknown collection: " + collectionRole );
+ public void add(XmlDocument metadataXml) {
+ final Document document = metadataXml.getDocumentTree();
+ final Element hmNode = document.getRootElement();
+ Attribute packNode = hmNode.attribute( "package" );
+ String defaultPackage = packNode != null ? packNode.getValue() : "";
+ Set entityNames = new HashSet();
+ findClassNames( defaultPackage, hmNode, entityNames );
+ for ( String entity : entityNames ) {
+ hbmMetadataByEntityNameXRef.put( entity, metadataXml );
+ }
+ this.hbmMetadataToEntityNamesMap.put( metadataXml, entityNames );
}
- collection.setCacheConcurrencyStrategy( concurrencyStrategy );
- collection.setCacheRegionName( region );
- }
- /**
- * Get the query language imports
- *
- * @return a mapping from "import" names to fully qualified class names
- */
- public Map getImports() {
- return imports;
- }
+ private void findClassNames(String defaultPackage, Element startNode, Set names) {
+ // if we have some extends we need to check if those classes possibly could be inside the
+ // same hbm.xml file...
+ Iterator[] classes = new Iterator[4];
+ classes[0] = startNode.elementIterator( "class" );
+ classes[1] = startNode.elementIterator( "subclass" );
+ classes[2] = startNode.elementIterator( "joined-subclass" );
+ classes[3] = startNode.elementIterator( "union-subclass" );
- /**
- * Create an object-oriented view of the configuration properties
- */
- public Settings buildSettings() throws HibernateException {
- Properties clone = ( Properties ) properties.clone();
- PropertiesHelper.resolvePlaceHolders( clone );
- return settingsFactory.buildSettings( clone );
- }
+ Iterator classIterator = new JoinedIterator( classes );
+ while ( classIterator.hasNext() ) {
+ Element element = ( Element ) classIterator.next();
+ String entityName = element.attributeValue( "entity-name" );
+ if ( entityName == null ) {
+ entityName = getClassName( element.attribute( "name" ), defaultPackage );
+ }
+ names.add( entityName );
+ findClassNames( defaultPackage, element, names );
+ }
+ }
- public Settings buildSettings(Properties props) throws HibernateException {
- return settingsFactory.buildSettings( props );
- }
+ private String getClassName(Attribute name, String defaultPackage) {
+ if ( name == null ) {
+ return null;
+ }
+ String unqualifiedName = name.getValue();
+ if ( unqualifiedName == null ) {
+ return null;
+ }
+ if ( unqualifiedName.indexOf( '.' ) < 0 && defaultPackage != null ) {
+ return defaultPackage + '.' + unqualifiedName;
+ }
+ return unqualifiedName;
+ }
- public Map getNamedSQLQueries() {
- return namedSqlQueries;
- }
+ public void add(XClass annotatedClass) {
+ annotatedClasses.add( annotatedClass );
+ }
- public Map getSqlResultSetMappings() {
- return sqlResultSetMappings;
- }
+ protected void syncAnnotatedClasses() {
+ final Iterator itr = annotatedClasses.iterator();
+ while ( itr.hasNext() ) {
+ final XClass annotatedClass = itr.next();
+ if ( annotatedClass.isAnnotationPresent( Entity.class ) ) {
+ annotatedClassesByEntityNameMap.put( annotatedClass.getName(), annotatedClass );
+ continue;
+ }
- /**
- * @return the NamingStrategy.
- */
- public NamingStrategy getNamingStrategy() {
- return namingStrategy;
- }
+ if ( !annotatedClass.isAnnotationPresent( javax.persistence.MappedSuperclass.class ) ) {
+ itr.remove();
+ }
+ }
+ }
- /**
- * Set a custom naming strategy
- *
- * @param namingStrategy the NamingStrategy to set
- */
- public Configuration setNamingStrategy(NamingStrategy namingStrategy) {
- this.namingStrategy = namingStrategy;
- return this;
- }
+ protected void processMetadata(List order) {
+ syncAnnotatedClasses();
- public Mapping buildMapping() {
- return new Mapping() {
- /**
- * Returns the identifier type of a mapped class
- */
- public Type getIdentifierType(String persistentClass) throws MappingException {
- PersistentClass pc = ( (PersistentClass) classes.get( persistentClass ) );
- if ( pc == null ) {
- throw new MappingException( "persistent class not known: " + persistentClass );
+ for ( MetadataSourceType type : order ) {
+ if ( MetadataSourceType.HBM.equals( type ) ) {
+ processHbmXmlQueue();
}
- return pc.getIdentifier().getType();
+ else if ( MetadataSourceType.CLASS.equals( type ) ) {
+ processAnnotatedClassesQueue();
+ }
}
+ }
- public String getIdentifierPropertyName(String persistentClass) throws MappingException {
- final PersistentClass pc = (PersistentClass) classes.get( persistentClass );
- if ( pc == null ) {
- throw new MappingException( "persistent class not known: " + persistentClass );
+ private void processHbmXmlQueue() {
+ LOG.debug( "Processing hbm.xml files" );
+ for ( Map.Entry> entry : hbmMetadataToEntityNamesMap.entrySet() ) {
+ // Unfortunately we have to create a Mappings instance for each iteration here
+ processHbmXml( entry.getKey(), entry.getValue() );
+ }
+ hbmMetadataToEntityNamesMap.clear();
+ hbmMetadataByEntityNameXRef.clear();
+ }
+
+ private void processHbmXml(XmlDocument metadataXml, Set entityNames) {
+ try {
+ HbmBinder.bindRoot( metadataXml, createMappings(), Collections.EMPTY_MAP, entityNames );
+ }
+ catch ( MappingException me ) {
+ throw new InvalidMappingException(
+ metadataXml.getOrigin().getType(),
+ metadataXml.getOrigin().getName(),
+ me
+ );
+ }
+
+ for ( String entityName : entityNames ) {
+ if ( annotatedClassesByEntityNameMap.containsKey( entityName ) ) {
+ annotatedClasses.remove( annotatedClassesByEntityNameMap.get( entityName ) );
+ annotatedClassesByEntityNameMap.remove( entityName );
}
- if ( !pc.hasIdentifierProperty() ) {
- return null;
+ }
+ }
+
+ private void processAnnotatedClassesQueue() {
+ LOG.debug( "Process annotated classes" );
+ //bind classes in the correct order calculating some inheritance state
+ List orderedClasses = orderAndFillHierarchy( annotatedClasses );
+ Mappings mappings = createMappings();
+ Map inheritanceStatePerClass = AnnotationBinder.buildInheritanceStates(
+ orderedClasses, mappings
+ );
+
+
+ for ( XClass clazz : orderedClasses ) {
+ AnnotationBinder.bindClass( clazz, inheritanceStatePerClass, mappings );
+
+ final String entityName = clazz.getName();
+ if ( hbmMetadataByEntityNameXRef.containsKey( entityName ) ) {
+ hbmMetadataToEntityNamesMap.remove( hbmMetadataByEntityNameXRef.get( entityName ) );
+ hbmMetadataByEntityNameXRef.remove( entityName );
}
- return pc.getIdentifierProperty().getName();
}
+ annotatedClasses.clear();
+ annotatedClassesByEntityNameMap.clear();
+ }
- public Type getReferencedPropertyType(String persistentClass, String propertyName) throws MappingException {
- final PersistentClass pc = (PersistentClass) classes.get( persistentClass );
- if ( pc == null ) {
- throw new MappingException( "persistent class not known: " + persistentClass );
+ private List orderAndFillHierarchy(List original) {
+ List copy = new ArrayList( original );
+ insertMappedSuperclasses( original, copy );
+
+ // order the hierarchy
+ List workingCopy = new ArrayList( copy );
+ List newList = new ArrayList( copy.size() );
+ while ( workingCopy.size() > 0 ) {
+ XClass clazz = workingCopy.get( 0 );
+ orderHierarchy( workingCopy, newList, copy, clazz );
+ }
+ return newList;
+ }
+
+ private void insertMappedSuperclasses(List original, List copy) {
+ for ( XClass clazz : original ) {
+ XClass superClass = clazz.getSuperclass();
+ while ( superClass != null
+ && !reflectionManager.equals( superClass, Object.class )
+ && !copy.contains( superClass ) ) {
+ if ( superClass.isAnnotationPresent( Entity.class )
+ || superClass.isAnnotationPresent( javax.persistence.MappedSuperclass.class ) ) {
+ copy.add( superClass );
+ }
+ superClass = superClass.getSuperclass();
}
- Property prop = pc.getReferencedProperty( propertyName );
- if ( prop == null ) {
- throw new MappingException(
- "property not known: " +
- persistentClass + '.' + propertyName
- );
+ }
+ }
+
+ private void orderHierarchy(List copy, List newList, List original, XClass clazz) {
+ if ( clazz == null || reflectionManager.equals( clazz, Object.class ) ) {
+ return;
+ }
+ //process superclass first
+ orderHierarchy( copy, newList, original, clazz.getSuperclass() );
+ if ( original.contains( clazz ) ) {
+ if ( !newList.contains( clazz ) ) {
+ newList.add( clazz );
}
- return prop.getType();
+ copy.remove( clazz );
}
- };
- }
+ }
- private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException {
- ois.defaultReadObject();
- this.mapping = buildMapping();
- xmlHelper = new XMLHelper();
- }
+ public boolean isEmpty() {
+ return hbmMetadataToEntityNamesMap.isEmpty() && annotatedClasses.isEmpty();
+ }
- public Map getFilterDefinitions() {
- return filterDefinitions;
}
- public void addFilterDefinition(FilterDefinition definition) {
- filterDefinitions.put( definition.getFilterName(), definition );
- }
- public void addAuxiliaryDatabaseObject(AuxiliaryDatabaseObject object) {
- auxiliaryDatabaseObjects.add( object );
- }
+ public static final MetadataSourceType[] DEFAULT_ARTEFACT_PROCESSING_ORDER = new MetadataSourceType[] {
+ MetadataSourceType.HBM,
+ MetadataSourceType.CLASS
+ };
- public Map getSqlFunctions() {
- return sqlFunctions;
+ private List metadataSourcePrecedence;
+
+ private List determineMetadataSourcePrecedence() {
+ if ( metadataSourcePrecedence.isEmpty()
+ && StringHelper.isNotEmpty( getProperties().getProperty( ARTEFACT_PROCESSING_ORDER ) ) ) {
+ metadataSourcePrecedence = parsePrecedence( getProperties().getProperty( ARTEFACT_PROCESSING_ORDER ) );
+ }
+ if ( metadataSourcePrecedence.isEmpty() ) {
+ metadataSourcePrecedence = Arrays.asList( DEFAULT_ARTEFACT_PROCESSING_ORDER );
+ }
+ metadataSourcePrecedence = Collections.unmodifiableList( metadataSourcePrecedence );
+
+ return metadataSourcePrecedence;
}
- public void addSqlFunction(String functionName, SQLFunction function) {
- sqlFunctions.put( functionName, function );
+ public void setPrecedence(String precedence) {
+ this.metadataSourcePrecedence = parsePrecedence( precedence );
}
- public SessionFactoryObserver getSessionFactoryObserver() {
- return sessionFactoryObserver;
+ private List parsePrecedence(String s) {
+ if ( StringHelper.isEmpty( s ) ) {
+ return Collections.emptyList();
+ }
+ StringTokenizer precedences = new StringTokenizer( s, ",; ", false );
+ List tmpPrecedences = new ArrayList();
+ while ( precedences.hasMoreElements() ) {
+ tmpPrecedences.add( MetadataSourceType.parsePrecedence( ( String ) precedences.nextElement() ) );
+ }
+ return tmpPrecedences;
}
- public void setSessionFactoryObserver(SessionFactoryObserver sessionFactoryObserver) {
- this.sessionFactoryObserver = sessionFactoryObserver;
+ private static class CacheHolder {
+ public CacheHolder(String role, String usage, String region, boolean isClass, boolean cacheLazy) {
+ this.role = role;
+ this.usage = usage;
+ this.region = region;
+ this.isClass = isClass;
+ this.cacheLazy = cacheLazy;
+ }
+
+ public String role;
+ public String usage;
+ public String region;
+ public boolean isClass;
+ public boolean cacheLazy;
}
}
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/CopyIdentifierComponentSecondPass.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/CreateKeySecondPass.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/DefaultComponentSafeNamingStrategy.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/cfg/DefaultNamingStrategy.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/cfg/DefaultNamingStrategy.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/cfg/DefaultNamingStrategy.java 17 Aug 2012 14:33:53 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/cfg/DefaultNamingStrategy.java 30 Jul 2014 15:51:04 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,14 +20,13 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.cfg;
import java.io.Serializable;
-import org.hibernate.util.StringHelper;
import org.hibernate.AssertionFailure;
+import org.hibernate.internal.util.StringHelper;
/**
* The default NamingStrategy
@@ -130,4 +129,4 @@
public String logicalCollectionColumnName(String columnName, String propertyName, String referencedColumn) {
return StringHelper.isNotEmpty( columnName ) ? columnName : propertyName + "_" + referencedColumn;
}
-}
\ No newline at end of file
+}
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/EJB3DTDEntityResolver.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/EJB3NamingStrategy.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/Ejb3Column.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/Ejb3DiscriminatorColumn.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/Ejb3JoinColumn.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/cfg/Environment.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/cfg/Environment.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/cfg/Environment.java 17 Aug 2012 14:33:53 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/cfg/Environment.java 30 Jul 2014 15:51:06 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,29 +20,28 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.cfg;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
-import java.sql.Statement;
import java.sql.Timestamp;
+import java.util.Collections;
import java.util.HashMap;
-import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
import org.hibernate.HibernateException;
-import org.hibernate.bytecode.BytecodeProvider;
-import org.hibernate.util.ConfigHelper;
-import org.hibernate.util.PropertiesHelper;
+import org.hibernate.Version;
+import org.hibernate.bytecode.spi.BytecodeProvider;
+import org.hibernate.internal.CoreMessageLogger;
+import org.hibernate.internal.util.ConfigHelper;
+import org.hibernate.internal.util.config.ConfigurationHelper;
+import org.jboss.logging.Logger;
+
/**
* Provides access to configuration info passed in Properties objects.
*
@@ -67,7 +66,7 @@
* Properties may be either be System properties, properties
* defined in a resource named /hibernate.properties or an instance of
* java.util.Properties passed to
- * Configuration.buildSessionFactory()
+ * Configuration.build()
*
*
* property | meaning |
@@ -76,13 +75,8 @@
* classname of org.hibernate.dialect.Dialect subclass |
*
*
- * hibernate.cache.provider_class |
- * classname of org.hibernate.cache.CacheProvider
- * subclass (if not specified EHCache is used) |
- *
- *
* hibernate.connection.provider_class |
- * classname of org.hibernate.connection.ConnectionProvider
+ * | classname of ConnectionProvider
* subclass (if not specified hueristics are used) |
*
* hibernate.connection.username | database username |
@@ -157,14 +151,14 @@
* Session (de)serialization.
*
*
- * hibernate.transaction.manager_lookup_class |
- * classname of org.hibernate.transaction.TransactionManagerLookup
+ * | hibernate.transaction.jta.platform |
+ * classname of org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform
* implementor |
*
*
* hibernate.transaction.factory_class |
* the factory to use for instantiating Transactions.
- * (Defaults to JDBCTransactionFactory.) |
+ * (Defaults to JdbcTransactionFactory.)
*
*
* hibernate.query.substitutions | query language token substitutions |
@@ -174,513 +168,156 @@
* @see org.hibernate.SessionFactory
* @author Gavin King
*/
-public final class Environment {
+public final class Environment implements AvailableSettings {
+ private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, Environment.class.getName());
- public static final String VERSION = "3.3.1.GA";
-
- /**
- * ConnectionProvider implementor to use when obtaining connections
- */
- public static final String CONNECTION_PROVIDER ="hibernate.connection.provider_class";
- /**
- * JDBC driver class
- */
- public static final String DRIVER ="hibernate.connection.driver_class";
- /**
- * JDBC transaction isolation level
- */
- public static final String ISOLATION ="hibernate.connection.isolation";
- /**
- * JDBC URL
- */
- public static final String URL ="hibernate.connection.url";
- /**
- * JDBC user
- */
- public static final String USER ="hibernate.connection.username";
- /**
- * JDBC password
- */
- public static final String PASS ="hibernate.connection.password";
- /**
- * JDBC autocommit mode
- */
- public static final String AUTOCOMMIT ="hibernate.connection.autocommit";
- /**
- * Maximum number of inactive connections for Hibernate's connection pool
- */
- public static final String POOL_SIZE ="hibernate.connection.pool_size";
- /**
- * java.sql.Datasource JNDI name
- */
- public static final String DATASOURCE ="hibernate.connection.datasource";
- /**
- * prefix for arbitrary JDBC connection properties
- */
- public static final String CONNECTION_PREFIX = "hibernate.connection";
-
- /**
- * JNDI initial context class, Context.INITIAL_CONTEXT_FACTORY
- */
- public static final String JNDI_CLASS ="hibernate.jndi.class";
- /**
- * JNDI provider URL, Context.PROVIDER_URL
- */
- public static final String JNDI_URL ="hibernate.jndi.url";
- /**
- * prefix for arbitrary JNDI InitialContext properties
- */
- public static final String JNDI_PREFIX = "hibernate.jndi";
- /**
- * JNDI name to bind to SessionFactory
- */
- public static final String SESSION_FACTORY_NAME = "hibernate.session_factory_name";
-
- /**
- * Hibernate SQL Dialect class
- */
- public static final String DIALECT ="hibernate.dialect";
- /**
- * A default database schema (owner) name to use for unqualified tablenames
- */
- public static final String DEFAULT_SCHEMA = "hibernate.default_schema";
- /**
- * A default database catalog name to use for unqualified tablenames
- */
- public static final String DEFAULT_CATALOG = "hibernate.default_catalog";
-
- /**
- * Enable logging of generated SQL to the console
- */
- public static final String SHOW_SQL ="hibernate.show_sql";
- /**
- * Enable formatting of SQL logged to the console
- */
- public static final String FORMAT_SQL ="hibernate.format_sql";
- /**
- * Add comments to the generated SQL
- */
- public static final String USE_SQL_COMMENTS ="hibernate.use_sql_comments";
- /**
- * Maximum depth of outer join fetching
- */
- public static final String MAX_FETCH_DEPTH = "hibernate.max_fetch_depth";
- /**
- * The default batch size for batch fetching
- */
- public static final String DEFAULT_BATCH_FETCH_SIZE = "hibernate.default_batch_fetch_size";
- /**
- * Use java.io streams to read / write binary data from / to JDBC
- */
- public static final String USE_STREAMS_FOR_BINARY = "hibernate.jdbc.use_streams_for_binary";
- /**
- * Use JDBC scrollable ResultSets. This property is only necessary when there is
- * no ConnectionProvider, ie. the user is supplying JDBC connections.
- */
- public static final String USE_SCROLLABLE_RESULTSET = "hibernate.jdbc.use_scrollable_resultset";
- /**
- * Tells the JDBC driver to attempt to retrieve row Id with the JDBC 3.0 PreparedStatement.getGeneratedKeys()
- * method. In general, performance will be better if this property is set to true and the underlying
- * JDBC driver supports getGeneratedKeys().
- */
- public static final String USE_GET_GENERATED_KEYS = "hibernate.jdbc.use_get_generated_keys";
- /**
- * Gives the JDBC driver a hint as to the number of rows that should be fetched from the database
- * when more rows are needed. If 0, JDBC driver default settings will be used.
- */
- public static final String STATEMENT_FETCH_SIZE = "hibernate.jdbc.fetch_size";
- /**
- * Maximum JDBC batch size. A nonzero value enables batch updates.
- */
- public static final String STATEMENT_BATCH_SIZE = "hibernate.jdbc.batch_size";
- /**
- * Select a custom batcher.
- */
- public static final String BATCH_STRATEGY = "hibernate.jdbc.factory_class";
- /**
- * Should versioned data be included in batching?
- */
- public static final String BATCH_VERSIONED_DATA = "hibernate.jdbc.batch_versioned_data";
- /**
- * An XSLT resource used to generate "custom" XML
- */
- public static final String OUTPUT_STYLESHEET ="hibernate.xml.output_stylesheet";
-
- /**
- * Maximum size of C3P0 connection pool
- */
- public static final String C3P0_MAX_SIZE = "hibernate.c3p0.max_size";
- /**
- * Minimum size of C3P0 connection pool
- */
- public static final String C3P0_MIN_SIZE = "hibernate.c3p0.min_size";
-
- /**
- * Maximum idle time for C3P0 connection pool
- */
- public static final String C3P0_TIMEOUT = "hibernate.c3p0.timeout";
- /**
- * Maximum size of C3P0 statement cache
- */
- public static final String C3P0_MAX_STATEMENTS = "hibernate.c3p0.max_statements";
- /**
- * Number of connections acquired when pool is exhausted
- */
- public static final String C3P0_ACQUIRE_INCREMENT = "hibernate.c3p0.acquire_increment";
- /**
- * Idle time before a C3P0 pooled connection is validated
- */
- public static final String C3P0_IDLE_TEST_PERIOD = "hibernate.c3p0.idle_test_period";
-
- /**
- * Proxool/Hibernate property prefix
- */
- public static final String PROXOOL_PREFIX = "hibernate.proxool";
- /**
- * Proxool property to configure the Proxool Provider using an XML (/path/to/file.xml)
- */
- public static final String PROXOOL_XML = "hibernate.proxool.xml";
- /**
- * Proxool property to configure the Proxool Provider using a properties file (/path/to/proxool.properties)
- */
- public static final String PROXOOL_PROPERTIES = "hibernate.proxool.properties";
- /**
- * Proxool property to configure the Proxool Provider from an already existing pool (true / false)
- */
- public static final String PROXOOL_EXISTING_POOL = "hibernate.proxool.existing_pool";
- /**
- * Proxool property with the Proxool pool alias to use
- * (Required for PROXOOL_EXISTING_POOL, PROXOOL_PROPERTIES, or
- * PROXOOL_XML)
- */
- public static final String PROXOOL_POOL_ALIAS = "hibernate.proxool.pool_alias";
-
- /**
- * Enable automatic session close at end of transaction
- */
- public static final String AUTO_CLOSE_SESSION = "hibernate.transaction.auto_close_session";
- /**
- * Enable automatic flush during the JTA beforeCompletion() callback
- */
- public static final String FLUSH_BEFORE_COMPLETION = "hibernate.transaction.flush_before_completion";
- /**
- * Specifies how Hibernate should release JDBC connections.
- */
- public static final String RELEASE_CONNECTIONS = "hibernate.connection.release_mode";
- /**
- * Context scoping impl for {@link org.hibernate.SessionFactory#getCurrentSession()} processing.
- */
- public static final String CURRENT_SESSION_CONTEXT_CLASS = "hibernate.current_session_context_class";
- /**
- * TransactionFactory implementor to use for creating Transactions
- */
- public static final String TRANSACTION_STRATEGY = "hibernate.transaction.factory_class";
- /**
- * TransactionManagerLookup implementor to use for obtaining the TransactionManager
- */
- public static final String TRANSACTION_MANAGER_STRATEGY = "hibernate.transaction.manager_lookup_class";
- /**
- * JNDI name of JTA UserTransaction object
- */
- public static final String USER_TRANSACTION = "jta.UserTransaction";
-
- /**
- * The CacheProvider implementation class
- */
- public static final String CACHE_PROVIDER = "hibernate.cache.provider_class";
-
- /**
- * The {@link org.hibernate.cache.RegionFactory} implementation class
- */
- public static final String CACHE_REGION_FACTORY = "hibernate.cache.region.factory_class";
-
- /**
- * The CacheProvider implementation class
- */
- public static final String CACHE_PROVIDER_CONFIG = "hibernate.cache.provider_configuration_file_resource_path";
- /**
- * The CacheProvider JNDI namespace, if pre-bound to JNDI.
- */
- public static final String CACHE_NAMESPACE = "hibernate.cache.jndi";
- /**
- * Enable the query cache (disabled by default)
- */
- public static final String USE_QUERY_CACHE = "hibernate.cache.use_query_cache";
- /**
- * The QueryCacheFactory implementation class.
- */
- public static final String QUERY_CACHE_FACTORY = "hibernate.cache.query_cache_factory";
- /**
- * Enable the second-level cache (enabled by default)
- */
- public static final String USE_SECOND_LEVEL_CACHE = "hibernate.cache.use_second_level_cache";
- /**
- * Optimize the cache for mimimal puts instead of minimal gets
- */
- public static final String USE_MINIMAL_PUTS = "hibernate.cache.use_minimal_puts";
- /**
- * The CacheProvider region name prefix
- */
- public static final String CACHE_REGION_PREFIX = "hibernate.cache.region_prefix";
- /**
- * Enable use of structured second-level cache entries
- */
- public static final String USE_STRUCTURED_CACHE = "hibernate.cache.use_structured_entries";
-
- /**
- * Enable statistics collection
- */
- public static final String GENERATE_STATISTICS = "hibernate.generate_statistics";
-
- public static final String USE_IDENTIFIER_ROLLBACK = "hibernate.use_identifier_rollback";
-
- /**
- * Use bytecode libraries optimized property access
- */
- public static final String USE_REFLECTION_OPTIMIZER = "hibernate.bytecode.use_reflection_optimizer";
-
- /**
- * The classname of the HQL query parser factory
- */
- public static final String QUERY_TRANSLATOR = "hibernate.query.factory_class";
-
- /**
- * A comma-seperated list of token substitutions to use when translating a Hibernate
- * query to SQL
- */
- public static final String QUERY_SUBSTITUTIONS = "hibernate.query.substitutions";
-
- /**
- * Should named queries be checked during startup (the default is enabled).
- *
- * Mainly intended for test environments.
- */
- public static final String QUERY_STARTUP_CHECKING = "hibernate.query.startup_check";
-
- /**
- * Auto export/update schema using hbm2ddl tool. Valid values are update,
- * create, create-drop and validate.
- */
- public static final String HBM2DDL_AUTO = "hibernate.hbm2ddl.auto";
-
- /**
- * The {@link org.hibernate.exception.SQLExceptionConverter} to use for converting SQLExceptions
- * to Hibernate's JDBCException hierarchy. The default is to use the configured
- * {@link org.hibernate.dialect.Dialect}'s preferred SQLExceptionConverter.
- */
- public static final String SQL_EXCEPTION_CONVERTER = "hibernate.jdbc.sql_exception_converter";
-
- /**
- * Enable wrapping of JDBC result sets in order to speed up column name lookups for
- * broken JDBC drivers
- */
- public static final String WRAP_RESULT_SETS = "hibernate.jdbc.wrap_result_sets";
-
- /**
- * Enable ordering of update statements by primary key value
- */
- public static final String ORDER_UPDATES = "hibernate.order_updates";
-
- /**
- * Enable ordering of insert statements for the purpose of more effecient JDBC batching.
- */
- public static final String ORDER_INSERTS = "hibernate.order_inserts";
-
- /**
- * The EntityMode in which set the Session opened from the SessionFactory.
- */
- public static final String DEFAULT_ENTITY_MODE = "hibernate.default_entity_mode";
-
- /**
- * The jacc context id of the deployment
- */
- public static final String JACC_CONTEXTID = "hibernate.jacc_context_id";
-
- public static final String BYTECODE_PROVIDER = "hibernate.bytecode.provider";
-
- public static final String JPAQL_STRICT_COMPLIANCE= "hibernate.query.jpaql_strict_compliance";
-
private static final BytecodeProvider BYTECODE_PROVIDER_INSTANCE;
private static final boolean ENABLE_BINARY_STREAMS;
private static final boolean ENABLE_REFLECTION_OPTIMIZER;
- private static final boolean JVM_SUPPORTS_LINKED_HASH_COLLECTIONS;
private static final boolean JVM_HAS_TIMESTAMP_BUG;
- private static final boolean JVM_HAS_JDK14_TIMESTAMP;
- private static final boolean JVM_SUPPORTS_GET_GENERATED_KEYS;
private static final Properties GLOBAL_PROPERTIES;
- private static final HashMap ISOLATION_LEVELS = new HashMap();
+ private static final Map ISOLATION_LEVELS;
+
private static final Map OBSOLETE_PROPERTIES = new HashMap();
private static final Map RENAMED_PROPERTIES = new HashMap();
- private static final Logger log = LoggerFactory.getLogger(Environment.class);
-
/**
- * Issues warnings to the user when any obsolete property names are used.
+ * Issues warnings to the user when any obsolete or renamed property names are used.
+ *
+ * @param configurationValues The specified properties.
*/
- public static void verifyProperties(Properties props) {
- Iterator iter = props.keySet().iterator();
- Map propertiesToAdd = new HashMap();
- while ( iter.hasNext() ) {
- final Object propertyName = iter.next();
- Object newPropertyName = OBSOLETE_PROPERTIES.get( propertyName );
- if ( newPropertyName != null ) {
- log.warn( "Usage of obsolete property: " + propertyName + " no longer supported, use: " + newPropertyName );
+ public static void verifyProperties(Map,?> configurationValues) {
+ final Map propertiesToAdd = new HashMap();
+ for ( Map.Entry entry : configurationValues.entrySet() ) {
+ final Object replacementKey = OBSOLETE_PROPERTIES.get( entry.getKey() );
+ if ( replacementKey != null ) {
+ LOG.unsupportedProperty( entry.getKey(), replacementKey );
}
- newPropertyName = RENAMED_PROPERTIES.get( propertyName );
- if ( newPropertyName != null ) {
- log.warn( "Property [" + propertyName + "] has been renamed to [" + newPropertyName + "]; update your properties appropriately" );
- if ( ! props.containsKey( newPropertyName ) ) {
- propertiesToAdd.put( newPropertyName, props.get( propertyName ) );
- }
+ final Object renamedKey = RENAMED_PROPERTIES.get( entry.getKey() );
+ if ( renamedKey != null ) {
+ LOG.renamedProperty( entry.getKey(), renamedKey );
+ propertiesToAdd.put( renamedKey, entry.getValue() );
}
}
- props.putAll(propertiesToAdd);
+ configurationValues.putAll( propertiesToAdd );
}
static {
+ Version.logVersion();
- log.info("Hibernate " + VERSION);
-
- RENAMED_PROPERTIES.put( "hibernate.cglib.use_reflection_optimizer", USE_REFLECTION_OPTIMIZER );
-
- ISOLATION_LEVELS.put( new Integer(Connection.TRANSACTION_NONE), "NONE" );
- ISOLATION_LEVELS.put( new Integer(Connection.TRANSACTION_READ_UNCOMMITTED), "READ_UNCOMMITTED" );
- ISOLATION_LEVELS.put( new Integer(Connection.TRANSACTION_READ_COMMITTED), "READ_COMMITTED" );
- ISOLATION_LEVELS.put( new Integer(Connection.TRANSACTION_REPEATABLE_READ), "REPEATABLE_READ" );
- ISOLATION_LEVELS.put( new Integer(Connection.TRANSACTION_SERIALIZABLE), "SERIALIZABLE" );
-
+ Map temp = new HashMap();
+ temp.put( Connection.TRANSACTION_NONE, "NONE" );
+ temp.put( Connection.TRANSACTION_READ_UNCOMMITTED, "READ_UNCOMMITTED" );
+ temp.put( Connection.TRANSACTION_READ_COMMITTED, "READ_COMMITTED" );
+ temp.put( Connection.TRANSACTION_REPEATABLE_READ, "REPEATABLE_READ" );
+ temp.put( Connection.TRANSACTION_SERIALIZABLE, "SERIALIZABLE" );
+ ISOLATION_LEVELS = Collections.unmodifiableMap( temp );
GLOBAL_PROPERTIES = new Properties();
//Set USE_REFLECTION_OPTIMIZER to false to fix HHH-227
GLOBAL_PROPERTIES.setProperty( USE_REFLECTION_OPTIMIZER, Boolean.FALSE.toString() );
try {
- InputStream stream = ConfigHelper.getResourceAsStream("/hibernate.properties");
+ InputStream stream = ConfigHelper.getResourceAsStream( "/hibernate.properties" );
try {
GLOBAL_PROPERTIES.load(stream);
- log.info( "loaded properties from resource hibernate.properties: " + PropertiesHelper.maskOut(GLOBAL_PROPERTIES, PASS) );
+ LOG.propertiesLoaded( ConfigurationHelper.maskOut( GLOBAL_PROPERTIES, PASS ) );
}
catch (Exception e) {
- log.error("problem loading properties from hibernate.properties");
+ LOG.unableToLoadProperties();
}
finally {
try{
stream.close();
}
catch (IOException ioe){
- log.error("could not close stream on hibernate.properties", ioe);
+ LOG.unableToCloseStreamError( ioe );
}
}
}
catch (HibernateException he) {
- log.info("hibernate.properties not found");
+ LOG.propertiesNotFound();
}
try {
- GLOBAL_PROPERTIES.putAll( System.getProperties() );
+ Properties systemProperties = System.getProperties();
+ // Must be thread-safe in case an application changes System properties during Hibernate initialization.
+ // See HHH-8383.
+ synchronized (systemProperties) {
+ GLOBAL_PROPERTIES.putAll(systemProperties);
+ }
+ } catch (SecurityException se) {
+ LOG.unableToCopySystemProperties();
}
- catch (SecurityException se) {
- log.warn("could not copy system properties, system properties will be ignored");
- }
verifyProperties(GLOBAL_PROPERTIES);
- ENABLE_BINARY_STREAMS = PropertiesHelper.getBoolean(USE_STREAMS_FOR_BINARY, GLOBAL_PROPERTIES);
- ENABLE_REFLECTION_OPTIMIZER = PropertiesHelper.getBoolean(USE_REFLECTION_OPTIMIZER, GLOBAL_PROPERTIES);
-
- if (ENABLE_BINARY_STREAMS) {
- log.info("using java.io streams to persist binary types");
+ ENABLE_BINARY_STREAMS = ConfigurationHelper.getBoolean(USE_STREAMS_FOR_BINARY, GLOBAL_PROPERTIES);
+ if ( ENABLE_BINARY_STREAMS ) {
+ LOG.usingStreams();
}
- if (ENABLE_REFLECTION_OPTIMIZER) {
- log.info("using bytecode reflection optimizer");
- }
- BYTECODE_PROVIDER_INSTANCE = buildBytecodeProvider( GLOBAL_PROPERTIES );
- boolean getGeneratedKeysSupport;
- try {
- Statement.class.getMethod("getGeneratedKeys", null);
- getGeneratedKeysSupport = true;
+ ENABLE_REFLECTION_OPTIMIZER = ConfigurationHelper.getBoolean(USE_REFLECTION_OPTIMIZER, GLOBAL_PROPERTIES);
+ if ( ENABLE_REFLECTION_OPTIMIZER ) {
+ LOG.usingReflectionOptimizer();
}
- catch (NoSuchMethodException nsme) {
- getGeneratedKeysSupport = false;
- }
- JVM_SUPPORTS_GET_GENERATED_KEYS = getGeneratedKeysSupport;
- if (!JVM_SUPPORTS_GET_GENERATED_KEYS) log.info("JVM does not support Statement.getGeneratedKeys()");
- boolean linkedHashSupport;
- try {
- Class.forName("java.util.LinkedHashSet");
- linkedHashSupport = true;
- }
- catch (ClassNotFoundException cnfe) {
- linkedHashSupport = false;
- }
- JVM_SUPPORTS_LINKED_HASH_COLLECTIONS = linkedHashSupport;
- if (!JVM_SUPPORTS_LINKED_HASH_COLLECTIONS) log.info("JVM does not support LinkedHasMap, LinkedHashSet - ordered maps and sets disabled");
+ BYTECODE_PROVIDER_INSTANCE = buildBytecodeProvider( GLOBAL_PROPERTIES );
- JVM_HAS_TIMESTAMP_BUG = new Timestamp(123456789).getTime() != 123456789;
- if (JVM_HAS_TIMESTAMP_BUG) log.info("using workaround for JVM bug in java.sql.Timestamp");
- Timestamp t = new Timestamp(0);
- t.setNanos(5 * 1000000);
- JVM_HAS_JDK14_TIMESTAMP = t.getTime() == 5;
- if (JVM_HAS_JDK14_TIMESTAMP) {
- log.info("using JDK 1.4 java.sql.Timestamp handling");
+ long x = 123456789;
+ JVM_HAS_TIMESTAMP_BUG = new Timestamp(x).getTime() != x;
+ if ( JVM_HAS_TIMESTAMP_BUG ) {
+ LOG.usingTimestampWorkaround();
}
- else {
- log.info("using pre JDK 1.4 java.sql.Timestamp handling");
- }
}
public static BytecodeProvider getBytecodeProvider() {
return BYTECODE_PROVIDER_INSTANCE;
}
/**
- * Does this JVM have the IBM JDK 1.3.1. The bug is new Timestamp(x).getTime()!=x.
+ * Does this JVM's implementation of {@link java.sql.Timestamp} have a bug in which the following is true:
+ * new java.sql.Timestamp( x ).getTime() != x
+ *
+ *
+ * NOTE : IBM JDK 1.3.1 the only known JVM to exhibit this behavior.
+ *
+ * @return True if the JVM's {@link Timestamp} implementa
*/
public static boolean jvmHasTimestampBug() {
return JVM_HAS_TIMESTAMP_BUG;
}
/**
- * Does this JVM handle Timestamp in the JDK 1.4 compliant way?
+ * Should we use streams to bind binary types to JDBC IN parameters?
+ *
+ * @return True if streams should be used for binary data handling; false otherwise.
+ *
+ * @see #USE_STREAMS_FOR_BINARY
*/
- public static boolean jvmHasJDK14Timestamp() {
- return JVM_HAS_JDK14_TIMESTAMP;
- }
-
- /**
- * Does this JVM support LinkedHashSet, LinkedHashMap.
- * @see java.util.LinkedHashSet
- * @see java.util.LinkedHashMap
- */
- public static boolean jvmSupportsLinkedHashCollections() {
- return JVM_SUPPORTS_LINKED_HASH_COLLECTIONS;
- }
-
- public static boolean jvmSupportsGetGeneratedKeys() {
- return JVM_SUPPORTS_GET_GENERATED_KEYS;
- }
-
- /**
- * Should we use streams to bind binary types to JDBC IN parameters.
- * Property hibernate.jdbc.use_streams_for_binary.
- * @see Environment#USE_STREAMS_FOR_BINARY
- */
public static boolean useStreamsForBinary() {
return ENABLE_BINARY_STREAMS;
}
/**
- * Should we use CGLIB reflection optimizer.
- * Property hibernate.jdbc.use_refection_optimizer.
- * @see Environment#USE_REFLECTION_OPTIMIZER
+ * Should we use reflection optimization?
+ *
+ * @return True if reflection optimization should be used; false otherwise.
+ *
+ * @see #USE_REFLECTION_OPTIMIZER
+ * @see #getBytecodeProvider()
+ * @see BytecodeProvider#getReflectionOptimizer
*/
public static boolean useReflectionOptimizer() {
return ENABLE_REFLECTION_OPTIMIZER;
}
- private Environment() { throw new UnsupportedOperationException(); }
+ /**
+ * Disallow instantiation
+ */
+ private Environment() {
+ throw new UnsupportedOperationException();
+ }
/**
* Return System properties, extended by any properties specified
@@ -701,25 +338,21 @@
* @return a human-readable name
*/
public static String isolationLevelToString(int isolation) {
- return (String) ISOLATION_LEVELS.get( new Integer(isolation) );
+ return ISOLATION_LEVELS.get( isolation );
}
public static BytecodeProvider buildBytecodeProvider(Properties properties) {
- String provider = PropertiesHelper.getString( BYTECODE_PROVIDER, properties, "javassist" );
- log.info( "Bytecode provider name : " + provider );
+ String provider = ConfigurationHelper.getString( BYTECODE_PROVIDER, properties, "javassist" );
+ LOG.bytecodeProvider( provider );
return buildBytecodeProvider( provider );
}
private static BytecodeProvider buildBytecodeProvider(String providerName) {
if ( "javassist".equals( providerName ) ) {
- return new org.hibernate.bytecode.javassist.BytecodeProviderImpl();
+ return new org.hibernate.bytecode.internal.javassist.BytecodeProviderImpl();
}
- else if ( "cglib".equals( providerName ) ) {
- return new org.hibernate.bytecode.cglib.BytecodeProviderImpl();
- }
- log.warn( "unrecognized bytecode provider [" + providerName + "], using javassist by default" );
- return new org.hibernate.bytecode.javassist.BytecodeProviderImpl();
+ LOG.unknownBytecodeProvider( providerName );
+ return new org.hibernate.bytecode.internal.javassist.BytecodeProviderImpl();
}
-
}
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/ExtendedMappings.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/cfg/ExtendsQueueEntry.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/cfg/ExtendsQueueEntry.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/cfg/ExtendsQueueEntry.java 17 Aug 2012 14:33:53 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/cfg/ExtendsQueueEntry.java 30 Jul 2014 15:51:05 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,12 +20,13 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.cfg;
-import org.dom4j.Document;
+import java.util.Set;
+import org.hibernate.internal.util.xml.XmlDocument;
+
/**
* Represents a mapping queued for delayed processing to await
* processing of an extends entity upon which it depends.
@@ -35,12 +36,14 @@
public class ExtendsQueueEntry {
private final String explicitName;
private final String mappingPackage;
- private final Document document;
+ private final XmlDocument metadataXml;
+ private final Set entityNames;
- public ExtendsQueueEntry(String explicitName, String mappingPackage, Document document) {
+ public ExtendsQueueEntry(String explicitName, String mappingPackage, XmlDocument metadataXml, Set entityNames) {
this.explicitName = explicitName;
this.mappingPackage = mappingPackage;
- this.document = document;
+ this.metadataXml = metadataXml;
+ this.entityNames = entityNames;
}
public String getExplicitName() {
@@ -51,7 +54,11 @@
return mappingPackage;
}
- public Document getDocument() {
- return document;
+ public XmlDocument getMetadataXml() {
+ return metadataXml;
}
+
+ public Set getEntityNames() {
+ return entityNames;
+ }
}
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/ExternalSessionFactoryConfig.java'.
Fisheye: No comparison available. Pass `N' to diff?
Fisheye: Tag 1.1 refers to a dead (removed) revision in file `3rdParty_sources/hibernate-core/org/hibernate/cfg/FkSecondPass.java'.
Fisheye: No comparison available. Pass `N' to diff?
Index: 3rdParty_sources/hibernate-core/org/hibernate/cfg/HbmBinder.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/cfg/HbmBinder.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/cfg/HbmBinder.java 17 Aug 2012 14:33:53 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/cfg/HbmBinder.java 30 Jul 2014 15:51:05 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,33 +20,33 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.cfg;
import java.util.ArrayList;
-import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Properties;
import java.util.StringTokenizer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.dom4j.Attribute;
-import org.dom4j.Document;
-import org.dom4j.Element;
import org.hibernate.CacheMode;
import org.hibernate.EntityMode;
import org.hibernate.FetchMode;
import org.hibernate.FlushMode;
import org.hibernate.MappingException;
-import org.hibernate.engine.FilterDefinition;
-import org.hibernate.engine.NamedQueryDefinition;
-import org.hibernate.engine.Versioning;
-import org.hibernate.engine.ExecuteUpdateResultCheckStyle;
+import org.hibernate.engine.OptimisticLockStyle;
+import org.hibernate.engine.spi.ExecuteUpdateResultCheckStyle;
+import org.hibernate.engine.spi.FilterDefinition;
+import org.hibernate.engine.spi.NamedQueryDefinition;
+import org.hibernate.engine.spi.NamedQueryDefinitionBuilder;
import org.hibernate.id.PersistentIdentifierGenerator;
+import org.hibernate.internal.CoreMessageLogger;
+import org.hibernate.internal.util.ReflectHelper;
+import org.hibernate.internal.util.StringHelper;
+import org.hibernate.internal.util.collections.JoinedIterator;
+import org.hibernate.internal.util.xml.XmlDocument;
+import org.hibernate.loader.PropertyPath;
import org.hibernate.mapping.Any;
import org.hibernate.mapping.Array;
import org.hibernate.mapping.AuxiliaryDatabaseObject;
@@ -55,7 +55,9 @@
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Component;
+import org.hibernate.mapping.Constraint;
import org.hibernate.mapping.DependantValue;
+import org.hibernate.mapping.FetchProfile;
import org.hibernate.mapping.Fetchable;
import org.hibernate.mapping.Filterable;
import org.hibernate.mapping.Formula;
@@ -70,12 +72,12 @@
import org.hibernate.mapping.ManyToOne;
import org.hibernate.mapping.Map;
import org.hibernate.mapping.MetaAttribute;
+import org.hibernate.mapping.MetadataSource;
import org.hibernate.mapping.OneToMany;
import org.hibernate.mapping.OneToOne;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.PrimitiveArray;
import org.hibernate.mapping.Property;
-import org.hibernate.mapping.PropertyGeneration;
import org.hibernate.mapping.RootClass;
import org.hibernate.mapping.Selectable;
import org.hibernate.mapping.Set;
@@ -89,17 +91,19 @@
import org.hibernate.mapping.UnionSubclass;
import org.hibernate.mapping.UniqueKey;
import org.hibernate.mapping.Value;
-import org.hibernate.persister.entity.JoinedSubclassEntityPersister;
-import org.hibernate.persister.entity.SingleTableEntityPersister;
-import org.hibernate.persister.entity.UnionSubclassEntityPersister;
+import org.hibernate.tuple.GeneratedValueGeneration;
+import org.hibernate.tuple.GenerationTiming;
+import org.hibernate.type.BasicType;
import org.hibernate.type.DiscriminatorType;
import org.hibernate.type.ForeignKeyDirection;
import org.hibernate.type.Type;
-import org.hibernate.type.TypeFactory;
-import org.hibernate.util.JoinedIterator;
-import org.hibernate.util.ReflectHelper;
-import org.hibernate.util.StringHelper;
+import org.jboss.logging.Logger;
+
+import org.dom4j.Attribute;
+import org.dom4j.Document;
+import org.dom4j.Element;
+
/**
* Walks an XML mapping document and produces the Hibernate configuration-time metamodel (the
* classes in the mapping package)
@@ -108,7 +112,7 @@
*/
public final class HbmBinder {
- private static final Logger log = LoggerFactory.getLogger( HbmBinder.class );
+ private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, HbmBinder.class.getName());
/**
* Private constructor to disallow instantiation.
@@ -120,44 +124,51 @@
* The main contract into the hbm.xml-based binder. Performs necessary binding operations
* represented by the given DOM.
*
- * @param doc The DOM to be parsed and bound.
+ * @param metadataXml The DOM to be parsed and bound.
* @param mappings Current bind state.
* @param inheritedMetas Any inherited meta-tag information.
+ * @param entityNames Any state
+ *
* @throws MappingException
*/
- public static void bindRoot(Document doc, Mappings mappings, java.util.Map inheritedMetas)
- throws MappingException {
+ public static void bindRoot(
+ XmlDocument metadataXml,
+ Mappings mappings,
+ java.util.Map inheritedMetas,
+ java.util.Set entityNames) throws MappingException {
- java.util.List names = HbmBinder.getExtendsNeeded( doc, mappings );
+ final Document doc = metadataXml.getDocumentTree();
+ final Element hibernateMappingElement = doc.getRootElement();
+
+ java.util.List names = HbmBinder.getExtendsNeeded( metadataXml, mappings );
if ( !names.isEmpty() ) {
// classes mentioned in extends not available - so put it in queue
- Element hmNode = doc.getRootElement();
- Attribute packNode = hmNode.attribute( "package" );
- String packageName = null;
- if ( packNode != null ) {
- packageName = packNode.getValue();
+ Attribute packageAttribute = hibernateMappingElement.attribute( "package" );
+ String packageName = packageAttribute == null ? null : packageAttribute.getValue();
+ for ( String name : names ) {
+ mappings.addToExtendsQueue( new ExtendsQueueEntry( name, packageName, metadataXml, entityNames ) );
}
- Iterator itr = names.iterator();
- while ( itr.hasNext() ) {
- String extendsName = (String) itr.next();
- mappings.addToExtendsQueue( new ExtendsQueueEntry( extendsName, packageName, doc ) );
- }
return;
}
- Element hmNode = doc.getRootElement();
// get meta's from
- inheritedMetas = getMetas( hmNode, inheritedMetas, true );
- extractRootAttributes( hmNode, mappings );
+ inheritedMetas = getMetas( hibernateMappingElement, inheritedMetas, true );
+ extractRootAttributes( hibernateMappingElement, mappings );
- Iterator rootChildren = hmNode.elementIterator();
+ Iterator rootChildren = hibernateMappingElement.elementIterator();
while ( rootChildren.hasNext() ) {
final Element element = (Element) rootChildren.next();
final String elementName = element.getName();
if ( "filter-def".equals( elementName ) ) {
parseFilterDef( element, mappings );
}
+ else if ( "fetch-profile".equals( elementName ) ) {
+ parseFetchProfile( element, mappings, null );
+ }
+ else if ( "identifier-generator".equals( elementName ) ) {
+ parseIdentifierGeneratorRegistration( element, mappings );
+ }
else if ( "typedef".equals( elementName ) ) {
bindTypeDef( element, mappings );
}
@@ -196,13 +207,33 @@
}
}
+ private static void parseIdentifierGeneratorRegistration(Element element, Mappings mappings) {
+ String strategy = element.attributeValue( "name" );
+ if ( StringHelper.isEmpty( strategy ) ) {
+ throw new MappingException( "'name' attribute expected for identifier-generator elements" );
+ }
+ String generatorClassName = element.attributeValue( "class" );
+ if ( StringHelper.isEmpty( generatorClassName ) ) {
+ throw new MappingException( "'class' attribute expected for identifier-generator [identifier-generator@name=" + strategy + "]" );
+ }
+
+ try {
+ Class generatorClass = ReflectHelper.classForName( generatorClassName );
+ mappings.getIdentifierGeneratorFactory().register( strategy, generatorClass );
+ }
+ catch ( ClassNotFoundException e ) {
+ throw new MappingException( "Unable to locate identifier-generator class [name=" + strategy + ", class=" + generatorClassName + "]" );
+ }
+
+ }
+
private static void bindImport(Element importNode, Mappings mappings) {
String className = getClassName( importNode.attribute( "class" ), mappings );
Attribute renameNode = importNode.attribute( "rename" );
String rename = ( renameNode == null ) ?
StringHelper.unqualify( className ) :
renameNode.getValue();
- log.debug( "Import: " + rename + " -> " + className );
+ LOG.debugf( "Import: %s -> %s", rename, className );
mappings.addImport( className, rename );
}
@@ -280,7 +311,7 @@
}
/**
- * Responsible for perfoming the bind operation related to an <class/> mapping element.
+ * Responsible for performing the bind operation related to an <class/> mapping element.
*
* @param node The DOM Element for the <class/> element.
* @param rootClass The mapping instance to which to bind the information.
@@ -314,15 +345,14 @@
catalog,
getClassTableName( entity, node, schema, catalog, null, mappings ),
getSubselect( node ),
- entity.isAbstract() != null && entity.isAbstract().booleanValue()
+ entity.isAbstract() != null && entity.isAbstract()
);
entity.setTable( table );
bindComment(table, node);
- log.info(
- "Mapping class: " + entity.getEntityName() +
- " -> " + entity.getTable().getName()
- );
+ if ( LOG.isDebugEnabled() ) {
+ LOG.debugf( "Mapping class: %s -> %s", entity.getEntityName(), entity.getTable().getName() );
+ }
// MUTABLE
Attribute mutableNode = node.attribute( "mutable" );
@@ -385,7 +415,7 @@
java.util.Map inheritedMetas) throws MappingException {
String propertyName = idNode.attributeValue( "name" );
- SimpleValue id = new SimpleValue( entity.getTable() );
+ SimpleValue id = new SimpleValue( mappings, entity.getTable() );
entity.setIdentifier( id );
// if ( propertyName == null || entity.getPojoRepresentation() == null ) {
@@ -428,6 +458,7 @@
prop.setValue( id );
bindProperty( idNode, prop, mappings, inheritedMetas );
entity.setIdentifierProperty( prop );
+ entity.setDeclaredIdentifierProperty( prop );
}
// TODO:
@@ -441,7 +472,7 @@
private static void bindCompositeId(Element idNode, RootClass entity, Mappings mappings,
java.util.Map inheritedMetas) throws MappingException {
String propertyName = idNode.attributeValue( "name" );
- Component id = new Component( entity );
+ Component id = new Component( mappings, entity );
entity.setIdentifier( id );
bindCompositeId( idNode, id, entity, propertyName, mappings, inheritedMetas );
if ( propertyName == null ) {
@@ -461,6 +492,7 @@
prop.setValue( id );
bindProperty( idNode, prop, mappings, inheritedMetas );
entity.setIdentifierProperty( prop );
+ entity.setDeclaredIdentifierProperty( prop );
}
makeIdentifier( idNode, id, mappings );
@@ -471,7 +503,7 @@
String name, RootClass entity, java.util.Map inheritedMetas) {
String propertyName = subnode.attributeValue( "name" );
- SimpleValue val = new SimpleValue( table );
+ SimpleValue val = new SimpleValue( mappings, table );
bindSimpleValue( subnode, val, false, propertyName, mappings );
if ( !val.isTypeSpecified() ) {
// this is either a tag with no type attribute,
@@ -494,8 +526,10 @@
// for version properties marked as being generated, make sure they are "always"
// generated; aka, "insert" is invalid; this is dis-allowed by the DTD,
// but just to make sure...
- if ( prop.getGeneration() == PropertyGeneration.INSERT ) {
- throw new MappingException( "'generated' attribute cannot be 'insert' for versioning property" );
+ if ( prop.getValueGenerationStrategy() != null ) {
+ if ( prop.getValueGenerationStrategy().getGenerationTiming() == GenerationTiming.INSERT ) {
+ throw new MappingException( "'generated' attribute cannot be 'insert' for versioning property" );
+ }
}
makeVersion( subnode, val );
entity.setVersion( prop );
@@ -504,7 +538,7 @@
private static void bindDiscriminatorProperty(Table table, RootClass entity, Element subnode,
Mappings mappings) {
- SimpleValue discrim = new SimpleValue( table );
+ SimpleValue discrim = new SimpleValue( mappings, table );
entity.setDiscriminator( discrim );
bindSimpleValue(
subnode,
@@ -518,10 +552,14 @@
// ( (Column) discrim.getColumnIterator().next() ).setType(type);
}
entity.setPolymorphic( true );
- if ( "true".equals( subnode.attributeValue( "force" ) ) )
- entity.setForceDiscriminator( true );
- if ( "false".equals( subnode.attributeValue( "insert" ) ) )
+ final String explicitForceValue = subnode.attributeValue( "force" );
+ boolean forceDiscriminatorInSelects = explicitForceValue == null
+ ? mappings.forceDiscriminatorInSelectsByDefault()
+ : "true".equals( explicitForceValue );
+ entity.setForceDiscriminator( forceDiscriminatorInSelects );
+ if ( "false".equals( subnode.attributeValue( "insert" ) ) ) {
entity.setDiscriminatorInsertable( false );
+ }
}
public static void bindClass(Element node, PersistentClass persistentClass, Mappings mappings,
@@ -541,13 +579,19 @@
throw new MappingException( "Unable to determine entity name" );
}
persistentClass.setEntityName( entityName );
+ persistentClass.setJpaEntityName( StringHelper.unqualify( entityName ) );
bindPojoRepresentation( node, persistentClass, mappings, inheritedMetas );
bindDom4jRepresentation( node, persistentClass, mappings, inheritedMetas );
bindMapRepresentation( node, persistentClass, mappings, inheritedMetas );
- bindPersistentClassCommonValues( node, persistentClass, mappings, inheritedMetas );
+ Iterator itr = node.elementIterator( "fetch-profile" );
+ while ( itr.hasNext() ) {
+ final Element profileElement = ( Element ) itr.next();
+ parseFetchProfile( profileElement, mappings, entityName );
+ }
+ bindPersistentClassCommonValues( node, persistentClass, mappings, inheritedMetas );
}
private static void bindPojoRepresentation(Element node, PersistentClass entity,
@@ -578,10 +622,10 @@
if (nodeName==null) nodeName = StringHelper.unqualify( entity.getEntityName() );
entity.setNodeName(nodeName);
- Element tuplizer = locateTuplizerDefinition( node, EntityMode.DOM4J );
- if ( tuplizer != null ) {
- entity.addTuplizer( EntityMode.DOM4J, tuplizer.attributeValue( "class" ) );
- }
+// Element tuplizer = locateTuplizerDefinition( node, EntityMode.DOM4J );
+// if ( tuplizer != null ) {
+// entity.addTuplizer( EntityMode.DOM4J, tuplizer.attributeValue( "class" ) );
+// }
}
private static void bindMapRepresentation(Element node, PersistentClass entity,
@@ -649,16 +693,18 @@
// OPTIMISTIC LOCK MODE
Attribute olNode = node.attribute( "optimistic-lock" );
- entity.setOptimisticLockMode( getOptimisticLockMode( olNode ) );
+ entity.setOptimisticLockStyle( getOptimisticLockStyle( olNode ) );
entity.setMetaAttributes( getMetas( node, inheritedMetas ) );
// PERSISTER
Attribute persisterNode = node.attribute( "persister" );
if ( persisterNode != null ) {
try {
- entity.setEntityPersisterClass( ReflectHelper.classForName( persisterNode
- .getValue() ) );
+ entity.setEntityPersisterClass( ReflectHelper.classForName(
+ persisterNode
+ .getValue()
+ ) );
}
catch (ClassNotFoundException cnfe) {
throw new MappingException( "Could not find persister class: "
@@ -780,7 +826,7 @@
// NONE might be a better option moving forward in the case of callable
return ExecuteUpdateResultCheckStyle.COUNT;
}
- return ExecuteUpdateResultCheckStyle.parse( attr.getValue() );
+ return ExecuteUpdateResultCheckStyle.fromExternalName( attr.getValue() );
}
public static void bindUnionSubclass(Element node, UnionSubclass unionSubclass,
@@ -789,11 +835,6 @@
bindClass( node, unionSubclass, mappings, inheritedMetas );
inheritedMetas = getMetas( node, inheritedMetas, true ); // get meta's from
- if ( unionSubclass.getEntityPersisterClass() == null ) {
- unionSubclass.getRootClass().setEntityPersisterClass(
- UnionSubclassEntityPersister.class );
- }
-
Attribute schemaNode = node.attribute( "schema" );
String schema = schemaNode == null ?
mappings.getSchemaName() : schemaNode.getValue();
@@ -807,16 +848,15 @@
schema,
catalog,
getClassTableName(unionSubclass, node, schema, catalog, denormalizedSuperTable, mappings ),
- unionSubclass.isAbstract() != null && unionSubclass.isAbstract().booleanValue(),
+ unionSubclass.isAbstract() != null && unionSubclass.isAbstract(),
getSubselect( node ),
denormalizedSuperTable
);
unionSubclass.setTable( mytable );
- log.info(
- "Mapping union-subclass: " + unionSubclass.getEntityName() +
- " -> " + unionSubclass.getTable().getName()
- );
+ if ( LOG.isDebugEnabled() ) {
+ LOG.debugf( "Mapping union-subclass: %s -> %s", unionSubclass.getEntityName(), unionSubclass.getTable().getName() );
+ }
createClassProperties( node, unionSubclass, mappings, inheritedMetas );
@@ -828,24 +868,21 @@
bindClass( node, subclass, mappings, inheritedMetas );
inheritedMetas = getMetas( node, inheritedMetas, true ); // get meta's from
- if ( subclass.getEntityPersisterClass() == null ) {
- subclass.getRootClass()
- .setEntityPersisterClass( SingleTableEntityPersister.class );
+ if ( LOG.isDebugEnabled() ) {
+ LOG.debugf( "Mapping subclass: %s -> %s", subclass.getEntityName(), subclass.getTable().getName() );
}
- log.info(
- "Mapping subclass: " + subclass.getEntityName() +
- " -> " + subclass.getTable().getName()
- );
-
// properties
createClassProperties( node, subclass, mappings, inheritedMetas );
}
private static String getClassTableName(
- PersistentClass model, Element node, String schema, String catalog, Table denormalizedSuperTable,
- Mappings mappings
- ) {
+ PersistentClass model,
+ Element node,
+ String schema,
+ String catalog,
+ Table denormalizedSuperTable,
+ Mappings mappings) {
Attribute tableNameNode = node.attribute( "table" );
String logicalTableName;
String physicalTableName;
@@ -869,11 +906,6 @@
//
// joined subclasses
- if ( joinedSubclass.getEntityPersisterClass() == null ) {
- joinedSubclass.getRootClass()
- .setEntityPersisterClass( JoinedSubclassEntityPersister.class );
- }
-
Attribute schemaNode = node.attribute( "schema" );
String schema = schemaNode == null ?
mappings.getSchemaName() : schemaNode.getValue();
@@ -892,14 +924,13 @@
joinedSubclass.setTable( mytable );
bindComment(mytable, node);
- log.info(
- "Mapping joined-subclass: " + joinedSubclass.getEntityName() +
- " -> " + joinedSubclass.getTable().getName()
- );
+ if ( LOG.isDebugEnabled() ) {
+ LOG.debugf( "Mapping joined-subclass: %s -> %s", joinedSubclass.getEntityName(), joinedSubclass.getTable().getName() );
+ }
// KEY
Element keyNode = node.element( "key" );
- SimpleValue key = new DependantValue( mytable, joinedSubclass.getIdentifier() );
+ SimpleValue key = new DependantValue( mappings, mytable, joinedSubclass.getIdentifier() );
joinedSubclass.setKey( key );
key.setCascadeDeleteEnabled( "cascade".equals( keyNode.attributeValue( "on-delete" ) ) );
bindSimpleValue( keyNode, key, false, joinedSubclass.getEntityName(), mappings );
@@ -957,14 +988,13 @@
join.setOptional( "true".equals( nullNode.getValue() ) );
}
- log.info(
- "Mapping class join: " + persistentClass.getEntityName() +
- " -> " + join.getTable().getName()
- );
+ if ( LOG.isDebugEnabled() ) {
+ LOG.debugf( "Mapping class join: %s -> %s", persistentClass.getEntityName(), join.getTable().getName() );
+ }
// KEY
Element keyNode = node.element( "key" );
- SimpleValue key = new DependantValue( table, persistentClass.getIdentifier() );
+ SimpleValue key = new DependantValue( mappings, table, persistentClass.getIdentifier() );
join.setKey( key );
key.setCascadeDeleteEnabled( "cascade".equals( keyNode.attributeValue( "on-delete" ) ) );
bindSimpleValue( keyNode, key, false, persistentClass.getEntityName(), mappings );
@@ -982,20 +1012,20 @@
Value value = null;
if ( "many-to-one".equals( name ) ) {
- value = new ManyToOne( table );
+ value = new ManyToOne( mappings, table );
bindManyToOne( subnode, (ManyToOne) value, propertyName, true, mappings );
}
else if ( "any".equals( name ) ) {
- value = new Any( table );
+ value = new Any( mappings, table );
bindAny( subnode, (Any) value, true, mappings );
}
else if ( "property".equals( name ) ) {
- value = new SimpleValue( table );
+ value = new SimpleValue( mappings, table );
bindSimpleValue( subnode, (SimpleValue) value, true, propertyName, mappings );
}
else if ( "component".equals( name ) || "dynamic-component".equals( name ) ) {
String subpath = StringHelper.qualify( path, propertyName );
- value = new Component( join );
+ value = new Component( mappings, join );
bindComponent(
subnode,
(Component) value,
@@ -1033,20 +1063,22 @@
// COLUMN(S)
Attribute columnAttribute = node.attribute( "column" );
if ( columnAttribute == null ) {
- Iterator iter = node.elementIterator();
+ Iterator itr = node.elementIterator();
int count = 0;
- while ( iter.hasNext() ) {
- Element columnElement = (Element) iter.next();
+ while ( itr.hasNext() ) {
+ Element columnElement = (Element) itr.next();
if ( columnElement.getName().equals( "column" ) ) {
Column column = new Column();
column.setValue( simpleValue );
column.setTypeIndex( count++ );
bindColumn( columnElement, column, isNullable );
+ String columnName = columnElement.attributeValue( "name" );
String logicalColumnName = mappings.getNamingStrategy().logicalColumnName(
- columnElement.attributeValue( "name" ), propertyPath
+ columnName, propertyPath
);
- column.setName( mappings.getNamingStrategy().columnName(
- logicalColumnName ) );
+ columnName = mappings.getNamingStrategy().columnName( columnName );
+ columnName = quoteIdentifier( columnName, mappings );
+ column.setName( columnName );
if ( table != null ) {
table.addColumn( column ); // table=null -> an association
// - fill it in later
@@ -1069,6 +1101,16 @@
simpleValue.addFormula( formula );
}
}
+
+ // todo : another GoodThing would be to go back after all parsing and see if all the columns
+ // (and no formulas) are contained in a defined unique key that only contains these columns.
+ // That too would mark this as a logical one-to-one
+ final Attribute uniqueAttribute = node.attribute( "unique" );
+ if ( uniqueAttribute != null
+ && "true".equals( uniqueAttribute.getValue() )
+ && ManyToOne.class.isInstance( simpleValue ) ) {
+ ( (ManyToOne) simpleValue ).markAsLogicalOneToOne();
+ }
}
else {
if ( node.elementIterator( "column" ).hasNext() ) {
@@ -1083,10 +1125,16 @@
Column column = new Column();
column.setValue( simpleValue );
bindColumn( node, column, isNullable );
+ if ( column.isUnique() && ManyToOne.class.isInstance( simpleValue ) ) {
+ ( (ManyToOne) simpleValue ).markAsLogicalOneToOne();
+ }
+ String columnName = columnAttribute.getValue();
String logicalColumnName = mappings.getNamingStrategy().logicalColumnName(
- columnAttribute.getValue(), propertyPath
+ columnName, propertyPath
);
- column.setName( mappings.getNamingStrategy().columnName( logicalColumnName ) );
+ columnName = mappings.getNamingStrategy().columnName( columnName );
+ columnName = quoteIdentifier( columnName, mappings );
+ column.setName( columnName );
if ( table != null ) {
table.addColumn( column ); // table=null -> an association - fill
// it in later
@@ -1102,12 +1150,14 @@
Column column = new Column();
column.setValue( simpleValue );
bindColumn( node, column, isNullable );
- column.setName( mappings.getNamingStrategy().propertyToColumnName( propertyPath ) );
+ String columnName = mappings.getNamingStrategy().propertyToColumnName( propertyPath );
+ columnName = quoteIdentifier( columnName, mappings );
+ column.setName( columnName );
String logicalName = mappings.getNamingStrategy().logicalColumnName( null, propertyPath );
mappings.addColumnBinding( logicalName, column, table );
/* TODO: joinKeyColumnName & foreignKeyColumnName should be called either here or at a
* slightly higer level in the stack (to get all the information we need)
- * Right now HbmBinder does not support the
+ * Right now HbmMetadataSourceProcessorImpl does not support the
*/
simpleValue.getTable().addColumn( column );
simpleValue.addColumn( column );
@@ -1153,8 +1203,12 @@
Properties parameters = new Properties();
Attribute typeNode = node.attribute( "type" );
- if ( typeNode == null ) typeNode = node.attribute( "id-type" ); // for an any
- if ( typeNode != null ) typeName = typeNode.getValue();
+ if ( typeNode == null ) {
+ typeNode = node.attribute( "id-type" ); // for an any
+ }
+ else {
+ typeName = typeNode.getValue();
+ }
Element typeChild = node.element( "type" );
if ( typeName == null && typeChild != null ) {
@@ -1170,6 +1224,11 @@
}
}
+ resolveAndBindTypeDef(simpleValue, mappings, typeName, parameters);
+ }
+
+ private static void resolveAndBindTypeDef(SimpleValue simpleValue,
+ Mappings mappings, String typeName, Properties parameters) {
TypeDef typeDef = mappings.getTypeDef( typeName );
if ( typeDef != null ) {
typeName = typeDef.getTypeClass();
@@ -1179,6 +1238,19 @@
allParameters.putAll( typeDef.getParameters() );
allParameters.putAll( parameters );
parameters = allParameters;
+ }else if (typeName!=null && !mappings.isInSecondPass()){
+ BasicType basicType=mappings.getTypeResolver().basic(typeName);
+ if (basicType==null) {
+ /*
+ * If the referenced typeName isn't a basic-type, it's probably a typedef defined
+ * in a mapping file not read yet.
+ * It should be solved by deferring the resolution and binding of this type until
+ * all mapping files are read - the second passes.
+ * Fixes issue HHH-7300
+ */
+ SecondPass resolveUserTypeMappingSecondPass=new ResolveUserTypeMappingSecondPass(simpleValue,typeName,mappings,parameters);
+ mappings.addSecondPass(resolveUserTypeMappingSecondPass);
+ }
}
if ( !parameters.isEmpty() ) simpleValue.setTypeParameters( parameters );
@@ -1229,47 +1301,52 @@
Attribute generatedNode = node.attribute( "generated" );
String generationName = generatedNode == null ? null : generatedNode.getValue();
- PropertyGeneration generation = PropertyGeneration.parse( generationName );
- property.setGeneration( generation );
- if ( generation == PropertyGeneration.ALWAYS || generation == PropertyGeneration.INSERT ) {
- // generated properties can *never* be insertable...
- if ( property.isInsertable() ) {
- if ( insertNode == null ) {
- // insertable simply because that is the user did not specify
- // anything; just override it
+ // Handle generated properties.
+ GenerationTiming generationTiming = GenerationTiming.parseFromName( generationName );
+ if ( generationTiming == GenerationTiming.ALWAYS || generationTiming == GenerationTiming.INSERT ) {
+ // we had generation specified...
+ // HBM only supports "database generated values"
+ property.setValueGenerationStrategy( new GeneratedValueGeneration( generationTiming ) );
+
+ // generated properties can *never* be insertable...
+ if ( property.isInsertable() ) {
+ if ( insertNode == null ) {
+ // insertable simply because that is the user did not specify
+ // anything; just override it
property.setInsertable( false );
- }
- else {
- // the user specifically supplied insert="true",
- // which constitutes an illegal combo
+ }
+ else {
+ // the user specifically supplied insert="true",
+ // which constitutes an illegal combo
throw new MappingException(
- "cannot specify both insert=\"true\" and generated=\"" + generation.getName() +
- "\" for property: " +
- propName
+ "cannot specify both insert=\"true\" and generated=\"" + generationTiming.name().toLowerCase() +
+ "\" for property: " +
+ propName
);
- }
- }
+ }
+ }
- // properties generated on update can never be updateable...
- if ( property.isUpdateable() && generation == PropertyGeneration.ALWAYS ) {
- if ( updateNode == null ) {
- // updateable only because the user did not specify
- // anything; just override it
- property.setUpdateable( false );
- }
- else {
- // the user specifically supplied update="true",
- // which constitutes an illegal combo
+ // properties generated on update can never be updateable...
+ if ( property.isUpdateable() && generationTiming == GenerationTiming.ALWAYS ) {
+ if ( updateNode == null ) {
+ // updateable only because the user did not specify
+ // anything; just override it
+ property.setUpdateable( false );
+ }
+ else {
+ // the user specifically supplied update="true",
+ // which constitutes an illegal combo
throw new MappingException(
- "cannot specify both update=\"true\" and generated=\"" + generation.getName() +
- "\" for property: " +
- propName
+ "cannot specify both update=\"true\" and generated=\"" + generationTiming.name().toLowerCase() +
+ "\" for property: " +
+ propName
);
- }
- }
- }
+ }
+ }
+ }
+
boolean isLazyable = "property".equals( node.getName() ) ||
"component".equals( node.getName() ) ||
"many-to-one".equals( node.getName() ) ||
@@ -1280,21 +1357,21 @@
property.setLazy( lazyNode != null && "true".equals( lazyNode.getValue() ) );
}
- if ( log.isDebugEnabled() ) {
+ if ( LOG.isDebugEnabled() ) {
String msg = "Mapped property: " + property.getName();
String columns = columns( property.getValue() );
if ( columns.length() > 0 ) msg += " -> " + columns;
// TODO: this fails if we run with debug on!
// if ( model.getType()!=null ) msg += ", type: " + model.getType().getName();
- log.debug( msg );
+ LOG.debug( msg );
}
property.setMetaAttributes( getMetas( node, inheritedMetas ) );
}
private static String columns(Value val) {
- StringBuffer columns = new StringBuffer();
+ StringBuilder columns = new StringBuilder();
Iterator iter = val.getColumnIterator();
while ( iter.hasNext() ) {
columns.append( ( (Selectable) iter.next() ).getText() );
@@ -1327,12 +1404,7 @@
Attribute orderNode = node.attribute( "order-by" );
if ( orderNode != null ) {
- if ( Environment.jvmSupportsLinkedHashCollections() || ( collection instanceof Bag ) ) {
- collection.setOrderBy( orderNode.getValue() );
- }
- else {
- log.warn( "Attribute \"order-by\" ignored in JDK1.3 or less" );
- }
+ collection.setOrderBy( orderNode.getValue() );
}
Attribute whereNode = node.attribute( "where" );
if ( whereNode != null ) {
@@ -1347,6 +1419,12 @@
if ( nodeName == null ) nodeName = node.attributeValue( "name" );
collection.setNodeName( nodeName );
String embed = node.attributeValue( "embed-xml" );
+ // sometimes embed is set to the default value when not specified in the mapping,
+ // so can't seem to determine if an attribute was explicitly set;
+ // log a warning if embed has a value different from the default.
+ if ( !StringHelper.isEmpty( embed ) && !"true".equals( embed ) ) {
+ LOG.embedXmlAttributesNoLongerSupported();
+ }
collection.setEmbedded( embed==null || "true".equals(embed) );
@@ -1394,7 +1472,7 @@
Element oneToManyNode = node.element( "one-to-many" );
if ( oneToManyNode != null ) {
- OneToMany oneToMany = new OneToMany( collection.getOwner() );
+ OneToMany oneToMany = new OneToMany( mappings, collection.getOwner() );
collection.setElement( oneToMany );
bindOneToMany( oneToManyNode, oneToMany, mappings );
// we have to set up the table later!! yuck
@@ -1419,6 +1497,9 @@
null,
path
);
+ if ( ownerTable.isQuoted() ) {
+ tableName = StringHelper.quote( tableName );
+ }
}
Attribute schemaNode = node.attribute( "schema" );
String schema = schemaNode == null ?
@@ -1438,10 +1519,9 @@
collection.setCollectionTable( table );
bindComment(table, node);
- log.info(
- "Mapping collection: " + collection.getRole() +
- " -> " + collection.getCollectionTable().getName()
- );
+ if ( LOG.isDebugEnabled() ) {
+ LOG.debugf( "Mapping collection: %s -> %s", collection.getRole(), collection.getCollectionTable().getName() );
+ }
}
// SORT
@@ -1527,11 +1607,11 @@
) {
if ( "no-proxy".equals( node.attributeValue( "lazy" ) ) ) {
fetchable.setUnwrapProxy(true);
- fetchable.setLazy(true);
+ fetchable.setLazy( true );
//TODO: better to degrade to lazy="false" if uninstrumented
}
else {
- initLaziness(node, fetchable, mappings, "proxy", defaultLazy);
+ initLaziness( node, fetchable, mappings, "proxy", defaultLazy );
}
}
@@ -1564,10 +1644,17 @@
if ( ukName != null ) {
manyToOne.setReferencedPropertyName( ukName.getValue() );
}
+ manyToOne.setReferenceToPrimaryKey( manyToOne.getReferencedPropertyName() == null );
manyToOne.setReferencedEntityName( getEntityName( node, mappings ) );
String embed = node.attributeValue( "embed-xml" );
+ // sometimes embed is set to the default value when not specified in the mapping,
+ // so can't seem to determine if an attribute was explicitly set;
+ // log a warning if embed has a value different from the default.
+ if ( !StringHelper.isEmpty( embed ) && !"true".equals( embed ) ) {
+ LOG.embedXmlAttributesNoLongerSupported();
+ }
manyToOne.setEmbedded( embed == null || "true".equals( embed ) );
String notFound = node.attributeValue( "not-found" );
@@ -1582,13 +1669,13 @@
Attribute fkNode = node.attribute( "foreign-key" );
if ( fkNode != null ) manyToOne.setForeignKeyName( fkNode.getValue() );
- validateCascade( node, path );
- }
-
- private static void validateCascade(Element node, String path) {
String cascade = node.attributeValue( "cascade" );
if ( cascade != null && cascade.indexOf( "delete-orphan" ) >= 0 ) {
- throw new MappingException( "single-valued associations do not support orphan delete: " + path );
+ if ( !manyToOne.isLogicalOneToOne() ) {
+ throw new MappingException(
+ "many-to-one attribute [" + path + "] does not support orphan delete as it is not unique"
+ );
+ }
}
}
@@ -1602,7 +1689,7 @@
Iterator iter = node.elementIterator( "meta-value" );
if ( iter.hasNext() ) {
HashMap values = new HashMap();
- org.hibernate.type.Type metaType = TypeFactory.heuristicType( any.getMetaType() );
+ org.hibernate.type.Type metaType = mappings.getTypeResolver().heuristicType( any.getMetaType() );
while ( iter.hasNext() ) {
Element metaValue = (Element) iter.next();
try {
@@ -1643,19 +1730,34 @@
initOuterJoinFetchSetting( node, oneToOne );
initLaziness( node, oneToOne, mappings, true );
- oneToOne.setEmbedded( "true".equals( node.attributeValue( "embed-xml" ) ) );
+ String embed = node.attributeValue( "embed-xml" );
+ // sometimes embed is set to the default value when not specified in the mapping,
+ // so can't seem to determine if an attribute was explicitly set;
+ // log a warning if embed has a value different from the default.
+ if ( !StringHelper.isEmpty( embed ) && !"true".equals( embed ) ) {
+ LOG.embedXmlAttributesNoLongerSupported();
+ }
+ oneToOne.setEmbedded( "true".equals( embed ) );
Attribute fkNode = node.attribute( "foreign-key" );
if ( fkNode != null ) oneToOne.setForeignKeyName( fkNode.getValue() );
Attribute ukName = node.attribute( "property-ref" );
if ( ukName != null ) oneToOne.setReferencedPropertyName( ukName.getValue() );
+ oneToOne.setReferenceToPrimaryKey( oneToOne.getReferencedPropertyName() == null );
oneToOne.setPropertyName( node.attributeValue( "name" ) );
oneToOne.setReferencedEntityName( getEntityName( node, mappings ) );
- validateCascade( node, path );
+ String cascade = node.attributeValue( "cascade" );
+ if ( cascade != null && cascade.indexOf( "delete-orphan" ) >= 0 ) {
+ if ( oneToOne.isConstrained() ) {
+ throw new MappingException(
+ "one-to-one attribute [" + path + "] does not support orphan delete as it is constrained"
+ );
+ }
+ }
}
public static void bindOneToMany(Element node, OneToMany oneToMany, Mappings mappings)
@@ -1664,14 +1766,20 @@
oneToMany.setReferencedEntityName( getEntityName( node, mappings ) );
String embed = node.attributeValue( "embed-xml" );
+ // sometimes embed is set to the default value when not specified in the mapping,
+ // so can't seem to determine if an attribute was explicitly set;
+ // log a warning if embed has a value different from the default.
+ if ( !StringHelper.isEmpty( embed ) && !"true".equals( embed ) ) {
+ LOG.embedXmlAttributesNoLongerSupported();
+ }
oneToMany.setEmbedded( embed == null || "true".equals( embed ) );
String notFound = node.attributeValue( "not-found" );
oneToMany.setIgnoreNotFound( "ignore".equals( notFound ) );
}
- public static void bindColumn(Element node, Column column, boolean isNullable) {
+ public static void bindColumn(Element node, Column column, boolean isNullable) throws MappingException {
Attribute lengthNode = node.attribute( "length" );
if ( lengthNode != null ) column.setLength( Integer.parseInt( lengthNode.getValue() ) );
Attribute scalNode = node.attribute( "scale" );
@@ -1691,6 +1799,13 @@
Attribute typeNode = node.attribute( "sql-type" );
if ( typeNode != null ) column.setSqlType( typeNode.getValue() );
+ String customWrite = node.attributeValue( "write" );
+ if(customWrite != null && !customWrite.matches("[^?]*\\?[^?]*")) {
+ throw new MappingException("write expression must contain exactly one value placeholder ('?') character");
+ }
+ column.setCustomWrite( customWrite );
+ column.setCustomRead( node.attributeValue( "read" ) );
+
Element comment = node.element("comment");
if (comment!=null) column.setComment( comment.getTextTrim() );
@@ -1729,7 +1844,7 @@
mappings,
inheritedMetas,
false
- );
+ );
}
public static void bindCompositeId(Element node, Component component,
@@ -1760,7 +1875,7 @@
if ( propertyName!=null ) {
throw new MappingException("cannot combine mapped=\"true\" with specified name");
}
- Component mapper = new Component(persistentClass);
+ Component mapper = new Component( mappings, persistentClass );
bindComponent(
node,
mapper,
@@ -1775,7 +1890,7 @@
);
persistentClass.setIdentifierMapper(mapper);
Property property = new Property();
- property.setName("_identifierMapper");
+ property.setName( PropertyPath.IDENTIFIER_MAPPER_PROPERTY );
property.setNodeName("id");
property.setUpdateable(false);
property.setInsertable(false);
@@ -1861,7 +1976,7 @@
value = collection;
}
else if ( "many-to-one".equals( name ) || "key-many-to-one".equals( name ) ) {
- value = new ManyToOne( component.getTable() );
+ value = new ManyToOne( mappings, component.getTable() );
String relativePath;
if (isEmbedded) {
relativePath = propertyName;
@@ -1872,7 +1987,7 @@
bindManyToOne( subnode, (ManyToOne) value, relativePath, isNullable, mappings );
}
else if ( "one-to-one".equals( name ) ) {
- value = new OneToOne( component.getTable(), component.getOwner() );
+ value = new OneToOne( mappings, component.getTable(), component.getOwner() );
String relativePath;
if (isEmbedded) {
relativePath = propertyName;
@@ -1883,11 +1998,11 @@
bindOneToOne( subnode, (OneToOne) value, relativePath, isNullable, mappings );
}
else if ( "any".equals( name ) ) {
- value = new Any( component.getTable() );
+ value = new Any( mappings, component.getTable() );
bindAny( subnode, (Any) value, isNullable, mappings );
}
else if ( "property".equals( name ) || "key-property".equals( name ) ) {
- value = new SimpleValue( component.getTable() );
+ value = new SimpleValue( mappings, component.getTable() );
String relativePath;
if (isEmbedded) {
relativePath = propertyName;
@@ -1900,7 +2015,7 @@
else if ( "component".equals( name )
|| "dynamic-component".equals( name )
|| "nested-composite-element".equals( name ) ) {
- value = new Component( component ); // a nested composite element
+ value = new Component( mappings, component ); // a nested composite element
bindComponent(
subnode,
(Component) value,
@@ -1980,21 +2095,41 @@
}
}
else {
- // use old (HB 2.1) defaults if outer-join is specified
- String eoj = jfNode.getValue();
- if ( "auto".equals( eoj ) ) {
- fetchStyle = FetchMode.DEFAULT;
+ if ( "many-to-many".equals( node.getName() ) ) {
+ //NOTE " + collection.getCollectionTable().getName()
- );
+ if ( LOG.isDebugEnabled() ) {
+ LOG.debugf( "Mapping collection: %s -> %s", collection.getRole(), collection.getCollectionTable().getName() );
+ }
}
// CHECK
@@ -2428,7 +2585,7 @@
else {
keyVal = (KeyValue) collection.getOwner().getRecursiveProperty( propRef ).getValue();
}
- SimpleValue key = new DependantValue( collection.getCollectionTable(), keyVal );
+ SimpleValue key = new DependantValue( mappings, collection.getCollectionTable(), keyVal );
key.setCascadeDeleteEnabled( "cascade"
.equals( subnode.attributeValue( "on-delete" ) ) );
bindSimpleValue(
@@ -2449,7 +2606,7 @@
}
else if ( "element".equals( name ) ) {
- SimpleValue elt = new SimpleValue( collection.getCollectionTable() );
+ SimpleValue elt = new SimpleValue( mappings, collection.getCollectionTable() );
collection.setElement( elt );
bindSimpleValue(
subnode,
@@ -2460,7 +2617,7 @@
);
}
else if ( "many-to-many".equals( name ) ) {
- ManyToOne element = new ManyToOne( collection.getCollectionTable() );
+ ManyToOne element = new ManyToOne( mappings, collection.getCollectionTable() );
collection.setElement( element );
bindManyToOne(
subnode,
@@ -2472,7 +2629,7 @@
bindManyToManySubelements( collection, subnode, mappings );
}
else if ( "composite-element".equals( name ) ) {
- Component element = new Component( collection );
+ Component element = new Component( mappings, collection );
collection.setElement( element );
bindComposite(
subnode,
@@ -2484,7 +2641,7 @@
);
}
else if ( "many-to-any".equals( name ) ) {
- Any element = new Any( collection.getCollectionTable() );
+ Any element = new Any( mappings, collection.getCollectionTable() );
collection.setElement( element );
bindAny( subnode, element, true, mappings );
}
@@ -2539,6 +2696,7 @@
"not valid within collection using join fetching [" + collection.getRole() + "]"
);
}
+ final boolean debugEnabled = LOG.isDebugEnabled();
while ( filters.hasNext() ) {
final Element filterElement = ( Element ) filters.next();
final String name = filterElement.attributeValue( "name" );
@@ -2550,84 +2708,56 @@
if ( condition==null) {
throw new MappingException("no filter condition found for filter: " + name);
}
- log.debug(
- "Applying many-to-many filter [" + name +
- "] as [" + condition +
- "] to role [" + collection.getRole() + "]"
- );
- collection.addManyToManyFilter( name, condition );
+ Iterator aliasesIterator = filterElement.elementIterator("aliases");
+ java.util.Map aliasTables = new HashMap();
+ while (aliasesIterator.hasNext()){
+ Element alias = (Element) aliasesIterator.next();
+ aliasTables.put(alias.attributeValue("alias"), alias.attributeValue("table"));
+ }
+ if ( debugEnabled ) {
+ LOG.debugf( "Applying many-to-many filter [%s] as [%s] to role [%s]", name, condition, collection.getRole() );
+ }
+ String autoAliasInjectionText = filterElement.attributeValue("autoAliasInjection");
+ boolean autoAliasInjection = StringHelper.isEmpty(autoAliasInjectionText) ? true : Boolean.parseBoolean(autoAliasInjectionText);
+ collection.addManyToManyFilter(name, condition, autoAliasInjection, aliasTables, null);
}
}
- public static final FlushMode getFlushMode(String flushMode) {
- if ( flushMode == null ) {
- return null;
- }
- else if ( "auto".equals( flushMode ) ) {
- return FlushMode.AUTO;
- }
- else if ( "commit".equals( flushMode ) ) {
- return FlushMode.COMMIT;
- }
- else if ( "never".equals( flushMode ) ) {
- return FlushMode.NEVER;
- }
- else if ( "manual".equals( flushMode ) ) {
- return FlushMode.MANUAL;
- }
- else if ( "always".equals( flushMode ) ) {
- return FlushMode.ALWAYS;
- }
- else {
- throw new MappingException( "unknown flushmode" );
- }
- }
-
private static void bindNamedQuery(Element queryElem, String path, Mappings mappings) {
String queryName = queryElem.attributeValue( "name" );
if (path!=null) queryName = path + '.' + queryName;
String query = queryElem.getText();
- log.debug( "Named query: " + queryName + " -> " + query );
+ LOG.debugf( "Named query: %s -> %s", queryName, query );
boolean cacheable = "true".equals( queryElem.attributeValue( "cacheable" ) );
String region = queryElem.attributeValue( "cache-region" );
Attribute tAtt = queryElem.attribute( "timeout" );
- Integer timeout = tAtt == null ? null : new Integer( tAtt.getValue() );
+ Integer timeout = tAtt == null ? null : Integer.valueOf( tAtt.getValue() );
Attribute fsAtt = queryElem.attribute( "fetch-size" );
- Integer fetchSize = fsAtt == null ? null : new Integer( fsAtt.getValue() );
+ Integer fetchSize = fsAtt == null ? null : Integer.valueOf( fsAtt.getValue() );
Attribute roAttr = queryElem.attribute( "read-only" );
boolean readOnly = roAttr != null && "true".equals( roAttr.getValue() );
Attribute cacheModeAtt = queryElem.attribute( "cache-mode" );
String cacheMode = cacheModeAtt == null ? null : cacheModeAtt.getValue();
Attribute cmAtt = queryElem.attribute( "comment" );
String comment = cmAtt == null ? null : cmAtt.getValue();
- NamedQueryDefinition namedQuery = new NamedQueryDefinition(
- query,
- cacheable,
- region,
- timeout,
- fetchSize,
- getFlushMode( queryElem.attributeValue( "flush-mode" ) ) ,
- getCacheMode( cacheMode ),
- readOnly,
- comment,
- getParameterTypes(queryElem)
- );
+ NamedQueryDefinition namedQuery = new NamedQueryDefinitionBuilder().setName( queryName )
+ .setQuery( query )
+ .setCacheable( cacheable )
+ .setCacheRegion( region )
+ .setTimeout( timeout )
+ .setFetchSize( fetchSize )
+ .setFlushMode( FlushMode.interpretExternalSetting( queryElem.attributeValue( "flush-mode" ) ) )
+ .setCacheMode( CacheMode.interpretExternalSetting( cacheMode ) )
+ .setReadOnly( readOnly )
+ .setComment( comment )
+ .setParameterTypes( getParameterTypes( queryElem ) )
+ .createNamedQueryDefinition();
- mappings.addQuery( queryName, namedQuery );
+ mappings.addQuery( namedQuery.getName(), namedQuery );
}
- public static CacheMode getCacheMode(String cacheMode) {
- if (cacheMode == null) return null;
- if ( "get".equals( cacheMode ) ) return CacheMode.GET;
- if ( "ignore".equals( cacheMode ) ) return CacheMode.IGNORE;
- if ( "normal".equals( cacheMode ) ) return CacheMode.NORMAL;
- if ( "put".equals( cacheMode ) ) return CacheMode.PUT;
- if ( "refresh".equals( cacheMode ) ) return CacheMode.REFRESH;
- throw new MappingException("Unknown Cache Mode: " + cacheMode);
- }
-
public static java.util.Map getParameterTypes(Element queryElem) {
java.util.Map result = new java.util.LinkedHashMap();
Iterator iter = queryElem.elementIterator("query-param");
@@ -2697,7 +2827,7 @@
(IdentifierCollection) collection,
persistentClasses,
mappings,
- inheritedMetas
+ inheritedMetas
);
}
@@ -2715,7 +2845,7 @@
(Map) collection,
persistentClasses,
mappings,
- inheritedMetas
+ inheritedMetas
);
}
@@ -2734,7 +2864,7 @@
}
}
-
+
static class ListSecondPass extends CollectionSecondPass {
ListSecondPass(Element node, Mappings mappings, List collection, java.util.Map inheritedMetas) {
super( node, mappings, collection, inheritedMetas );
@@ -2747,7 +2877,7 @@
(List) collection,
persistentClasses,
mappings,
- inheritedMetas
+ inheritedMetas
);
}
@@ -2771,55 +2901,55 @@
private static final CollectionType MAP = new CollectionType( "map" ) {
public Collection create(Element node, String path, PersistentClass owner,
Mappings mappings, java.util.Map inheritedMetas) throws MappingException {
- Map map = new Map( owner );
+ Map map = new Map( mappings, owner );
bindCollection( node, map, owner.getEntityName(), path, mappings, inheritedMetas );
return map;
}
};
private static final CollectionType SET = new CollectionType( "set" ) {
public Collection create(Element node, String path, PersistentClass owner,
Mappings mappings, java.util.Map inheritedMetas) throws MappingException {
- Set set = new Set( owner );
+ Set set = new Set( mappings, owner );
bindCollection( node, set, owner.getEntityName(), path, mappings, inheritedMetas );
return set;
}
};
private static final CollectionType LIST = new CollectionType( "list" ) {
public Collection create(Element node, String path, PersistentClass owner,
Mappings mappings, java.util.Map inheritedMetas) throws MappingException {
- List list = new List( owner );
+ List list = new List( mappings, owner );
bindCollection( node, list, owner.getEntityName(), path, mappings, inheritedMetas );
return list;
}
};
private static final CollectionType BAG = new CollectionType( "bag" ) {
public Collection create(Element node, String path, PersistentClass owner,
Mappings mappings, java.util.Map inheritedMetas) throws MappingException {
- Bag bag = new Bag( owner );
+ Bag bag = new Bag( mappings, owner );
bindCollection( node, bag, owner.getEntityName(), path, mappings, inheritedMetas );
return bag;
}
};
private static final CollectionType IDBAG = new CollectionType( "idbag" ) {
public Collection create(Element node, String path, PersistentClass owner,
Mappings mappings, java.util.Map inheritedMetas) throws MappingException {
- IdentifierBag bag = new IdentifierBag( owner );
+ IdentifierBag bag = new IdentifierBag( mappings, owner );
bindCollection( node, bag, owner.getEntityName(), path, mappings, inheritedMetas );
return bag;
}
};
private static final CollectionType ARRAY = new CollectionType( "array" ) {
public Collection create(Element node, String path, PersistentClass owner,
Mappings mappings, java.util.Map inheritedMetas) throws MappingException {
- Array array = new Array( owner );
+ Array array = new Array( mappings, owner );
bindArray( node, array, owner.getEntityName(), path, mappings, inheritedMetas );
return array;
}
};
private static final CollectionType PRIMITIVE_ARRAY = new CollectionType( "primitive-array" ) {
public Collection create(Element node, String path, PersistentClass owner,
Mappings mappings, java.util.Map inheritedMetas) throws MappingException {
- PrimitiveArray array = new PrimitiveArray( owner );
+ PrimitiveArray array = new PrimitiveArray( mappings, owner );
bindArray( node, array, owner.getEntityName(), path, mappings, inheritedMetas );
return array;
}
@@ -2841,21 +2971,23 @@
}
}
- private static int getOptimisticLockMode(Attribute olAtt) throws MappingException {
+ private static OptimisticLockStyle getOptimisticLockStyle(Attribute olAtt) throws MappingException {
+ if ( olAtt == null ) {
+ return OptimisticLockStyle.VERSION;
+ }
- if ( olAtt == null ) return Versioning.OPTIMISTIC_LOCK_VERSION;
- String olMode = olAtt.getValue();
+ final String olMode = olAtt.getValue();
if ( olMode == null || "version".equals( olMode ) ) {
- return Versioning.OPTIMISTIC_LOCK_VERSION;
+ return OptimisticLockStyle.VERSION;
}
else if ( "dirty".equals( olMode ) ) {
- return Versioning.OPTIMISTIC_LOCK_DIRTY;
+ return OptimisticLockStyle.DIRTY;
}
else if ( "all".equals( olMode ) ) {
- return Versioning.OPTIMISTIC_LOCK_ALL;
+ return OptimisticLockStyle.ALL;
}
else if ( "none".equals( olMode ) ) {
- return Versioning.OPTIMISTIC_LOCK_NONE;
+ return OptimisticLockStyle.NONE;
}
else {
throw new MappingException( "Unsupported optimistic-lock style: " + olMode );
@@ -2877,7 +3009,7 @@
boolean inheritable = Boolean
.valueOf( metaNode.attributeValue( "inherit" ) )
.booleanValue();
- if ( onlyInheritable & !inheritable ) {
+ if ( onlyInheritable && !inheritable ) {
continue;
}
String name = metaNode.attributeValue( "attribute" );
@@ -2887,10 +3019,10 @@
if ( meta == null ) {
meta = new MetaAttribute( name );
map.put( name, meta );
- } else if (meta == inheritedAttribute) { // overriding inherited meta attribute. HBX-621 & HBX-793
- meta = new MetaAttribute( name );
- map.put( name, meta );
- }
+ } else if (meta == inheritedAttribute) { // overriding inherited meta attribute. HBX-621 & HBX-793
+ meta = new MetaAttribute( name );
+ map.put( name, meta );
+ }
meta.addValue( metaNode.getText() );
}
return map;
@@ -2920,7 +3052,7 @@
private static void parseFilterDef(Element element, Mappings mappings) {
String name = element.attributeValue( "name" );
- log.debug( "Parsing filter-def [" + name + "]" );
+ LOG.debugf( "Parsing filter-def [%s]", name );
String defaultCondition = element.getTextTrim();
if ( StringHelper.isEmpty( defaultCondition ) ) {
defaultCondition = element.attributeValue( "condition" );
@@ -2931,12 +3063,12 @@
final Element param = (Element) params.next();
final String paramName = param.attributeValue( "name" );
final String paramType = param.attributeValue( "type" );
- log.debug( "adding filter parameter : " + paramName + " -> " + paramType );
- final Type heuristicType = TypeFactory.heuristicType( paramType );
- log.debug( "parameter heuristic type : " + heuristicType );
+ LOG.debugf( "Adding filter parameter : %s -> %s", paramName, paramType );
+ final Type heuristicType = mappings.getTypeResolver().heuristicType( paramType );
+ LOG.debugf( "Parameter heuristic type : %s", heuristicType );
paramMappings.put( paramName, heuristicType );
}
- log.debug( "Parsed filter-def [" + name + "]" );
+ LOG.debugf( "Parsed filter-def [%s]", name );
FilterDefinition def = new FilterDefinition( name, defaultCondition, paramMappings );
mappings.addFilterDefinition( def );
}
@@ -2950,7 +3082,7 @@
//TODO: bad implementation, cos it depends upon ordering of mapping doc
// fixing this requires that Collection/PersistentClass gain access
// to the Mappings reference from Configuration (or the filterDefinitions
- // map directly) sometime during Configuration.buildSessionFactory
+ // map directly) sometime during Configuration.build
// (after all the types/filter-defs are known and before building
// persisters).
if ( StringHelper.isEmpty(condition) ) {
@@ -2959,10 +3091,37 @@
if ( condition==null) {
throw new MappingException("no filter condition found for filter: " + name);
}
- log.debug( "Applying filter [" + name + "] as [" + condition + "]" );
- filterable.addFilter( name, condition );
+ Iterator aliasesIterator = filterElement.elementIterator("aliases");
+ java.util.Map aliasTables = new HashMap();
+ while (aliasesIterator.hasNext()){
+ Element alias = (Element) aliasesIterator.next();
+ aliasTables.put(alias.attributeValue("alias"), alias.attributeValue("table"));
+ }
+ LOG.debugf( "Applying filter [%s] as [%s]", name, condition );
+ String autoAliasInjectionText = filterElement.attributeValue("autoAliasInjection");
+ boolean autoAliasInjection = StringHelper.isEmpty(autoAliasInjectionText) ? true : Boolean.parseBoolean(autoAliasInjectionText);
+ filterable.addFilter(name, condition, autoAliasInjection, aliasTables, null);
}
+ private static void parseFetchProfile(Element element, Mappings mappings, String containingEntityName) {
+ String profileName = element.attributeValue( "name" );
+ FetchProfile profile = mappings.findOrCreateFetchProfile( profileName, MetadataSource.HBM );
+ Iterator itr = element.elementIterator( "fetch" );
+ while ( itr.hasNext() ) {
+ final Element fetchElement = ( Element ) itr.next();
+ final String association = fetchElement.attributeValue( "association" );
+ final String style = fetchElement.attributeValue( "style" );
+ String entityName = fetchElement.attributeValue( "entity" );
+ if ( entityName == null ) {
+ entityName = containingEntityName;
+ }
+ if ( entityName == null ) {
+ throw new MappingException( "could not determine entity for fetch-profile fetch [" + profileName + "]:[" + association + "]" );
+ }
+ profile.addFetch( entityName, association, style );
+ }
+ }
+
private static String getSubselect(Element element) {
String subselect = element.attributeValue( "subselect" );
if ( subselect != null ) {
@@ -2978,14 +3137,14 @@
* For the given document, locate all extends attributes which refer to
* entities (entity-name or class-name) not defined within said document.
*
- * @param doc The document to check
+ * @param metadataXml The document to check
* @param mappings The already processed mappings.
* @return The list of unresolved extends names.
*/
- public static java.util.List getExtendsNeeded(Document doc, Mappings mappings) {
- java.util.List extendz = new ArrayList();
+ public static java.util.List getExtendsNeeded(XmlDocument metadataXml, Mappings mappings) {
+ java.util.List extendz = new ArrayList();
Iterator[] subclasses = new Iterator[3];
- final Element hmNode = doc.getRootElement();
+ final Element hmNode = metadataXml.getDocumentTree().getRootElement();
Attribute packNode = hmNode.attribute( "package" );
final String packageName = packNode == null ? null : packNode.getValue();
@@ -3019,7 +3178,7 @@
// extends names which require us to delay processing (i.e.
// external to this document and not yet processed) are contained
// in the returned result
- final java.util.Set set = new HashSet( extendz );
+ final java.util.Set set = new HashSet( extendz );
EntityElementHandler handler = new EntityElementHandler() {
public void handleEntity(String entityName, String className, Mappings mappings) {
if ( entityName != null ) {
@@ -3072,8 +3231,36 @@
recognizeEntities( mappings, element, handler );
}
}
+
+ private static String quoteIdentifier(String identifier, Mappings mappings) {
+ return mappings.getObjectNameNormalizer().isUseQuotedIdentifiersGlobally()
+ ? StringHelper.quote( identifier ) : identifier;
+ }
private static interface EntityElementHandler {
public void handleEntity(String entityName, String className, Mappings mappings);
}
+
+ private static class ResolveUserTypeMappingSecondPass implements SecondPass{
+
+ private SimpleValue simpleValue;
+ private String typeName;
+ private Mappings mappings;
+ private Properties parameters;
+
+ public ResolveUserTypeMappingSecondPass(SimpleValue simpleValue,
+ String typeName, Mappings mappings, Properties parameters) {
+ this.simpleValue=simpleValue;
+ this.typeName=typeName;
+ this.parameters=parameters;
+ this.mappings=mappings;
+ }
+
+ @Override
+ public void doSecondPass(java.util.Map persistentClasses)
+ throws MappingException {
+ resolveAndBindTypeDef(simpleValue, mappings, typeName, parameters);
+ }
+
+ }
}
Index: 3rdParty_sources/hibernate-core/org/hibernate/cfg/ImprovedNamingStrategy.java
===================================================================
RCS file: /usr/local/cvsroot/3rdParty_sources/hibernate-core/org/hibernate/cfg/ImprovedNamingStrategy.java,v
diff -u -r1.1 -r1.1.2.1
--- 3rdParty_sources/hibernate-core/org/hibernate/cfg/ImprovedNamingStrategy.java 17 Aug 2012 14:33:53 -0000 1.1
+++ 3rdParty_sources/hibernate-core/org/hibernate/cfg/ImprovedNamingStrategy.java 30 Jul 2014 15:51:05 -0000 1.1.2.1
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,14 +20,13 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.cfg;
import java.io.Serializable;
-import org.hibernate.util.StringHelper;
import org.hibernate.AssertionFailure;
+import org.hibernate.internal.util.StringHelper;
/**
* An improved naming strategy that prefers embedded
@@ -70,7 +69,7 @@
}
protected static String addUnderscores(String name) {
- StringBuffer buf = new StringBuffer( name.replace('.', '_') );
+ StringBuilder buf = new StringBuilder( name.replace('.', '_') );
for (int i=1; i<hibernate-mapping> element.)
+ * A collection of mappings from classes and collections to relational database tables. Represents a single
+ * <hibernate-mapping> element.
+ *
+ * todo : the statement about this representing a single mapping element is simply not true if it was ever the case.
+ * this contract actually represents 3 scopes of information:
+ * - bounded state : this is information which is indeed scoped by a single mapping
+ * - unbounded state : this is information which is Configuration wide (think of metadata repository)
+ * - transient state : state which changed at its own pace (naming strategy)
+ *
+ *
* @author Gavin King
+ * @author Steve Ebersole
*/
-public class Mappings implements Serializable {
+public interface Mappings {
+ /**
+ * Retrieve the type resolver in effect.
+ *
+ * @return The type resolver.
+ */
+ public TypeResolver getTypeResolver();
- private static final Logger log = LoggerFactory.getLogger(Mappings.class);
+ /**
+ * Get the current naming strategy.
+ *
+ * @return The current naming strategy.
+ */
+ public NamingStrategy getNamingStrategy();
- protected final Map classes;
- protected final Map collections;
- protected final Map tables;
- protected final Map queries;
- protected final Map sqlqueries;
- protected final Map resultSetMappings;
- protected final Map typeDefs;
- protected final List secondPasses;
- protected final Map imports;
- protected String schemaName;
- protected String catalogName;
- protected String defaultCascade;
- protected String defaultPackage;
- protected String defaultAccess;
- protected boolean autoImport;
- protected boolean defaultLazy;
- protected final List propertyReferences;
- protected final NamingStrategy namingStrategy;
- protected final Map filterDefinitions;
- protected final List auxiliaryDatabaseObjects;
+ /**
+ * Set the current naming strategy.
+ *
+ * @param namingStrategy The naming strategy to use.
+ */
+ public void setNamingStrategy(NamingStrategy namingStrategy);
- protected final Map extendsQueue;
-// private final List extendsQueue;
+ /**
+ * Returns the currently bound default schema name.
+ *
+ * @return The currently bound schema name
+ */
+ public String getSchemaName();
/**
- * binding table between the logical column name and the name out of the naming strategy
- * for each table.
- * According that when the column name is not set, the property name is considered as such
- * This means that while theorically possible through the naming strategy contract, it is
- * forbidden to have 2 real columns having the same logical name
- *
+ * Sets the currently bound default schema name.
+ *
+ * @param schemaName The schema name to bind as the current default.
*/
- protected final Map columnNameBindingPerTable;
+ public void setSchemaName(String schemaName);
+
/**
- * binding between logical table name and physical one (ie after the naming strategy has been applied)
- *
+ * Returns the currently bound default catalog name.
+ *
+ * @return The currently bound catalog name, or null if none.
*/
- protected final Map tableNameBinding;
+ public String getCatalogName();
+ /**
+ * Sets the currently bound default catalog name.
+ *
+ * @param catalogName The catalog name to use as the current default.
+ */
+ public void setCatalogName(String catalogName);
- Mappings(
- final Map classes,
- final Map collections,
- final Map tables,
- final Map queries,
- final Map sqlqueries,
- final Map sqlResultSetMappings,
- final Map imports,
- final List secondPasses,
- final List propertyReferences,
- final NamingStrategy namingStrategy,
- final Map typeDefs,
- final Map filterDefinitions,
-// final List extendsQueue,
- final Map extendsQueue,
- final List auxiliaryDatabaseObjects,
- final Map tableNamebinding,
- final Map columnNameBindingPerTable
- ) {
- this.classes = classes;
- this.collections = collections;
- this.queries = queries;
- this.sqlqueries = sqlqueries;
- this.resultSetMappings = sqlResultSetMappings;
- this.tables = tables;
- this.imports = imports;
- this.secondPasses = secondPasses;
- this.propertyReferences = propertyReferences;
- this.namingStrategy = namingStrategy;
- this.typeDefs = typeDefs;
- this.filterDefinitions = filterDefinitions;
- this.extendsQueue = extendsQueue;
- this.auxiliaryDatabaseObjects = auxiliaryDatabaseObjects;
- this.tableNameBinding = tableNamebinding;
- this.columnNameBindingPerTable = columnNameBindingPerTable;
- }
+ /**
+ * Get the currently bound default package name.
+ *
+ * @return The currently bound default package name
+ */
+ public String getDefaultPackage();
- public void addClass(PersistentClass persistentClass) throws MappingException {
- Object old = classes.put( persistentClass.getEntityName(), persistentClass );
- if ( old!=null ) {
- throw new DuplicateMappingException( "class/entity", persistentClass.getEntityName() );
- }
- }
- public void addCollection(Collection collection) throws MappingException {
- Object old = collections.put( collection.getRole(), collection );
- if ( old!=null ) {
- throw new DuplicateMappingException( "collection role", collection.getRole() );
- }
- }
- public PersistentClass getClass(String className) {
- return (PersistentClass) classes.get(className);
- }
- public Collection getCollection(String role) {
- return (Collection) collections.get(role);
- }
+ /**
+ * Set the current default package name.
+ *
+ * @param defaultPackage The package name to set as the current default.
+ */
+ public void setDefaultPackage(String defaultPackage);
- public void addImport(String className, String rename) throws MappingException {
- String existing = (String) imports.put(rename, className);
- if ( existing!=null ) {
- if ( existing.equals(className) ) {
- log.info( "duplicate import: " + className + "->" + rename );
- }
- else {
- throw new DuplicateMappingException(
- "duplicate import: " + rename +
- " refers to both " + className +
- " and " + existing +
- " (try using auto-import=\"false\")",
- "import",
- rename
- );
- }
- }
- }
+ /**
+ * Determine whether auto importing of entity names is currently enabled.
+ *
+ * @return True if currently enabled; false otherwise.
+ */
+ public boolean isAutoImport();
- public Table addTable(String schema,
- String catalog,
- String name,
- String subselect,
- boolean isAbstract
- ) {
- String key = subselect==null ?
- Table.qualify(catalog, schema, name) :
- subselect;
- Table table = (Table) tables.get(key);
+ /**
+ * Set whether to enable auto importing of entity names.
+ *
+ * @param autoImport True to enable; false to diasable.
+ * @see #addImport
+ */
+ public void setAutoImport(boolean autoImport);
- if (table == null) {
- table = new Table();
- table.setAbstract(isAbstract);
- table.setName(name);
- table.setSchema(schema);
- table.setCatalog(catalog);
- table.setSubselect(subselect);
- tables.put(key, table);
- }
- else {
- if (!isAbstract) table.setAbstract(false);
- }
+ /**
+ * Determine whether default laziness is currently enabled.
+ *
+ * @return True if enabled, false otherwise.
+ */
+ public boolean isDefaultLazy();
- return table;
- }
+ /**
+ * Set whether to enable default laziness.
+ *
+ * @param defaultLazy True to enable, false to disable.
+ */
+ public void setDefaultLazy(boolean defaultLazy);
- public Table addDenormalizedTable(
- String schema,
- String catalog,
- String name,
- boolean isAbstract,
- String subselect,
- Table includedTable)
- throws MappingException {
- String key = subselect==null ?
- Table.qualify(catalog, schema, name) :
- subselect;
- if ( tables.containsKey(key) ) {
- throw new DuplicateMappingException("table", name);
- }
-
- Table table = new DenormalizedTable(includedTable);
- table.setAbstract(isAbstract);
- table.setName(name);
- table.setSchema(schema);
- table.setCatalog(catalog);
- table.setSubselect(subselect);
- tables.put(key, table);
- return table;
- }
+ /**
+ * Get the current default cascade style.
+ *
+ * @return The current default cascade style.
+ */
+ public String getDefaultCascade();
- public Table getTable(String schema, String catalog, String name) {
- String key = Table.qualify(catalog, schema, name);
- return (Table) tables.get(key);
- }
+ /**
+ * Sets the current default cascade style.
+ * .
+ * @param defaultCascade The cascade style to set as the current default.
+ */
+ public void setDefaultCascade(String defaultCascade);
- public String getSchemaName() {
- return schemaName;
- }
+ /**
+ * Get the current default property access style.
+ *
+ * @return The current default property access style.
+ */
+ public String getDefaultAccess();
- public String getCatalogName() {
- return catalogName;
- }
+ /**
+ * Sets the current default property access style.
+ *
+ * @param defaultAccess The access style to use as the current default.
+ */
+ public void setDefaultAccess(String defaultAccess);
- public String getDefaultCascade() {
- return defaultCascade;
- }
/**
- * Sets the schemaName.
- * @param schemaName The schemaName to set
+ * Retrieves an iterator over the entity metadata present in this repository.
+ *
+ * @return Iterator over class metadata.
*/
- public void setSchemaName(String schemaName) {
- this.schemaName = schemaName;
- }
+ public Iterator iterateClasses();
- /**
- * Sets the catalogName.
- * @param catalogName The catalogName to set
- */
- public void setCatalogName(String catalogName) {
- this.catalogName = catalogName;
- }
+ /**
+ * Retrieves the entity mapping metadata for the given entity name.
+ *
+ * @param entityName The entity name for which to retrieve the metadata.
+ * @return The entity mapping metadata, or null if none found matching given entity name.
+ */
+ public PersistentClass getClass(String entityName);
/**
- * Sets the defaultCascade.
- * @param defaultCascade The defaultCascade to set
+ * Retrieves the entity mapping metadata for the given entity name, potentially accounting
+ * for imports.
+ *
+ * @param entityName The entity name for which to retrieve the metadata.
+ * @return The entity mapping metadata, or null if none found matching given entity name.
*/
- public void setDefaultCascade(String defaultCascade) {
- this.defaultCascade = defaultCascade;
- }
+ public PersistentClass locatePersistentClassByEntityName(String entityName);
/**
- * sets the default access strategy
- * @param defaultAccess the default access strategy.
+ * Add entity mapping metadata.
+ *
+ * @param persistentClass The entity metadata
+ * @throws DuplicateMappingException Indicates there4 was already an extry
+ * corresponding to the given entity name.
*/
- public void setDefaultAccess(String defaultAccess) {
- this.defaultAccess = defaultAccess;
- }
+ public void addClass(PersistentClass persistentClass) throws DuplicateMappingException;
- public String getDefaultAccess() {
- return defaultAccess;
- }
+ /**
+ * Adds an import (HQL entity rename) to the repository.
+ *
+ * @param entityName The entity name being renamed.
+ * @param rename The rename
+ * @throws DuplicateMappingException If rename already is mapped to another
+ * entity name in this repository.
+ */
+ public void addImport(String entityName, String rename) throws DuplicateMappingException;
- public void addQuery(String name, NamedQueryDefinition query) throws MappingException {
- checkQueryExist(name);
- queries.put( name.intern(), query );
- }
+ /**
+ * Retrieves the collection mapping metadata for the given collection role.
+ *
+ * @param role The collection role for which to retrieve the metadata.
+ * @return The collection mapping metadata, or null if no matching collection role found.
+ */
+ public Collection getCollection(String role);
- public void addSQLQuery(String name, NamedSQLQueryDefinition query) throws MappingException {
- checkQueryExist(name);
- sqlqueries.put( name.intern(), query );
- }
+ /**
+ * Returns an iterator over collection metadata.
+ *
+ * @return Iterator over collection metadata.
+ */
+ public Iterator iterateCollections();
- private void checkQueryExist(String name) throws MappingException {
- if ( sqlqueries.containsKey(name) || queries.containsKey(name) ) {
- throw new DuplicateMappingException("query", name);
- }
- }
+ /**
+ * Add collection mapping metadata to this repository.
+ *
+ * @param collection The collection metadata
+ * @throws DuplicateMappingException Indicates there was already an entry
+ * corresponding to the given collection role
+ */
+ public void addCollection(Collection collection) throws DuplicateMappingException;
- public void addResultSetMapping(ResultSetMappingDefinition sqlResultSetMapping) {
- final String name = sqlResultSetMapping.getName();
- if ( resultSetMappings.containsKey(name) ) {
- throw new DuplicateMappingException("resultSet", name);
- }
- resultSetMappings.put(name, sqlResultSetMapping);
- }
+ /**
+ * Returns the named table metadata.
+ *
+ * @param schema The named schema in which the table belongs (or null).
+ * @param catalog The named catalog in which the table belongs (or null).
+ * @param name The table name
+ * @return The table metadata, or null.
+ */
+ public Table getTable(String schema, String catalog, String name);
- public ResultSetMappingDefinition getResultSetMapping(String name) {
- return (ResultSetMappingDefinition) resultSetMappings.get(name);
- }
+ /**
+ * Returns an iterator over table metadata.
+ *
+ * @return Iterator over table metadata.
+ */
+ public Iterator iterateTables();
+ /**
+ * Adds table metadata to this repository returning the created
+ * metadata instance.
+ *
+ * @param schema The named schema in which the table belongs (or null).
+ * @param catalog The named catalog in which the table belongs (or null).
+ * @param name The table name
+ * @param subselect A select statement which defines a logical table, much
+ * like a DB view.
+ * @param isAbstract Is the table abstract (i.e. not really existing in the DB)?
+ * @return The created table metadata, or the existing reference.
+ */
+ public Table addTable(String schema, String catalog, String name, String subselect, boolean isAbstract);
- public NamedQueryDefinition getQuery(String name) {
- return (NamedQueryDefinition) queries.get(name);
- }
+ /**
+ * Adds a 'denormalized table' to this repository.
+ *
+ * @param schema The named schema in which the table belongs (or null).
+ * @param catalog The named catalog in which the table belongs (or null).
+ * @param name The table name
+ * @param isAbstract Is the table abstract (i.e. not really existing in the DB)?
+ * @param subselect A select statement which defines a logical table, much
+ * like a DB view.
+ * @param includedTable ???
+ * @return The created table metadata.
+ * @throws DuplicateMappingException If such a table mapping already exists.
+ */
+ public Table addDenormalizedTable(String schema, String catalog, String name, boolean isAbstract, String subselect, Table includedTable)
+ throws DuplicateMappingException;
- public void addSecondPass(SecondPass sp) {
- addSecondPass(sp, false);
- }
-
- public void addSecondPass(SecondPass sp, boolean onTopOfTheQueue) {
- if (onTopOfTheQueue) {
- secondPasses.add(0, sp);
- }
- else {
- secondPasses.add(sp);
- }
- }
+ /**
+ * Get named query metadata by name.
+ *
+ * @param name The named query name
+ * @return The query metadata, or null.
+ */
+ public NamedQueryDefinition getQuery(String name);
/**
- * Returns the autoImport.
- * @return boolean
+ * Adds metadata for a named query to this repository.
+ *
+ * @param name The name
+ * @param query The metadata
+ * @throws DuplicateMappingException If a query already exists with that name.
*/
- public boolean isAutoImport() {
- return autoImport;
- }
+ public void addQuery(String name, NamedQueryDefinition query) throws DuplicateMappingException;
/**
- * Sets the autoImport.
- * @param autoImport The autoImport to set
+ * Get named SQL query metadata.
+ *
+ * @param name The named SQL query name.
+ * @return The meatdata, or null if none found.
*/
- public void setAutoImport(boolean autoImport) {
- this.autoImport = autoImport;
- }
+ public NamedSQLQueryDefinition getSQLQuery(String name);
- void addUniquePropertyReference(String referencedClass, String propertyName) {
- PropertyReference upr = new PropertyReference();
- upr.referencedClass = referencedClass;
- upr.propertyName = propertyName;
- upr.unique = true;
- propertyReferences.add(upr);
- }
+ /**
+ * Adds metadata for a named SQL query to this repository.
+ *
+ * @param name The name
+ * @param query The metadata
+ * @throws DuplicateMappingException If a query already exists with that name.
+ */
+ public void addSQLQuery(String name, NamedSQLQueryDefinition query) throws DuplicateMappingException;
- void addPropertyReference(String referencedClass, String propertyName) {
- PropertyReference upr = new PropertyReference();
- upr.referencedClass = referencedClass;
- upr.propertyName = propertyName;
- propertyReferences.add(upr);
- }
+ /**
+ * Adds metadata for a named stored procedure call to this repository.
+ *
+ * @param definition The procedure call information
+ *
+ * @throws DuplicateMappingException If a query already exists with that name.
+ */
+ public void addNamedProcedureCallDefinition(NamedProcedureCallDefinition definition) throws DuplicateMappingException;
- private String buildTableNameKey(String schema, String catalog, String finalName) {
- StringBuffer keyBuilder = new StringBuffer();
- if (schema != null) keyBuilder.append( schema );
- keyBuilder.append( ".");
- if (catalog != null) keyBuilder.append( catalog );
- keyBuilder.append( ".");
- keyBuilder.append( finalName );
- return keyBuilder.toString();
- }
+ /**
+ * Adds metadata for a named stored procedure call to this repository.
+ *
+ * @param definition The procedure call information
+ *
+ * @throws DuplicateMappingException If a query already exists with that name.
+ */
+ public void addDefaultNamedProcedureCallDefinition(NamedProcedureCallDefinition definition) throws DuplicateMappingException;
- static final class PropertyReference implements Serializable {
- String referencedClass;
- String propertyName;
- boolean unique;
- }
+
/**
- * @return Returns the defaultPackage.
+ * Adds metadata for a named entity graph to this repository
+ *
+ * @param namedEntityGraphDefinition The procedure call information
+ *
+ * @throws DuplicateMappingException If an entity graph already exists with that name.
*/
- public String getDefaultPackage() {
- return defaultPackage;
- }
+ public void addNamedEntityGraphDefintion(NamedEntityGraphDefinition namedEntityGraphDefinition);
/**
- * @param defaultPackage The defaultPackage to set.
+ * Get the metadata for a named SQL result set mapping.
+ *
+ * @param name The mapping name.
+ * @return The SQL result set mapping metadat, or null if none found.
*/
- public void setDefaultPackage(String defaultPackage) {
- this.defaultPackage = defaultPackage;
- }
+ public ResultSetMappingDefinition getResultSetMapping(String name);
- public NamingStrategy getNamingStrategy() {
- return namingStrategy;
- }
+ /**
+ * Adds the metadata for a named SQL result set mapping to this repository.
+ *
+ * @param sqlResultSetMapping The metadata
+ * @throws DuplicateMappingException If metadata for another SQL result mapping was
+ * already found under the given name.
+ */
+ public void addResultSetMapping(ResultSetMappingDefinition sqlResultSetMapping) throws DuplicateMappingException;
- public void addTypeDef(String typeName, String typeClass, Properties paramMap) {
- TypeDef def = new TypeDef(typeClass, paramMap);
- typeDefs.put(typeName, def);
- log.debug("Added " + typeName + " with class " + typeClass);
- }
+ /**
+ * Retrieve a type definition by name.
+ *
+ * @param typeName The name of the type definition to retrieve.
+ * @return The type definition, or null if none found.
+ */
+ public TypeDef getTypeDef(String typeName);
- public TypeDef getTypeDef(String typeName) {
- return (TypeDef) typeDefs.get(typeName);
- }
+ /**
+ * Adds a type definition to this metadata repository.
+ *
+ * @param typeName The type name.
+ * @param typeClass The class implementing the {@link org.hibernate.type.Type} contract.
+ * @param paramMap Map of parameters to be used to configure the type after instantiation.
+ */
+ public void addTypeDef(String typeName, String typeClass, Properties paramMap);
- public Iterator iterateCollections() {
- return collections.values().iterator();
- }
-
- public Iterator iterateTables() {
- return tables.values().iterator();
- }
+ /**
+ * Retrieves the copmplete map of filter definitions.
+ *
+ * @return The filter definition map.
+ */
+ public Map getFilterDefinitions();
- public Map getFilterDefinitions() {
- return filterDefinitions;
- }
+ /**
+ * Retrieves a filter definition by name.
+ *
+ * @param name The name of the filter definition to retrieve.
+ * @return The filter definition, or null.
+ */
+ public FilterDefinition getFilterDefinition(String name);
- public void addFilterDefinition(FilterDefinition definition) {
- filterDefinitions.put( definition.getFilterName(), definition );
- }
-
- public FilterDefinition getFilterDefinition(String name) {
- return (FilterDefinition) filterDefinitions.get(name);
- }
-
- public boolean isDefaultLazy() {
- return defaultLazy;
- }
- public void setDefaultLazy(boolean defaultLazy) {
- this.defaultLazy = defaultLazy;
- }
+ /**
+ * Adds a filter definition to this repository.
+ *
+ * @param definition The filter definition to add.
+ */
+ public void addFilterDefinition(FilterDefinition definition);
- public void addToExtendsQueue(ExtendsQueueEntry entry) {
- extendsQueue.put( entry, null );
- }
+ /**
+ * Retrieves a fetch profile by either finding one currently in this repository matching the given name
+ * or by creating one (and adding it).
+ *
+ * @param name The name of the profile.
+ * @param source The source from which this profile is named.
+ * @return The fetch profile metadata.
+ */
+ public FetchProfile findOrCreateFetchProfile(String name, MetadataSource source);
- public PersistentClass locatePersistentClassByEntityName(String entityName) {
- PersistentClass persistentClass = ( PersistentClass ) classes.get( entityName );
- if ( persistentClass == null ) {
- String actualEntityName = ( String ) imports.get( entityName );
- if ( StringHelper.isNotEmpty( actualEntityName ) ) {
- persistentClass = ( PersistentClass ) classes.get( actualEntityName );
- }
- }
- return persistentClass;
- }
+ /**
+ * @deprecated To fix misspelling; use {@link #iterateAuxiliaryDatabaseObjects} instead
+ */
+ @Deprecated
+ @SuppressWarnings({ "JavaDoc" })
+ public Iterator iterateAuxliaryDatabaseObjects();
- public void addAuxiliaryDatabaseObject(AuxiliaryDatabaseObject auxiliaryDatabaseObject) {
- auxiliaryDatabaseObjects.add( auxiliaryDatabaseObject );
- }
+ /**
+ * Retrieves an iterator over the metadata pertaining to all auxiliary database objects int this repository.
+ *
+ * @return Iterator over the auxiliary database object metadata.
+ */
+ public Iterator iterateAuxiliaryDatabaseObjects();
+ /**
+ * @deprecated To fix misspelling; use {@link #iterateAuxiliaryDatabaseObjectsInReverse} instead
+ */
+ @Deprecated
+ @SuppressWarnings({ "JavaDoc" })
+ public ListIterator iterateAuxliaryDatabaseObjectsInReverse();
+
+ /**
+ * Same as {@link #iterateAuxiliaryDatabaseObjects()} except that here the iterator is reversed.
+ *
+ * @return The reversed iterator.
+ */
+ public ListIterator iterateAuxiliaryDatabaseObjectsInReverse();
+
+ /**
+ * Add metadata pertaining to an auxiliary database object to this repository.
+ *
+ * @param auxiliaryDatabaseObject The metadata.
+ */
+ public void addAuxiliaryDatabaseObject(AuxiliaryDatabaseObject auxiliaryDatabaseObject);
+
+ /**
+ * Get the logical table name mapped for the given physical table.
+ *
+ * @param table The table for which to determine the logical name.
+ * @return The logical name.
+ * @throws MappingException Indicates that no logical name was bound for the given physical table.
+ */
+ public String getLogicalTableName(Table table) throws MappingException;
+
+ /**
+ * Adds a table binding to this repository.
+ *
+ * @param schema The schema in which the table belongs (may be null).
+ * @param catalog The catalog in which the table belongs (may be null).
+ * @param logicalName The logical table name.
+ * @param physicalName The physical table name.
+ * @param denormalizedSuperTable ???
+ * @throws DuplicateMappingException Indicates physical table was already bound to another logical name.
+ */
public void addTableBinding(
- String schema, String catalog, String logicalName, String physicalName, Table denormalizedSuperTable
- ) {
- String key = buildTableNameKey( schema, catalog, physicalName );
- TableDescription tableDescription = new TableDescription(
- logicalName, denormalizedSuperTable
- );
- TableDescription oldDescriptor = (TableDescription) tableNameBinding.put( key, tableDescription );
- if ( oldDescriptor != null && ! oldDescriptor.logicalName.equals( logicalName ) ) {
- //TODO possibly relax that
- throw new MappingException("Same physical table name reference several logical table names: "
- + physicalName + " => " + "'" + oldDescriptor.logicalName + "' and '" + logicalName + "'");
- }
- }
+ String schema,
+ String catalog,
+ String logicalName,
+ String physicalName,
+ Table denormalizedSuperTable) throws DuplicateMappingException;
- public void addColumnBinding(String logicalName, Column finalColumn, Table table) {
- ColumnNames binding = (ColumnNames) columnNameBindingPerTable.get(table);
- if (binding == null) {
- binding = new ColumnNames();
- columnNameBindingPerTable.put(table, binding);
- }
- String oldFinalName = (String) binding.logicalToPhysical.put(
- logicalName.toLowerCase(),
- finalColumn.getQuotedName()
- );
- if ( oldFinalName != null &&
- ! ( finalColumn.isQuoted() ?
- oldFinalName.equals( finalColumn.getQuotedName() ) :
- oldFinalName.equalsIgnoreCase( finalColumn.getQuotedName() ) ) ) {
- //TODO possibly relax that
- throw new MappingException("Same logical column name referenced by different physical ones: "
- + table.getName() + "." + logicalName + " => '" + oldFinalName + "' and '" + finalColumn.getQuotedName() + "'" );
- }
- String oldLogicalName = (String) binding.physicalToLogical.put(
- finalColumn.getQuotedName(),
- logicalName
- );
- if ( oldLogicalName != null && ! oldLogicalName.equals( logicalName ) ) {
- //TODO possibly relax that
- throw new MappingException("Same physical column represented by different logical column names: "
- + table.getName() + "." + finalColumn.getQuotedName() + " => '" + oldLogicalName + "' and '" + logicalName + "'");
- }
- }
+ /**
+ * Binds the given 'physicalColumn' to the give 'logicalName' within the given 'table'.
+ *
+ * @param logicalName The logical column name binding.
+ * @param physicalColumn The physical column metadata.
+ * @param table The table metadata.
+ * @throws DuplicateMappingException Indicates a duplicate binding for either the physical column name
+ * or the logical column name.
+ */
+ public void addColumnBinding(String logicalName, Column physicalColumn, Table table) throws DuplicateMappingException;
- private String getLogicalTableName(String schema, String catalog, String physicalName) {
- String key = buildTableNameKey( schema, catalog, physicalName );
- TableDescription descriptor = (TableDescription) tableNameBinding.get( key );
- if (descriptor == null) {
- throw new MappingException( "Unable to find physical table: " + physicalName);
- }
- return descriptor.logicalName;
- }
+ /**
+ * Find the physical column name for the given logical column name within the given table.
+ *
+ * @param logicalName The logical name binding.
+ * @param table The table metatdata.
+ * @return The physical column name.
+ * @throws MappingException Indicates that no such binding was found.
+ */
+ public String getPhysicalColumnName(String logicalName, Table table) throws MappingException;
- public String getPhysicalColumnName(String logicalName, Table table) {
- logicalName = logicalName.toLowerCase();
- String finalName = null;
- Table currentTable = table;
- do {
- ColumnNames binding = (ColumnNames) columnNameBindingPerTable.get(currentTable);
- if (binding != null) {
- finalName = (String) binding.logicalToPhysical.get( logicalName );
- }
- String key = buildTableNameKey( currentTable.getSchema(), currentTable.getCatalog(), currentTable.getName() );
- TableDescription description = (TableDescription) tableNameBinding.get(key);
- if (description != null) currentTable = description.denormalizedSupertable;
- }
- while (finalName == null && currentTable != null);
- if (finalName == null) {
- throw new MappingException( "Unable to find column with logical name "
- + logicalName + " in table " + table.getName() );
- }
- return finalName;
- }
+ /**
+ * Find the logical column name against whcih the given physical column name was bound within the given table.
+ *
+ * @param physicalName The physical column name
+ * @param table The table metadata.
+ * @return The logical column name.
+ * @throws MappingException Indicates that no such binding was found.
+ */
+ public String getLogicalColumnName(String physicalName, Table table) throws MappingException;
- public String getLogicalColumnName(String physicalName, Table table) {
- String logical = null;
- Table currentTable = table;
- TableDescription description = null;
- do {
- ColumnNames binding = (ColumnNames) columnNameBindingPerTable.get(currentTable);
- if (binding != null) {
- logical = (String) binding.physicalToLogical.get( physicalName );
- }
- String key = buildTableNameKey( currentTable.getSchema(), currentTable.getCatalog(), currentTable.getName() );
- description = (TableDescription) tableNameBinding.get(key);
- if (description != null) currentTable = description.denormalizedSupertable;
- }
- while (logical == null && currentTable != null && description != null);
- if (logical == null) {
- throw new MappingException( "Unable to find logical column name from physical name "
- + physicalName + " in table " + table.getName() );
- }
- return logical;
- }
+ /**
+ * Adds a second-pass to the end of the current queue.
+ *
+ * @param sp The second pass to add.
+ */
+ public void addSecondPass(SecondPass sp);
- public String getLogicalTableName(Table table) {
- return getLogicalTableName( table.getQuotedSchema(), table.getCatalog(), table.getQuotedName() );
- }
+ /**
+ * Adds a second pass.
+ * @param sp The second pass to add.
+ * @param onTopOfTheQueue True to add to the beginning of the queue; false to add to the end.
+ */
+ public void addSecondPass(SecondPass sp, boolean onTopOfTheQueue);
- static public class ColumnNames implements Serializable {
- //
- public Map logicalToPhysical = new HashMap();
- //
- public Map physicalToLogical = new HashMap();
- public ColumnNames() {
+ /**
+ * Locate the AttributeConverterDefinition corresponding to the given AttributeConverter Class.
+ *
+ * @param attributeConverterClass The AttributeConverter Class for which to get the definition
+ *
+ * @return The corresponding AttributeConverter definition; will return {@code null} if no corresponding
+ * definition found.
+ */
+ public AttributeConverterDefinition locateAttributeConverter(Class attributeConverterClass);
+
+ /**
+ * All all AttributeConverter definitions
+ *
+ * @return The collection of all AttributeConverter definitions.
+ */
+ public java.util.Collection getAttributeConverters();
+
+ /**
+ * Represents a property-ref mapping.
+ *
+ * TODO : currently needs to be exposed because Configuration needs access to it for second-pass processing
+ */
+ public static final class PropertyReference implements Serializable {
+ public final String referencedClass;
+ public final String propertyName;
+ public final boolean unique;
+
+ public PropertyReference(String referencedClass, String propertyName, boolean unique) {
+ this.referencedClass = referencedClass;
+ this.propertyName = propertyName;
+ this.unique = unique;
}
}
- static public class TableDescription implements Serializable {
- public TableDescription(String logicalName, Table denormalizedSupertable) {
- this.logicalName = logicalName;
- this.denormalizedSupertable = denormalizedSupertable;
- }
+ /**
+ * Adds a property reference binding to this repository.
+ *
+ * @param referencedClass The referenced entity name.
+ * @param propertyName The referenced property name.
+ */
+ public void addPropertyReference(String referencedClass, String propertyName);
- public String logicalName;
- public Table denormalizedSupertable;
- }
-}
\ No newline at end of file
+ /**
+ * Adds a property reference binding to this repository where said proeprty reference is marked as unique.
+ *
+ * @param referencedClass The referenced entity name.
+ * @param propertyName The referenced property name.
+ */
+ public void addUniquePropertyReference(String referencedClass, String propertyName);
+
+ /**
+ * Adds an entry to the extends queue queue.
+ *
+ * @param entry The entry to add.
+ */
+ public void addToExtendsQueue(ExtendsQueueEntry entry);
+
+ /**
+ * Retrieve the IdentifierGeneratorFactory in effect for this mapping.
+ *
+ * @return The IdentifierGeneratorFactory
+ */
+ public MutableIdentifierGeneratorFactory getIdentifierGeneratorFactory();
+
+ /**
+ * add a new MappedSuperclass
+ * This should not be called if the MappedSuperclass already exists
+ * (it would be erased)
+ * @param type type corresponding to the Mappedsuperclass
+ * @param mappedSuperclass MappedSuperclass
+ */
+ public void addMappedSuperclass(Class type, org.hibernate.mapping.MappedSuperclass mappedSuperclass);
+
+ /**
+ * Get a MappedSuperclass or null if not mapped
+ *
+ * @param type class corresponding to the MappedSuperclass
+ * @return the MappedSuperclass
+ */
+ org.hibernate.mapping.MappedSuperclass getMappedSuperclass(Class type);
+
+ /**
+ * Retrieve the database identifier normalizer for this context.
+ *
+ * @return The normalizer.
+ */
+ public ObjectNameNormalizer getObjectNameNormalizer();
+
+ /**
+ * Retrieve the configuration properties currently in effect.
+ *
+ * @return The configuration properties
+ */
+ public Properties getConfigurationProperties();
+
+
+
+
+
+
+
+
+
+ /**
+ * Adds a default id generator.
+ *
+ * @param generator The id generator
+ */
+ public void addDefaultGenerator(IdGenerator generator);
+
+ /**
+ * Retrieve the id-generator by name.
+ *
+ * @param name The generator name.
+ *
+ * @return The generator, or null.
+ */
+ public IdGenerator getGenerator(String name);
+
+ /**
+ * Try to find the generator from the localGenerators
+ * and then from the global generator list
+ *
+ * @param name generator name
+ * @param localGenerators local generators
+ *
+ * @return the appropriate idgenerator or null if not found
+ */
+ public IdGenerator getGenerator(String name, Map localGenerators);
+
+ /**
+ * Add a generator.
+ *
+ * @param generator The generator to add.
+ */
+ public void addGenerator(IdGenerator generator);
+
+ /**
+ * Add a generator table properties.
+ *
+ * @param name The generator name
+ * @param params The generator table properties.
+ */
+ public void addGeneratorTable(String name, Properties params);
+
+ /**
+ * Retrieve the properties related to a generator table.
+ *
+ * @param name generator name
+ * @param localGeneratorTables local generator tables
+ *
+ * @return The properties, or null.
+ */
+ public Properties getGeneratorTableProperties(String name, Map localGeneratorTables);
+
+ /**
+ * Retrieve join metadata for a particular persistent entity.
+ *
+ * @param entityName The entity name
+ *
+ * @return The join metadata
+ */
+ public Map